code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _rename_duplicate_tabs(self, current, name, path): for i in range(self.count()): if self.widget(i)._tab_name == name and self.widget(i) != current: file_path = self.widget(i).file.path if file_path: parent_dir = os.path.split(os.path.abspath( ...
Rename tabs whose title is the same as the name
async def load(self, node_id=None): if node_id is not None: await self._load_node(node_id=node_id) else: await self._load_all_nodes()
Load nodes from KLF 200, if no node_id is specified all nodes are loaded.
def initialize_axes(self, boundary=0.05): bs = [(self.vals[:, i].max()-self.vals[:, i].min())*boundary for i in range(3)] self.x_lim = np.array([self.vals[:, 0].min()-bs[0], self.vals[:, 0].max()+bs[0]]) self.y_lim = np.array([self.vals[:, 1].min()-bs[1], self.vals[:, 1].max()+bs[1]]) se...
Set up the axes with the right limits and scaling.
def _EnsureRequesterStarted(self): if not self._analyzer_started: self._analyzer.start() self._analyzer_started = True
Checks if the analyzer is running and starts it if not.
def delete(self, event): assert self.receiver_id == event.receiver_id event.response = {'status': 410, 'message': 'Gone.'} event.response_code = 410
Mark event as deleted.
def render(self, *args, **kwargs): return self.doctype.render() + super().render(*args, **kwargs)
Override so each html page served have a doctype
def ajax_get_service(self): uid = self.request.form.get("uid", None) if uid is None: return self.error("Invalid UID", status=400) service = self.get_object_by_uid(uid) if not service: return self.error("Service not found", status=404) info = self.get_servi...
Returns the services information
def Sign(self, message, use_pss=False): precondition.AssertType(message, bytes) if not use_pss: padding_algorithm = padding.PKCS1v15() else: padding_algorithm = padding.PSS( mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH) return self._value.sign(message, padd...
Sign a given message.
def _set_zone_name(self, zoneid, name): zoneid -= 1 data = { '_set_zone_name': 'Set Name', 'select_zone': str(zoneid), 'zone_name': name, } self._controller.post(data)
Private method to override zone name.
def _context_string(context): msg = 'Code block context:\n ' lines = ['Selective arithmetic coding bypass: {0}', 'Reset context probabilities on coding pass boundaries: {1}', 'Termination on each coding pass: {2}', 'Vertically stripe causal context: {3}', ...
Produce a string to represent the code block context
def _fetchAllChildren(self): childItems = [] if self._array.ndim == 2: _nRows, nCols = self._array.shape if self._array is not None else (0, 0) for col in range(nCols): colItem = SliceRti(self._array[:, col], nodeName="channel-{}".format(col), ...
Adds an ArrayRti per column as children so that they can be inspected easily
def _compile_literal(self, data): if data is None: return 'nil' elif data is True: return 'yes' elif data is False: return 'no' else: return repr(data)
Write correct representation of literal.
def delaunay_2d(self, tol=1e-05, alpha=0.0, offset=1.0, bound=False, inplace=False): alg = vtk.vtkDelaunay2D() alg.SetProjectionPlaneMode(vtk.VTK_BEST_FITTING_PLANE) alg.SetInputDataObject(self) alg.SetTolerance(tol) alg.SetAlpha(alpha) alg.SetOffset(offset) alg.S...
Apply a delaunay 2D filter along the best fitting plane
def print_descr(rect, annot): annot.parent.insertText(rect.br + (10, 0), "'%s' annotation" % annot.type[1], color = red)
Print a short description to the right of an annot rect.
def ssh_calc_aws(node): userlu = {"ubunt": "ubuntu", "debia": "admin", "fedor": "root", "cento": "centos", "openb": "root"} image_name = node.driver.get_image(node.extra['image_id']).name if not image_name: image_name = node.name usertemp = ['name'] + [value for key, value in list(...
Calculate default ssh-user based on image-if of AWS instance.
def team_info(): teams = __get_league_object().find('teams').findall('team') output = [] for team in teams: info = {} for x in team.attrib: info[x] = team.attrib[x] output.append(info) return output
Returns a list of team information dictionaries
def preview(num): problem_text = Problem(num).text click.secho("Project Euler Problem %i" % num, bold=True) click.echo(problem_text)
Prints the text of a problem.
def full_path(self): if self.parent: return os.path.join(self.parent.full_path, self.name) return self.name
Absolute system path to the node
def _match_iter_single(self, path_elements, start_at): length = len(path_elements) if length == 0: return if self.bound_end: start = length - 1 if start < start_at: return else: start = start_at if self.bound_start: ...
Implementation of match_iter optimized for self.elements of length 1
def lookup_forward(name): ip_addresses = {} addresses = list(set(str(ip[4][0]) for ip in socket.getaddrinfo( name, None))) if addresses is None: return ip_addresses for address in addresses: if type(ipaddress.ip_address(address)) is ipaddress.IPv4Address: ip_addresses...
Perform a forward lookup of a hostname.
def formatted_value(self): val = self.value pval = val ftype = self.value_type if ftype == "percentage": pval = int(round(val * 100)) if self.type == "negative": pval = 0 - (100 - pval) else: pval -= 100 elif fty...
Returns a formatted value as a string
def _remove_n(self): for i, result in enumerate(self.results): largest = max(str(result).split('N'), key=len) start = result.locate(largest)[0][0] stop = start + len(largest) if start != stop: self.results[i] = self.results[i][start:stop]
Remove terminal Ns from sequencing results.
def powernode_data(self, name:str) -> Powernode: self.assert_powernode(name) contained_nodes = frozenset(self.nodes_in(name)) return Powernode( size=len(contained_nodes), contained=frozenset(self.all_in(name)), contained_pnodes=frozenset(self.powernodes_in(nam...
Return a Powernode object describing the given powernode
def _call_syndic(self, func, args=(), kwargs=None, master_id=None): if kwargs is None: kwargs = {} successful = False for master, syndic_future in self.iter_master_options(master_id): if not syndic_future.done() or syndic_future.exception(): log.error( ...
Wrapper to call a given func on a syndic, best effort to get the one you asked for
def _ParseYamlFromFile(filedesc): content = filedesc.read() return yaml.Parse(content) or collections.OrderedDict()
Parses given YAML file.
def construct_url(self): path = [self.path] path.extend([str(x) for x in self.params]) url = self.client.base_url + '/'.join(x for x in path if x) query = self.kwargs.get('query') if query: if type(query) is dict: query = query.items() quer...
Construct a full plex request URI, with `params`.
def save_block(self, data, dest): write_csv(dest, data, self.sep, self.fmt, 'no-header')
Save data on dest, which is file open in 'a' mode
def create_equipamento(self): return Equipamento( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of equipamento services facade.
def _file_name(self, dtype_out_time, extension='nc'): if dtype_out_time is None: dtype_out_time = '' out_lbl = utils.io.data_out_label(self.intvl_out, dtype_out_time, dtype_vert=self.dtype_out_vert) in_lbl = utils.io.data_in_label(self.intvl_...
Create the name of the aospy file.
def MakeRequest(self, data): stats_collector_instance.Get().IncrementCounter("grr_client_sent_bytes", len(data)) response = self.http_manager.OpenServerEndpoint( path="control?api=%s" % config.CONFIG["Network.api"], verify_cb=self.VerifyServerC...
Make a HTTP Post request to the server 'control' endpoint.
def _step_failure(self, exc): self.log_crit(u"STEP %d (%s) FAILURE" % (self.step_index, self.step_label)) self.step_index += 1 self.log_exc(u"Unexpected error while executing task", exc, True, ExecuteTaskExecutionError)
Log failure of a step
def entity_tags_form(self, entity, ns=None): if ns is None: ns = self.entity_default_ns(entity) field = TagsField(label=_l("Tags"), ns=ns) cls = type("EntityNSTagsForm", (_TagsForm,), {"tags": field}) return cls
Construct a form class with a field for tags in namespace `ns`.
def setViewMode( self, state = True ): if self._viewMode == state: return self._viewMode = state if state: self._mainView.setDragMode( self._mainView.ScrollHandDrag ) else: self._mainView.setDragMode( self._mainView.RubberBandDrag ) self.emitVi...
Starts the view mode for moving around the scene.
def data_type_to_numpy(datatype, unsigned=False): basic_type = _dtypeLookup[datatype] if datatype in (stream.STRING, stream.OPAQUE): return np.dtype(basic_type) if unsigned: basic_type = basic_type.replace('i', 'u') return np.dtype('=' + basic_type)
Convert an ncstream datatype to a numpy one.
def getCiphertextLen(self, ciphertext): plaintext_length = self.getPlaintextLen(ciphertext) ciphertext_length = plaintext_length + Encrypter._CTXT_EXPANSION return ciphertext_length
Given a ``ciphertext`` with a valid header, returns the length of the ciphertext inclusive of ciphertext expansion.
def to_jd(year, month, day, method=None): method = method or 'equinox' if day < 1 or day > 30: raise ValueError("Invalid day for this calendar") if month > 13: raise ValueError("Invalid month for this calendar") if month == 13 and day > 5 + leap(year, method=method): raise ValueE...
Obtain Julian day from a given French Revolutionary calendar date.
def _execute_comprehension(self, node: Union[ast.ListComp, ast.SetComp, ast.GeneratorExp, ast.DictComp]) -> Any: args = [ast.arg(arg=name) for name in sorted(self._name_to_value.keys())] func_def_node = ast.FunctionDef( name="generator_expr", args=ast.arguments(args=args, kwonlya...
Compile the generator or comprehension from the node and execute the compiled code.
def _call_in_reactor_thread(self, f, *args, **kwargs): self._reactor.callFromThread(f, *args, **kwargs)
Call the given function with args in the reactor thread.
def geturl(self): return urlparse.urlunparse(( self.scheme, self.netloc, self.path, self.params, self.query_str, self.fragment, ))
return the dsn back into url form
async def get_constants(self): url = self.BASE + '/constants' data = await self.request(url) return Constants(self, data)
Get clash royale constants.
def restart(ctx, **kwargs): update_context(ctx, kwargs) daemon = mk_daemon(ctx) daemon.stop() daemon.start()
restart a vaping process
def init(cls, name): clsdict = {subcls.name: subcls for subcls in cls.__subclasses__() if hasattr(subcls, 'name')} return clsdict.get(name, cls)(name)
Return an instance of this class or an appropriate subclass
def replace(self, p_todos): self.erase() self.add_todos(p_todos) self.dirty = True
Replaces whole todolist with todo objects supplied as p_todos.
def xpathNewContext(self): ret = libxml2mod.xmlXPathNewContext(self._o) if ret is None:raise xpathError('xmlXPathNewContext() failed') __tmp = xpathContext(_obj=ret) return __tmp
Create a new xmlXPathContext
def parse_pdb_file(self): self.pdb_parse_tree = {'info': {}, 'data': { self.state: {}} } try: for line in self.pdb_lines: self.current_line = line record_name = li...
Runs the PDB parser.
def form_lines_valid(self, form): handled = 0 for inner_form in form: if not inner_form.cleaned_data.get(formsets.DELETION_FIELD_NAME): handled += 1 self.handle_inner_form(inner_form) self.log_and_notify_lines(handled) return http.HttpResponseR...
Handle a valid LineFormSet.
def randrange(seq): seq = seq.copy() choose = rng().choice remove = seq.remove for x in range(len(seq)): y = choose(seq) remove(y) yield y
Yields random values from @seq until @seq is empty
def process_fields(self, fields): result = [] strip = ''.join(self.PREFIX_MAP) for field in fields: direction = self.PREFIX_MAP[''] if field[0] in self.PREFIX_MAP: direction = self.PREFIX_MAP[field[0]] field = field.lstrip(strip) result.append((field, direction)) return result
Process a list of simple string field definitions and assign their order based on prefix.
def _update_range(self, response): header_value = response.headers.get('x-resource-range', '') m = re.match(r'\d+-\d+/(\d+)$', header_value) if m: self._count = int(m.group(1)) else: self._count = None
Update the query count property from the `X-Resource-Range` response header
def validate(self): if self.error: return False for v in self.validators: self.error = v(self.value) if self.error: return False return True
Run the form value through the validators, and update the error field if needed
def convert_body_frame(self, phi, theta, phiDot, thetaDot, psiDot): p = phiDot - psiDot*math.sin(theta) q = math.cos(phi)*thetaDot + math.sin(phi)*psiDot*math.cos(theta) r = math.cos(phi)*psiDot*math.cos(theta) - math.sin(phi)*thetaDot return (p, q, r)
convert a set of roll rates from earth frame to body frame
def _from_dict(cls, _dict): args = {} if 'entities' in _dict: args['entities'] = [ QueryEntitiesResponseItem._from_dict(x) for x in (_dict.get('entities')) ] return cls(**args)
Initialize a QueryEntitiesResponse object from a json dictionary.
def cleanup_temporary_directories(self): while self.build_directories: shutil.rmtree(self.build_directories.pop()) for requirement in self.reported_requirements: requirement.remove_temporary_source() while self.eggs_links: symbolic_link = self.eggs_links.pop()...
Delete the build directories and any temporary directories created by pip.
def force_update(self): if self.disabled or self.terminated or not self.enabled: return for meth in self.methods: self.methods[meth]["cached_until"] = time() if self.config["debug"]: self._py3_wrapper.log("clearing cache for method {}".format(meth)) ...
Forces an update of the module.
def delete_fw(self, fw_id): self.fw_id = None self.fw_name = None self.fw_created = False self.active_pol_id = None
Deletes the FW local attributes.
def to_param_dict(self): param_dict = {} for index, dictionary in enumerate(self.value): for key, value in dictionary.items(): param_name = '{param_name}[{index}][{key}]'.format( param_name=self.param_name, ...
Sorts to ensure Order is consistent for Testing
def _connect(self): self.conn = self._create_connection() spawn(self.conn.connect) self.set_nick(self.nick) self.cmd(u'USER', u'{0} 3 * {1}'.format(self.nick, self.realname))
Connects the bot to the server and identifies itself.
def DFS(G): if not G.vertices: raise GraphInsertError("This graph have no vertices.") color = {} pred = {} reach = {} finish = {} def DFSvisit(G, current, time): color[current] = 'grey' time += 1 reach[current] = time for vertex in G.vertices[cu...
Algorithm for depth-first searching the vertices of a graph.
def sanitizer(name, replacements=[(':','_'), ('/','_'), ('\\','_')]): for old,new in replacements: name = name.replace(old,new) return name
String sanitizer to avoid problematic characters in filenames.
def setup_sighandlers(self): signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGTERM, signal.SIG_IGN) signal.signal(signal.SIGPROF, self.on_sigprof) signal.signal(signal.SIGABRT, self.stop) signal.siginterrupt(signal.SIGPROF, False) signal.siginterrupt(s...
Setup the stats and stop signal handlers.
def _is_vis(channel): if isinstance(channel, str): return channel == '00_7' elif isinstance(channel, int): return channel == 1 else: raise ValueError('Invalid channel')
Determine whether the given channel is a visible channel
def init_with_keytab(self): creds_opts = { 'usage': 'initiate', 'name': self._cleaned_options['principal'], } store = {} if self._cleaned_options['keytab'] != DEFAULT_KEYTAB: store['client_keytab'] = self._cleaned_options['keytab'] if self._cle...
Initialize credential cache with keytab
def dump(self, path): try: with open(path, "wb") as f: f.write(self.__str__().encode("utf-8")) except: pass with open(path, "wb") as f: pickle.dump(self.__data__, f)
dump DictTree data to json files.
def addreadergroup(self, newgroup): hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) if 0 != hresult: raise error( 'Failed to establish context: ' + \ SCardGetErrorMessage(hresult)) try: hresult = SCardIntroduceReaderGroup(hconte...
Add a reader group
def phenotype_data(self): if self._phenotype_data is None: pheno_data = {} for gsm_name, gsm in iteritems(self.gsms): tmp = {} for key, value in iteritems(gsm.metadata): if len(value) == 0: tmp[key] = np.nan ...
Get the phenotype data for each of the sample.
def _create_path_if_not_exist(self, path): if path and not os.path.exists(path): os.makedirs(path)
Creates a folders path if it doesn't exist
def iiif_info_handler(prefix=None, identifier=None, config=None, klass=None, auth=None, **args): if (not auth or degraded_request(identifier) or auth.info_authz()): if (auth): logging.debug("Authorized for image %s" % identifier) i = IIIFHandler(prefix, identifier, ...
Handler for IIIF Image Information requests.
def _IndexedScan(self, i, max_records=None): self._ReadIndex() idx = 0 start_ts = 0 if i >= self._max_indexed: start_ts = max((0, 0), (self._index[self._max_indexed][0], self._index[self._max_indexed][1] - 1)) idx = self._max_indexed else: try: ...
Scan records starting with index i.
def flatten(cls, stats): flat_children = {} for _stats in spread_stats(stats): key = (_stats.name, _stats.filename, _stats.lineno, _stats.module) try: flat_stats = flat_children[key] except KeyError: flat_stats = flat_children[key] = cl...
Makes a flat statistics from the given statistics.
def mcube(self, **kwargs): kwargs_copy = self.base_dict.copy() kwargs_copy.update(**kwargs) kwargs_copy['dataset'] = kwargs.get('dataset', self.dataset(**kwargs)) kwargs_copy['component'] = kwargs.get( 'component', self.component(**kwargs)) self._replace_none(kwargs_c...
return the name of a model cube file
def _check_consistency(self): type_count = defaultdict(int) for _, section in sorted(self.sections.items()): type_count[section.section_type] += 1 if type_count[POINT_TYPE.SOMA] != 1: L.info('Have %d somas, expected 1', type_count[POINT_TYPE.SOMA])
see if the sections have obvious errors
def reset(self, relation_name=None): if relation_name is not None: self.data_access.delete("relations", dict(name=relation_name)) else: self.data_access.delete("relations", "1=1") return self
Reset the transfer info for a particular relation, or if none is given, for all relations.
def dmsStrToDeg(dec): sign_deg, min, sec = dec.split(':') sign = sign_deg[0:1] if sign not in ('+', '-'): sign = '+' deg = sign_deg else: deg = sign_deg[1:] dec_deg = decTimeToDeg(sign, int(deg), int(min), float(sec)) return dec_deg
Convert a string representation of DEC into a float in degrees.
def create_delete_model(record): arn = f"arn:aws:s3:::{cloudwatch.filter_request_parameters('bucketName', record)}" LOG.debug(f'[-] Deleting Dynamodb Records. Hash Key: {arn}') data = { 'arn': arn, 'principalId': cloudwatch.get_principal(record), 'userIdentity': cloudwatch.get_user_i...
Create an S3 model from a record.
def field_datetime_from_json(self, json_val): if type(json_val) == int: seconds = int(json_val) dt = datetime.fromtimestamp(seconds, utc) elif json_val is None: dt = None else: seconds, microseconds = [int(x) for x in json_val.split('.')] ...
Convert a UTC timestamp to a UTC datetime.
def _summarize_result(self, root_action, leaf_eot): root_board = root_action.parent.board action_detail = root_action.position_pair score = self._relative_score(root_action, leaf_eot, root_action.parent.player, root_action...
Return a dict with useful information that summarizes this action.
def pauli_product(*elements: Pauli) -> Pauli: result_terms = [] for terms in product(*elements): coeff = reduce(mul, [term[1] for term in terms]) ops = (term[0] for term in terms) out = [] key = itemgetter(0) for qubit, qops in groupby(heapq.merge(*ops, key=key), key=key)...
Return the product of elements of the Pauli algebra
def install_egg(self, egg_name): if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement( InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder) sel...
Install an egg into the egg directory
def _log_statistics(self): rows_per_second_trans = self._count_total / (self._time1 - self._time0) rows_per_second_load = self._count_transform / (self._time2 - self._time1) rows_per_second_overall = self._count_total / (self._time3 - self._time0) self._log('Number of rows processed ...
Log statistics about the number of rows and number of rows per second.
def _flush_stack(self): output = self._postprocess_output(''.join(self.stack)) self._clear_char() self._empty_stack() if not PYTHON_2: return output else: return unicode(output)
Returns the final output and resets the machine's state.
def _parse(read_method: Callable) -> HTTPResponse: response = read_method(4096) while b'HTTP/' not in response or b'\r\n\r\n' not in response: response += read_method(4096) fake_sock = _FakeSocket(response) response = HTTPResponse(fake_sock) response.begin() r...
Trick to standardize the API between sockets and SSLConnection objects.
def _ExtractList(self, fields, ignores=(",",), terminators=()): extracted = [] i = 0 for i, field in enumerate(fields): if field in ignores: continue if field in terminators: break extracted.append(field.strip("".join(ignores))) if not (field.endswith(",") or ...
Extract a list from the given fields.
def count_elements(doc): "Counts the number of times each element is used in a document" summary = {} for el in doc.iter(): try: namespace, element_name = re.search('^{(.+)}(.+)$', el.tag).groups() except: namespace = None element_name = el.tag if ...
Counts the number of times each element is used in a document
def find_bound_ports(self, ports): bound = [] for port in ports: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind((port.ip if port.ip is not NotSpecified else "127.0.0.1", port.host_port)) except socket.error as error: ...
Find any ports that are already bound and complain about them
def preScale(self, sx, sy): self.a *= sx self.b *= sx self.c *= sy self.d *= sy return self
Calculate pre scaling and replace current matrix.
def available_state_for_gene(self, gene: Gene, state: State) -> Tuple[State, ...]: result: List[State] = [] active_multiplex: Tuple[Multiplex] = gene.active_multiplex(state) transition: Transition = self.find_transition(gene, active_multiplex) current_state: int = state[gene] don...
Return the state reachable from a given state for a particular gene.
def make_sh_address(script_string, witness=False, cashaddr=True): script_bytes = script_ser.serialize(script_string) return _ser_script_to_sh_address( script_bytes=script_bytes, witness=witness, cashaddr=cashaddr)
str, bool, bool -> str
def _at_exit(self): if self.process_exit: try: term = self.term if self.set_scroll: term.reset() else: term.move_to(0, term.height) self.term.feed() except ValueError: ...
Resets terminal to normal configuration
def add_log_options(self, verbose_func=None, quiet_func=None): if not verbose_func: def verbose_func(): return log.config(verbose=True) if not quiet_func: def quiet_func(): return log.config(quiet=True) self.option('-v, --verbose', 'show mo...
A helper for setting up log options
def setup(interactive, not_relative, dry_run, reset, root_dir, testuser): click.echo("[cellpy] (setup)") init_filename = create_custom_init_filename() userdir, dst_file = get_user_dir_and_dst(init_filename) if testuser: if not root_dir: root_dir = os.getcwd() click.echo(f"[ce...
This will help you to setup cellpy.
def download_file(url, path, session=None, params=None): if url[0:2] == '//': url = 'https://' + url[2:] tmp_path = path + '.tmp' if session and params: r = session.get( url, params=params, stream=True ) elif session and not params: r = session.get( url, stream=True ) else: ...
Download an individual file.
def getGenomeList() : import rabaDB.filters as rfilt f = rfilt.RabaQuery(Genome_Raba) names = [] for g in f.iterRun() : names.append(g.name) return names
Return the names of all imported genomes
def info(self): url = "{}/v7/finance/quote?symbols={}".format( self._base_url, self.ticker) r = _requests.get(url=url).json()["quoteResponse"]["result"] if len(r) > 0: return r[0] return {}
retreive metadata and currenct price data
def delete(self, key): obj = self._get_content() obj.pop(key, None) self.write_data(self.path, obj)
Removes the specified key from the database.
def _get(pseudodict, key, single=True): matches = [item[1] for item in pseudodict if item[0] == key] if single: return matches[0] else: return matches
Helper method for getting values from "multi-dict"s
def tick_clients(self): if not self._ticker: self._create_ticker() for client in self.clients.values(): self._ticker.tick(client)
Trigger the periodic tick function in the client.
def _ascii_find_urls(bytes, mimetype, extra_tokens=True): tokens = _tokenize(bytes, mimetype, extra_tokens=extra_tokens) return tokens
This function finds URLs inside of ASCII bytes.
def open_in_browser(self, outfile): if self.browser == 'default': webbrowser.open('file://%s' % outfile) else: browser = webbrowser.get(self.browser) browser.open('file://%s' % outfile)
Open the given HTML file in a browser.
def updater(f): "Decorate a function with named arguments into updater for transact" @functools.wraps(f) def wrapped_updater(keys, values): result = f(*values) return (keys[:len(result)], result) return wrapped_updater
Decorate a function with named arguments into updater for transact
def onlyOnce(fn): 'Set up FN to only run once within an interpreter instance' def wrap(*args, **kwargs): if hasattr(fn, 'called'): return fn.called = 1 return fn(*args, **kwargs) util.mergeFunctionMetadata(fn, wrap) return wrap
Set up FN to only run once within an interpreter instance