code
stringlengths
51
2.34k
sequence
stringlengths
1.16k
13.1k
docstring
stringlengths
11
171
def validate_registry_uri(uri: str) -> None: parsed = parse.urlparse(uri) scheme, authority, pkg_name, query = ( parsed.scheme, parsed.netloc, parsed.path, parsed.query, ) validate_registry_uri_scheme(scheme) validate_registry_uri_authority(authority) if query: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_registry_uri'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children'...
Raise an exception if the URI does not conform to the registry URI scheme.
def load_shp(self, shapefile_name): shp_ext = 'shp' try: self.shp = open("%s.%s" % (shapefile_name, shp_ext), "rb") except IOError: try: self.shp = open("%s.%s" % (shapefile_name, shp_ext.upper()), "rb") except IOError: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_shp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Attempts to load file with .shp extension as both lower and upper case
def remove_tenant_from_flavor(request, flavor, tenant): return _nova.novaclient(request).flavor_access.remove_tenant_access( flavor=flavor, tenant=tenant)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_tenant_from_flavor'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'childr...
Remove a tenant from the given flavor access list.
def run_section(self, name, input_func=_stdin_): print('\nStuff %s by the license:\n' % name) section = self.survey[name] for question in section: self.run_question(question, input_func)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run_section'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Run the given section.
def _contigs_dict_to_file(self, contigs, fname): f = pyfastaq.utils.open_file_write(fname) for contig in sorted(contigs, key=lambda x:len(contigs[x]), reverse=True): print(contigs[contig], file=f) pyfastaq.utils.close(f)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_contigs_dict_to_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':...
Writes dictionary of contigs to file
def disconnect(self, driver): self.log("SSH disconnect") try: self.device.ctrl.sendline('\x03') self.device.ctrl.sendline('\x04') except OSError: self.log("Protocol already disconnected")
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'disconnect'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Disconnect using the protocol specific method.
def register(): registry = { key: bake_html(key) for key in ('css', 'css-all', 'tag', 'text') } registry['xpath'] = bake_parametrized(xpath_selector, select_all=False) registry['xpath-all'] = bake_parametrized(xpath_selector, select_all=True) return registry
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '22', '34', '46']}; {'id...
Return dictionary of tranform factories
def _validate_args(env, args): if all([args['cpu'], args['flavor']]): raise exceptions.ArgumentError( '[-c | --cpu] not allowed with [-f | --flavor]') if all([args['memory'], args['flavor']]): raise exceptions.ArgumentError( '[-m | --memory] not allowed with [-f | --flavo...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_args'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
Raises an ArgumentError if the given arguments are not valid.
def convert_items(self, items): return ((key, self.convert(value, self)) for key, value in items)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_items'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Generator like `convert_iterable`, but for 2-tuple iterators.
def value(self): if len(self._element): var = ''.join(map(str, self.trigger.agentml.parse_tags(self._element, self.trigger))) else: var = self._element.text or attribute(self._element, 'name') default = attribute(self._element, 'default') try: self._lo...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'value'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i...
Return the current value of a variable
def plotly_class(name=None, slug=None, da=None, prefix=None, postfix=None, template_type=None): 'Return a string of space-separated class names' da, app = _locate_daapp(name, slug, da) return app.extra_html_properties(prefix=prefix, postfix=postfix, ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '22']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'plotly_class'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10', '13', '16', '19']}; {'id': '4', 'type': 'default_param...
Return a string of space-separated class names
def view_graph(graph_str, dest_file=None): from rez.system import system from rez.config import config if (system.platform == "linux") and (not os.getenv("DISPLAY")): print >> sys.stderr, "Unable to open display." sys.exit(1) dest_file = _write_graph(graph_str, dest_file=dest_file) v...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'view_graph'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'g...
View a dot graph in an image viewer.
def toProtocolElement(self): readGroupSet = protocol.ReadGroupSet() readGroupSet.id = self.getId() readGroupSet.read_groups.extend( [readGroup.toProtocolElement() for readGroup in self.getReadGroups()] ) readGroupSet.name = self.getLocalId() readG...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'toProtocolElement'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Returns the GA4GH protocol representation of this ReadGroupSet.
def chk_goids(goids, msg=None, raise_except=True): for goid in goids: if not goid_is_valid(goid): if raise_except: raise RuntimeError("BAD GO({GO}): {MSG}".format(GO=goid, MSG=msg)) else: return goid
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'chk_goids'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
check that all GO IDs have the proper format.
def destroyTempFiles(self): os.system("rm -rf %s" % self.rootDir) logger.debug("Temp files created: %s, temp files actively destroyed: %s" % (self.tempFilesCreated, self.tempFilesDestroyed))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'destroyTempFiles'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Destroys all temp temp file hierarchy, getting rid of all files.
def _iso_info(iso_year, iso_week): "Give all the iso info we need from one calculation" prev_year_start = _iso_year_start(iso_year-1) year_start = _iso_year_start(iso_year) next_year_start = _iso_year_start(iso_year+1) first_day = year_start + dt.timedelta(weeks=iso_week-1) last_day = first_day ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_iso_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'is...
Give all the iso info we need from one calculation
def broker_url(self): return 'amqp://{}:{}@{}/{}'.format( self.user, self.password, self.name, self.vhost)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'broker_url'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}...
Returns a "broker URL" for use with Celery.
def normalize_arxiv(obj): obj = obj.split()[0] matched_arxiv_pre = RE_ARXIV_PRE_2007_CLASS.match(obj) if matched_arxiv_pre: return ('/'.join(matched_arxiv_pre.group("extraidentifier", "identifier"))).lower() matched_arxiv_post = RE_ARXIV_POST_2007_CLASS.match(obj) if matched_arxiv_post: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'normalize_arxiv'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'o...
Return a normalized arXiv identifier from ``obj``.
def acquisition_function(self, x): return self._penalized_acquisition(x, self.model, self.X_batch, self.r_x0, self.s_x0)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'acquisition_function'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '...
Returns the value of the acquisition function at x.
def save_map(self, map_path, map_data): return self._client.send(save_map=sc_pb.RequestSaveMap( map_path=map_path, map_data=map_data))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save_map'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Save a map into temp dir so create game can access it in multiplayer.
def getRejectionReasonsItems(self): reasons = self.getRejectionReasons() if not reasons: return [] reasons = reasons[0] keys = filter(lambda key: key != "checkbox", reasons.keys()) return map(lambda key: reasons[key], sorted(keys)) or []
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getRejectionReasonsItems'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Return the list of predefined rejection reasons
def schemaSetValidOptions(self, options): ret = libxml2mod.xmlSchemaSetValidOptions(self._o, options) return ret
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'schemaSetValidOptions'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Sets the options to be used during the validation.
def account(self, key=None, address=None, name=None): if key: return self.client.account(key, wallet=self) if address: q = dict(address=address) elif name: q = dict(name=name) else: raise TypeError("Missing param: key, address, or name is r...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'account'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Query for an account by key, address, or name.
def parsePositionFile(filename): l=[] with open( filename, "rb" ) as theFile: reader = csv.DictReader( theFile ) for line in reader: mytime=dateparser.parse(line['time']) line['strtime']=mytime.strftime("%d %b %Y, %H:%M UTC") l.append(line) return l
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parsePositionFile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Parses Android GPS logger csv file and returns list of dictionaries
def searchlog(self, argv): opts = cmdline(argv, FLAGS_SEARCHLOG) self.foreach(opts.args, lambda job: output(job.searchlog(**opts.kwargs)))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'searchlog'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Retrieve the searchlog for the specified search jobs.
def _convert_value_to_native(value): if isinstance(value, Counter32): return int(value.prettyPrint()) if isinstance(value, Counter64): return int(value.prettyPrint()) if isinstance(value, Gauge32): return int(value.prettyPrint()) if isinstance(value, Integer): return int(...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_convert_value_to_native'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Converts pysnmp objects into native Python objects.
def receive_message(self): with self.lock: assert self.can_receive_messages() message_type = self._read_message_type(self._file) message = message_type(self._file, self) self._message_received(message)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'receive_message'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Receive a message from the file.
def delete(self): if not HAS_SQL: return try: conn, c = self.connect() c.execute('DELETE FROM {0}'.format(self.table_name)) conn.commit() conn.close() except: log.traceback(logging.DEBUG)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'...
Clears all cached Session objects.
def outlays(self): return pd.DataFrame({x.name: x.outlays for x in self.securities})
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'outlays'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {...
Returns a DataFrame of outlays for each child SecurityBase
def make_form_or_formset_fields_not_required(form_or_formset): if isinstance(form_or_formset, BaseFormSet): for single_form in form_or_formset: make_form_fields_not_required(single_form) else: make_form_fields_not_required(form_or_formset)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_form_or_formset_fields_not_required'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'c...
Take a Form or FormSet and set all fields to not required.
def _set_flow_entry(self, datapath, actions, in_port, dst, src=None): set_flow = self._set_flow_func.get(datapath.ofproto.OFP_VERSION) assert set_flow set_flow(datapath, actions, in_port, dst, src)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_flow_entry'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', '...
set a flow entry.
def action_print(reader, *args): arg_count = len(args) if arg_count == 0: stop_after = 0 elif arg_count == 1: stop_after = int(args[0]) else: raise RuntimeError("0 or 1 arguments expected for action 'print'") for i, record in enumerate(reader, 1): print(record.to_mess...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'action_print'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Simply print the Flow Log records to output.
def sis_metadata(self): if not self.is_sis: return None tags = self.pages[0].tags result = {} try: result.update(tags['OlympusINI'].value) except Exception: pass try: result.update(tags['OlympusSIS'].value) except Ex...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sis_metadata'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
Return Olympus SIS metadata from SIS and INI tags as dict.
def deploy(self, fobj, md5=None, sha1=None, parameters={}): return self._accessor.deploy(self, fobj, md5, sha1, parameters)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'deploy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children': []...
Upload the given file object to this path
def stderr_avail(self): data = self.interpreter.stderr_write.empty_queue() if data: self.write(data, error=True) self.flush(error=True)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'stderr_avail'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
Data is available in stderr, let's empty the queue and write it!
def _add_token_to_document(self, token_string, token_attrs=None): token_feat = {self.ns+':token': token_string} if token_attrs: token_attrs.update(token_feat) else: token_attrs = token_feat token_id = 'token_{}'.format(self.token_count) self.add_node(token...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_add_token_to_document'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children'...
add a token node to this document graph
def ProcessContent(self, strip_expansion=False): self._ParseFile() if strip_expansion: collection = None else: collection = MacroCollection() for section in self._sections: section.BindMacroCollection(collection) result = '' for section in self._sections: result += sectio...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ProcessContent'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
Processes the file contents.
def ansi(string, *args): ansi = '' for arg in args: arg = str(arg) if not re.match(ANSI_PATTERN, arg): raise ValueError('Additional arguments must be ansi strings') ansi += arg return ansi + string + colorama.Style.RESET_ALL
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ansi'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'string'...
Convenience function to chain multiple ColorWrappers to a string
def search(self, query: Optional[dict] = None, offset: Optional[int] = None, limit: Optional[int] = None, order_by: Union[None, list, tuple] = None) -> Sequence['IModel']: raise NotImplementedError
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '45', '51']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'search'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '14', '23', '32']}; {'id': '4', 'type': 'identifier', 'child...
return search result based on specified rulez query
def count(self): self.request_params.update({'sysparm_count': True}) response = self.session.get(self._get_stats_url(), params=self._get_formatted_query(fields=list(), limit=None, ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'count'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i...
Returns the number of records the query would yield
def partition(lst, n): q, r = divmod(len(lst), n) indices = [q*i + min(i, r) for i in xrange(n+1)] return [lst[indices[i]:indices[i+1]] for i in xrange(n)], \ [list(xrange(indices[i], indices[i+1])) for i in xrange(n)]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'partition'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ls...
Divide list into n equal parts
def update_hash_prefix_cache(self): try: self.storage.cleanup_full_hashes() self.storage.commit() self._sync_threat_lists() self.storage.commit() self._sync_hash_prefix_cache() except Exception: self.storage.rollback() r...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_hash_prefix_cache'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Update locally cached threat lists.
def register_token_getter(self, provider): app = oauth.remote_apps[provider] decorator = getattr(app, 'tokengetter') def getter(token=None): return self.token_getter(provider, token) decorator(getter)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register_token_getter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Register callback to retrieve token from session
def away_abbreviation(self): abbr = re.sub(r'.*/teams/', '', str(self._away_name)) abbr = re.sub(r'/.*', '', abbr) return abbr
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'away_abbreviation'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Returns a ``string`` of the away team's abbreviation, such as 'NWE'.
def reduce_fit(interface, state, label, inp): import numpy as np out = interface.output(0) sum_etde = 0 sum_ete = [0 for _ in range(len(state["X_indices"]) + 1)] for key, value in inp: if key == "etde": sum_etde += value else: sum_ete[key] += value sum_ete...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reduce_fit'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], '...
Function joins all partially calculated matrices ETE and ETDe, aggregates them and it calculates final parameters.
def namedObject(name): classSplit = name.split('.') module = namedModule('.'.join(classSplit[:-1])) return getattr(module, classSplit[-1])
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'namedObject'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'name'...
Get a fully named module-global object.
def _initialize_trackbars(self): for parameter in self.block_matcher.parameter_maxima.keys(): maximum = self.block_matcher.parameter_maxima[parameter] if not maximum: maximum = self.shortest_dimension cv2.createTrackbar(parameter, self.window_name, ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_initialize_trackbars'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Initialize trackbars by discovering ``block_matcher``'s parameters.
def make_emoji_dict(self): emoji_dict = {} for line in self.emoji_full_filepath.split('\n'): (emoji, description) = line.strip().split('\t')[0:2] emoji_dict[emoji] = description return emoji_dict
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_emoji_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Convert emoji lexicon file to a dictionary
def iname(self) -> InstanceName: dp = self.data_parent() return (self.name if dp and self.ns == dp.ns else self.ns + ":" + self.name)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'iname'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}...
Return the instance name corresponding to the receiver.
def prep_input(self, read_list): logger.info("Prepping input.") i = 0 for content in read_list: quality_issue = self._check_content(content.get_text()) if quality_issue is not None: logger.warning("Skipping %d due to: %s" % (...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prep_input'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Apply the readers to the content.
def color_greedy(self): n = self.num_vertices() coloring = np.zeros(n, dtype=int) for i, nbrs in enumerate(self.adj_list()): nbr_colors = set(coloring[nbrs]) for c in count(1): if c not in nbr_colors: coloring[i] = c break return coloring
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'color_greedy'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
Returns a greedy vertex coloring as an array of ints.
def raw_diff(self): udiff_copy = self.copy_iterator() if self.__format == 'gitdiff': udiff_copy = self._parse_gitdiff(udiff_copy) return u''.join(udiff_copy)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'raw_diff'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ...
Returns raw string as udiff
def download_file(fName, time, dire=pDir()): gen_hash = get_hash(fName, 64, dire) if gen_hash == -1: return -1 user_agent = {'User-agent': 'SubDB/1.0 (sub/0.1; http://github.com/leosartaj/sub)'} param = {'action': 'download', 'hash': gen_hash, 'language': 'en'} try: r = requests.get(...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
download the required subtitle
def to_xdr_object(self): return Xdr.types.Memo(type=Xdr.const.MEMO_TEXT, text=self.text)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_xdr_object'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Creates an XDR Memo object for a transaction with MEMO_TEXT.
def job_started_message(self, job, queue): return '[%s|%s|%s] starting' % (queue._cached_name, job.pk.get(), job._cached_identifier)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'job_started_message'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [...
Return the message to log just befre the execution of the job
def admin_password(self, environment, target_name, password): try: remote_server_command( ["ssh", environment.deploy_target, "admin_password", target_name, password], environment, self, clean_up=True ) re...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'admin_password'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [...
Return True if password was set successfully
def to_dict(self): 'Convert a structure into a Python native type.' ctx = Context() ContextFlags = self.ContextFlags ctx['ContextFlags'] = ContextFlags if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: for key in self._ctx_debug: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {...
Convert a structure into a Python native type.
def execute_once(self): symbol = self.tape.get(self.head, self.EMPTY_SYMBOL) index = self.alphabet.index(symbol) rule = self.states[self.state][index] if rule is None: raise RuntimeError('Unexpected symbol: ' + symbol) self.tape[self.head] = rule[0] if rule[1]...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'execute_once'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
One step of execution.
def hostname(self): try: return self.data.get('identity').get('host_name') except (KeyError, AttributeError): return self.device_status_simple('')
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hostname'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ...
Return the hostname of the printer.
def _parse_arg(func, variables, arg_name, anno): if isinstance(anno, str): var = variables[anno] return var, var.read_latest elif (isinstance(anno, list) and len(anno) == 1 and isinstance(anno[0], str)): var = variables[anno[0]] return var, var.read_all raise Startu...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_arg'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], '...
Parse an argument's annotation.
def on_headers(self, response, exc=None): if response.status_code == 101: connection = response.connection request = response.request handler = request.websocket_handler if not handler: handler = WS() parser = request.client.frame_parse...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_headers'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Websocket upgrade as ``on_headers`` event.
def _log_thread(self, pipe, queue): def enqueue_output(out, q): for line in iter(out.readline, b''): q.put(line.rstrip()) out.close() t = threading.Thread(target=enqueue_output, args=(pipe, queue)) t.daemon = True ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_log_thread'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Start a thread logging output from pipe
def contains(polygon, point): in_hole = functools.reduce( lambda P, Q: P and Q, [interior.covers(point) for interior in polygon.interiors] ) if polygon.interiors else False return polygon.covers(point) and not in_hole
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'contains'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'pol...
Tests whether point lies within the polygon
def on_complete(cls, req): if not (req.status == 200 or req.status == 0): alert("Couldn't connect to authority base.") LogView.add( "Error when calling Aleph authority base: %s" % req.text ) AuthorBar.hide() return AuthorBar.hid...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_complete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Callback called when the request was received.
def fetch_last_receipt_number(self, point_of_sales, receipt_type): client = clients.get_client('wsfe', point_of_sales.owner.is_sandboxed) response_xml = client.service.FECompUltimoAutorizado( serializers.serialize_ticket( point_of_sales.owner.get_or_create_ticket('wsfe') ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fetch_last_receipt_number'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'childr...
Returns the number for the last validated receipt.
def delay(name, args, kwargs): args = args or [] kwargs = dict(k.split() for k in kwargs) if kwargs else {} if name not in celery.tasks: log.error('Job %s not found', name) job = celery.tasks[name] log.info('Sending job %s', name) async_result = job.delay(*args, **kwargs) log.info('J...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delay'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'n...
Run a job asynchronously
def monitor(app): heroku_app = HerokuApp(dallinger_uid=app) webbrowser.open(heroku_app.dashboard_url) webbrowser.open("https://requester.mturk.com/mturk/manageHITs") heroku_app.open_logs() check_call(["open", heroku_app.db_uri]) while _keep_running(): summary = get_summary(app) c...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'monitor'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'app'}; {'...
Set up application monitoring.
def find_root(self): cmd = self while cmd.parent: cmd = cmd.parent return cmd
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_root'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Traverse parent refs to top.
async def user_info(self, params=None, **kwargs): params = params or {} params[ 'fields'] = 'id,email,first_name,last_name,name,link,locale,' \ 'gender,location' return await super(FacebookClient, self).user_info(params=params, **kwargs)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'user_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Facebook required fields-param.
def team_profile_get(self, **kwargs) -> SlackResponse: self._validate_xoxp_token() return self.api_call("team.profile.get", http_verb="GET", params=kwargs)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'team_profile_get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Retrieve a team's profile.
def tracker(): application = mmi.tracker.app() application.listen(22222) logger.info('serving at port 22222') tornado.ioloop.IOLoop.instance().start()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tracker'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '15', '22', '29']}; {'id'...
start a tracker to register running models
def until(self, regex): logger.debug('waiting for %s', regex) r = re.compile(regex, re.M) self.tn.expect([r])
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'until'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}...
Wait until the regex encountered
def tables_with_counts(self): table_to_count = lambda t: self.count_rows(t) return zip(self.tables, map(table_to_count, self.tables))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tables_with_counts'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Return the number of entries in all table.
def first_rec(ofile, Rec, file_type): keylist = [] opened = False while not opened: try: pmag_out = open(ofile, 'w') opened = True except IOError: time.sleep(1) outstring = "tab \t" + file_type + "\n" pmag_out.write(outstring) keystring = "" ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'first_rec'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
opens the file ofile as a magic template file with headers as the keys to Rec
async def join_voice_channel(self, guild_id, channel_id): voice_ws = self.get_voice_ws(guild_id) await voice_ws.voice_state(guild_id, channel_id)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'join_voice_channel'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []...
Alternative way to join a voice channel if node is known.
def urls(self, version=None): urls = [] for base_url, routes in self.api.http.routes.items(): for url, methods in routes.items(): for method, versions in methods.items(): for interface_version, interface in versions.items(): if inte...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'urls'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Returns all URLS that are mapped to this interface
def root_tokens(self): if not self.is_tagged(ANALYSIS): self.tag_analysis() return self.get_analysis_element(ROOT_TOKENS)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'root_tokens'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
Root tokens of word roots.
def gather_hinting(config, rules, valid_paths): hinting = collections.defaultdict(list) for rule in rules: root, name = rule.code_path.split(':', 1) info = valid_paths[root][name] if info['hints-callable']: result = info['hints-callable'](config=config, **rule.arguments) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'gather_hinting'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Construct hint arguments for datanommer from a list of rules.
def bar_chart_mf(data, path_name): N = len(data) ind = np.arange(N) width = 0.8 fig, ax = pyplot.subplots() rects1 = ax.bar(ind, data, width, color='g') ax.set_ylabel('Population') ax.set_xticks(ind+width/2) labs = ['m='+str(i) for i in range(-N/2+1, N/2+1)] ax.set_xticklabels...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bar_chart_mf'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Make a bar chart for data on MF quantities.
def load(self, commit=None): git_info = self.record_git_info(commit) LOGGER.debug("Loading the result for commit '%s'.", git_info.hexsha) filename = self.get_filename(git_info) LOGGER.debug("Loading the result '%s'.", filename) result = super(RepoResultManager, self).load(filenam...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Load a result from the storage directory.
def save(self, filename): plt.savefig(filename, fig=self.fig, facecolor='black', edgecolor='black')
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
save colormap to file
def _load_api(self): self._add_url_route('get_scheduler_info', '', api.get_scheduler_info, 'GET') self._add_url_route('add_job', '/jobs', api.add_job, 'POST') self._add_url_route('get_job', '/jobs/<job_id>', api.get_job, 'GET') self._add_url_route('get_jobs', '/jobs', api.get_jobs, 'GET'...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_api'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Add the routes for the scheduler API.
def print_packet_count(): for name in archive.list_packet_names(): packet_count = 0 for group in archive.list_packet_histogram(name): for rec in group.records: packet_count += rec.count print(' {: <40} {: >20}'.format(name, packet_count))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'print_packet_count'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id': '5', ...
Print the number of packets grouped by packet name.
def _compile(self, expression): x = self.RE_PYTHON_VAR.sub('(?:\\1,)', expression) x = self.RE_SPACES.sub('', x) return re.compile(x)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_compile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Transform a class exp into an actual regex
def send_signals(self): if self.flag: invalid_ipn_received.send(sender=self) return else: valid_ipn_received.send(sender=self)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'send_signals'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
Shout for the world to hear whether a txn was successful.
def _pad_string(self, str, colwidth): width = self._disp_width(str) prefix = (colwidth - 1 - width) // 2 suffix = colwidth - prefix - width return ' ' * prefix + str + ' ' * suffix
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_pad_string'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Center-pads a string to the given column width using spaces.
def _update_images(self): wd_images = self.data['claims'].get('P18') if wd_images: if not isinstance(wd_images, list): wd_images = [wd_images] if 'image' not in self.data: self.data['image'] = [] for img_file in wd_images: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_update_images'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
add images from Wikidata
def split_input(cls, mapper_spec): filelists = mapper_spec.params[cls.FILES_PARAM] max_values_count = mapper_spec.params.get(cls.MAX_VALUES_COUNT_PARAM, -1) max_values_size = mapper_spec.params.get(cls.MAX_VALUES_SIZE_PARAM, -1) return [cls([0] * len(files), max_values_count, max_values_size) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'split_input'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Split input into multiple shards.
def mark_module_skipped(self, module_name): try: del self.modules[module_name] except KeyError: pass self.skip_modules[module_name] = True
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'mark_module_skipped'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Skip reloading the named module in the future
def _var(ins): output = [] output.append('%s:' % ins.quad[1]) output.append('DEFB %s' % ((int(ins.quad[2]) - 1) * '00, ' + '00')) return output
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_var'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ins'}; {'id'...
Defines a memory variable.
def get(self, key, recursive=False, sorted=False, quorum=False, wait=False, wait_index=None, timeout=None): url = self.make_key_url(key) params = self.build_args({ 'recursive': (bool, recursive or None), 'sorted': (bool, sorted or None), 'quorum': (bool, q...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '24']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15', '18', '21']}; {'id': '4', 'type': 'identifier',...
Requests to get a node by the given key.
def _get_privacy(self, table_name): ds_manager = DatasetManager(self.auth_client) try: dataset = ds_manager.get(table_name) return dataset.privacy.lower() except NotFoundException: return None
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_privacy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
gets current privacy of a table
def _load_stats(self): for attempt in range(0, 3): try: with self.stats_file.open() as f: return json.load(f) except ValueError: if attempt < 2: time.sleep(attempt * 0.2) else: rai...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_stats'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
Load the webpack-stats file
def reset(self): self._x = self._start_x self._y = self._start_y
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reset'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i...
Reset the Path for use next time.
def size(self,value): if not self.params: self.params = dict(size=value) return self self.params['size'] = value return self
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'size'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
The number of hits to return. Defaults to 10
def aspage(self): if self.offset is None: raise ValueError('cannot return virtual frame as page.') self.parent.filehandle.seek(self.offset) return TiffPage(self.parent, index=self.index)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'aspage'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'...
Return TiffPage from file.
def convert_args(self, command, args): for wanted, arg in zip(command.argtypes(), args): wanted = wanted.type_ if(wanted == "const"): try: yield to_int(arg) except: if(arg in self.processor.constants): yield self.processor.constants[arg] else: yield arg if(wanted == "regist...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_args'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Converts ``str -> int`` or ``register -> int``.
def generate_direct_deps(self, target: Target): yield from (self.targets[dep_name] for dep_name in sorted(target.deps))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_direct_deps'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '...
Generate only direct dependencies of `target`.
def translate(self, instruction): try: trans_instrs = self.__translate(instruction) except NotImplementedError: unkn_instr = self._builder.gen_unkn() unkn_instr.address = instruction.address << 8 | (0x0 & 0xff) trans_instrs = [unkn_instr] self....
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'translate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Return IR representation of an instruction.
def save(self, filename=None): filename = filename or self._filename o = open(filename, 'w') o.write(self.write()) o.close()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Save an arff structure to a file.