code
stringlengths
51
2.34k
sequence
stringlengths
1.16k
13.1k
docstring
stringlengths
11
171
def StringEncoder(field_number, is_repeated, is_packed): tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) local_EncodeVarint = _EncodeVarint local_len = len assert not is_packed if is_repeated: def EncodeRepeatedField(write, value): for element in value: encoded = element....
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'StringEncoder'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va...
Returns an encoder for a string field.
def datetimes(self): if self._timestamps_data is None: self._calculate_timestamps() return tuple(DateTime.from_moy(moy, self.is_leap_year) for moy in self._timestamps_data)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'datetimes'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
A sorted list of datetimes in this analysis period.
def OnSortAscending(self, event): try: with undo.group(_("Sort ascending")): self.grid.actions.sort_ascending(self.grid.actions.cursor) statustext = _(u"Sorting complete.") except Exception, err: statustext = _(u"Sorting failed: {}").format(err) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'OnSortAscending'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Sort ascending event handler
def download(): ftp = ftplib.FTP(SITE) ftp.set_debuglevel(DEBUG) ftp.login(USER, PASSWD) ftp.cwd(DIR) filelist = ftp.nlst() filecounter = MANAGER.counter(total=len(filelist), desc='Downloading', unit='files') for filename in filelist: with Writer(fil...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '14', '21', '29', '36', ...
Download all files from an FTP share
def command_preflight_check(self): checks_pass, failures = self.environment.perform_preflight_check() if checks_pass: print('All checks pass.') else: sys.stderr.write('Problems encountered:\n') for msg in failures: sys.stderr.write(' - %s\n' % ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'command_preflight_check'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va...
Detects whether we have everything needed to mount sshfs filesystems.
def abort (aggregate): while True: try: aggregate.abort() aggregate.finish() aggregate.end_log_output(interrupt=True) break except KeyboardInterrupt: log.warn(LOG_CHECK, _("user abort; force shutdown")) aggregate.end_log_output(...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'abort'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'aggregate'}...
Helper function to ensure a clean shutdown.
def _fill_request(self, request, rdata): if not isinstance(rdata, dict): raise InvalidRequestError request['jsonrpc'] = self._get_jsonrpc(rdata) request['id'] = self._get_id(rdata) request['method'] = self._get_method(rdata) request['params'] = self._get_params(rdata)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_fill_request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va...
Fills request with data from the jsonrpc call.
def timedeltaToString(delta): if delta.days == 0: sign = 1 else: sign = delta.days / abs(delta.days) delta = abs(delta) days = delta.days hours = int(delta.seconds / 3600) minutes = int((delta.seconds % 3600) / 60) seconds = int(delta.seconds % 60) output = '' if sign...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'timedeltaToString'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Convert timedelta to an ical DURATION.
def _parse( self, item_iter, state ): parsed_array = [] for i, item in enumerate(item_iter): state.push_location(self._item_processor.element_path, i) parsed_array.append(self._item_processor.parse_at_element(item, state)) state...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Parse the array data using the provided iterator of XML elements.
def obsres_from_oblock_id(self, obsid, configuration=None): este = self.ob_table[obsid] obsres = obsres_from_dict(este) _logger.debug("obsres_from_oblock_id id='%s', mode='%s' START", obsid, obsres.mode) try: this_drp = self.drps.query_by_name(obsres.instrument) excep...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'obsres_from_oblock_id'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':...
Override instrument configuration if configuration is not None
def predictive_probability_multistate(M_c, X_L_list, X_D_list, Y, Q): logprobs = [float(predictive_probability(M_c, X_L, X_D, Y, Q)) for X_L, X_D in zip(X_L_list, X_D_list)] return logmeanexp(logprobs)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'predictive_probability_multistate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'id...
Returns the predictive probability, averaged over each sample.
def _set_session_cookie(self): LOGGER.debug('Setting session cookie for %s', self.session.id) self.set_secure_cookie(name=self._session_cookie_name, value=self.session.id, expires=self._cookie_expiration)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_session_cookie'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
Set the session data cookie.
def prepare_mongod_server(server): log_info("Preparing server '%s' for use as configured..." % server.id) cluster = server.get_cluster() if server.supports_local_users(): users.setup_server_local_users(server) if not server.is_cluster_member() or server.is_standalone_config_server()...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prepare_mongod_server'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Contains post start server operations
def apply(self): self.read_group_info() if self.tabs.count() == 0: self.button_color.setEnabled(False) self.button_del.setEnabled(False) self.button_apply.setEnabled(False) else: self.button_color.setEnabled(True) self.button_del.setEna...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'apply'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i...
Apply changes to the plots.
def local_timezone(self): if self._local_tz.zone in pytz.all_timezones: return self._local_tz.zone return self.timezone
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'local_timezone'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Returns the name of the local timezone.
def module_functions(modulestr): funcs = dict(inspect.getmembers(import_module(modulestr), inspect.isfunction)) return OrderedDict(sorted(funcs.items(), key=lambda f: f[0]))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'module_functions'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Return ordered dictionary of all functions declared in module
def create_node_tables(self): self.cursor.execute('PRAGMA foreign_keys=1') self.cursor.execute( ) self.cursor.execute( ) self.cursor.execute( ) self.cursor.execute( 'CREATE UNIQUE INDEX IF NOT EXISTS node ON nodes (value)' ) self.cursor.execute( ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_node_tables'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Create node and link tables if they don't exist.
def _is_allowed_abbr(self, tokens): if len(tokens) <= 2: abbr_text = ''.join(tokens) if self.abbr_min <= len(abbr_text) <= self.abbr_max and bracket_level(abbr_text) == 0: if abbr_text[0].isalnum() and any(c.isalpha() for c in abbr_text): if re.match('...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_is_allowed_abbr'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Return True if text is an allowed abbreviation.
def dict_to_enum_fn(d: Dict[str, Any], enum_class: Type[Enum]) -> Enum: return enum_class[d['name']]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '22', '24']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dict_to_enum_fn'}; {'id': '3', 'type': 'parameters', 'children': ['4', '14']}; {'id': '4', 'type': 'typed_parameter', 'children...
Converts an ``dict`` to a ``Enum``.
def _get_fix_my_django_submission_url(self, tb_info, sanitized_tb): err_post_create_path = '/create/' url = '{0}{1}'.format(base_url, err_post_create_path) return '{url}?{query}'.format( url=url, query=urlencode({ 'exception_type': clean_exception_type(tb_...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_fix_my_django_submission_url'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier',...
Links to the error submission url with pre filled fields
def _copy_params(self): cls = type(self) src_name_attrs = [(x, getattr(cls, x)) for x in dir(cls)] src_params = list(filter(lambda nameAttr: isinstance(nameAttr[1], Param), src_name_attrs)) for name, param in src_params: setattr(self, name, param._copy_new_parent(self))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_copy_params'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
Copy all params defined on the class to current object.
def _factory(cls, constraints, op): pieces = [] for i, constraint in enumerate(constraints): pieces.append(constraint) if i != len(constraints) - 1: pieces.append(op) return cls(pieces)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_factory'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Factory for joining constraints with a single conjunction
def _size_map(size): try: if not isinstance(size, int): if re.search(r'[Kk]', size): size = 1024 * float(re.sub(r'[Kk]', '', size)) elif re.search(r'[Mm]', size): size = 1024**2 * float(re.sub(r'[Mm]', '', size)) size = int(size) re...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_size_map'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'size'};...
Map Bcache's size strings to real bytes
def change_last_time_step(self, **replace_time_step_kwargs): assert self._time_steps self._time_steps[-1] = self._time_steps[-1].replace( **replace_time_step_kwargs)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_last_time_step'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Replace the last time-steps with the given kwargs.
def skip(self): for pos, element in self.element_iter: tag, class_attr = _tag_and_class_attr(element) if tag == "div" and "thread" in class_attr and pos == "end": break
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'skip'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id...
Eats through the input iterator without recording the content.
def destroy(self): node = self.node if not config.is_node_destroyable(node.name): logger.error('node %s has non-destroyable prefix' % node.name) return False logger.info('destroying node %s' % node) return node.destroy()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'destroy'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {...
Insure only destroyable nodes are destroyed
def list_types(self): uri = "/notification_types" resp, resp_body = self.api.method_get(uri) return [CloudMonitorNotificationType(self, info) for info in resp_body["values"]]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_types'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}...
Returns a list of all available notification types.
def _get_float(data, position, dummy0, dummy1, dummy2): end = position + 8 return _UNPACK_FLOAT(data[position:end])[0], end
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_float'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': ...
Decode a BSON double to python float.
def _start_container(self, container, tool_d, s_containers, f_containers): section = tool_d[container]['section'] del tool_d[container]['section'] manifest = Template(self.manifest) try: c = self.d_client.containers.get(container) c.start() s_container...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_start_container'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'child...
Start container that was passed in and return status
def _active_mounts_openbsd(ret): for line in __salt__['cmd.run_stdout']('mount -v').split('\n'): comps = re.sub(r"\s+", " ", line).split() parens = re.findall(r'\((.*?)\)', line, re.DOTALL) if len(parens) > 1: nod = __salt__['cmd.run_stdout']('ls -l {0}'.format(comps[0])) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_active_mounts_openbsd'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
List active mounts on OpenBSD systems
def must_stop(self): return bool(self.terminate_gracefuly and self.end_signal_caught or self.num_loops >= self.max_loops or self.end_forced or self.wanted_end_date and datetime.utcnow() >= self.wanted_end_date)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'must_stop'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Return True if the worker must stop when the current loop is over.
def compare_digest(a, b): py_version = sys.version_info[0] if py_version >= 3: return _compare_digest_py3(a, b) return _compare_digest_py2(a, b)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'compare_digest'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
Compare 2 hash digest.
def generate(env): global PDFLaTeXAction if PDFLaTeXAction is None: PDFLaTeXAction = SCons.Action.Action('$PDFLATEXCOM', '$PDFLATEXCOMSTR') global PDFLaTeXAuxAction if PDFLaTeXAuxAction is None: PDFLaTeXAuxAction = SCons.Action.Action(PDFLaTeXAuxFunction, st...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'env'}; {...
Add Builders and construction variables for pdflatex to an Environment.
def bar3d(h2: Histogram2D, ax: Axes3D, **kwargs): density = kwargs.pop("density", False) data = get_data(h2, cumulative=False, flatten=True, density=density) if "cmap" in kwargs: cmap = _get_cmap(kwargs) _, cmap_data = _get_cmap_data(data, kwargs) colors = cmap(cmap_data) else: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bar3d'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6...
Plot of 2D histograms as 3D boxes.
def root_urns_for_deletion(self): roots = set() for urn in self._urns_for_deletion: new_root = True str_urn = utils.SmartUnicode(urn) fake_roots = [] for root in roots: str_root = utils.SmartUnicode(root) if str_urn.startswith(str_root): new_root = False ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'root_urns_for_deletion'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Roots of the graph of urns marked for deletion.
def _load_ssh(self, tag): for child in tag: if child.tag == "server": self._vardict["server"] = child.attrib elif child.tag == "codes": self._load_codes(child, True) elif child.tag == "mappings": self._load_mapping(child, True) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_ssh'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Loads the SSH configuration into the vardict.
def request(self, method, url, **kwargs): if not url.startswith('https'): url = '{}{}'.format(self.args.tc_api_path, url) return super(TcExSession, self).request(method, url, **kwargs)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Override request method disabling verify on token renewal if disabled on session.
def compose(func_list): def f(G, bim): for func in func_list: G, bim = func(G, bim) return G, bim return f
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'compose'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'func_list...
composion of preprocessing functions
def user_can_add_attachments(self): if not self.global_attachments_allowed(): return False context = self.context pm = api.get_tool("portal_membership") return pm.checkPermission(AddAttachment, context)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'user_can_add_attachments'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Checks if the current logged in user is allowed to add attachments
def backlink(node): seen = set() to_see = [node] while to_see: node = to_see.pop() seen.add(node) for succ in node.next: succ.prev.add(node) if succ not in seen: to_see.append(succ)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'backlink'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'node'}; ...
Given a CFG with outgoing links, create incoming links.
def attr(aid): def _attr(ctx): return ctx.current_link[ATTRIBUTES].get(aid) return _attr
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'attr'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'aid'}; {'id'...
Action function generator to retrieve an attribute from the current link
def log_all(self, file): global rflink_log if file == None: rflink_log = None else: log.debug('logging to: %s', file) rflink_log = open(file, 'a')
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_all'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
Log all data received from RFLink to file.
def access_list(**kwargs): ctx = Context(**kwargs) ctx.execute_action('access:list', **{ 'unicorn': ctx.repo.create_secure_service('unicorn'), })
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'access_list'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'dictionary_splat_pattern', 'children': ['5']...
Shows services for which there are ACL specified.
def fix_e271(self, result): line_index = result['line'] - 1 target = self.source[line_index] offset = result['column'] - 1 fixed = fix_whitespace(target, offset=offset, replacement=' ') if fixed == target: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fix_e271'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Fix extraneous whitespace around keywords.
def bytes_needed(self): if self.native: ret = self.native.expected_length - self.buf_len() else: ret = self.expected_length - self.buf_len() if ret <= 0: return 1 return ret
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bytes_needed'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self...
return number of bytes needed for next parsing stage
def _installed_snpeff_genome(base_name, config): snpeff_config_file = os.path.join(config_utils.get_program("snpeff", config, "dir"), "snpEff.config") if os.path.exists(snpeff_config_file): data_dir = _find_snpeff_datadir(snpeff_config_file) dbs = [d for d i...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_installed_snpeff_genome'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [...
Find the most recent installed genome for snpEff with the given name.
def keys(self): keys = ttk.Label.keys(self) keys.extend(["link", "normal_color", "hover_color", "clicked_color"]) return keys
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'keys'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id...
Return a list of all resource names of this widget.
def Validate(self): ValidateMultiple(self.probe, "Method has invalid probes") Validate(self.target, "Method has invalid target") Validate(self.hint, "Method has invalid hint")
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'Validate'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ...
Check the Method is well constructed.
def resumable(self): jids = self.client.workers[self.client.worker_name]['jobs'] jobs = self.client.jobs.get(*jids) queue_names = set([queue.name for queue in self.queues]) return [job for job in jobs if job.queue_name in queue_names]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'resumable'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Find all the jobs that we'd previously been working on
def dl_cub(cub_url, cub_archive_name): with open(cub_archive_name, 'wb') as f: remote_file = urllib2.urlopen(cub_url) meta = remote_file.info() cl_header = meta.getheaders("Content-Length") remote_file_size = int(cl_header[0]) if len(cl_header) > 0 else None local_file_size =...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dl_cub'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cub_u...
Download cub archive from cub_url and store it in cub_archive_name
def generate_vector_color_map(self): vector_stops = [] if type(self.data) == str: self.data = geojson_to_dict_list(self.data) for row in self.data: color = color_map(row[self.color_property], self.color_stops, self.color_default) vector_stops.append([row[self....
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_vector_color_map'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], '...
Generate color stops array for use with match expression in mapbox template
def can_use_cached_output(self, contentitem): return contentitem.plugin.search_output and not contentitem.plugin.search_fields \ and super(SearchRenderingPipe, self).can_use_cached_output(contentitem)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'can_use_cached_output'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Read the cached output - only when search needs it.
def using_ios_stash(): print('detected install path:') print(os.path.dirname(__file__)) module_names = set(sys.modules.keys()) return 'stash' in module_names or 'stash.system' in module_names
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'using_ios_stash'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '10', '22', '35']...
returns true if sys path hints the install is running on ios
def change_channel_group(self, group): assert self._probe is not None self._channels = _probe_channels(self._probe, group) self._positions = _probe_positions(self._probe, group)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_channel_group'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '...
Change the current channel group.
def retryable_writes_supported(self): return ( self._ls_timeout_minutes is not None and self._server_type in (SERVER_TYPE.Mongos, SERVER_TYPE.RSPrimary))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'retryable_writes_supported'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Checks if this server supports retryable writes.
def _get_context_id(self): from furious.context import get_current_context context_id = self._options.get('context_id') if context_id: return context_id try: context = get_current_context() except errors.NotInContextError: context = None ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_context_id'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
If this async is in a context set the context id.
def _prepare_app(self, app): for key in ['url', 'html', 'script', 'implies']: try: value = app[key] except KeyError: app[key] = [] else: if not isinstance(value, list): app[key] = [value] for key in [...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_prepare_app'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Normalize app data, preparing it for the detection phase.
def register_directory(self, dirpath, **kwargs): kwargs['file_extensions'] = kwargs.get("file_extensions", self.rdf_formats) files = list_files(file_directory=dirpath, **kwargs) for fileinfo in files: self.register_rml(fileinfo[-1], **kw...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register_directory'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []...
Registers all of the files in the the directory path
def start_block(self,stylestack=None): if self.dirty: self.escpos._raw('\n') self.dirty = False self.stack.append('block') if stylestack: self.style(stylestack)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start_block'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
starts a block entity with an optional style definition
def infer_endpoint(rule_payload): bucket = (rule_payload if isinstance(rule_payload, dict) else json.loads(rule_payload)).get("bucket") return "counts" if bucket else "search"
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'infer_endpoint'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ru...
Infer which endpoint should be used for a given rule payload.
def parse(self, node): self._attrs = {} vals = [] yielded = False for x in self._read_parts(node): if isinstance(x, Field): yielded = True x.attrs = self._attrs yield x else: vals.append(ustr(x).strip...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}...
Return generator yielding Field objects for a given node
def permissions(self, **kwargs): self._validate_loaded() return TailoredAudiencePermission.all(self.account, self.id, **kwargs)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'permissions'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Returns a collection of permissions for the curent tailored audience.
def _validate_nodes_with_data(self, names): names = names if isinstance(names, list) else [names] if not names: raise RuntimeError("Argument `nodes` is not valid") for ndict in names: if (not isinstance(ndict, dict)) or ( isinstance(ndict, dict) and (set(n...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_nodes_with_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': ...
Validate NodeWithData pseudo-type.
def _collect_names(handlers, scopes, user, client): results = set() data = {'user': user, 'client': client} def visitor(_scope_name, func): claim_names = func(data) if claim_names is not None: results.update(claim_names) _visit_handlers(handlers, visitor, 'scope', scopes) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_collect_names'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [...
Get the names of the claims supported by the handlers for the requested scope.
def changelog_cli(ctx): if ctx.invoked_subcommand: return from peltak.core import shell from . import logic shell.cprint(logic.changelog())
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'changelog_cli'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ctx...
Generate changelog from commit messages.
def hilite(s, ok=True, bold=False): if not term_supports_colors(): return s attr = [] if ok is None: pass elif ok: attr.append('32') else: attr.append('31') if bold: attr.append('1') return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hilite'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Return an highlighted version of 'string'.
def _make_color_fn(color): def _color(text = ""): return (_color_sep + color + _color_sep2 + text + _color_sep + "default" + _color_sep2) return _color
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_make_color_fn'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'co...
Create a function that set the foreground color.
def list(self, wg_uuid, parent=None, flat=False, node_types=None): url = "%(base)s/%(wg_uuid)s/nodes" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid } param = [] if parent: if isinstance(parent, (list,)): if len(parent) >= 1: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Get a list of workgroup nodes.
def _error(self, message, start, end=None): raise errors.EfilterParseError( source=self.source, start=start, end=end, message=message)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_error'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Raise a nice error, with the token highlighted.
def _load_get_attr(self, name): 'Return an internal attribute after ensuring the headers is loaded if necessary.' if self._mode in _allowed_read and self._N is None: self._read_header() return getattr(self, name)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_get_attr'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
Return an internal attribute after ensuring the headers is loaded if necessary.
def _find_chain_name(self, mac): ipt_cmd = ['iptables', '-t', 'filter', '-S'] cmdo = dsl.execute(ipt_cmd, root_helper=self._root_helper, log_output=False) for o in cmdo.split('\n'): if mac in o.lower(): chain = o.split()[1] L...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_find_chain_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Find a rule associated with a given mac.
def write(name, keyword, domain, citation, author, description, species, version, contact, licenses, values, functions, output, value_prefix): write_namespace( name, keyword, domain, author, citation, values, namespace_description=description, namespace_species=species, nam...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17']...
Build a namespace from items.
def smsTextMode(self, textMode): if textMode != self._smsTextMode: if self.alive: self.write('AT+CMGF={0}'.format(1 if textMode else 0)) self._smsTextMode = textMode self._compileSmsRegexes()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'smsTextMode'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Set to True for the modem to use text mode for SMS, or False for it to use PDU mode
def low(self, fun, low): l_fun = getattr(self, fun) f_call = salt.utils.args.format_call(l_fun, low) return l_fun(*f_call.get('args', ()), **f_call.get('kwargs', {}))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'low'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Pass the cloud function and low data structure to run
def probe_git(): try: repo = git.Repo() except git.InvalidGitRepositoryError: LOGGER.warning( "We highly recommend keeping your model in a git repository." " It allows you to track changes and to easily collaborate with" " others via online platforms such as h...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'probe_git'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '31', '54']}; {'id': '5...
Return a git repository instance if it exists.
def id(self): id = self.element.attrib[GML_NS + 'id'] code = id.split('-')[-1] return code
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'id'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id':...
The EPSG code for this CRS.
def delete_role_perm(role_id, perm_id,**kwargs): _get_perm(perm_id) _get_role(role_id) try: roleperm_i = db.DBSession.query(RolePerm).filter(RolePerm.role_id==role_id, RolePerm.perm_id==perm_id).one() db.DBSession.delete(roleperm_i) except NoResultFound: raise ResourceNotFoundErr...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_role_perm'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Remove a permission from a role
def add_toc_entry(self, title, level, slide_number): self.__toc.append({'title': title, 'number': slide_number, 'level': level})
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_toc_entry'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []...
Adds a new entry to current presentation Table of Contents.
def state_length(state, size): if len(state) != size: raise ValueError('Invalid state: there must be one entry per ' 'node in the network; this state has {} entries, but ' 'there are {} nodes.'.format(len(state), size)) return True
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'state_length'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Check that the state is the given size.
def Ctt_(self): self._check_estimated() return self._rc.cov_YY(bessel=self.bessel)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'Ctt_'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id...
Covariance matrix of the time shifted data
def error(*args): if sys.stdin.isatty(): print('ERROR:', *args, file=sys.stderr) else: notify_error(*args)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'error'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'list_splat_pattern', 'children': ['5']}; {'id': '5...
Display error message via stderr or GUI.
def benchmark_mitdb_record(rec, detector, verbose): sig, fields = rdsamp(rec, pb_dir='mitdb', channels=[0]) ann_ref = rdann(rec, pb_dir='mitdb', extension='atr') qrs_inds = detector(sig=sig[:,0], fs=fields['fs'], verbose=verbose) comparitor = compare_annotations(ref_sample=ann_ref.sample[1:], ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'benchmark_mitdb_record'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children'...
Benchmark a single mitdb record
def list_roles(): for role in lib.get_roles(): margin_left = lib.get_margin(len(role['fullname'])) print("{0}{1}{2}".format( role['fullname'], margin_left, role.get('description', '(no description)')))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_roles'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id': '5', 'type': ...
Show a list of all available roles
def get(self, *args, **kwargs): payload = self.buf.get(*args, **kwargs) logger.debug("Removing RPC payload from ControlBuffer queue: %s", payload) return payload
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Call from main thread.
def push_bus(self, tokens): logger.debug("Pushing bus data: %s" % tokens) bus = Bus() bus.name = tokens["bus_no"] bus.v_magnitude = tokens["v_magnitude"] bus.v_angle = tokens["v_angle"] bus.v_magnitude = tokens["v_magnitude"] bus.v_angle = tokens["v_angle"] ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'push_bus'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Adds a Bus object to the case.
def create_payload(self): payload = super(OverrideValue, self).create_payload() if hasattr(self, 'smart_class_parameter'): del payload['smart_class_parameter_id'] if hasattr(self, 'smart_variable'): del payload['smart_variable_id'] return payload
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_payload'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Remove ``smart_class_parameter_id`` or ``smart_variable_id``
def addFile(self, path, msg=""): item = Item.from_path(repo=self.repo, path=path) self.addItem(item)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'addFile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Adds a file to the version
def getbranchcomponents(idf, branch, utest=False): fobjtype = 'Component_%s_Object_Type' fobjname = 'Component_%s_Name' complist = [] for i in range(1, 100000): try: objtype = branch[fobjtype % (i,)] if objtype.strip() == '': break objname = br...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getbranchcomponents'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [...
get the components of the branch
def ensure_bytes(str_or_bytes, encoding='utf-8', errors='strict'): if isinstance(str_or_bytes, six.text_type): return str_or_bytes.encode(encoding, errors) return str_or_bytes
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ensure_bytes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'va...
Ensures an input is bytes, encoding if it is a string.
def register_view(self, view, timestamp): if len(self.exporters) > 0: try: for e in self.exporters: e.on_register_view(view) except AttributeError: pass self._exported_views = None existing_view = self._registered_views....
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register_view'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va...
registers the view's measure name to View Datas given a view
def isubsample(full_sample, k, full_sample_len=None): if not full_sample_len: full_sample_len = len(full_sample) if not 0 <= k <= full_sample_len: raise ValueError('Required that 0 <= k <= full_sample_length') picked = 0 for i, element in enumerate(full_sample): prob = (k-picked) / (full...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'isubsample'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Down-sample an enumerable list of things
def flatten_list(l): return list(chain.from_iterable(repeat(x,1) if isinstance(x,str) else x for x in l))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'flatten_list'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'l'};...
Nested lists to single-level list, does not split strings
def remove_accounts_from_group(accounts_query, group): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_group(account, group)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_accounts_from_group'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children':...
Remove accounts from group.
def cli(env, columns, sortby, volume_id): file_storage_manager = SoftLayer.FileStorageManager(env.client) legal_centers = file_storage_manager.get_replication_locations( volume_id ) if not legal_centers: click.echo("No data centers compatible for replication.") else: table = ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
List suitable replication datacenters for the given volume.
def _apply_data(self, f, ts, reverse=False): if isinstance(ts, (int, float)): d = ts * np.ones(self.shape[0]) elif ts is None: d = None elif np.array_equal(ts.index, self.index): d = ts.values else: d = ts._retime(self.index) if not...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_apply_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [],...
Convenience function for all of the math stuff.
def _sorted_actions(self): for a in filter(lambda _: not _.last and \ not self.is_action(_, 'parsers'), self._actions): yield a for a in filter(lambda _: _.last and \ not self.is_action(_, 'parsers'), self._actions): yield a ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_sorted_actions'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Generate the sorted list of actions based on the "last" attribute.
def plot_and_save(self, data, w=800, h=420, filename='chart', overwrite=True): self.save(data, filename, overwrite) return IFrame(filename + '.html', w, h)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'plot_and_save'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15']}; {'id': '4', 'type': 'identifier', '...
Save the rendered html to a file and returns an IFrame to display the plot in the notebook.
def parse(self, func, *args, **kwargs): result = [] for element in self.xpath('child::node()'): if isinstance(element, Parser): children = element.parse(func, *args, **kwargs) element_result = func(element, children, *args, **kwargs) if element...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Parse element with given function
def insertCallSet(self, callSet): try: models.Callset.create( id=callSet.getId(), name=callSet.getLocalId(), variantsetid=callSet.getParentContainer().getId(), biosampleid=callSet.getBiosampleId(), attributes=json.dumps(...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'insertCallSet'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Inserts a the specified callSet into this repository.
def removeUrl(self, url): root = self.etree t_urls = root.find('urls') if not t_urls: return False for t_url in t_urls.findall('url'): if t_url.text == url.strip(): t_urls.remove(t_url) if url in self.urls: self....
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'removeUrl'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Remove passed url from a binder