code
stringlengths
51
2.34k
sequence
stringlengths
1.16k
13.1k
docstring
stringlengths
11
171
def format_docstring(elt, arg_comments:dict={}, alt_doc_string:str='', ignore_warn:bool=False)->str: "Merge and format the docstring definition with `arg_comments` and `alt_doc_string`." parsed = "" doc = parse_docstring(inspect.getdoc(elt)) description = alt_doc_string or f"{doc['short_description']} {...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '20', '22']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_docstring'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '10', '15']}; {'id': '4', 'type': 'identifier', 'c...
Merge and format the docstring definition with `arg_comments` and `alt_doc_string`.
def _image_loop(self): if self.progress_bar and 'tqdm' in self.progress_bar.lower(): return tqdm(self.imgs, desc='Saving PNGs as flat PDFs', total=len(self.imgs), unit='PDFs') else: return self.imgs
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_image_loop'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
Retrieve an iterable of images either with, or without a progress bar.
def remaining_bytes(self, meta=True): pos, self._pos = self._pos, len(self.buffer) return self.buffer[pos:]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remaining_bytes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Returns the remaining, unread bytes from the buffer.
def authenticate(self, request, **kwargs): self.request = request if not self.request: return None state = self.request.GET.get('state') code = self.request.GET.get('code') nonce = kwargs.pop('nonce', None) if not code or not state: return None ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'authenticate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Authenticates a user based on the OIDC code flow.
def _get_shells(): start = time.time() if 'sh.last_shells' in __context__: if start - __context__['sh.last_shells'] > 5: __context__['sh.last_shells'] = start else: __context__['sh.shells'] = __salt__['cmd.shells']() else: __context__['sh.last_shells'] = start...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_shells'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '13', '63']}; {'id': ...
Return the valid shells on this system
def _checksum(self): for line in [self._line1, self._line2]: check = 0 for char in line[:-1]: if char.isdigit(): check += int(char) if char == "-": check += 1 if (check % 10) != int(line[-1]): ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_checksum'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Performs the checksum for the current TLE.
def _forward_mode(self, *args): X: np.ndarray dX: np.ndarray X, dX = self._parse_dicts(*args) if X is not None: val = X else: val = self.X if dX is not None: diff = dX else: diff = np.ones_like(val) return (v...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_forward_mode'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Forward mode differentiation for variables
def from_apps(cls, apps): "Takes in an Apps and returns a VersionedProjectState matching it" app_models = {} for model in apps.get_models(include_swapped=True): model_state = VersionedModelState.from_model(model) app_models[(model_state.app_label, model_state.name.lower()...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_apps'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cl...
Takes in an Apps and returns a VersionedProjectState matching it
def backend_add(cls, name, backend): oper = cls.call( 'hosting.rproxy.server.create', cls.usable_id(name), backend) cls.echo('Adding backend %s:%s into webaccelerator' % (backend['ip'], backend['port'])) cls.display_progress(oper) cls.echo('Backend added') ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'backend_add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Add a backend into a webaccelerator
def clear(self): value = self._sum / tf.cast(self._count, self._dtype) with tf.control_dependencies([value]): reset_value = self._sum.assign(tf.zeros_like(self._sum)) reset_count = self._count.assign(0) with tf.control_dependencies([reset_value, reset_count]): return tf.identity(value)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clear'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i...
Return the mean estimate and reset the streaming statistics.
def _load_lines(self, filename, line_generator, suite, rules): line_counter = 0 for line in line_generator: line_counter += 1 if line.category in self.ignored_lines: continue if line.category == "test": suite.addTest(Adapter(filename, l...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_lines'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children':...
Load a suite with lines produced by the line generator.
def unpack(data): size, position = decoder._DecodeVarint(data, 0) envelope = wire.Envelope() envelope.ParseFromString(data[position:position+size]) return envelope
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unpack'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'data'}; {'...
unpack from delimited data
def getDefaultTMParams(self, inputSize, numInputBits): sampleSize = int(1.5 * numInputBits) if numInputBits == 20: activationThreshold = 18 minThreshold = 18 elif numInputBits == 10: activationThreshold = 8 minThreshold = 8 else: activationThreshold = int(numInputBits * .6)...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getDefaultTMParams'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []...
Returns a good default set of parameters to use in the TM region.
def _create_rubber_bands_action(self): icon = resources_path('img', 'icons', 'toggle-rubber-bands.svg') self.action_toggle_rubberbands = QAction( QIcon(icon), self.tr('Toggle Scenario Outlines'), self.iface.mainWindow()) message = self.tr('Toggle rubber bands showing scen...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_create_rubber_bands_action'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [],...
Create action for toggling rubber bands.
def shapeExprFor(self, id_: Union[ShExJ.shapeExprLabel, START]) -> Optional[ShExJ.shapeExpr]: rval = self.schema.start if id_ is START else self.schema_id_map.get(str(id_)) return rval
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17', '25']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'shapeExprFor'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Return the shape expression that corresponds to id
def maybe_resume_consumer(self): if self._consumer is None or not self._consumer.is_paused: return if self.load < self.flow_control.resume_threshold: self._consumer.resume() else: _LOGGER.debug("Did not resume, current load is %s", self.load)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'maybe_resume_consumer'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Check the current load and resume the consumer if needed.
def _fromdata(self, code, dtype, count, value, name=None): self.code = int(code) self.name = name if name else str(code) self.dtype = TIFF_DATA_TYPES[dtype] self.count = int(count) self.value = value
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_fromdata'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', 'childr...
Initialize instance from arguments.
def _read_message(self): size = int(self.buf.read_line().decode("utf-8")) return self.buf.read(size).decode("utf-8")
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_read_message'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Reads a single size-annotated message from the server
def _chunk_filter(self, extensions): if isinstance(extensions, six.string_types): extensions = extensions.split() def _filter(chunk): name = chunk['name'] if extensions is not None: if not any(name.endswith(e) for e in extensions): ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_chunk_filter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Create a filter from the extensions and ignore files
def getextensibleindex(bunchdt, data, commdct, key, objname): theobject = getobject(bunchdt, key, objname) if theobject == None: return None theidd = iddofobject(data, commdct, key) extensible_i = [ i for i in range(len(theidd)) if 'begin-extensible' in theidd[i]] try: extens...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getextensibleindex'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'chi...
get the index of the first extensible item
def multi_session(self): _val = 0 if "multi_session" in self._dict: _val = self._dict["multi_session"] if str(_val).lower() == 'all': _val = -1 return int(_val)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'multi_session'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
convert the multi_session param a number
def random_draft(card_class: CardClass, exclude=[]): from . import cards from .deck import Deck deck = [] collection = [] for card in cards.db.keys(): if card in exclude: continue cls = cards.db[card] if not cls.collectible: continue if cls.type == CardType.HERO: continue if cls.card_class and c...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'random_draft'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '...
Return a deck of 30 random cards for the \a card_class
def peak_interval(data, alpha=_alpha, npoints=_npoints): peak = kde_peak(data,npoints) x = np.sort(data.flat); n = len(x) window = int(np.rint((1.0-alpha)*n)) starts = x[:n-window]; ends = x[window:] widths = ends - starts select = (peak >= starts) & (peak <= ends) widths = widths[select] ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'peak_interval'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Identify interval using Gaussian kernel density estimator.
def _normalize(image): offset = tf.constant(MEAN_RGB, shape=[1, 1, 3]) image -= offset scale = tf.constant(STDDEV_RGB, shape=[1, 1, 3]) image /= scale return image
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_normalize'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'image'...
Normalize the image to zero mean and unit variance.
def remove_stale_sockets(self): if self.opts.max_idle_time_seconds is not None: with self.lock: while (self.sockets and self.sockets[-1].idle_time_seconds() > self.opts.max_idle_time_seconds): sock_info = self.sockets.pop() ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_stale_sockets'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Removes stale sockets then adds new ones if pool is too small.
def dynamic_import(import_string): lastdot = import_string.rfind('.') if lastdot == -1: return __import__(import_string, {}, {}, []) module_name, attr = import_string[:lastdot], import_string[lastdot + 1:] parent_module = __import__(module_name, {}, {}, [attr]) return getattr(parent_module, ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dynamic_import'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'im...
Dynamically import a module or object.
def file_envs(self, load=None): if load is None: load = {} load.pop('cmd', None) return self.envs(**load)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'file_envs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Return environments for all backends for requests from fileclient
def dump(self): for modpath in sorted(self.map): title = 'Imports in %s' % modpath print('\n' + title + '\n' + '-'*len(title)) for name, value in sorted(self.map.get(modpath, {}).items()): print(' %s -> %s' % (name, ', '.join(sorted(value))))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dump'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id...
Prints out the contents of the import map.
def plot_prep_methods(df, prep, prepi, out_file_base, outtype, title=None, size=None): samples = df[(df["bamprep"] == prep)]["sample"].unique() assert len(samples) >= 1, samples out_file = "%s-%s.%s" % (out_file_base, samples[0], outtype) df = df[df["category"].isin(cat_labels)] ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'plot_prep_methods'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '12']}; {'id': '4', 'type': 'identi...
Plot comparison between BAM preparation methods.
def roots(cls, degree, domain, kind): basis_coefs = cls._basis_monomial_coefs(degree) basis_poly = cls.functions_factory(basis_coefs, domain, kind) return basis_poly.roots()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'roots'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Return optimal collocation nodes for some orthogonal polynomial.
def current_version(): import setuptools version = [None] def monkey_setup(**settings): version[0] = settings['version'] old_setup = setuptools.setup setuptools.setup = monkey_setup import setup reload(setup) setuptools.setup = old_setup return version[0]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'current_version'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '8', '13', '27', ...
Get the current version number from setup.py
def renew_session(self): if ((not 'user_uid' in self.cookieInterface.cookies) or self.cookieInterface.cookies['user_uid']!=self.session_uid) and (not self.expired): self.on_session_expired() if self.expired: self.session_uid = str(random.randint(1,999999999)) self.cookieI...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'renew_session'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Have to be called on user actions to check and renew session
def branchlist2branches(data, commdct, branchlist): objkey = 'BranchList'.upper() theobjects = data.dt[objkey] fieldlists = [] objnames = [obj[1] for obj in theobjects] for theobject in theobjects: fieldlists.append(list(range(2, len(theobject)))) blists = extractfields(data, commdct, ob...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'branchlist2branches'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [...
get branches from the branchlist
def image_undo(): if len(image_undo_list) <= 0: print("no undos in memory") return [image, Z] = image_undo_list.pop(-1) image.set_array(Z) _pylab.draw()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'image_undo'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '19', '31', '38']}; {'...
Undoes the last coarsen or smooth command.
def _read_cache_from_file(self): cache = {} try: with(open(self._cache_file_name, 'r')) as fp: contents = fp.read() cache = simplejson.loads(contents) except (IOError, JSONDecodeError): pass return cache
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_read_cache_from_file'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Read the contents of the cache from a file on disk.
def _setToDefaults(self): try: tmpObj = cfgpars.ConfigObjPars(self._taskParsObj.filename, associatedPkg=\ self._taskParsObj.getAssocPkg(), setAllToDefaults=self.taskName, ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_setToDefaults'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Load the default parameter settings into the GUI.
def lookup(self, topic): nsq.assert_valid_topic_name(topic) return self._request('GET', '/lookup', fields={'topic': topic})
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'lookup'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
Returns producers for a topic.
def _ExecuteTransaction(self, transaction): def Action(connection): connection.cursor.execute("START TRANSACTION") for query in transaction: connection.cursor.execute(query["query"], query["args"]) connection.cursor.execute("COMMIT") return connection.cursor.fetchall() return sel...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_ExecuteTransaction'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Get connection from pool and execute transaction.
def linkorcopy(self, src, dst): if os.path.isdir(dst): log.warn('linkorcopy given a directory as destination. ' 'Use caution.') log.debug('src: %s dst: %s', src, dst) elif os.path.exists(dst): os.unlink(dst) elif not os.path.exists(os.pat...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'linkorcopy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
hardlink src file to dst if possible, otherwise copy.
def load(self, model): self._dawg.load(find_data(model)) self._loaded_model = True
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Load pickled DAWG from disk.
def accel_toggle_transparency(self, *args): self.transparency_toggled = not self.transparency_toggled self.settings.styleBackground.triggerOnChangedValue( self.settings.styleBackground, 'transparency' ) return True
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'accel_toggle_transparency'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': ...
Callback to toggle transparency.
def shutdown_connections(self): if not self.is_shutting_down: self.set_state(self.STATE_SHUTTING_DOWN) for name in self.connections: if self.connections[name].is_running: self.connections[name].shutdown()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'shutdown_connections'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
This method closes the connections to RabbitMQ.
def create_command( principal, permissions, endpoint_plus_path, notify_email, notify_message ): if not principal: raise click.UsageError("A security principal is required for this command") endpoint_id, path = endpoint_plus_path principal_type, principal_val = principal client = get_client()...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'childre...
Executor for `globus endpoint permission create`
def without_extra_phrases(self): name = re.sub(r'\s*\([^)]*\)?\s*$', '', self.name) name = re.sub(r'(?i)\s* formerly.*$', '', name) name = re.sub(r'(?i)\s*and its affiliates$', '', name) name = re.sub(r'\bet al\b', '', name) if "-" in name: hyphen_parts = name.rsplit(...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'without_extra_phrases'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Removes parenthethical and dashed phrases
def from_cli_multi_ifos(opt, length_dict, delta_f_dict, low_frequency_cutoff_dict, ifos, strain_dict=None, **kwargs): psd = {} for ifo in ifos: if strain_dict is not None: strain = strain_dict[ifo] else: strain = None ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_cli_multi_ifos'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '12']}; {'id': '4', 'type': 'iden...
Get the PSD for all ifos when using the multi-detector CLI
def trunc_str(s: str) -> str: if len(s) > max_str_size: i = max(0, (max_str_size - 3) // 2) j = max(0, max_str_size - 3 - i) s = s[:i] + "..." + s[-j:] return s
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'trunc_str'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6']...
Truncate strings to maximum length.
def register_plugin_dir(path): import glob for f in glob.glob(path + '/*.py'): for k, v in load_plugins_from_module(f).items(): if k: global_registry[k] = v
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register_plugin_dir'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'...
Find plugins in given directory
def removeFileSafely(filename,clobber=True): if filename is not None and filename.strip() != '': if os.path.exists(filename) and clobber: os.remove(filename)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'removeFileSafely'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
Delete the file specified, but only if it exists and clobber is True.
def xor(a, b): return bytearray(i ^ j for i, j in zip(a, b))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'xor'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'a'}; {'i...
Bitwise xor on equal length bytearrays.
def command(state, args): if len(args) > 1: print(f'Usage: {args[0]}') return db = state.db _refresh_incomplete_anime(db) _fix_cached_completed(db)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'stat...
Fix cache issues caused by schema pre-v4.
def data_layout(self): with ffi.OutputString(owned=False) as outmsg: ffi.lib.LLVMPY_GetDataLayout(self, outmsg) return str(outmsg)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'data_layout'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
This module's data layout specification, as a string.
async def on_isupport_invex(self, value): if not value: value = INVITE_EXCEPT_MODE self._channel_modes.add(value) self._channel_modes_behaviour[rfc1459.protocol.BEHAVIOUR_LIST].add(value)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_isupport_invex'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Server allows invite exceptions.
def put_coord_inside(lattice, cart_coordinate): fc = lattice.get_fractional_coords(cart_coordinate) return lattice.get_cartesian_coords([c - np.floor(c) for c in fc])
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'put_coord_inside'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu...
converts a cartesian coordinate such that it is inside the unit cell.
def pattern_logic_srt(): if Config.options.pattern_files and Config.options.regex: return prep_regex(prep_patterns(Config.options.pattern_files)) elif Config.options.pattern_files: return prep_patterns(Config.options.pattern_files) elif Config.options.regex: return prep_regex(Config....
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pattern_logic_srt'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id': '5', '...
Return patterns to be used for searching srt subtitles.
def non_fluents(self) -> Dict[str, PVariable]: return { str(pvar): pvar for pvar in self.pvariables if pvar.is_non_fluent() }
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'non_fluents'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ...
Returns non-fluent pvariables.
def generate_filename(self, mark, **kwargs): kwargs = kwargs.copy() kwargs['opacity'] = int(kwargs['opacity'] * 100) kwargs['st_mtime'] = kwargs['fstat'].st_mtime kwargs['st_size'] = kwargs['fstat'].st_size params = [ '%(original_basename)s', 'wm', ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_filename'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],...
Comes up with a good filename for the watermarked image
def repeat(obj, times=None): if times is None: return AsyncIterWrapper(sync_itertools.repeat(obj)) return AsyncIterWrapper(sync_itertools.repeat(obj, times))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'repeat'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'obj'}...
Make an iterator that returns object over and over again.
def __pull_image_info(self, title, imageinfo, normalized): for info in imageinfo: info.update({'title': title}) _from = None for norm in normalized: if title == norm['to']: _from = norm['from'] info['metadata'] = {} ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__pull_image_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children'...
Pull image INFO from API response and insert
def srandmember(self, key, count=None, *, encoding=_NOTSET): args = [key] count is not None and args.append(count) return self.execute(b'SRANDMEMBER', *args, encoding=encoding)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'srandmember'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '10']}; {'id': '4', 'type': 'identifier', 'children...
Get one or multiple random members from a set.
def add(cls, name, value): attr = cls(value) attr._name = name setattr(cls, name, attr)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls...
Add a name-value pair to the enumeration.
def raw(config): client = Client() client.prepare_connection() audit_api = API(client) print(audit_api.raw())
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'raw'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'config'}; {'i...
Dump the contents of LDAP to console in raw format.
def blockshapes(self): if self._blockshapes is None: if self._filename: self._populate_from_rasterio_object(read_image=False) else: self._blockshapes = [(self.height, self.width) for z in range(self.num_bands)] return self._blockshapes
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'blockshapes'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
Raster all bands block shape.
def relation_for_unit(unit=None, rid=None): unit = unit or remote_unit() relation = relation_get(unit=unit, rid=rid) for key in relation: if key.endswith('-list'): relation[key] = relation[key].split() relation['__unit__'] = unit return relation
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'relation_for_unit'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7']}; {'id': '4', 'type': 'default_parameter', 'children': ...
Get the json represenation of a unit's relation
def copy_with_new_str(self, new_str): old_atts = dict((att, value) for bfs in self.chunks for (att, value) in bfs.atts.items()) return FmtStr(Chunk(new_str, old_atts))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'copy_with_new_str'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Copies the current FmtStr's attributes while changing its string.
def _get_build_env(env): env_override = '' if env is None: return env_override if not isinstance(env, dict): raise SaltInvocationError( '\'env\' must be a Python dictionary' ) for key, value in env.items(): env_override += '{0}={1}\n'.format(key, value) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_build_env'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'en...
Get build environment overrides dictionary to use in build process
def check_images(data): if isinstance(data, ndarray): data = fromarray(data) if not isinstance(data, Images): data = fromarray(asarray(data)) if len(data.shape) not in set([3, 4]): raise Exception('Number of image dimensions %s must be 2 or 3' % (len(data.shape))) return data
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_images'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'data...
Check and reformat input images if needed
def from_jd(jd): d = jd - EPOCH baktun = trunc(d / 144000) d = (d % 144000) katun = trunc(d / 7200) d = (d % 7200) tun = trunc(d / 360) d = (d % 360) uinal = trunc(d / 20) kin = int((d % 20)) return (baktun, katun, tun, uinal, kin)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_jd'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'jd'}; {'i...
Calculate Mayan long count from Julian day
def readinto(self, buf): got = 0 vbuf = memoryview(buf) while got < len(buf): if self._cur_avail == 0: if not self._open_next(): break cnt = len(buf) - got if cnt > self._cur_avail: cnt = self._cur_avail ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'readinto'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel...
Zero-copy read directly into buffer.
def PrintAllTables(self): goodlogging.Log.Info("DB", "Database contents:\n") for table in self._tableDict.keys(): self._PrintDatabaseTable(table)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'PrintAllTables'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Prints contents of every table.
def delete_role(self, name): client = self._client('iam') inline_policies = client.list_role_policies( RoleName=name )['PolicyNames'] for policy_name in inline_policies: self.delete_role_policy(name, policy_name) client.delete_role(RoleName=name)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_role'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '...
Delete a role by first deleting all inline policies.
def mail_admins(subject, message, fail_silently=False, connection=None, html_message=None): if not settings.ADMINS: return mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message, settings.SERVER_EMAIL, [a[1] for a in set...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'mail_admins'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children...
Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting.
async def on_raw_375(self, message): await self._registration_completed(message) self.motd = message.params[1] + '\n'
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_raw_375'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Start message of the day.
def redirect_uris(self, value): if isinstance(value, six.text_type): value = value.split("\n") value = [v.strip() for v in value] for v in value: validate_redirect_uri(v) self._redirect_uris = "\n".join(value) or ""
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'redirect_uris'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Validate and store redirect URIs for client.
def runstring(self): cfile = self.template % self.last self.last += 1 return cfile
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'runstring'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
Return the run number and the file name.
def ship_move(ship, x, y, speed): click.echo('Moving ship %s to %s,%s with speed %s' % (ship, x, y, speed))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ship_move'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Moves SHIP to the new location X,Y.
def GetDictToFormat(self): d = {} for k, v in self.__dict__.items(): d[k] = util.EncodeUnicode(v) return d
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'GetDictToFormat'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Return a copy of self as a dict, suitable for passing to FormatProblem
def id_to_did(did_id, method='op'): if isinstance(did_id, bytes): did_id = Web3.toHex(did_id) if isinstance(did_id, str): did_id = remove_0x_prefix(did_id) else: raise TypeError("did id must be a hex string or bytes") if Web3.toBytes(hexstr=did_id) == b'': did_id = '0' ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'id_to_did'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'di...
Return an Ocean DID from given a hex id.
def _flush(self): if not self.enabled: return try: try: self.lock.acquire() self.flush() except Exception: self.log.error(traceback.format_exc()) finally: if self.lock.locked(): self.l...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_flush'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'...
Decorator for flushing handlers with an lock, catching exceptions
def find_shape(self, canvas_x, canvas_y): shape_x, shape_y, w = self.canvas_to_shapes_transform.dot([canvas_x, canvas_y, 1]) if hasattr(self.space, 'point_query_first'): ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_shape'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Look up shape based on canvas coordinates.
def on_message(self, message_id_service, contact_id_service, content): try: live_chat = Chat.live.get( Q(agent__id_service=contact_id_service) | Q(asker__id_service=contact_id_service)) except ObjectDoesNotExist: self._new_chat_processing(message_id_se...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_message'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], '...
To use as callback in message service backend
def no_duplicates_sections2d(sections2d, prt=None): no_dups = True ctr = cx.Counter() for _, hdrgos in sections2d: for goid in hdrgos: ctr[goid] += 1 for goid, cnt in ctr.most_common(): if cnt == 1: break no_dups = False if prt is not None: ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'no_duplicates_sections2d'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [...
Check for duplicate header GO IDs in the 2-D sections variable.
def profile_delete(self): self.validate_profile_exists() profile_data = self.profiles.get(self.args.profile_name) fqfn = profile_data.get('fqfn') with open(fqfn, 'r+') as fh: data = json.load(fh) for profile in data: if profile.get('profile_name') ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'profile_delete'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se...
Delete an existing profile.
def update_forum_redirects_counter(sender, forum, user, request, response, **kwargs): if forum.is_link and forum.link_redirects: forum.link_redirects_count = F('link_redirects_count') + 1 forum.save()
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_forum_redirects_counter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': ...
Handles the update of the link redirects counter associated with link forums.
def change_view(self, request, object_id, **kwargs): page = get_object_or_404(Page, pk=object_id) content_model = page.get_content_model() kwargs.setdefault("extra_context", {}) kwargs["extra_context"].update({ "hide_delete_link": not content_model.can_delete(request), ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_view'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], ...
Enforce custom permissions for the page instance.
def show_rich_text(self, text, collapse=False, img_path=''): self.switch_to_plugin() self.switch_to_rich_text() context = generate_context(collapse=collapse, img_path=img_path, css_path=self.css_path) self.render_sphinx_doc(text, context)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show_rich_text'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children': ...
Show text in rich mode
def binary_size(self): return ( 1 + 1 + len(self.name.encode('utf-8')) + 2 + 1 + len(self.desc.encode('utf-8')) + sum(p.binary_size() for p in self.params.values()))
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'binary_size'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'...
Return the number of bytes to store this group and its parameters.
def log_pin_request(self): if self.pin_logging and self.pin is not None: _log( "info", " * To enable the debugger you need to enter the security pin:" ) _log("info", " * Debugger pin code: %s" % self.pin) return Response("")
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_pin_request'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's...
Log the pin if needed.
def to_native_units(self, motor): assert abs(self.rotations_per_minute) <= motor.max_rpm,\ "invalid rotations-per-minute: {} max RPM is {}, {} was requested".format( motor, motor.max_rpm, self.rotations_per_minute) return self.rotations_per_minute/motor.max_rpm * motor.max_speed
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_native_units'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value...
Return the native speed measurement required to achieve desired rotations-per-minute
def check_dissociated(self, cutoff=1.2): dissociated = False if not len(self.B) > self.nslab + 1: return dissociated adsatoms = [atom for atom in self.B[self.nslab:]] ads0, ads1 = set(atom.symbol for atom in adsatoms) bond_dist = get_ads_dist(self.B, ads0, ads1) ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_dissociated'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Check if adsorbate dissociates
def _reduction_output_shape(x, output_shape, reduced_dim): if output_shape is None: if reduced_dim is None: return Shape([]) else: if reduced_dim not in x.shape.dims: raise ValueError( "reduced_dim=%s not in x.shape.dims=%s" % (reduced_dim, x.shape)) return x.shape - redu...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_reduction_output_shape'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children...
Helper function to reduce_sum, etc.
def will_tag(self): wanttags = self.retrieve_config('Tag', 'no') if wanttags == 'yes': if aux.staggerexists: willtag = True else: willtag = False print(("You want me to tag {0}, but you have not installed " "t...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'will_tag'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ...
Check whether the feed should be tagged
def side_task(pipe, *side_jobs): assert iterable(pipe), 'side_task needs the first argument to be iterable' for sj in side_jobs: assert callable(sj), 'all side_jobs need to be functions, not {}'.format(sj) side_jobs = (lambda i:i ,) + side_jobs for i in map(pipe, *side_jobs): yield i[0]
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'side_task'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'pi...
allows you to run a function in a pipeline without affecting the data
def _calculate_session_expiry(self, request, user_info): access_token_expiry_timestamp = self._get_access_token_expiry(request) id_token_expiry_timestamp = self._get_id_token_expiry(user_info) now_in_seconds = int(time.time()) earliest_expiration_timestamp = min(access_token_expiry_times...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_calculate_session_expiry'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'childr...
Returns the number of seconds after which the Django session should expire.
def _extract_mask_distance(image, mask = slice(None), voxelspacing = None): if isinstance(mask, slice): mask = numpy.ones(image.shape, numpy.bool) distance_map = distance_transform_edt(mask, sampling=voxelspacing) return _extract_intensities(distance_map, mask)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_extract_mask_distance'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '11']}; {'id': '4', 'type': 'identifier', 'childre...
Internal, single-image version of `mask_distance`.
def check_vip_ip(self, ip, environment_vip): uri = 'api/ipv4/ip/%s/environment-vip/%s/' % (ip, environment_vip) return super(ApiNetworkIPv4, self).get(uri)
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_vip_ip'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val...
Check available ip in environment vip
def refresh(self): table = self.tableType() if table: table.markTableCacheExpired() self.uiRecordTREE.searchRecords(self.uiSearchTXT.text())
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'refresh'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {...
Commits changes stored in the interface to the database.
def tags(self, extra_params=None): return self.api._get_json( Tag, space=self, rel_path=self._build_rel_path('tags'), extra_params=extra_params, )
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tags'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};...
All Tags in this Space
def log_output(f): @wraps(f) def wrapper_fn(*args, **kwargs): res = f(*args, **kwargs) logging.debug("Logging result %s.", res) return res return wrapper_fn
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_output'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'f'}; {...
Logs the output value.
def _notify_single_item(self, item): triggered_channels = set() for key_set in self.watch_keys: plucked = { key_name: item[key_name] for key_name in key_set if key_name in item } route_keys = expand_dict_as_keys(plucked) for...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_notify_single_item'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v...
Route inbound items to individual channels
def _handle_files(self, data): initial = data.get("set", False) files = data["files"] for f in files: try: fobj = File( self.room, self.conn, f[0], f[1], type=f[2], ...
{'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_files'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':...
Handle new files being uploaded