code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def extract(filename_url_filelike_or_htmlstring):
html_tree = get_html_tree(filename_url_filelike_or_htmlstring)
subtrees = get_textnode_subtrees(html_tree)
avg, _, _ = calcavg_avgstrlen_subtrees(subtrees)
filtered = [subtree for subtree in subtrees
if subtree.ttl_strlen > avg]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'filename_... | An "improved" algorithm over the original eatiht algorithm |
def _check_required_settings(batches):
required_settings = [
'sawtooth.consensus.algorithm.name',
'sawtooth.consensus.algorithm.version']
for batch in batches:
for txn in batch.transactions:
txn_header = TransactionHeader()
txn_header.ParseFromString(txn.header)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check_required_settings'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Ensure that all settings required at genesis are set. |
def _build_offset(offset, kwargs, default):
if offset is None:
if not kwargs:
return default
else:
return _td_check(datetime.timedelta(**kwargs))
elif kwargs:
raise ValueError('Cannot pass kwargs and an offset')
elif isinstance(offset, datetime.timedelta):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_build_offset'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Builds the offset argument for event rules. |
def action_update(self):
order = []
form = self.request.form
attachments = form.get("attachments", [])
for attachment in attachments:
values = dict(attachment)
uid = values.pop("UID")
obj = api.get_object_by_uid(uid)
if values.pop("delete",... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'action_update'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Form action enpoint to update the attachments |
def _find_observable_paths(extra_files=None):
rv = set(
os.path.dirname(os.path.abspath(x)) if os.path.isfile(x) else os.path.abspath(x)
for x in sys.path
)
for filename in extra_files or ():
rv.add(os.path.dirname(os.path.abspath(filename)))
for module in list(sys.modules.values... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_find_observable_paths'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': [... | Finds all paths that should be observed. |
def align(doc):
validate_doc(doc)
def evaluator(indent, column, page_width, ribbon_width):
return Nest(column - indent, doc)
return contextual(evaluator) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'align'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'doc'}; {'id... | Aligns each new line in ``doc`` with the first new line. |
def clean_ufo(path):
if path.endswith(".ufo") and os.path.exists(path):
shutil.rmtree(path) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clean_ufo'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'path'};... | Make sure old UFO data is removed, as it may contain deleted glyphs. |
def hmget(self, key, field, *fields, encoding=_NOTSET):
return self.execute(b'HMGET', key, field, *fields, encoding=encoding) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hmget'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '9']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Get the values of all the given fields. |
def removeApplicationManifest(self, pchApplicationManifestFullPath):
fn = self.function_table.removeApplicationManifest
result = fn(pchApplicationManifestFullPath)
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'removeApplicationManifest'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': ... | Removes an application manifest from the list to load when building the list of installed applications. |
def learn(self, numEpochs, batchsize):
for epoch in range(numEpochs):
print('epoch %d' % epoch)
indexes = np.random.permutation(self.trainsize)
for i in range(0, self.trainsize, batchsize):
x = Variable(self.x_train[indexes[i: i + batchsize]])
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'learn'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Train the classifier for a given number of epochs, with a given batchsize |
def _get_csv_cells_gen(self, line):
digest_types = self.digest_types
for j, value in enumerate(line):
if self.first_line:
digest_key = None
digest = lambda x: x.decode(self.encoding)
else:
try:
digest_key = diges... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_csv_cells_gen'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Generator of values in a csv line |
def from_json(self, value):
if value is None:
return None
if isinstance(value, six.binary_type):
value = value.decode('utf-8')
if isinstance(value, six.text_type):
if value == "":
return None
try:
value = dateutil.pa... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Parse the date from an ISO-formatted date string, or None. |
def handle_starttag(self, tag, attrs):
if tag == 'a' and ( ('class', 'download-pdf') in attrs or ('id', 'download-pdf') in attrs ):
for attr in attrs:
if attr[0] == 'href':
self.download_link = 'http://www.nature.com' + attr[1] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle_starttag'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], '... | PDF link handler; never gets explicitly called by user |
def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return u'1'
else:
return u'0' | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'I'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id': ... | 1' if Daylight Savings Time, '0' otherwise. |
def _get_registerd_func(name_or_func):
if callable(name_or_func):
func = register_array_xcorr(name_or_func)
else:
func = XCOR_FUNCS[name_or_func or 'default']
assert callable(func), 'func is not callable'
if not hasattr(func, 'registered'):
func = register_array_xcorr(func)
r... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_registerd_func'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | get a xcorr function from a str or callable. |
def stn(s, length, encoding, errors):
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'stn'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Convert a string to a null-terminated bytes object. |
def getdrawings():
infos = Info.query.all()
sketches = [json.loads(info.contents) for info in infos]
return jsonify(drawings=sketches) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getdrawings'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '15', '30']}; {'id': ... | Get all the drawings. |
def make_prefix(api_version, manipulator, auth_type):
prefix = "%s_%s" % (api_version, manipulator)
if (auth_type and auth_type != 'none'):
prefix += '_' + auth_type
return prefix | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_prefix'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Make prefix string based on configuration parameters. |
def expandf(m, format):
_assert_expandable(format, True)
return _apply_replace_backrefs(m, format, flags=FORMAT) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'expandf'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'm'};... | Expand the string using the format replace pattern or function. |
def call_output(cmd, stdin=None, encoding_errors="replace", **kwargs):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
stdout, stderr, retcode = [], [], None
while retcode is None:
if stdin is not None:
logger.log_prefix("<0 ", stdin.rstrip())
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'call_output'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': []... | Run command and read output. |
def maybe_start_recording(tokens, index):
if tokens[index].type == TokenType.BeginInlineRST:
return _InlineRSTRecorder(index) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'maybe_start_recording'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Return a new _InlineRSTRecorder when its time to record. |
def recent_articles(limit=10, exclude=None):
queryset = Article.objects.filter(published=True).order_by('-modified')
if exclude:
if hasattr(exclude, '__iter__'):
queryset = queryset.exclude(pk__in=exclude)
else:
queryset = queryset.exclude(pk=exclude)
return queryset | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'recent_articles'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7']}; {'id': '4', 'type': 'default_parameter', 'children': ['... | Returns list of latest article |
async def shutdown(self):
"Force stop the output stream, if there are more data to download, shutdown the connection"
if self.stream:
if not self.stream.dataeof and not self.stream.dataerror:
self.stream.close(self.scheduler)
await self.connection.shutdown()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'shutdown'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Force stop the output stream, if there are more data to download, shutdown the connection |
def _get_cmd(command, arguments):
if arguments is None:
arguments = []
if command.endswith(".py") or command.endswith(".pyw"):
return [sys.executable, command] + list(arguments)
else:
return [command] + list(arguments) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_cmd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'com... | Merge command with arguments. |
def have_graph(name):
for g in mestate.graphs:
if g.name == name:
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'have_graph'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'name'}... | return true if we have a graph of the given name |
def split_timesteps(data, consistent_abmn=False):
if has_multiple_timesteps(data):
grouped = data.groupby("timestep")
return [group[1] for group in grouped]
else:
return data | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'split_timesteps'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Split data into multiple timesteps. |
def make_stmt(stmt_cls, tf_agent, target_agent, pmid):
ev = Evidence(source_api='trrust', pmid=pmid)
return stmt_cls(deepcopy(tf_agent), deepcopy(target_agent),
evidence=[ev]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_stmt'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Return a Statement based on its type, agents, and PMID. |
def make_module_reload_func(module_name=None, module_prefix='[???]', module=None):
module = _get_module(module_name, module, register=False)
if module_name is None:
module_name = str(module.__name__)
def rrr(verbose=True):
if not __RELOAD_OK__:
raise Exception('Reloading has been... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_module_reload_func'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10']}; {'id': '4', 'type': 'default_parameter', ... | Injects dynamic module reloading |
def triangle(self, verts=True, lines=True):
tf = vtk.vtkTriangleFilter()
tf.SetPassLines(lines)
tf.SetPassVerts(verts)
tf.SetInputData(self.poly)
tf.Update()
return self.updateMesh(tf.GetOutput()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'triangle'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Converts actor polygons and strips to triangles. |
def regxy(pattern, response, supress_regex, custom):
try:
matches = re.findall(r'%s' % pattern, response)
for match in matches:
verb('Custom regex', match)
custom.add(match)
except:
supress_regex = True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'regxy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Extract a string based on regex pattern supplied by user. |
def tricu(P, k=0):
tri = numpy.sum(numpy.mgrid[[slice(0,_,1) for _ in P.shape]], 0)
tri = tri<len(tri) + k
if isinstance(P, Poly):
A = P.A.copy()
B = {}
for key in P.keys:
B[key] = A[key]*tri
return Poly(B, shape=P.shape, dim=P.dim, dtype=P.dtype)
out = P*tri
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tricu'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'P'}; {... | Cross-diagonal upper triangle. |
def filespecs(self):
filespecs = {'globs': self._file_globs}
exclude_filespecs = self._exclude_filespecs
if exclude_filespecs:
filespecs['exclude'] = exclude_filespecs
return filespecs | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'filespecs'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Return a filespecs dict representing both globs and excludes. |
def export_project(self):
output = copy.deepcopy(self.generated_project)
data_for_make = self.workspace.copy()
self.exporter.process_data_for_makefile(data_for_make)
output['path'], output['files']['makefile'] = self.gen_file_jinja('makefile_gcc.tmpl', data_for_make, 'Makefile', data_for... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'export_project'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Processes groups and misc options specific for eclipse, and run generator |
def word_count(ctx, text, by_spaces=False):
text = conversions.to_string(text, ctx)
by_spaces = conversions.to_boolean(by_spaces, ctx)
return len(__get_words(text, by_spaces)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'word_count'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Returns the number of words in the given text string |
def heating_values(self):
heating_dict = {
'level': self.heating_level,
'target': self.target_heating_level,
'active': self.now_heating,
'remaining': self.heating_remaining,
'last_seen': self.last_seen,
}
return heating_dict | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'heating_values'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Return a dict of all the current heating values. |
def _transform_snapshot(
raw_snapshot: str,
storage: SQLiteStorage,
cache: BlockHashCache,
) -> str:
snapshot = json.loads(raw_snapshot)
block_number = int(snapshot['block_number'])
snapshot['block_hash'] = cache.get(block_number)
pending_transactions = snapshot['pending_transact... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_transform_snapshot'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12']}; {'id': '4', 'type': 'typed_parameter', ... | Upgrades a single snapshot by adding the blockhash to it and to any pending transactions |
def _score_macro_average(self, n_classes):
all_fpr = np.unique(np.concatenate([self.fpr[i] for i in range(n_classes)]))
avg_tpr = np.zeros_like(all_fpr)
for i in range(n_classes):
avg_tpr += interp(all_fpr, self.fpr[i], self.tpr[i])
avg_tpr /= n_classes
self.fpr[MACRO... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_score_macro_average'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Compute the macro average scores for the ROCAUC curves. |
def main(argv=None):
arguments = cli_common(__doc__, argv=argv)
es_export = ESExporter(arguments['CAMPAIGN-DIR'], arguments['--es'])
es_export.export()
if argv is not None:
return es_export | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6']}; {'id':... | ben-elastic entry point |
def errored_tasks(self):
etasks = []
for status in [self.S_ERROR, self.S_QCRITICAL, self.S_ABICRITICAL]:
etasks.extend(list(self.iflat_tasks(status=status)))
return set(etasks) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'errored_tasks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | List of errored tasks. |
def _validate_configuration(self):
if not self.access_token:
raise ConfigurationException(
'You will need to initialize a client with an Access Token'
)
if not self.api_url:
raise ConfigurationException(
'The client configuration needs ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_configuration'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Validates that required parameters are present. |
def memory_usage_psutil():
process = psutil.Process(os.getpid())
mem = process.memory_info()[0] / float(2 ** 20)
mem_vms = process.memory_info()[1] / float(2 ** 20)
return mem, mem_vms | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'memory_usage_psutil'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '18', '35', '... | Return the current process memory usage in MB. |
def del_key(self, k):
if k not in self.mirror:
raise KeyError
del self.proxy[k]
if '_config' in self.proxy and k in self.proxy['_config']:
del self.proxy['_config'][k] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'del_key'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Delete the key and any configuration for it |
def append_text(self, txt):
with open(self.fullname, "a") as myfile:
myfile.write(txt) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'append_text'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | adds a line of text to a file |
def main(argv=None):
arguments = cli_common(__doc__, argv=argv)
driver = CampaignDriver(arguments['CAMPAIGN-DIR'], expandcampvars=False)
driver(no_exec=True)
if argv is not None:
return driver | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6']}; {'id':... | ben-umb entry point |
def read_file(filename: PathLike = "experiment.yml") -> Dict[str, Any]:
logger.debug("Input file: %s", filename)
with open(filename, "r") as stream:
structure = yaml.safe_load(stream)
return structure | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_file'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_default_parameter', 'children': ['... | Read and parse yaml file. |
def convert_all(self):
for url_record in self._url_table.get_all():
if url_record.status != Status.done:
continue
self.convert_by_record(url_record) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_all'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Convert all links in URL table. |
def process_view(self, request, view_func, view_args, view_kwargs):
try:
if ignore_path(request.path):
TrackedRequest.instance().tag("ignore_transaction", True)
view_name = request.resolver_match._func_path
span = TrackedRequest.instance().current_span()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_view'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children'... | Capture details about the view_func that is about to execute |
def stop(self):
if not self.running.wait(0.2):
return
self._logger.debug('runner disabled: %s', self)
with self._lock:
self.running.clear()
self._stopped.wait() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'stop'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Stop execution of all current and future payloads |
def add(self, sensor):
if isinstance(sensor, (list, tuple)):
for sss in sensor:
self.add(sss)
return
if not isinstance(sensor, Sensor):
raise TypeError("pysma.Sensor expected")
if sensor.name in self:
old = self[sensor.name]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Add a sensor, warning if it exists. |
def execute_locally(self):
self.make_script()
with open(self.kwargs['out_file'], 'w') as handle:
sh.python(self.script_path, _out=handle, _err=handle) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'execute_locally'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Runs the equivalent command locally in a blocking way. |
def load_image(name, n, m=None, gpu=None, square=None):
if m is None:
m = n
if gpu is None:
gpu = 0
if square is None:
square = 0
command = ('Shearlab.load_image("{}", {}, {}, {}, {})'.format(name,
n, m, gpu, square))
return j.eval(command) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_image'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children'... | Function to load images with certain size. |
def _is_device_active(device):
cmd = ['dmsetup', 'info', device]
dmsetup_info = util.subp(cmd)
for dm_line in dmsetup_info.stdout.split("\n"):
line = dm_line.split(':')
if ('State' in line[0].strip()) and ('ACTIVE' in line[1].strip()):
return True
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_is_device_active'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Checks dmsetup to see if a device is already active |
def token_meta(opt):
meta = {
'via': 'aomi',
'operation': opt.operation,
'hostname': socket.gethostname()
}
if 'USER' in os.environ:
meta['unix_user'] = os.environ['USER']
if opt.metadata:
meta_bits = opt.metadata.split(',')
for meta_bit in meta_bits:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'token_meta'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'opt'};... | Generates metadata for a token |
def monthly_clear_sky_conditions(self):
if self._monthly_tau_diffuse is [] or self._monthly_tau_beam is []:
return [OriginalClearSkyCondition(i, 21) for i in xrange(1, 13)]
return [RevisedClearSkyCondition(i, 21, x, y) for i, x, y in zip(
list(xrange(1, 13)), self._monthly_tau_be... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'monthly_clear_sky_conditions'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': []... | A list of 12 monthly clear sky conditions that are used on the design days. |
def _interface_exists(self, interface):
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
itfcs_raw = parse.find_lines("^interface " + interface)
return len(itfcs_raw) > 0 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_interface_exists'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Check whether interface exists. |
def maybe_download_and_extract(dest_directory, cifar_classnum):
assert cifar_classnum == 10 or cifar_classnum == 100
if cifar_classnum == 10:
cifar_foldername = 'cifar-10-batches-py'
else:
cifar_foldername = 'cifar-100-python'
if os.path.isdir(os.path.join(dest_directory, cifar_foldernam... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'maybe_download_and_extract'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children':... | Download and extract the tarball from Alex's website. Copied from tensorflow example |
def _get_or_create_service_key(self):
keys = self.service._get_service_keys(self.name)
for key in keys['resources']:
if key['entity']['name'] == self.service_name:
return self.service.get_service_key(self.name,
self.service_name)
self.service.c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_or_create_service_key'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Get a service key or create one if needed. |
def main():
global args
args = parse_args()
if not args:
return 1
state = MyState(args)
for path in args.paths:
if os.path.isdir(path):
walk_dir(path, args, state)
else:
safe_process_files(os.path.dirname(path), [os.path.basename(path)], args, state)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '7', '13', '19', '26', '82',... | Main function when running as a program. |
def try_fix_dataset(dataset):
if isinstance(dataset, numpy.ndarray):
if len(dataset.shape) == 3:
if dataset.shape[-1] == 3:
return dataset.transpose((2, 0, 1))
elif len(dataset.shape) == 4:
if dataset.shape[-1] == 3:
return dataset.transpose((0... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'try_fix_dataset'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'd... | Transpose the image data if it's in PIL format. |
def _print_napps(cls, napp_list):
mgr = NAppsManager()
enabled = mgr.get_enabled()
installed = mgr.get_installed()
napps = []
for napp, desc in sorted(napp_list):
status = 'i' if napp in installed else '-'
status += 'e' if napp in enabled else '-'
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_print_napps'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Format the NApp list to be printed. |
def lineage(self):
if not self.parent:
return [self]
else:
L = self.parent.lineage()
L.append(self)
return L | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'lineage'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Return all nodes between this node and the root, including this one. |
def _load_info(self):
if self._info is None:
try:
self._info = self._api.tables_get(self._name_parts)
except Exception as e:
raise e | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_info'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Loads metadata about this table. |
def standard_exc_info(self):
tb = self.frames[0]
if type(tb) is not TracebackType:
tb = tb.tb
return self.exc_type, self.exc_value, tb | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'standard_exc_info'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Standard python exc_info for re-raising |
def list_team_codes():
cleanlist = sorted(TEAM_DATA, key=lambda k: (k["league"]["name"], k["code"]))
leaguenames = sorted(list(set([team["league"]["name"] for team in cleanlist])))
for league in leaguenames:
teams = [team for team in cleanlist if team["league"]["name"] == league]
click.secho... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_team_codes'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '26', '47']}; {'i... | List team names in alphabetical order of team ID, per league. |
def _encode_personality(self, personality):
if personality is None:
personality = []
else:
personality = utils.coerce_to_list(personality)
for pfile in personality:
if "contents" in pfile:
pfile["contents"] = base64.b64encode(pfile[... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_encode_personality'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Personality files must be base64-encoded before transmitting. |
def GetReportData(self, get_report_args, token):
report = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType
.PIE_CHART)
graph_series = client_report_utils.FetchMostRecentGraphSeries(
get_report_args.client_label,
rdf_stats.C... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'GetReportData'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Extract only the operating system type from the active histogram. |
def findfirst(f, coll):
result = list(dropwhile(f, coll))
return result[0] if result else None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'findfirst'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'f'... | Return first occurrence matching f, otherwise None |
def getSubOrder(existing):
alpha = list(zip(*sorted(((k, v['rec']['label']) for k, v in existing.items()), key=lambda a: a[1])))[0]
depths = {}
def getDepth(id_):
if id_ in depths:
return depths[id_]
else:
if id_ in existing:
names_above = getDepth(exi... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getSubOrder'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'exist... | Alpha sort by the full chain of parents. |
def reading_order(e1, e2):
b1 = e1.bbox
b2 = e2.bbox
if round(b1[y0]) == round(b2[y0]) or round(b1[y1]) == round(b2[y1]):
return float_cmp(b1[x0], b2[x0])
return float_cmp(b1[y0], b2[y0]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reading_order'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | A comparator to sort bboxes from top to bottom, left to right |
def indicator_constraints(self,x):
x = np.atleast_2d(x)
I_x = np.ones((x.shape[0],1))
if self.constraints is not None:
for d in self.constraints:
try:
exec('constraint = lambda x:' + d['constraint'], globals())
ind_x = (constrai... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'indicator_constraints'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Returns array of ones and zeros indicating if x is within the constraints |
def submit_row(context):
opts = context['opts']
change = context['change']
is_popup = context['is_popup']
save_as = context['save_as']
return {
'onclick_attrib': (opts.get_ordered_objects() and change
and 'onclick="submitOrderForm();"' or ''),
'show_delete... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'submit_row'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'contex... | Displays the row of buttons for delete and save. |
def populate_from_settings(self):
settings_aliases = settings.THUMBNAIL_ALIASES
if settings_aliases:
for target, aliases in settings_aliases.items():
target_aliases = self._aliases.setdefault(target, {})
target_aliases.update(aliases) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'populate_from_settings'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Populate the aliases from the ``THUMBNAIL_ALIASES`` setting. |
def decipher_block (self, state):
if len(state) != 16:
Log.error(u"Expecting block of 16")
self._add_round_key(state, self._Nr)
for i in range(self._Nr - 1, 0, -1):
self._i_shift_rows(state)
self._i_sub_bytes(state)
self._add_round_key(state, i)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'decipher_block'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Perform AES block decipher on input |
def dist_to(self, p2):
return math.hypot(self.x - p2.x, self.y - p2.y) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dist_to'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Measure the distance to another point. |
def serialize_on_parent(
self,
parent,
value,
state
):
if value is None and self.required:
state.raise_error(MissingValue, self._missing_value_message(parent))
if not value and self.omit_empty:
return
element = _element_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'serialize_on_parent'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'childre... | Serialize the value and add it to the parent element. |
async def _remote_close(self, exc=None):
if self.state in (STATE_CLOSING, STATE_CLOSED):
return
log.info("close session: %s", self.id)
self.state = STATE_CLOSING
if exc is not None:
self.exception = exc
self.interrupted = True
try:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_remote_close'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | close session from remote. |
def add_str(self,oid,value,label=None):
self.add_oid_entry(oid,'STRING',value,label=label) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_str'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Short helper to add a string value to the MIB subtree. |
def recruit(self):
if self.networks(full=False):
self.recruiter.recruit(n=1)
else:
self.recruiter.close_recruitment() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'recruit'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Recruit one participant at a time until all networks are full. |
def DeleteRequest(self, request):
self.requests_to_delete.append(request)
if request and request.HasField("request"):
self.DeQueueClientRequest(request.request)
data_store.DB.DeleteRequest(request) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'DeleteRequest'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Deletes the request and all its responses from the flow state queue. |
def AddChild(self, path_info):
if self._path_type != path_info.path_type:
message = "Incompatible path types: `%s` and `%s`"
raise ValueError(message % (self._path_type, path_info.path_type))
if self._components != path_info.components[:-1]:
message = "Incompatible path components, expected `%... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'AddChild'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Makes the path aware of some child. |
def _get_destination(script_parts):
for part in script_parts:
if part not in {'ln', '-s', '--symbolic'} and os.path.exists(part):
return part | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_destination'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | When arguments order is wrong first argument will be destination. |
def _fetch_app_role_token(vault_url, role_id, secret_id):
url = _url_joiner(vault_url, 'v1/auth/approle/login')
resp = requests.post(url, data={'role_id': role_id, 'secret_id': secret_id})
resp.raise_for_status()
data = resp.json()
if data.get('errors'):
raise VaultEx... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_fetch_app_role_token'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':... | Get a Vault token, using the RoleID and SecretID |
def put_abs (self, r, c, ch):
r = constrain (r, 1, self.rows)
c = constrain (c, 1, self.cols)
if isinstance(ch, bytes):
ch = self._decode(ch)[0]
else:
ch = ch[0]
self.w[r-1][c-1] = ch | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'put_abs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Screen array starts at 1 index. |
def simple_filter(self, key, value):
"Search keys whose values match with the searched values"
searched = {key: value}
return set([k for k, v in self.data.items() if
intersect(searched, v) == searched]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'simple_filter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Search keys whose values match with the searched values |
def _bypass_non_decrypted_field_exception(self):
if getattr(settings, 'PGPFIELDS_BYPASS_NON_DECRYPTED_FIELD_EXCEPTION', False):
return True
if getattr(settings, 'PGPFIELDS_BYPASS_FIELD_EXCEPTION_IN_MIGRATIONS', False):
if {'manage.py', 'migrate'}.issubset(sys.argv):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_bypass_non_decrypted_field_exception'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'chil... | Bypass exception if some field was not decrypted. |
def upload_files_in_folder(self, dirname, fnames):
if utils.match_pattern(dirname, self.ignore):
return False
good_names = (nm for nm in fnames
if not utils.match_pattern(nm, self.ignore))
for fname in good_names:
if self.client._should_abort_folder_upload... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'upload_files_in_folder'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children'... | Handles the iteration across files within a folder. |
def min_order_amount(self) -> Money:
return self._fetch('minimum order amount', self.market.code)(self._min_order_amount)() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'min_order_amount'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Minimum amount to place an order. |
def nocache(func):
def new_func(*args, **kwargs):
resp = make_response(func(*args, **kwargs))
resp.cache_control.no_cache = True
return resp
return update_wrapper(new_func, func) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'nocache'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'func'}; {... | Stop caching for pages wrapped in nocache decorator. |
def schedule_hourly():
if not config.get('ENABLE_SCHEDULED_EMAIL_REPORTS'):
logging.info('Scheduled email reports not enabled in config')
return
resolution = config.get('EMAIL_REPORTS_CRON_RESOLUTION', 0) * 60
start_at = datetime.now(tzlocal()).replace(microsecond=0, second=0, minute=0)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'schedule_hourly'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '22', '34', '58',... | Celery beat job meant to be invoked hourly |
def change_speed(body, speed=1):
if speed == 1:
return body
length = int(len(body) * speed)
rv = bytearray(length)
step = 0
for v in body:
i = int(step)
while i < int(step + speed) and i < length:
rv[i] = v
i += 1
step += speed
return rv | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_speed'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Change the voice speed of the wave body. |
def convert_to_node(instance, xml_node: XmlNode, node_globals: InheritedDict = None)\
-> InstanceNode:
return InstanceNode(instance, xml_node, node_globals) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14', '15', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_to_node'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'chi... | Wraps passed instance with InstanceNode |
def check_valid_varname(varname,
custom_units,
custom_structs,
constants,
pos,
error_prefix="Variable name invalid.",
exc=None):
exc = VariableDeclarationException if exc i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_valid_varname'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '12']}; {'id': '4', 'type': 'iden... | Handle invalid variable names |
def _read_python_source(self, filename):
try:
f = open(filename, "rb")
except IOError as err:
self.log_error("Can't open %s: %s", filename, err)
return None, None
try:
encoding = tokenize.detect_encoding(f.readline)[0]
finally:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_read_python_source'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Do our best to decode a Python source file correctly. |
def toBCD (n):
bcd = 0
bits = 0
while True:
n, r = divmod(n, 10)
bcd |= (r << bits)
if n is 0:
break
bits += 4
return bcd | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'toBCD'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'n'}; {'id':... | Converts the number n into Binary Coded Decimal. |
def types(self):
r = requests.get(self.evaluator_url + 'types')
r.raise_for_status()
return r.json() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'types'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | List of the known event types |
def chain_user_names(users, exclude_user, truncate=35):
if not users or not isinstance(exclude_user, get_user_model()):
return ''
return truncatechars(
', '.join(u'{}'.format(u) for u in users.exclude(pk=exclude_user.pk)),
truncate) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'chain_user_names'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Tag to return a truncated chain of user names. |
def threshold(np, acc, stream_raster, threshold=100., workingdir=None,
mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None):
fname = TauDEM.func_name('threshold')
return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir),
{'-s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '28']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'threshold'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13', '16', '19', '22', '25']}; {'id': '4', 'ty... | Run threshold for stream raster |
def update_field_forward_refs(field: 'Field', globalns: Any, localns: Any) -> None:
if type(field.type_) == ForwardRef:
field.type_ = field.type_._evaluate(globalns, localns or None)
field.prepare()
if field.sub_fields:
for sub_f in field.sub_fields:
update_field_forward_refs... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_field_forward_refs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12']}; {'id': '4', 'type': 'typed_parame... | Try to update ForwardRefs on fields based on this Field, globalns and localns. |
def _get_vcpu_field_and_address(self, field_name, x, y, p):
vcpu_struct = self.structs[b"vcpu"]
field = vcpu_struct[six.b(field_name)]
address = (self.read_struct_field("sv", "vcpu_base", x, y) +
vcpu_struct.size * p) + field.offset
pack_chars = b"<" + field.pack_chars... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_vcpu_field_and_address'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifi... | Get the field and address for a VCPU struct field. |
def extract_version_from_filename(filename):
filename = os.path.splitext(os.path.basename(filename))[0]
if filename.endswith('.tar'):
filename = os.path.splitext(filename)[0]
return filename.partition('-')[2] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract_version_from_filename'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [... | Extract version number from sdist filename. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.