code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def _update_access_key_pair(self, access_key_id, key, val):
current_access_key = self.get_access_key(access_key_id)
payload_dict = KeenApi._build_access_key_dict(current_access_key)
payload_dict[key] = val
return self.update_access_key_full(access_key_id, **payload_dict) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_update_access_key_pair'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'chi... | Helper for updating access keys in a DRY fashion. |
def fields(self) -> GraphQLInputFieldMap:
try:
fields = resolve_thunk(self._fields)
except GraphQLError:
raise
except Exception as error:
raise TypeError(f"{self.name} fields cannot be resolved: {error}")
if not isinstance(fields, dict) or not all(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fields'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Get provided fields, wrap them as GraphQLInputField if needed. |
def _on_scan(_loop, adapter, _adapter_id, info, expiration_time):
info['validity_period'] = expiration_time
adapter.notify_event_nowait(info.get('connection_string'), 'device_seen', info) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_on_scan'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': []... | Callback when a new device is seen. |
def cleanup_on_delete(self, sender, document, **kwargs):
if not self.follow or sender is not self.owner_document:
return
slug = getattr(document, self.db_field)
namespace = self.owner_document.__name__
SlugFollow.objects(namespace=namespace, new_slug=slug).delete() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cleanup_on_delete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children'... | Clean up slug redirections on object deletion |
def _translate_timeperiod(self, timeperiod):
if self.time_grouping == 1:
return timeperiod
year, month, day, hour = time_helper.tokenize_timeperiod(timeperiod)
if self.time_qualifier == QUALIFIER_HOURLY:
stem = self._do_stem_grouping(timeperiod, int(hour))
res... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_translate_timeperiod'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | method translates given timeperiod to the grouped timeperiod |
def _init_date_range(self, start_date=None, end_date=None):
self.end_date = end_date
self.start_date = start_date
if self.end_date is None:
today = now_utc().date()
end_date = self.event.end_dt.date()
self.end_date = end_date if end_date < today else today
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_date_range'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [],... | Set date range defaults if no dates are passed |
def execute(self, method, args, ref):
response = {'result': None, 'error': None, 'ref': ref}
fun = self.methods.get(method)
if not fun:
response['error'] = 'Method `{}` not found'.format(method)
else:
try:
response['result'] = fun(*args)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'execute'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Execute the method with args |
def cublasZtrsm(handle, side, uplo, transa, diag, m, n, alpha, A, lda, B, ldb):
status = _libcublas.cublasZtrsm_v2(handle,
_CUBLAS_SIDE_MODE[side],
_CUBLAS_FILL_MODE[uplo],
_CUBLAS_OP[trans],
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cublasZtrsm'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15']}; {'i... | Solve complex triangular system with multiple right-hand sides. |
def drawCurve(self, p1, p2, p3):
kappa = 0.55228474983
p1 = Point(p1)
p2 = Point(p2)
p3 = Point(p3)
k1 = p1 + (p2 - p1) * kappa
k2 = p3 + (p2 - p3) * kappa
return self.drawBezier(p1, k1, k2, p3) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'drawCurve'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Draw a curve between points using one control point. |
def init_report(self, reporter=None):
self.options.report = (reporter or self.options.reporter)(self.options)
return self.options.report | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init_report'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Initialize the report instance. |
def url_join(base, *args):
scheme, netloc, path, query, fragment = urlsplit(base)
path = path if len(path) else "/"
path = posixpath.join(path, *[('%s' % x) for x in args])
return urlunsplit([scheme, netloc, path, query, fragment]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'url_join'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'bas... | Helper function to join an arbitrary number of url segments together. |
def emailComment(comment, obj, request):
if not obj.author.frog_prefs.get().json()['emailComments']:
return
if obj.author == request.user:
return
html = render_to_string('frog/comment_email.html', {
'user': comment.user,
'comment': comment.comment,
'object': obj,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'emailComment'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Send an email to the author about a new comment |
def excel_key(index):
X = lambda n: ~n and X((n // 26)-1) + chr(65 + (n % 26)) or ''
return X(int(index)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'excel_key'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'index'}... | create a key for index by converting index into a base-26 number, using A-Z as the characters. |
def OnTextFont(self, event):
fontchoice_combobox = event.GetEventObject()
idx = event.GetInt()
try:
font_string = fontchoice_combobox.GetString(idx)
except AttributeError:
font_string = event.GetString()
post_command_event(self, self.FontMsg, font=font_str... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'OnTextFont'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Text font choice event handler |
def _GetMetric(self, metric_name):
if metric_name in self._counter_metrics:
return self._counter_metrics[metric_name]
elif metric_name in self._event_metrics:
return self._event_metrics[metric_name]
elif metric_name in self._gauge_metrics:
return self._gauge_metrics[metric_name]
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_GetMetric'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Fetches the metric object corresponding to the given name. |
def SaveData(self, raw_data):
if self.filename is None:
raise IOError("Unknown filename")
logging.info("Writing back configuration to file %s", self.filename)
try:
os.makedirs(os.path.dirname(self.filename))
except (IOError, OSError):
pass
try:
mode = os.O_WRONLY | os.O_CREAT... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'SaveData'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Store the raw data as our configuration. |
def focus_last_reply(self):
mid = self.get_selected_mid()
newpos = self._tree.last_child_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'focus_last_reply'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | move focus to last reply to currently focussed message |
def check_dependencies():
print 'Checking dependencies...'
if not HAS_VIRTUALENV:
print 'Virtual environment not found.'
if HAS_EASY_INSTALL:
print 'Installing virtualenv via easy_install...',
run_command(['easy_install', 'virtualenv'],
die_message... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_dependencies'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '7', '61']}; {... | Make sure virtualenv is in the path. |
def get(cls, filter=None, **kwargs):
document = cls(cls.find_one(filter, **kwargs))
return document if document.document else None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cl... | Returns a Document if any document is filtered, returns None otherwise |
def update_hparams_from_hparams(target_hparams, source_hparams, prefix):
for (param_name, param_value) in six.iteritems(source_hparams.values()):
if param_name.startswith(prefix):
target_hparams.set_hparam(param_name[len(prefix):], param_value) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_hparams_from_hparams'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'chil... | Copy a subset of hparams to target_hparams. |
def _initialize_trunk_interfaces_to_none(self, switch_ip, replay=True):
try:
switch_ifs = self._mdriver._get_switch_interfaces(
switch_ip, cfg_only=(False if replay else True))
if not switch_ifs:
LOG.debug("Skipping switch %s which has no configured "
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_initialize_trunk_interfaces_to_none'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifie... | Initialize all nexus interfaces to trunk allowed none. |
def release_subnet(self, cidr, direc):
if direc == 'in':
self.service_in_ip.release_subnet(cidr)
else:
self.service_out_ip.release_subnet(cidr) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'release_subnet'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Routine to release a subnet from the DB. |
def _check_asset_node_def(node_def):
if node_def.op != "Const":
raise TypeError("Asset node must be of type constant.")
if tf.as_dtype(node_def.attr["dtype"].type) != tf.string:
raise TypeError("Asset node must be of dtype string.")
if len(node_def.attr["value"].tensor.string_val) != 1:
raise TypeErro... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check_asset_node_def'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Raises TypeError if `node_def` does not match the expectations. |
def _readData(self, id3, data):
for reader in self._framespec:
if len(data) or reader.handle_nodata:
try:
value, data = reader.read(id3, self, data)
except SpecError as e:
raise ID3JunkFrameError(e)
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_readData'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Raises ID3JunkFrameError; Returns leftover data |
def ensure_directory(directory):
directory = os.path.expanduser(directory)
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise e | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ensure_directory'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Create the directories along the provided directory path that do not exist. |
def cli(env):
manager = PlacementManager(env.client)
routers = manager.get_routers()
env.fout(get_router_table(routers))
rules = manager.get_all_rules()
env.fout(get_rule_table(rules)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'env'}; {'id':... | List options for creating a placement group. |
def check_dependee_exists(self, depender, dependee, dependee_id):
shutit_global.shutit_global_object.yield_to_draw()
if dependee is None:
return 'module: \n\n' + dependee_id + '\n\nnot found in paths: ' + str(self.host['shutit_module_path']) + ' but needed for ' + depender.module_id + '\nCheck your --shutit_modu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_dependee_exists'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'child... | Checks whether a depended-on module is available. |
def _drop_ignored(gold, pred, ignore_in_gold, ignore_in_pred):
keepers = np.ones_like(gold).astype(bool)
for x in ignore_in_gold:
keepers *= np.where(gold != x, 1, 0).astype(bool)
for x in ignore_in_pred:
keepers *= np.where(pred != x, 1, 0).astype(bool)
gold = gold[keepers]
pred = p... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_drop_ignored'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | Remove from gold and pred all items with labels designated to ignore. |
def parse_compounds(self):
if 'compounds' in self._model:
for compound in parse_compound_list(
self._context, self._model['compounds']):
yield compound | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_compounds'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Yield CompoundEntries for defined compounds |
def configured_class(cls):
base = cls.configurable_base()
if base.__dict__.get('_Configurable__impl_class') is None:
base.__impl_class = cls.configurable_default()
return base.__impl_class | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'configured_class'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Returns the currently configured class. |
def format_item(self, item):
"Construct result dictionary for the match item."
result = {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'label': self.get_item_label(item),
}
for key in settings.SELECTABLE_ESCAPED_KEYS:
i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_item'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Construct result dictionary for the match item. |
def hex_to_rgb(self, h):
rgb = (self.hex_to_red(h), self.hex_to_green(h), self.hex_to_blue(h))
return rgb | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hex_to_rgb'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Converts a valid hex color string to an RGB array. |
def authentications_spec(self):
return container_spec(authentication_objs.Authentication
, dictof(string_spec(), set_options(
reading = optional_spec(authentication_spec())
, writing = optional_spec(authentication_spec())
)
)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'authentications_spec'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Spec for a group of authentication options |
def cmd_ppp(self, args):
usage = "ppp <command|start|stop>"
if len(args) == 0:
print(usage)
return
if args[0] == "command":
if len(args) == 1:
print("ppp.command=%s" % " ".join(self.command))
else:
self.command = arg... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cmd_ppp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | set ppp parameters and start link |
def normalize_role(role, config):
if role.startswith(config["scm_repo_prefix"]):
role_name = role.replace(config["scm_repo_prefix"], "")
else:
if "." in role:
galaxy_prefix = "{0}.".format(config["scm_user"])
role_name = role.replace(galaxy_prefix, "")
elif "-" in... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'normalize_role'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Normalize a role name. |
def require_isis_version(major, minor=0, patch=0):
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
check_isis_version(major, minor, patch)
return fn(*args, **kwargs)
return wrapper
return decorator | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'require_isis_version'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children':... | Decorator that ensures a function is called with a minimum isis version. |
def cancel_pending_tasks(self):
for task in self._pending_tasks:
task.cancel()
if not self._loop.is_running():
try:
self._loop.run_until_complete(task)
except asyncio.CancelledError:
pass
except Excep... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cancel_pending_tasks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Cancel all pending tasks. |
def update_domain_smarthost(self, domainid, serverid, data):
return self.api_call(
ENDPOINTS['domainsmarthosts']['update'],
dict(domainid=domainid, serverid=serverid),
body=data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_domain_smarthost'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'chi... | Update a domain smarthost |
def unpack_zipfile(filename):
with open(filename, "rb") as fzip:
z = zipfile.ZipFile(fzip)
for name in z.namelist():
print((" extracting {}".format(name)))
ensure_dirs(name)
z.extract(name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unpack_zipfile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fi... | Unpack a zipfile, using the names in the zip. |
def uint32_to_uint8(cls, img):
return np.flipud(img.view(dtype=np.uint8).reshape(img.shape + (4,))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'uint32_to_uint8'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Cast uint32 RGB image to 4 uint8 channels. |
def to_dict(self):
data = self.extract_fields()
for key, attr in self.attributes.iteritems():
if key in self.ignore:
continue
value = getattr(self.context, attr, None)
if value is None:
value = getattr(self, attr, None)
if c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | extract the data of the content and return it as a dictionary |
def fromimporterror(cls, bundle, importerid, rsid, exception, endpoint):
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.IMPORT_ERROR,
bundle,
importerid,
rsid,
None,
None,
exception,
endpoint,
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fromimporterror'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', '... | Creates a RemoteServiceAdminEvent object from an import error |
def _add_assert(self, **kwargs):
screenshot = kwargs.get('screenshot')
is_success = kwargs.get('success')
screenshot = (not is_success) if screenshot is None else screenshot
kwargs['screenshot'] = self._take_screenshot(screenshot=screenshot, name_prefix='assert')
action = kwargs.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_add_assert'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | if screenshot is None, only failed case will take screenshot |
def read(self, filehandle):
return self.__import(json.load(filehandle, **self.kwargs)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Read JSON from `filehandle`. |
def report_role(self, role):
self.yaml_files = []
fields = {
"state": "skipped",
"total_files": self.gather_files(),
"total_lines": self.gather_lines(),
"total_facts": self.gather_facts(),
"total_defaults": self.gather_defaults(),
"... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'report_role'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return the fields gathered. |
def drain(self, p):
self.logging.info('Started.')
fd = os.open(p, os.O_RDWR | os.O_NONBLOCK)
gevent_os.make_nonblocking(fd)
while self.loop():
try:
lines = gevent_os.nb_read(fd, 4096).splitlines()
if len(lines) == 0:
sleep(0... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'drain'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Reads the named pipe. |
def put_cache_results(self, key, func_akw, set_cache_cb):
args, kwargs = func_akw
func_results = self.func(*args, **kwargs)
if set_cache_cb(func_results):
self[key] = func_results
return func_results | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'put_cache_results'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children'... | Put function results into cache. |
def system(self):
if self._base == 2:
return "NIST"
elif self._base == 10:
return "SI"
else:
raise ValueError("Instances mathematical base is an unsupported value: %s" % (
str(self._base))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'system'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | The system of units used to measure an instance |
def publish(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload)
return None
wrapper.is_publish = True
return wrapper | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'publish'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'func'}; {... | publish the return value of this function as a message from this endpoint |
def tag_audio(filename, tracklisting):
if not(tag_audio_file(filename + '.m4a', tracklisting) or
tag_audio_file(filename + '.mp3', tracklisting)):
print("Cannot find or access any relevant M4A or MP3 audio file.")
print("Trying to save a text file instead.")
write_text(filename, t... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tag_audio'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fi... | Return True if audio tagged successfully; handle tagging audio. |
def _gather_active_forms(self):
for stmt in self.statements:
if isinstance(stmt, ActiveForm):
base_agent = self.agent_set.get_create_base_agent(stmt.agent)
agent_to_add = stmt.agent
if stmt.agent.activity:
new_agent = fast_deepcopy(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_gather_active_forms'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Collect all the active forms of each Agent in the Statements. |
def Uptime():
uptime = ''
try:
uptime = check_output(['uptime'], close_fds=True).decode('utf-8')[1:]
except Exception as e:
logger.error('Could not get current uptime ' + str(e))
return uptime | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'Uptime'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9', '49']}; {'id': '5', '... | Get the current uptime information |
def _expand_path(path):
path = os.path.expandvars(path)
path = os.path.expanduser(path)
return path | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_expand_path'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'path... | Expand both environment variables and user home in the given path. |
def close(self):
if self.parent != ():
self.parent.flush()
self.parent.close()
if self.hdf5:
self.hdf5.flush()
self.hdf5.close()
self.hdf5 = () | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'close'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Close the underlying hdf5 file |
def delete(self):
if not self._ddoc_id:
raise CloudantArgumentError(125)
if not self._name:
raise CloudantArgumentError(126)
ddoc_id = self._ddoc_id
if ddoc_id.startswith('_design/'):
ddoc_id = ddoc_id[8:]
url = '/'.join((self.index_url, ddoc_i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Removes the current index from the remote database. |
def build_reference_wcs(inputs, sciname='sci'):
wcslist = []
for img in inputs:
nsci = countExtn(img)
for num in range(nsci):
extname = (sciname, num + 1)
if sciname == 'sci':
extwcs = wcsutil.HSTWCS(img, ext=extname)
else:
extw... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_reference_wcs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Create the reference WCS based on all the inputs for a field |
def connect(self):
"Create connection to server"
family, stype, proto, cname, sockaddr = self.best_connection_params(
self.host, self.port)
self.sock = socket.socket(family, stype)
self.sock.settimeout(self.timeout)
self.sock.connect(sockaddr) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'connect'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Create connection to server |
def current_branch(self):
branch_name = git(self.gitdir, self.gitwd, "symbolic-ref", "HEAD")
return branch_name.replace('refs/heads/', '').strip() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'current_branch'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Return the current branch name |
def _get_item_from_search_response(self, response, type_):
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_item_from_search_response'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'c... | Returns either a Song or Artist result from search_genius_web |
def _format_type(cls):
if cls.__module__ == _BUILTIN_MODULE:
return cls.__name__
else:
return '%s.%s' % (cls.__module__, cls.__name__) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_format_type'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'... | Format a type name for printing. |
def from_path(filename):
filename = to_bytes(filename)
if NULL_BYTE in filename:
raise ValueError('null byte in path')
return ProguardView._from_ptr(rustcall(
_lib.lsm_proguard_mapping_from_path,
filename + b'\x00')) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_path'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'filenam... | Creates a sourcemap view from a file path. |
def _fft_convolve_gpu(data_g, h_g, res_g = None,
plan = None, inplace = False,
kernel_is_fft = False):
assert_bufs_type(np.complex64,data_g,h_g)
if data_g.shape != h_g.shape:
raise ValueError("data and kernel must have same size! %s vs %s "%(str(data_g.shape),... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_fft_convolve_gpu'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15']}; {'id': '4', 'type': 'identifier... | fft convolve for gpu buffer |
def getkey(stype, site_id=None, key=None):
'Returns the cache key depending on its type.'
base = '{0}.feedjack'.format(settings.CACHE_MIDDLEWARE_KEY_PREFIX)
if stype == T_HOST: return '{0}.hostcache'.format(base)
elif stype == T_ITEM: return '{0}.{1}.item.{2}'.format(base, site_id, str2md5(key))
elif stype == T_ME... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getkey'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Returns the cache key depending on its type. |
def _rewrite_q(self, q):
if isinstance(q, tuple) and len(q) == 2:
return rewrite_lookup_key(self.model, q[0]), q[1]
if isinstance(q, Node):
q.children = list(map(self._rewrite_q, q.children))
return q | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_rewrite_q'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Rewrite field names inside Q call. |
def GET_save_timegrid(self) -> None:
state.timegrids[self._id] = copy.deepcopy(hydpy.pub.timegrids.sim) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'GET_save_timegrid'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Save the current simulation period. |
def _set_init_vars_and_dims(self, data_vars, coords, compat):
both_data_and_coords = [k for k in data_vars if k in coords]
if both_data_and_coords:
raise ValueError('variables %r are found in both data_vars and '
'coords' % both_data_and_coords)
if isinst... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_init_vars_and_dims'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'chi... | Set the initial value of Dataset variables and dimensions |
def getPatches(self) :
if not self.mustValidate :
return self.getStore()
res = {}
res.update(self.patchStore)
for k, v in self.subStores.items() :
res[k] = v.getPatches()
return res | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getPatches'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | get patches as a dictionary |
def _keep_assembled_chrom(bam_file, genome, config):
fai = "%s.fai" % genome
chrom = []
with open(fai) as inh:
for line in inh:
c = line.split("\t")[0]
if c.find("_") < 0:
chrom.append(c)
chroms = " ".join(chrom)
out_file = utils.append_stem(bam_file, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_keep_assembled_chrom'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':... | Remove contigs from the BAM file |
def _preprocess(self, data, train):
if train:
inputs, labels = data
self.data_mean = inputs.mean(axis=0)
self.data_std = inputs.std(axis=0)
self.labels_mean = labels.mean(axis=0)
self.labels_std = labels.std(axis=0)
return ((inputs-self.da... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_preprocess'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Zero-mean, unit-variance normalization by default |
def to_dict(self):
viewconf = json.loads(json.dumps(self.viewconf))
for track in self.tracks:
if track.position is None:
raise ValueError(
"Track has no position: {}".format(track.viewconf["type"])
)
viewconf["tracks"][track.pos... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Convert the existing track to a JSON representation. |
def decode_pc11_message(raw_string):
data = {}
spot = raw_string.split("^")
data[const.FREQUENCY] = float(spot[1])
data[const.DX] = spot[2]
data[const.TIME] = datetime.fromtimestamp(mktime(strptime(spot[3]+" "+spot[4][:-1], "%d-%b-%Y %H%M")))
data[const.COMMENT] = spot[5]
data[const.SPOTTER]... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'decode_pc11_message'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Decode PC11 message, which usually contains DX Spots |
def cast_bytes(s, encoding='utf8', errors='strict'):
if isinstance(s, bytes):
return s
elif isinstance(s, str):
return s.encode(encoding, errors)
else:
raise TypeError("Expected unicode or bytes, got %r" % s) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cast_bytes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | cast str or bytes to bytes |
def brightness_to_hex(self, level):
level_int = int(level)
new_int = int((level_int * 255)/100)
new_level = format(new_int, '02X')
self.logger.debug("brightness_to_hex: %s to %s", level, str(new_level))
return str(new_level) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'brightness_to_hex'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Convert numeric brightness percentage into hex for insteon |
def add_rule_to_model(model, rule, annotations=None):
try:
model.add_component(rule)
if annotations:
model.annotations += annotations
except ComponentDuplicateNameError:
msg = "Rule %s already in model! Skipping." % rule.name
logger.debug(msg) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_rule_to_model'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | Add a Rule to a PySB model and handle duplicate component errors. |
def insert(self, index, value):
self.__field.validate_element(value)
return list.insert(self, index, value) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'insert'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Validate item insertion to list. |
def error_info():
worker = global_worker
worker.check_connected()
return (global_state.error_messages(driver_id=worker.task_driver_id) +
global_state.error_messages(driver_id=DriverID.nil())) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'error_info'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9', '15']}; {'id': '5... | Return information about failed tasks. |
def defaultFile(self):
filename = self.curframe.f_code.co_filename
if filename == '<string>' and self.mainpyfile:
filename = self.mainpyfile
return filename | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'defaultFile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Produce a reasonable default. |
def _createtoken(self, type_, value, flags=None):
pos = None
assert len(self._positions) >= 2, (type_, value)
p2 = self._positions.pop()
p1 = self._positions.pop()
pos = [p1, p2]
return token(type_, value, pos, flags) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_createtoken'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | create a token with position information |
def index_exists(self):
headers = {'Content-Type': 'application/json', 'DB-Method': 'GET'}
url = '/v2/exchange/db/{}/{}/_search'.format(self.domain, self.data_type)
r = self.tcex.session.post(url, headers=headers)
if not r.ok:
self.tcex.log.warning('The provided index was not... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'index_exists'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Check to see if index exists. |
def coerce(self, value):
if self.type == Style:
return value
elif self.type == list:
return self.type(
map(self.subtype, map(lambda x: x.strip(), value.split(',')))
)
elif self.type == dict:
rv = {}
for pair in value.spl... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'coerce'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Cast a string into this key type |
def _load_dataset(dsid, ds_info, file_handlers, dim='y'):
slice_list = []
failure = True
for fh in file_handlers:
try:
projectable = fh.get_dataset(dsid, ds_info)
if projectable is not None:
slice_list.append(projectable)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_dataset'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [... | Load only a piece of the dataset. |
def process_task(self):
pdu = RegisterForeignDevice(self.bbmdTimeToLive)
pdu.pduDestination = self.bbmdAddress
self.request(pdu) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_task'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Called when the registration request should be sent to the BBMD. |
def _parse_value(self):
indent = 0
while self._cur_token['type'] is TT.ws:
indent = self._skip_whitespace()
self._skip_newlines()
if self._cur_token['type'] is TT.id:
return self._parse_key(indent)
elif self._cur_token['type'] is TT.hyphen:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_value'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Parse the value of a key-value pair. |
def _axes(self):
self.view._force_vertical = True
super(HorizontalGraph, self)._axes()
self.view._force_vertical = False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_axes'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Set the _force_vertical flag when rendering axes |
def check(self):
for name, valItem, dtype in self.values:
val = valItem.text()
if dtype:
try:
val = dtype(val)
except:
msgBox = QtWidgets.QMessageBox()
msgBox.setText(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | check whether all attributes are setted and have the right dtype |
def write_stats (self):
self.writeln(u'<br/><i>%s</i><br/>' % _("Statistics"))
if self.stats.number > 0:
self.writeln(_(
"Content types: %(image)d image, %(text)d text, %(video)d video, "
"%(audio)d audio, %(application)d application, %(mail)d mail"
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_stats'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Write check statistic infos. |
def _parse(yr, mo, day):
yr = '20'+yr
yr = int(yr)
mo = int(mo)
day = int(day)
return pds.datetime(yr, mo, day) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Basic parser to deal with date format of the Kp file. |
def content(self):
toolbars = OrderedDict()
for id, toolbar in DebugToolbar._store.items():
content = {}
for panel in toolbar.panels:
panel_id = None
nav_title = ''
nav_subtitle = ''
try:
panel_id... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'content'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Content of the panel when it's displayed in full screen. |
def scan_results(self):
bsses = self._wifi_ctrl.scan_results(self._raw_obj)
if self._logger.isEnabledFor(logging.INFO):
for bss in bsses:
self._logger.info("Find bss:")
self._logger.info("\tbssid: %s", bss.bssid)
self._logger.info("\tssid: %s",... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'scan_results'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Return the scan result. |
def genome(self):
f = self.adapter.fileobj
d = {}
for ref, length in zip(f.references, f.lengths):
d[ref] = (0, length)
return d | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'genome'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | "genome" dictionary ready for pybedtools, based on the BAM header. |
def price_humanized(value, inst, currency=None):
return (natural_number_with_currency(value, ugettext('CZK') if currency is None else currency) if value is not None
else ugettext('(None)')) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'price_humanized'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Return a humanized price |
async def on_raw_notice(self, message):
nick, metadata = self._parse_user(message.source)
target, msg = message.params
if is_ctcp(msg):
self._sync_user(nick, metadata)
type, response = parse_ctcp(msg)
attr = 'on_ctcp_' + pydle.protocol.identifierify(type) + '_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_raw_notice'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Modify NOTICE to redirect CTCP messages. |
def clonerepo(barerepo, userrepo):
git.clone(barerepo, userrepo)
ag = activegit.ActiveGit(userrepo) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clonerepo'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ba... | Clone a bare base repo to a user |
def update_from_pypi(self):
package = pypi.Package(self.package_name)
self.licence = package.licence()
if self.is_parseable:
self.latest_version = package.latest_version()
self.next_version = package.next_version(self.current_version)
self.diff_status = pypi.v... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_from_pypi'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Call get_latest_version and then save the object. |
def _shouldOwn(self, param):
if not (self.uid == param.parent and self.hasParam(param.name)):
raise ValueError("Param %r does not belong to %r." % (param, self)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_shouldOwn'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Validates that the input param belongs to this Params instance. |
def nr_genes(self):
if self['genes']:
nr_genes = len(self['genes'])
else:
nr_genes = len(self['gene_symbols'])
return nr_genes | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'nr_genes'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Return the number of genes |
def _reindex_multi(self, axes, copy, fill_value):
new_index, row_indexer = self.index.reindex(axes['index'])
new_columns, col_indexer = self.columns.reindex(axes['columns'])
if row_indexer is not None and col_indexer is not None:
indexer = row_indexer, col_indexer
new_val... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_reindex_multi'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [... | We are guaranteed non-Nones in the axes. |
def create_description(self, complib=None, complevel=None,
fletcher32=False, expectedrows=None):
if expectedrows is None:
expectedrows = max(self.nrows_expected, 10000)
d = dict(name='table', expectedrows=expectedrows)
d['description'] = {a.cname: a.typ for... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_description'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier', '... | create the description of the table from the axes & values |
def reload(self):
app_profile_pb = self.instance_admin_client.get_app_profile(self.name)
self._update_from_pb(app_profile_pb) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reload'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Reload the metadata for this cluster |
def connect(self):
self.urlobj = getImageObject(self.url, self.referrer, self.session)
content_type = unquote(self.urlobj.headers.get('content-type', 'application/octet-stream'))
content_type = content_type.split(';', 1)[0]
if '/' in content_type:
maintype, subtype = content_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'connect'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Connect to host and get meta information. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.