code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def terminal_type(cls):
what = sys.platform
kind = 'UNDEFINED_TERMINAL_TYPE'
if 'linux' in what:
kind = 'linux'
elif 'darwin' in what:
kind = 'darwin'
elif 'cygwin' in what:
kind = 'cygwin'
elif 'windows' in what:
kind = 'wi... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'terminal_type'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls... | returns darwin, cygwin, cmd, or linux |
def pandoc(args, filein=None, fileout=None):
cmd = [u'pandoc']
if filein:
cmd.append(filein)
if fileout:
cmd.append('-o')
cmd.append(fileout)
cmd.extend(args.split())
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pandoc'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Execute pandoc with the given arguments |
def load_mode_builder(obs_mode, node):
nval1 = node.get('builder')
if nval1 is not None:
if isinstance(nval1, str):
newmethod = import_object(nval1)
obs_mode.build_ob = newmethod.__get__(obs_mode)
else:
raise TypeError('builder must be None or a string')
e... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_mode_builder'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Load observing mode OB builder |
def _init_flds_cur(self):
flds = []
flds0 = ['GO', 'NS', 'enrichment', self.pval_fld, 'dcnt', 'tinfo', 'depth',
'ratio_in_study', 'ratio_in_pop', 'name']
flds_p = [f for f in self.flds_all if f[:2] == 'p_' and f != self.pval_fld]
flds.extend(flds0)
if flds_p:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_flds_cur'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Choose fields to print from a multitude of available fields. |
def ms_rotate(self, viewer, event, data_x, data_y, msg=True):
if not self.canrotate:
return True
msg = self.settings.get('msg_rotate', msg)
x, y = self.get_win_xy(viewer)
if event.state == 'move':
self._rotate_xy(viewer, x, y)
elif event.state == 'down':
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ms_rotate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', 'childr... | Rotate the image by dragging the cursor left or right. |
def validate_relation_data(self, sentry_unit, relation, expected):
actual = sentry_unit.relation(relation[0], relation[1])
return self._validate_dict_data(expected, actual) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_relation_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'chil... | Validate actual relation data based on expected relation data. |
def _assemble_regulate_activity(stmt):
subj_str = _assemble_agent_str(stmt.subj)
obj_str = _assemble_agent_str(stmt.obj)
if stmt.is_activation:
rel_str = ' activates '
else:
rel_str = ' inhibits '
stmt_str = subj_str + rel_str + obj_str
return _make_sentence(stmt_str) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_assemble_regulate_activity'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [],... | Assemble RegulateActivity statements into text. |
def delete_node_1ton(node_list, begin, node, end):
if end is None:
assert end is not None
end = node.successor
elif not isinstance(end, list):
end = [end]
if any(e_.in_or_out for e_ in end):
begin.out_redirect(node.single_input, node.single_output)... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_node_1ton'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children':... | delete the node which has 1-input and n-output |
def _reuse_pre_installed_setuptools(env, installer):
if not env.setuptools_version:
return
reuse_old = config.reuse_old_setuptools
reuse_best = config.reuse_best_setuptools
reuse_future = config.reuse_future_setuptools
reuse_comment = None
if reuse_old or reuse_best or reuse_future... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_reuse_pre_installed_setuptools'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'child... | Return whether a pre-installed setuptools distribution should be reused. |
def update_docs(self, iface, module):
key = "{}.{}".format(module.name, iface.name)
if key in module.predocs:
iface.docstring = self.docparser.to_doc(module.predocs[key][0], iface.name)
iface.docstart, iface.docend = (module.predocs[key][1], module.predocs[key][2]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_docs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Updates the documentation for the specified interface using the module predocs. |
def remove_router_from_hosting_device(self, client, hosting_device_id,
router_id):
res_path = hostingdevice.HostingDevice.resource_path
return client.delete((res_path + DEVICE_L3_ROUTERS + "/%s") % (
hosting_device_id, router_id)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_router_from_hosting_device'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identif... | Remove a router from hosting_device. |
def interrupt(self):
if(self.device.read(9) & 0x01):
self.handle_request()
self.device.clear_IR() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'interrupt'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Invoked on a write operation into the IR of the RendererDevice. |
def _start_server(self, *args):
self.log("Starting server", args)
secure = self.certificate is not None
if secure:
self.log("Running SSL server with cert:", self.certificate)
else:
self.log("Running insecure server without SSL. Do not use without SSL proxy in prod... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_start_server'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Run the node local server |
def matches(target, entry):
for t, e in itertools.zip_longest(target, entry):
if e and t != e:
return False
return entry[0] and entry[1] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'matches'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'targ... | Does the target match the whitelist entry? |
def _ExtractHuntIdFromPath(entry, event):
match = re.match(r".*hunt/([^/]+).*", entry.http_request_path)
if match:
event.urn = "aff4:/hunts/{}".format(match.group(1)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_ExtractHuntIdFromPath'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [],... | Extracts a Hunt ID from an APIAuditEntry's HTTP request path. |
def delete_url(self, url):
for decompress in [False, True]:
key = (url, decompress)
if key in self._local_paths:
path = self._local_paths[key]
remove(path)
del self._local_paths[key]
path = self.local_path(
url, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_url'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Delete local files downloaded from given URL |
def create(cls, bucket, key, value):
with db.session.begin_nested():
obj = cls(
bucket_id=as_bucket_id(bucket),
key=key,
value=value
)
db.session.add(obj)
return obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Create a new tag for bucket. |
def render_field_previews(self, id_and_obj_list, admin, request, field_name):
obj_preview_list = []
for obj_id, obj in id_and_obj_list:
try:
if obj is None:
obj_preview = self.render_field_error(
obj_id, obj, None, request
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'render_field_previews'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', '... | Override this to customise the preview representation of all objects. |
def cl_mutect(self, params, tmp_dir):
gatk_jar = self._get_jar("muTect", ["mutect"])
jvm_opts = config_utils.adjust_opts(self._jvm_opts,
{"algorithm": {"memory_adjust":
{"magnitude": 1.1, "direction": ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cl_mutect'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Define parameters to run the mutect paired algorithm. |
def _find_stages(self):
stages = []
end = last_user_found = None
for part in reversed(self.dfp.structure):
if end is None:
end = part
if part['instruction'] == 'USER' and not last_user_found:
last_user_found = part['content']
if... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_find_stages'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Find limits of each Dockerfile stage |
def flush(self):
if not self._emit_partial and len(self._state) != self._state.maxlen:
self.notify(tuple(self._state))
self._state.clear() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'flush'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Flush the queue - this will emit the current queue |
def _cron_id(cron):
cid = None
if cron['identifier']:
cid = cron['identifier']
else:
cid = SALT_CRON_NO_IDENTIFIER
if cid:
return _ensure_string(cid) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_cron_id'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cron'}; ... | SAFETYBELT, Only set if we really have an identifier |
def _store_credentials(self, username, password, remember=False):
if username and password and remember:
CONF.set('main', 'report_error/username', username)
try:
keyring.set_password('github', username, password)
except Exception:
if self._show... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_store_credentials'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'childre... | Store credentials for future use. |
def _parse_tag(self, name):
from_ = self._get_from(b'tag')
tagger = self._get_user_info(b'tag', b'tagger',
accept_just_who=True)
message = self._get_data(b'tag', b'message')
return commands.TagCommand(name, from_, tagger, message) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_tag'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Parse a tag command. |
def sequences_from_fasta(path):
from Bio import SeqIO
return {x.description: x.seq for x in SeqIO.parse(path, 'fasta')} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sequences_from_fasta'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Extract multiple sequences from a FASTA file. |
def headers_as_dict(cls, resp):
if six.PY2:
pairs = [header.split(':', 1) for header in resp.msg.headers]
return dict([(k, v.strip()) for k, v in pairs])
else:
return dict([(k, v.strip()) for k, v in resp.msg._headers]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'headers_as_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Turns an array of response headers into a dictionary |
def can_undo(self):
return bool(self._undo) or bool(self._open and self._open[0]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'can_undo'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Are there actions to undo? |
def cached(size):
def decorator(func):
cached_func = _Cached(func, size)
return lambda *a, **kw: cached_func(*a, **kw)
return decorator | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cached'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'size'}; {'... | A caching decorator based on parameter objects |
def _align_mem(fastq_file, pair_file, ref_file, out_file, names, rg_info, data):
with postalign.tobam_cl(data, out_file, pair_file != "") as (tobam_cl, tx_out_file):
cmd = ("unset JAVA_HOME && "
"%s | %s" % (_get_bwa_mem_cmd(data, out_file, ref_file, fastq_file, pair_file), tobam_cl))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_align_mem'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '10']}; {'id': '4', 'type': 'identifier', ... | Perform bwa-mem alignment on supported read lengths. |
def __catalina_home():
locations = ['/usr/share/tomcat*', '/opt/tomcat']
for location in locations:
folders = glob.glob(location)
if folders:
for catalina_home in folders:
if os.path.isdir(catalina_home + "/bin"):
return catalina_home
return Fa... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__catalina_home'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '11', '45']}; {'i... | Tomcat paths differ depending on packaging |
def short_repr(item, max_length=15):
item = repr(item)
if len(item) > max_length:
item = '{}...{}'.format(item[:max_length - 3], item[-1])
return item | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'short_repr'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'i... | Short representation of item if it is too long |
def filter_string(n: Node, query: str) -> str:
return _scalariter2item(n, query, str) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'filter_string'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children': ... | Filter and ensure that the returned value is of string type. |
def update_contributions(sender, instance, action, model, pk_set, **kwargs):
if action != 'pre_add':
return
else:
for author in model.objects.filter(pk__in=pk_set):
update_content_contributions(instance, author) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_contributions'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifie... | Creates a contribution for each author added to an article. |
def parse_access_token(self):
access_file = os.path.join(self.file_path, 'access_token')
if os.path.isfile(access_file):
access_list = list()
with open(access_file, 'r') as access_token:
for line in access_token:
value, data = line.split('=')
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_access_token'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Extract the secret and token values from the access_token file |
def prepare(self, ansi='', ensure_trailing_newline=False):
body, styles = self.apply_regex(ansi)
if ensure_trailing_newline and _needs_extra_newline(body):
body += '\n'
self._attrs = {
'dark_bg': self.dark_bg,
'line_wrap': self.line_wrap,
'font_siz... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prepare'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Load the contents of 'ansi' into this object |
def flatten_unique(l: Iterable) -> List:
rval = OrderedDict()
for e in l:
if not isinstance(e, str) and isinstance(e, Iterable):
for ev in flatten_unique(e):
rval[ev] = None
else:
rval[e] = None
return list(rval.keys()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'flatten_unique'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5',... | Return a list of UNIQUE non-list items in l |
def setLocation(self, x, y):
self.x = int(x)
self.y = int(y)
return self | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setLocation'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Set the location of this object to the specified coordinates. |
def lastId(self) -> BaseReference:
if self.childIds is not None:
if len(self.childIds) > 0:
return self.childIds[-1]
return None
else:
raise NotImplementedError | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'lastId'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Last child's id of current TextualNode |
def FileHacks(self):
if sys.platform == "win32":
import win32api
if self.path == "/":
self.files = win32api.GetLogicalDriveStrings().split("\x00")
self.files = [drive.rstrip("\\") for drive in self.files if drive]
elif re.match(r"/*\\\\.\\[^\\]+\\?$", self.path) is not None:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'FileHacks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Hacks to make the filesystem look normal. |
def extend(self, content, zorder):
if zorder not in self._content:
self._content[zorder] = []
self._content[zorder].extend(content) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extend'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Extends with a list and a z-order |
def build_stop_ids(shape_id):
return [cs.SEP.join(['stp', shape_id, str(i)]) for i in range(2)] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_stop_ids'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sh... | Create a pair of stop IDs based on the given shape ID. |
def build_specfile_sections(spec):
str = ""
mandatory_sections = {
'DESCRIPTION' : '\n%%description\n%s\n\n', }
str = str + SimpleTagCompiler(mandatory_sections).compile( spec )
optional_sections = {
'DESCRIPTION_' : '%%description -l %s\n%s\n\n',
'CHANGELOG' : ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_specfile_sections'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Builds the sections of a rpm specfile. |
def _pdist(p):
index, ref, ampl, cutoff, beta = p[:5]
if cutoff == 0.0:
pdist = models.PowerLaw(
ampl * 1e30 * u.Unit("1/eV"), ref * u.TeV, index
)
else:
pdist = models.ExponentialCutoffPowerLaw(
ampl * 1e30 * u.Unit("1/eV"),
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_pdist'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'p'}; {'id'... | Return PL or ECPL instance based on parameters p |
def connect_event_handlers(self):
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'connect_event_handlers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Connects event handlers to the figure. |
def handle_profile_save(self, sender, instance, **kwargs):
self.handle_save(instance.user.__class__, instance.user) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle_profile_save'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'childre... | Custom handler for user profile save |
def scipy_sparse_to_spmatrix(A):
coo = A.tocoo()
SP = spmatrix(coo.data.tolist(), coo.row.tolist(), coo.col.tolist(), size=A.shape)
return SP | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'scipy_sparse_to_spmatrix'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Efficient conversion from scipy sparse matrix to cvxopt sparse matrix |
def _get_appoptics(options):
conn = appoptics_metrics.connect(
options.get('api_token'),
sanitizer=appoptics_metrics.sanitize_metric_name,
hostname=options.get('api_url'))
log.info("Connected to appoptics.")
return conn | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_appoptics'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'op... | Return an appoptics connection object. |
def _missing_imageinfo(self):
if 'image' not in self.data:
return
missing = []
for img in self.data['image']:
if 'url' not in img:
missing.append(img['file'])
return list(set(missing)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_missing_imageinfo'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | returns list of image filenames that are missing info |
def strip_prompt_login(path):
uri = urlsplit(path)
query_params = parse_qs(uri.query)
prompt_list = query_params.get('prompt', '')[0].split()
if 'login' in prompt_list:
prompt_list.remove('login')
query_params['prompt'] = ' '.join(prompt_list)
if not query_params['prompt']:
d... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'strip_prompt_login'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Strips 'login' from the 'prompt' query parameter. |
def generate_output_path(args, project_path):
milisec = datetime.now().microsecond
dirname = 'results_{}_{}'.format(time.strftime('%Y.%m.%d_%H.%M.%S', time.localtime()), str(milisec))
return os.path.join(project_path, 'results', dirname) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_output_path'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Generate default output directory |
def next(self, type=None):
i = self.index + 1
s = self.sentence
while i < len(s):
if type in (s[i].type, None):
return s[i]
i += 1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'next'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Returns the next word in the sentence with the given type. |
def _check_import_source():
path_rel = '~/cltk_data/greek/software/greek_software_tlgu/tlgu.h'
path = os.path.expanduser(path_rel)
if not os.path.isfile(path):
try:
corpus_importer = CorpusImporter('greek')
corpus_importer.import_corpus('greek_software... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check_import_source'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9', '20']};... | Check if tlgu imported, if not import it. |
async def wait_for_election_success(cls):
if cls.leader is None:
cls.leader_future = asyncio.Future(loop=cls.loop)
await cls.leader_future | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'wait_for_election_success'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Await this function if your cluster must have a leader |
def _validate_required(self, attributes):
required_fulfilled = set(self._required).issubset(set(attributes))
if not required_fulfilled:
raise ValueError(
"Not all required attributes fulfilled. Required: {required}".format(required=set(self._required))
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_required'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Ensure required attributes are present. |
def refresh(self):
if not self._client:
return
current_networks = self._client.networks()
self.clear()
self.update((net['Name'], net['Id'])
for net in current_networks) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'refresh'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Fetches all current network names from the client, along with their id. |
def clicked(self, event):
group = event.artist._mt_group
n = event.artist._mt_n
dt = num2date(event.artist._mt_bin)
print("%4i %s events in %s sec beginning at %s"
% (n, group, self.bucketsize, dt.strftime("%b %d %H:%M:%S"))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clicked'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Print group name and number of items in bin. |
def __parse_affiliations_yml(self, affiliations):
enrollments = []
for aff in affiliations:
name = self.__encode(aff['organization'])
if not name:
error = "Empty organization name"
msg = self.GRIMOIRELAB_INVALID_FORMAT % {'error': error}
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__parse_affiliations_yml'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Parse identity's affiliations from a yaml dict. |
def _decorate_axes(ax, freq, kwargs):
if not hasattr(ax, '_plot_data'):
ax._plot_data = []
ax.freq = freq
xaxis = ax.get_xaxis()
xaxis.freq = freq
if not hasattr(ax, 'legendlabels'):
ax.legendlabels = [kwargs.get('label', None)]
else:
ax.legendlabels.append(kwargs.get('la... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_decorate_axes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Initialize axes for time-series plotting |
def _get_name_map(saltenv='base'):
u_name_map = {}
name_map = get_repo_data(saltenv).get('name_map', {})
if not six.PY2:
return name_map
for k in name_map:
u_name_map[k] = name_map[k]
return u_name_map | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_name_map'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6']... | Return a reverse map of full pkg names to the names recognized by winrepo. |
def bundlestate_to_str(state):
states = {
pelix.Bundle.INSTALLED: "INSTALLED",
pelix.Bundle.ACTIVE: "ACTIVE",
pelix.Bundle.RESOLVED: "RESOLVED",
pelix.Bundle.STARTING: "STARTING",
pelix.Bundle.STOPPING: "STOPPING",
pelix.Bundle.UNINSTALLED:... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bundlestate_to_str'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Converts a bundle state integer to a string |
def layout(self, rect=None, width=0, height=0, fontsize=11):
if self.isClosed or self.isEncrypted:
raise ValueError("operation illegal for closed / encrypted doc")
val = _fitz.Document_layout(self, rect, width, height, fontsize)
self._reset_page_refs()
self.initData()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'layout'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier', 'children': [... | Re-layout a reflowable document. |
def setter_generator(field_name):
def set_translation_field(cls, value, language_code=None):
setattr(cls.get_translation(language_code, True),
field_name, value)
set_translation_field.short_description = "set " + field_name
return set_translation_field | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setter_generator'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Generate set_'field name' method for field field_name. |
def convert_time(time):
split_time = time.split()
try:
am_pm = split_time[1].replace('.', '')
time_str = '{0} {1}'.format(split_time[0], am_pm)
except IndexError:
return time
try:
time_obj = datetime.strptime(time_str, '%I:%M %p')
except ValueError:
time_obj =... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_time'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'time... | Convert a time string into 24-hour time. |
def api_walk(uri, per_page=100, key="login"):
page = 1
result = []
while True:
response = get_json(uri + "?page=%d&per_page=%d" % (page, per_page))
if len(response) == 0:
break
else:
page += 1
for r in response:
if key == USER_LOGIN... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'api_walk'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | For a GitHub URI, walk all the pages until there's no more content |
def add_checkpoint_file(self, filename):
if filename not in self.__checkpoint_files:
self.__checkpoint_files.append(filename) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_checkpoint_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Add filename as a checkpoint file for this DAG job. |
def complete(text, state):
global completion_results
if state == 0:
line = readline.get_line_buffer()
if line.startswith(':'):
completion_results = complete_control_command(line, text)
else:
if line.startswith('!') and text and line.startswith(text):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'complete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'tex... | On tab press, return the next possible completion |
def handle_input(self, proxy, event_type, event, refcon):
self.update_timeval()
self.events = []
if event_type in (1, 2, 3, 4, 25, 26, 27):
self.handle_button(event, event_type)
if event_type == 22:
self.handle_scrollwheel(event)
self.handle_absolute(event... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle_input'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children'... | Handle an input event. |
def identify_pycbc_live(origin, filepath, fileobj, *args, **kwargs):
if identify_hdf5(origin, filepath, fileobj, *args, **kwargs) and (
filepath is not None and PYCBC_FILENAME.match(basename(filepath))):
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'identify_pycbc_live'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '9']}; {'id': '4', 'type': 'identifier', 'c... | Identify a PyCBC Live file as an HDF5 with the correct name |
def _parse_regr_response(self, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
links = _parse_header_links(response)
if u'terms-of-service' in links:
terms_of_service = links[u'terms-of-service'][u'url']
if u'next' in links:
n... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_regr_response'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', ... | Parse a registration response from the server. |
def string_to_sign(self):
return (AWS4_HMAC_SHA256 + "\n" +
self.request_timestamp + "\n" +
self.credential_scope + "\n" +
sha256(self.canonical_request.encode("utf-8")).hexdigest()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'string_to_sign'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | The AWS SigV4 string being signed. |
def _clean_cleaned_data(self):
reference_node_id = 0
if '_ref_node_id' in self.cleaned_data:
reference_node_id = self.cleaned_data['_ref_node_id']
del self.cleaned_data['_ref_node_id']
position_type = self.cleaned_data['_position']
del self.cleaned_data['_position... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_clean_cleaned_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | delete auxilary fields not belonging to node model |
def idf2txt(txt):
astr = nocomment(txt)
objs = astr.split(';')
objs = [obj.split(',') for obj in objs]
objs = [[line.strip() for line in obj] for obj in objs]
objs = [[_tofloat(line) for line in obj] for obj in objs]
objs = [tuple(obj) for obj in objs]
objs.sort()
lst = []
for obj in... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'idf2txt'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'txt'}; {'... | convert the idf text to a simple text |
def pre_operations(self, mode=None):
version_mode = self._get_version_mode(mode=mode)
return version_mode.pre_operations | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pre_operations'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Return pre-operations only for the mode asked |
def arg_int(name, default=None):
try:
v = request.args.get(name)
return int(v)
except (ValueError, TypeError):
return default | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'arg_int'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'name... | Fetch a query argument, as an integer. |
def kill_clients():
clients = request.form.get('clients').split(',')
kill_dead = request.args.get('kill_dead', default=False)
kill_dead = kill_dead and kill_dead in ['true', '1']
if not kill_dead and not clients:
return jsonify({'Error': 'no clients provided'})
for client in list(drivers.key... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'kill_clients'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '21', '35', '45', '6... | Force kill driver and other objects for a perticular clien |
def generate_additional_context(self, matching_datasets):
top_tags = Tag.objects.filter(
dataset__in=matching_datasets
).annotate(
tag_count=Count('word')
).order_by('-tag_count')[:3]
return {
'top_tags': top_tags
} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_additional_context'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children'... | Return top tags for a source. |
def _request_toc_element(self, index):
logger.debug('Requesting index %d on port %d', index, self.port)
pk = CRTPPacket()
if self._useV2:
pk.set_header(self.port, TOC_CHANNEL)
pk.data = (CMD_TOC_ITEM_V2, index & 0x0ff, (index >> 8) & 0x0ff)
self.cf.send_packet... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_request_toc_element'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Request information about a specific item in the TOC |
def sys_mem_limit(self):
if platform.machine() in ['armv7l']:
_mem_limit = self.human_to_bytes('2700M')
else:
_mem_limit = self.human_to_bytes('4G')
return _mem_limit | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sys_mem_limit'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Determine the default memory limit for the current service unit. |
def close(self, filehandle):
with self.lock:
if filehandle in self.files:
self.files[filehandle] -= 1
index = 0
size = len(self.past)
while size > self.size and index < size:
filehandle = self.past[index]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'close'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Close openend file if no longer used. |
def _swaplch(LCH):
"Reverse the order of an LCH numpy dstack or tuple for analysis."
try:
L,C,H = np.dsplit(LCH,3)
return np.dstack((H,C,L))
except:
L,C,H = LCH
return H,C,L | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_swaplch'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'LCH'}; {... | Reverse the order of an LCH numpy dstack or tuple for analysis. |
def _pull_content_revision_parent(self):
if self._revision_id is None:
query_params = {
"prop": "extracts|revisions",
"explaintext": "",
"rvprop": "ids",
}
query_params.update(self.__title_query_param())
request = se... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_pull_content_revision_parent'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [... | combine the pulling of these three properties |
def load_data(self, filename, *args, **kwargs):
data = super(ParameterizedXLS, self).load_data(filename)
parameter_name = self.parameterization['parameter']['name']
parameter_values = self.parameterization['parameter']['values']
parameter_units = str(self.parameterization['parameter']['u... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Load parameterized data from different sheets. |
def getContactByUsername(cls, username):
pc = api.portal.get_tool("portal_catalog")
contacts = pc(portal_type=cls.portal_type,
getUsername=username)
if len(contacts) == 0:
return None
if len(contacts) > 1:
logger.error("User '{}' is bound to ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getContactByUsername'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Convenience Classmethod which returns a Contact by a Username |
def _get_entities(self, user, ids):
queryset = get_objects_for_user(user, 'view_entity', Entity.objects.filter(id__in=ids))
actual_ids = queryset.values_list('id', flat=True)
missing_ids = list(set(ids) - set(actual_ids))
if missing_ids:
raise exceptions.ParseError(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_entities'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Return entities queryset based on provided entity ids. |
def clone(src, **kwargs):
obj = object.__new__(type(src))
obj.__dict__.update(src.__dict__)
obj.__dict__.update(kwargs)
return obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clone'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'src'};... | Clones object with optionally overridden fields |
def _is_allowed_command(self, command):
cmds = self._meta_data['allowed_commands']
if command not in self._meta_data['allowed_commands']:
error_message = "The command value {0} does not exist. " \
"Valid commands are {1}".format(command, cmds)
raise In... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_is_allowed_command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Checking if the given command is allowed on a given endpoint. |
def add_new_devices_callback(self, callback):
self._new_devices_callbacks.append(callback)
_LOGGER.debug('Added new devices callback to %s', callback) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_new_devices_callback'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Register as callback for when new devices are added. |
def create_router(self, context, router):
new_router = super(AristaL3ServicePlugin, self).create_router(
context,
router)
try:
self.driver.create_router(context, new_router)
return new_router
except Exception:
with excutils.save_and_rer... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_router'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Create a new router entry in DB, and create it Arista HW. |
def _update_config_file(username, password, email, url, config_path):
try:
config = json.load(open(config_path, "r"))
except ValueError:
config = dict()
if not config.get('auths'):
config['auths'] = dict()
if not config['auths'].get(url):
c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_update_config_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'ch... | Update the config file with the authorization. |
def on_menu_clear_interpretation(self, event):
for sp in list(self.Data.keys()):
del self.Data[sp]['pars']
self.Data[sp]['pars'] = {}
self.Data[sp]['pars']['lab_dc_field'] = self.Data[sp]['lab_dc_field']
self.Data[sp]['pars']['er_specimen_name'] = self.Data[sp]['e... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_menu_clear_interpretation'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children... | clear all current interpretations. |
def _job_to_text(self, job):
next_run = self._format_date(job.get('next_run', None))
tasks = ''
for task in job.get('tasks', []):
tasks += self._task_to_text(task)
tasks += '\n\n'
return '\n'.join(['Job name: %s' % job.get('name', None),
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_job_to_text'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Return a standard formatting of a Job serialization. |
def main():
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_env(parser)
add_properties(parser)
args = parser.parse_args()
logging.getLogger(__package__.split(".")[0]).setLevel(args.d... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '14', '23', '31', '36', '41'... | Send Slack notification to a configured channel. |
def disconnect(self):
logger.info("Disconnecting from Zookeeper.")
self.client.stop()
self.client.close() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'disconnect'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Stops and closes the kazoo connection. |
def get(self):
return self.render(
'index.html',
databench_version=DATABENCH_VERSION,
meta_infos=self.meta_infos(),
**self.info
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Render the List-of-Analyses overview page. |
def _Start_refresh_timer(self):
if self._refreshPeriod > 60:
interval = self._refreshPeriod - 60
else:
interval = 60
self._refreshTimer = Timer(self._refreshPeriod, self.Refresh)
self._refreshTimer.setDaemon(True)
self._refreshTimer.start() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_Start_refresh_timer'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Internal method to support auto-refresh functionality. |
def strtype(self):
if self.kind is not None:
return "{}({})".format(self.dtype, self.kind)
else:
return self.dtype | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'strtype'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Returns a string representing the type and kind of this value element. |
def load_shared_data(path: typing.Union[str, None]) -> dict:
if path is None:
return dict()
if not os.path.exists(path):
raise FileNotFoundError('No such shared data file "{}"'.format(path))
try:
with open(path, 'r') as fp:
data = json.load(fp)
except Exception:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_shared_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['... | Load shared data from a JSON file stored on disk |
def allowed_values(self):
if self._allowed_values is None:
self._allowed_values = ValueList()
for val in self.scraper._fetch_allowed_values(self):
if isinstance(val, DimensionValue):
self._allowed_values.append(val)
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'allowed_values'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Return a list of allowed values. |
def _shutdown(self):
global sconf_global, _ac_config_hs
if not self.active:
raise SCons.Errors.UserError("Finish may be called only once!")
if self.logstream is not None and not dryrun:
self.logstream.write("\n")
self.logstream.close()
self.logstre... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_shutdown'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Private method. Reset to non-piped spawn |
def my_archieve(self):
if not self._ptr:
raise BfdException("BFD not initialized")
return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.MY_ARCHIEVE) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'my_archieve'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Return the my archieve attribute of the BFD file being processed. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.