code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def _check_configs(self):
configs = set(self._find_configs())
known_configs = set(self.configs.keys())
new_configs = configs - known_configs
for cfg in (known_configs - configs):
self.log.debug("Compass configuration has been removed: " + cfg)
del self.configs[cfg... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check_configs'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Reloads the configuration files. |
def verify_secret(self, form_instance, secret):
warn_untested()
if not check_secret(form_instance, secret):
self.set_flag("Invalid secret. (%s)") % secret
self.save() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'verify_secret'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Verifies an IPN payment over SSL using EWP. |
def profile(curr_step, start_step, end_step, profile_name='profile.json',
early_exit=True):
if curr_step == start_step:
mx.nd.waitall()
mx.profiler.set_config(profile_memory=False, profile_symbolic=True,
profile_imperative=True, filename=profile_name,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'profile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10']}; {'id': '4', 'type': 'identifier', 'children': [... | profile the program between [start_step, end_step). |
def update(self, stats):
all_stats = stats.getAllExportsAsDict(plugin_list=self.plugins_to_export())
if self.first_line:
csv_header = ['timestamp']
csv_data = [time.strftime('%Y-%m-%d %H:%M:%S')]
for plugin in self.plugins_to_export():
if isinstance(all_stats[plug... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Update stats in the CSV output file. |
def check_python_matlab_architecture(bits, lib_dir):
if not os.path.isdir(lib_dir):
raise RuntimeError("It seem that you are using {bits} version of Python, but there's no matching MATLAB installation in {lib_dir}.".format(bits=bits, lib_dir=lib_dir)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_python_matlab_architecture'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'chil... | Make sure we can find corresponding installation of Python and MATLAB. |
def update(self, other, **kwargs):
if other is None: return
if not isinstance(other, dict):
other = other.to_dict()
self.__dict__.update(other, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | A dict-like update for Struct attributes. |
def read_py(self, fin_txt, get_goids_only, exclude_ungrouped, prt=sys.stdout):
goids_fin = self._read_py(fin_txt, get_goids_only, exclude_ungrouped)
sections = self._read_finish(goids_fin, prt)
if prt is not None:
self._prt_read_msg(prt, fin_txt, exclude_ungrouped)
return sec... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_py'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': []... | Read GO IDs or sections data from a Python file. |
def _pip_search(stdout, stderr):
result = {}
lines = to_text_string(stdout).split('\n')
while '' in lines:
lines.remove('')
for line in lines:
if ' - ' in line:
parts = line.split(' - ')
name = parts[0].strip()
descr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_pip_search'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Callback for pip search. |
def listen_init(self):
self.dispatcher = ObjectDispatch(self)
self.factory = MsgPackProtocolFactory(self.dispatcher)
self.server = UnixServer(self.loop, self.factory, self.path)
self.server.start() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'listen_init'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Setup the service to listen for clients. |
def openstack_undercloud_install(self):
instack_undercloud_ver, _ = self.run('repoquery --whatprovides /usr/share/instack-undercloud/puppet-stack-config/puppet-stack-config.pp')
if instack_undercloud_ver.rstrip('\n') == 'instack-undercloud-0:2.2.0-1.el7ost.noarch':
LOG.warn('Workaround for B... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'openstack_undercloud_install'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': []... | Deploy an undercloud on the host. |
def distancemodulus(d):
if type(d)==Quantity:
x = d.to('pc').value
else:
x = d
if np.size(x)>1:
d = np.atleast_1d(x)
return 5*np.log10(x/10) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'distancemodulus'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'd... | Returns distance modulus given d in parsec. |
def xml_row(row, lang):
for elem in row:
name = elem.get('name')
child = elem[0]
ftype = re.sub(r'\{[^}]+\}', '', child.tag)
if ftype == 'literal':
ftype = '{}, {}'.format(ftype, child.attrib.get(XML_LANG, 'none'))
yield (name, (child.text, ftype)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'xml_row'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'row'... | Generator for an XML row |
def visit_for(self, node):
fors = "for %s in %s:\n%s" % (
node.target.accept(self),
node.iter.accept(self),
self._stmt_list(node.body),
)
if node.orelse:
fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse))
return fors | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_for'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | return an astroid.For node as string |
def _locate_file(f, base_dir):
if base_dir == None:
return f
file_name = os.path.join(base_dir, f)
real = os.path.realpath(file_name)
return real | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_locate_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Utility method for finding full path to a filename as string |
def create_ssl_context():
ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
return ssl_context | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_ssl_context'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '18', '24', '3... | Create and return SSL Context. |
def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
mappings=None):
if python_name and json_name:
raise exceptions.InvalidDataError(
'Cannot specify both python_name and json_name '
'for %s remapping' % mapping_type)
if not (python_n... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_FetchRemapping'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'chil... | Common code for fetching a key or value from a remapping dict. |
def run_normalization(self):
for index, media_file in enumerate(
tqdm(
self.media_files,
desc="File",
disable=not self.progress,
position=0
)):
logger.info("Normalizing file {} ({} of {})"... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run_normalization'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Run the normalization procedures |
def run(wf, *, display, n_threads=1):
worker = dynamic_exclusion_worker(display, n_threads)
return noodles.Scheduler(error_handler=display.error_handler)\
.run(worker, get_workflow(wf)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Run the workflow using the dynamic-exclusion worker. |
def show_some(items:Collection, n_max:int=5, sep:str=','):
"Return the representation of the first `n_max` elements in `items`."
if items is None or len(items) == 0: return ''
res = sep.join([f'{o}' for o in items[:n_max]])
if len(items) > n_max: res += '...'
return res | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show_some'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '13']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5'... | Return the representation of the first `n_max` elements in `items`. |
def clear(self, startBlock, endBlock):
for block in qutepart.iterateBlocksFrom(startBlock):
self._setBlockMarked(block, False)
if block == endBlock:
break | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clear'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Clear bookmarks on block range including start and end |
def _parse_columns(self):
column_map = {}
for key, value in self.model.keyMap.items():
record_key = key[1:]
if record_key:
if self.item_key.findall(record_key):
pass
else:
if value['value_datatype'] == 'map':... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_columns'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | a helper method for parsing the column properties from the record schema |
def url_for(**options):
url_parts = get_url_parts(**options)
image_hash = hashlib.md5(b(options['image_url'])).hexdigest()
url_parts.append(image_hash)
return "/".join(url_parts) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'url_for'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'dictionary_splat_pattern', 'children': ['5']}; {... | Returns the url for the specified options |
def single(wosclient, wos_query, xml_query=None, count=5, offset=1):
result = wosclient.search(wos_query, count, offset)
xml = _re.sub(' xmlns="[^"]+"', '', result.records, count=1).encode('utf-8')
if xml_query:
xml = _ET.fromstring(xml)
return [el.text for el in xml.findall(xml_query)]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'single'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children': []... | Perform a single Web of Science query and then XML query the results. |
def model(self):
if self.z == 0:
m = self._model
else:
if self._internal_wave_unit.physical_type == 'length':
rs = self._redshift_model.inverse
else:
rs = self._redshift_model
if self.z_type == 'wavelength_only':
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'model'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Model of the spectrum with given redshift. |
def format_summary(self):
chunks = [chunk.format_chunk_summary()
for chunk in self._progress_chunks]
return "/".join(chunks) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_summary'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Generate a summary string for the progress bar. |
def cookie_signature(seed, *parts):
sha1 = hmac.new(seed, digestmod=hashlib.sha1)
for part in parts:
if part:
sha1.update(part)
return sha1.hexdigest() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cookie_signature'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Generates a cookie signature. |
def remove_old_dumps(connection, container: str, days=None):
if not days:
return
if days < 20:
LOG.error('A minimum of 20 backups is stored')
return
options = return_file_objects(connection, container)
for dt, o_info in options:
now = datetime.datetime.now()
delta... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_old_dumps'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'children': [],... | Remove dumps older than x days |
def diff(self):
if not self.present:
if self.existing:
return DEL
return NOOP
is_diff = NOOP
if self.present and self.existing:
a_obj = self.config.copy()
if self.config and diff_dict(a_obj, self.existing, True):
is_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'diff'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Determines if changes are needed for the Vault backend |
def normalize_path():
whole_path = [
os.path.abspath(path) for path in sys.path if os.path.exists(path)
]
whole_set = collections.OrderedDict((("", 1), (os.getcwd(), 1)))
for path in whole_path:
if path not in whole_set:
whole_set[path] = 1
sys.path = list(whole_set)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'normalize_path'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '31', '50', '65', ... | Normalizes sys.path to avoid the use of relative folders |
def safe_record(ctx, item):
if isinstance(item, basestring):
return ctx.env.ref(item)
return item | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'safe_record'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Make sure we get a record instance even if we pass an xmlid. |
def _build_text_filter(self):
text_filter = TextFilter(logger=self.logger)
self.log(u"Created TextFilter object")
for key, cls, param_name in [
(
gc.PPN_TASK_IS_TEXT_FILE_IGNORE_REGEX,
TextFilterIgnoreRegex,
"regex"
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_build_text_filter'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Build a suitable TextFilter object. |
def embeddedFileCount(self):
if self.isClosed or self.isEncrypted:
raise ValueError("operation illegal for closed / encrypted doc")
return _fitz.Document_embeddedFileCount(self) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'embeddedFileCount'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Return number of embedded files. |
def int_subtype(i, bits, signed) :
"returns integer i after checking that it fits in the given number of bits."
if not isinstance(i, int) :
raise TypeError("value is not int: %s" % repr(i))
if signed :
lo = - 1 << bits - 1
hi = (1 << bits - 1) - 1
else... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'int_subtype'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | returns integer i after checking that it fits in the given number of bits. |
def DumpCurrentSchema(cursor):
cursor.execute("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES "
"WHERE table_schema = (SELECT DATABASE())")
defs = []
for table, in sorted(cursor.fetchall()):
cursor.execute("SHOW CREATE TABLE `{}`".format(table))
rows = cursor.fetchall()
defs.append(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'DumpCurrentSchema'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Dumps current database schema. |
def _secret_yaml(loader, node):
fname = os.path.join(os.path.dirname(loader.name), "secrets.yaml")
try:
with open(fname, encoding="utf-8") as secret_file:
secrets = YAML(typ="safe").load(secret_file)
except FileNotFoundError:
raise ValueError("Secrets file {} not found".format(fn... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_secret_yaml'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Load secrets and embed it into the configuration YAML. |
def experiment(self):
if self._experiment is None:
self._experiment = list(self.config.experiments.keys())[-1]
return self._experiment | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'experiment'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | The identifier or the experiment that is currently processed |
def main(argv):
del argv
engine = make_gtp_instance(FLAGS.load_file,
cgos_mode=FLAGS.cgos_mode,
kgs_mode=FLAGS.kgs_mode,
minigui_mode=FLAGS.minigui_mode)
dbg("GTP engine ready\n")
for msg in sys.stdin:
i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'argv'}; {'id... | Run Minigo in GTP mode. |
def no_content_response(response):
"Cautious assessment of the response body for no content."
if not hasattr(response, '_container'):
return True
if response._container is None:
return True
if isinstance(response._container, (list, tuple)):
if len(response._container) == 1 and no... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'no_content_response'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Cautious assessment of the response body for no content. |
def setOrga(request, hproPk=None):
if settings.PIAPI_STANDALONE:
request.session['plugit-standalone-organame'] = request.GET.get('name')
request.session['plugit-standalone-orgapk'] = request.GET.get('pk')
else:
(_, _, hproject) = getPlugItObject(hproPk)
from organizations.models ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setOrga'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'requ... | Change the current orga |
def _html(self, text):
html = URL_REGEX.sub(self._parse_urls, text)
html = USERNAME_REGEX.sub(self._parse_users, html)
html = LIST_REGEX.sub(self._parse_lists, html)
return HASHTAG_REGEX.sub(self._parse_tags, html) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_html'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Parse a Tweet and generate HTML. |
def function_parser(function, parser):
parser.set_defaults(func=function)
help_text = inspect.getdoc(function)
main_text, params_help = parser_help_text(help_text)
args, varargs, keywords, defaults = inspect.getargspec(function)
if args is None:
args = []
if defaults is None:
def... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'function_parser'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | This function parses a function and adds its arguments to the supplied parser |
def _repair_column(self):
check_for_title = True
for column_index in range(self.start[1], self.end[1]):
table_column = TableTranspose(self.table)[column_index]
column_start = table_column[self.start[0]]
if check_for_title and is_empty_cell(column_start):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_repair_column'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Same as _repair_row but for columns. |
def export_serving(model_path):
pred_config = PredictConfig(
session_init=get_model_loader(model_path),
model=InferenceOnlyModel(),
input_names=['input_img_bytes'],
output_names=['prediction_img_bytes'])
ModelExporter(pred_config).export_serving('/tmp/exported') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'export_serving'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'mo... | Export trained model to use it in TensorFlow Serving or cloudML. |
def _collect_state_names(self, variable):
"Return a list of states that the variable takes in the data"
states = sorted(list(self.data.ix[:, variable].dropna().unique()))
return states | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_collect_state_names'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Return a list of states that the variable takes in the data |
def ensure_unicode(text):
u
if isinstance(text, str):
try:
return text.decode(pyreadline_codepage, u"replace")
except (LookupError, TypeError):
return text.decode(u"ascii", u"replace")
return text | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ensure_unicode'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'te... | u"""helper to ensure that text passed to WriteConsoleW is unicode |
def load_stats(self, cache=None, wait=None):
if cache is None:
cache = not self.debug
if wait is None:
wait = self.debug
if not cache or self._stats is None:
self._stats = self._load_stats()
start = time.time()
while wait and self._stat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_stats'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Load and cache the webpack-stats file |
def blocks(self, lines):
state = markdown.blockparser.State()
blocks = []
state.set('start')
currblock = 0
for line in lines:
line += '\n'
if state.isstate('start'):
if line[:3] == '```':
state.set('```')
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'blocks'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Groups lines into markdown blocks |
def _create_attach_records(self, ids, attributes):
records = []
timed = self._has_pivot_column(self.created_at()) or self._has_pivot_column(
self.updated_at()
)
for key, value in enumerate(ids):
records.append(self._attacher(key, value, attributes, timed))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_create_attach_records'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children'... | Create a list of records to insert into the pivot table. |
def prioritize(self, item, force=False):
with self.condition:
if item in self.working or item in self.force:
return
self.queue.remove(item)
if force:
self.force.append(item)
else:
self.queue.appendleft(item)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prioritize'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Moves the item to the very left of the queue. |
async def command(dev, service, method, parameters):
params = None
if parameters is not None:
params = ast.literal_eval(parameters)
click.echo("Calling %s.%s with params %s" % (service, method, params))
res = await dev.raw_command(service, method, params)
click.echo(res) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Run a raw command. |
def meminfo():
f = open("/proc/meminfo")
hwinfo = {}
for line in f.readlines():
meml = line.split()
if (meml[0] == "MemTotal:"):
mem = int(meml[1])
hwinfo["Mem_MiB"] = mem/1024
elif (meml[0] == "SwapTotal:"):
swap = int(meml[1])
hwinfo[... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'meminfo'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '12', '16', '82', '88']};... | Get the amount of memory and swap, Mebibytes |
def record_event(self, event):
with open(self._path, 'a') as file_:
file_.write(str(event) + '\n') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'record_event'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Records the ``KindleEvent`` `event` in the store |
def _parallel_predict(estimators, estimators_features, X, n_classes, combination, estimators_weight):
n_samples = X.shape[0]
pred = np.zeros((n_samples, n_classes))
n_estimators = len(estimators)
for estimator, features, weight in zip(estimators, estimators_features, estimators_weight):
predicti... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parallel_predict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier',... | Private function used to compute predictions within a job. |
def kill(self, jid):
greenlet = self.greenlets.get(jid)
if greenlet is not None:
logger.warn('Lost ownership of %s' % jid)
greenlet.kill() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'kill'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Stop the greenlet processing the provided jid |
def process_environment_settings(default_dictionary: dict, settings: typing.Optional[dict]=None,
presets: typing.Optional[dict]=None):
settings = settings if settings is not None else {}
presets = presets if presets is not None else {}
env_keys = sorted(set(default_dictionar... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '26']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_environment_settings'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '17']}; {'id': '4', 'type': 'typed_parameter... | Process a dictionary of env settings |
def _log_app_data(self):
if self.install_json:
app_commit_hash = self.install_json.get('commitHash')
app_features = ','.join(self.install_json.get('features', []))
app_min_ver = self.install_json.get('minServerVersion', 'N/A')
app_name = self.install_json.get('dis... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_log_app_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Log the App data information. |
def sorted(self, wantdirs=False):
def add_dir(dirs, d):
dirs.add(d)
logger.debug('add_dir added %s', d)
if d != self.base:
parent, _ = os.path.split(d)
assert parent not in ('', '/')
add_dir(dirs, parent)
result = set(se... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sorted'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Return sorted files in directory order |
def update_rbac_policy(self, rbac_policy_id, body=None):
return self.put(self.rbac_policy_path % rbac_policy_id, body=body) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_rbac_policy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []... | Update a RBAC policy. |
def plantloopfieldlists(data):
objkey = 'plantloop'.upper()
numobjects = len(data.dt[objkey])
return [[
'Name',
'Plant Side Inlet Node Name',
'Plant Side Outlet Node Name',
'Plant Side Branch List Name',
'Demand Side Inlet Node Name',
'Demand Side Outlet Node ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'plantloopfieldlists'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | return the plantloopfield list |
def _compute_dlt(self):
res = super()._compute_dlt()
for rec in self:
ltaf_to_apply = self.env['ddmrp.adjustment'].search(
rec._ltaf_to_apply_domain())
if ltaf_to_apply:
ltaf = 1
values = ltaf_to_apply.mapped('value')
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_compute_dlt'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Apply Lead Time Adj Factor if existing |
def load(self, fileobj):
for loader in (pickle.load, json.load, csv.reader):
fileobj.seek(0)
try:
return self.initial_update(loader(fileobj))
except Exception as e:
pass
raise ValueError('File not in a supported format') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Load the dict from the file object |
def _lob_start_handler(c, ctx):
assert c == _OPEN_BRACE
c, self = yield
trans = ctx.immediate_transition(self)
quotes = 0
while True:
if c in _WHITESPACE:
if quotes > 0:
_illegal_character(c, ctx)
elif c == _DOUBLE_QUOTE:
if quotes > 0:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_lob_start_handler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Handles tokens that begin with two open braces. |
def combination_memo(n, r):
memo = {}
def recur(n, r):
if n == r or r == 0:
return 1
if (n, r) not in memo:
memo[(n, r)] = recur(n - 1, r - 1) + recur(n - 1, r)
return memo[(n, r)]
return recur(n, r) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'combination_memo'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | This function calculates nCr using memoization method. |
def _is_broken_ref(key1, value1, key2, value2):
if key1 != 'Link' or key2 != 'Str':
return False
n = 0 if _PANDOCVERSION < '1.16' else 1
if isinstance(value1[n][0]['c'], list):
return False
s = value1[n][0]['c'] + value2
return True if _REF.match(s) else False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_is_broken_ref'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [... | True if this is a broken reference; False otherwise. |
def _to_solr(self, data):
return self._dest.index_json(self._dest_coll, json.dumps(data,sort_keys=True)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_to_solr'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Sends data to a Solr instance. |
def defvar(varname):
if 'pyraf' in sys.modules:
from pyraf import iraf
else:
iraf = None
if iraf:
_irafdef = iraf.envget(varname)
else:
_irafdef = 0
return varname in _varDict or varname in os.environ or _irafdef | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'defvar'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'varname'};... | Returns true if CL variable is defined. |
def _get_agg_font(self, prop):
if __debug__: verbose.report('RendererAgg._get_agg_font',
'debug-annoying')
key = hash(prop)
font = RendererAgg._fontd.get(key)
if font is None:
fname = findfont(prop)
font = RendererAgg._fontd.ge... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_agg_font'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Get the font for text instance t, cacheing for efficiency |
def to_json(self):
result = super(Space, self).to_json()
result.update({'name': self.name})
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_json'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Returns the JSON representation of the space. |
def _get_metadata_as_string(self):
metalist = []
for metaname, meta in iteritems(self.metadata):
message = "Single value in metadata dictionary should be a list!"
assert isinstance(meta, list), message
for data in meta:
if data:
met... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_metadata_as_string'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Get the metadata as SOFT formatted string. |
def _repr_values(self):
def getattr_better(obj, field):
try:
return getattr(obj, field)
except AttributeError as e:
try:
return getattr(obj, '_' + field)
except AttributeError:
raise e
return ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_repr_values'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Return values that are to be shown in repr string. |
def extract(self, file_path):
import tarfile
print('Extracting {}'.format(file_path))
if not os.path.exists(self.extracted_data_directory):
os.makedirs(self.extracted_data_directory)
def track_progress(members):
sys.stdout.write('.')
for member in memb... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Extract a tar file at the specified file path. |
def init(self):
self.target.halt()
self.target.reset_and_halt()
result = self._call_function_and_wait(self.flash_algo['pc_init'], init=True)
if result != 0:
logging.error('init error: %i', result)
self.erase_sector(0x01000000)
time.sleep(.5)
self.targe... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Download the flash algorithm in RAM |
def extern_store_utf8(self, context_handle, utf8_ptr, utf8_len):
c = self._ffi.from_handle(context_handle)
return c.to_value(self._ffi.string(utf8_ptr, utf8_len).decode('utf-8')) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extern_store_utf8'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children'... | Given a context and UTF8 bytes, return a new Handle to represent the content. |
def start_process(self, key):
if key in self.processes and key in self.paused:
os.killpg(os.getpgid(self.processes[key].pid), signal.SIGCONT)
self.queue[key]['status'] = 'running'
self.paused.remove(key)
return True
elif key not in self.processes:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start_process'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Start a specific processes. |
def remove_callback(obj, handle):
callbacks = obj._callbacks
if callbacks is handle:
obj._callbacks = None
elif isinstance(callbacks, dllist):
callbacks.remove(handle)
if not callbacks:
obj._callbacks = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_callback'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Remove a callback from an object. |
def constraints(self, chunk):
a = [self._map1[w.index] for w in chunk.words if w.index in self._map1]
b = []; [b.append(constraint) for constraint in a if constraint not in b]
return b | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'constraints'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Returns a list of constraints that match the given Chunk. |
def inverse(self):
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'inverse'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | The inverse of this transform. |
def _set_data(self, data):
if type(data) == bytearray:
self._data = data
elif type(data) == str:
if sys.version_info < (3,):
self._data = bytearray(data)
else:
self._data = bytearray(data.encode('ISO-8859-1'))
elif type(data) ==... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Set the packet data |
def ip_registrant_monitor(self, query, days_back=0, search_type="all", server=None, country=None, org=None, page=1,
include_total_count=False, **kwargs):
return self._results('ip-registrant-monitor', '/v1/ip-registrant-monitor', query=query,
days_back=da... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '29']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ip_registrant_monitor'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15', '18', '21', '24', '27']}; {'i... | Query based on free text query terms |
def deploy(verbose, app):
config = PsiturkConfig()
config.load_config()
config.set("Experiment Configuration", "mode", "deploy")
config.set("Server Parameters", "logfile", "-")
config.set("Shell Parameters", "launch_in_sandbox_mode", "false")
deploy_sandbox_shared_setup(verbose=verbose, app=app) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'deploy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'verbo... | Deploy app using Heroku to MTurk. |
def decode_signature(sigb64):
sig_bin = base64.b64decode(sigb64)
if len(sig_bin) != 64:
raise ValueError("Invalid base64 signature")
sig_hex = sig_bin.encode('hex')
sig_r = int(sig_hex[:64], 16)
sig_s = int(sig_hex[64:], 16)
return sig_r, sig_s | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'decode_signature'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Decode a signature into r, s |
def currentVersion(self):
if self._currentVersion is None:
self.__init(self._url)
return self._currentVersion | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'currentVersion'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | returns the current version of the site |
def _is_complex(pe):
val = isinstance(pe, _bp('Complex')) or \
isinstance(pe, _bpimpl('Complex'))
return val | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_is_complex'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'pe'};... | Return True if the physical entity is a complex |
def normalise_key(self, key):
key = key.replace('-', '_')
if key.startswith("noy_"):
key = key[4:]
return key | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'normalise_key'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Make sure key is a valid python attribute |
def start_batch(job, input_args):
samples = parse_sra(input_args['sra'])
job.addChildJobFn(download_and_transfer_sample, input_args, samples, cores=1, disk='30') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start_batch'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | This function will administer 5 jobs at a time then recursively call itself until subset is empty |
def annotatedcore(self):
logging.info('Calculating annotated core')
self.total_core()
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
sample[self.analysistype].coreset = set()
if sample.general.referencegenus == 'Escherichia':
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'annotatedcore'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Calculates the core genome of organisms using custom databases |
def period(
start, end, absolute=False
):
return Period(start, end, absolute=absolute) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'period'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Create a Period instance. |
def _save_cache(self):
safe_makedirs(self.cache_dir)
try:
with open(self.cache_file, 'wb') as f:
pickle.dump(self.data, f)
except Exception as e:
logger.error("Cannot write version to cache file {} ({})".format(self.cache_file, e)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_save_cache'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Save data to the cache file. |
def GoZero(self, speed):
' Go to Zero position '
self.ReleaseSW()
spi.SPI_write_byte(self.CS, 0x82 | (self.Dir & 1))
spi.SPI_write_byte(self.CS, 0x00)
spi.SPI_write_byte(self.CS, speed)
while self.IsBusy():
pass
time.sleep(0.3)
self.ReleaseSW... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'GoZero'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Go to Zero position |
def _handle_single_tag_end(self):
stack = self._stack
depth = 1
for index, token in enumerate(stack[2:], 2):
if isinstance(token, tokens.TagOpenOpen):
depth += 1
elif isinstance(token, tokens.TagCloseOpen):
depth -= 1
if dep... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_single_tag_end'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Handle the stream end when inside a single-supporting HTML tag. |
def match(self, *args):
self.fall = self.fall or not args
self.fall = self.fall or (self.value in args)
return self.fall | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'match'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Whether or not to enter a given case statement |
def read(self):
if self.lines and self.chunksize:
obj = concat(self)
elif self.lines:
data = to_str(self.data)
obj = self._get_object_parser(
self._combine_lines(data.split('\n'))
)
else:
obj = self._get_object_parser(se... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Read the whole JSON input into a pandas object. |
def api_notifications():
event_type = request.values['Event.1.EventType']
assignment_id = request.values['Event.1.AssignmentId']
db.logger.debug('rq: Queueing %s with id: %s for worker_function',
event_type, assignment_id)
q.enqueue(worker_function, event_type, assignment_id, None)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'api_notifications'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '13', '21', '32... | Receive MTurk REST notifications. |
def _parse_certificate(cls, response):
links = _parse_header_links(response)
try:
cert_chain_uri = links[u'up'][u'url']
except KeyError:
cert_chain_uri = None
return (
response.content()
.addCallback(
lambda body: messages.C... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_certificate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Parse a response containing a certificate resource. |
def process_exception(self, request, exception):
log_format = self._get_log_format(request)
if log_format is None:
return
params = self._get_parameters_from_request(request, True)
params['message'] = exception
params['http_status'] = '-'
self.OPERATION_LOG.inf... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_exception'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | Log error info when exception occurred. |
def start(cls):
if cls._thread is None:
cls._thread = threading.Thread(target=cls._run, name="Heartbeat")
cls._thread.daemon = True
cls._thread.start() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; {'id... | Start background thread if not already started |
def open_fd(cls, name):
try:
return os.open(name, os.O_CREAT | os.O_RDWR | os.O_EXCL)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return os.open(name, os.O_RDWR | os.O_EXCL) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'open_fd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'... | Open a file or create it. |
def format(self, formatstring, *args):
if self.incoming_section:
self.SendMessage(['s', {'name': args}])
self.incoming_section = False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Presentation Information from the Plugin |
def count_elements_exactly_by_selector(self, number, selector):
elems = find_elements_by_jquery(world.browser, selector)
number = int(number)
if len(elems) != number:
raise AssertionError("Expected {} elements, found {}".format(
number, len(elems))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'count_elements_exactly_by_selector'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier'... | Assert n elements exist matching the given selector. |
def iterdirty(self):
return iter(chain(itervalues(self._new), itervalues(self._modified))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'iterdirty'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Ordered iterator over dirty elements. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.