code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def check_udp(helper, host, port, session):
open_ports = walk_data(session, '.1.3.6.1.2.1.7.5.1.2', helper)[0]
if scan:
print "All open UDP ports at host " + host
for port in open_ports:
print "UDP: \t" + port
quit()
if port in open_ports:
udp_status = "OP... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_udp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | the check logic for UDP ports |
def close(self):
if not self.connected:
return None
if self.config is not None:
if self.config.changed() and not self.config.committed():
try:
self.config.discard()
except pyPluribus.exceptions.ConfigurationDiscardError as disca... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'close'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Closes the SSH connection if the connection is UP. |
def _check(self):
import time
if self.expires_in is None or self.authenticated is None:
return False
current = time.time()
expire_time = self.authenticated + self.expires_in
return expire_time > current | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Check if the access token is expired or not. |
def cpp_spec():
return {
INDENTATION : '\t',
BEG_BLOCK : '{',
END_BLOCK : '}',
BEG_LINE : '',
END_LINE : '\n',
BEG_ACTION : '',
END_ACTION : ';',
BEG_CONDITION : 'if(',
END_CONDITION : ')',
LOGICAL_AND... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cpp_spec'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id': '5', 'type': 'r... | C++ specification, provided for example, and java compatible. |
def bytes_from_decode_data(s):
if isinstance(s, (str, unicode)):
try:
return s.encode('ascii')
except UnicodeEncodeError:
raise NotValidParamError(
'String argument should contain only ASCII characters')
if isinstance(s, bytes_types):
return s
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bytes_from_decode_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | copy from base64._bytes_from_decode_data |
def add_tags(self, md5, tags):
if not tags: return
tag_set = set(self.get_tags(md5)) if self.get_tags(md5) else set()
if isinstance(tags, str):
tags = [tags]
for tag in tags:
tag_set.add(tag)
self.data_store.store_work_results({'tags': list(tag_set)}, 'tag... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_tags'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Add tags to this sample |
def _create_thumbnail(self, source_image, geometry_string, options,
thumbnail):
logger.debug('Creating thumbnail file [%s] at [%s] with [%s]',
thumbnail.name, geometry_string, options)
ratio = default.engine.get_image_ratio(source_image, options)
ge... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_create_thumbnail'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'chil... | Creates the thumbnail by using default.engine |
def publish_scene_name(self, scene_id, name):
self.sequence_number += 1
self.publisher.send_multipart(msgs.MessageBuilder.scene_name(self.sequence_number, scene_id, name))
return self.sequence_number | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'publish_scene_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []... | publish a changed scene name |
def unmarshall_key(self, key, cls):
return setattr(cls, key, self.get(key)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unmarshall_key'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Takes a single key and unmarshalls it into a class. |
def resolve(self):
model = self.copy()
for ct in model.component_types:
model.resolve_component_type(ct)
for c in model.components:
if c.id not in model.fat_components:
model.add(model.fatten_component(c))
for c in ct.constants:
c2 = c.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'resolve'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Resolves references in this model. |
def snooze(self, from_email, duration):
if from_email is None or not isinstance(from_email, six.string_types):
raise MissingFromEmail(from_email)
endpoint = '/'.join((self.endpoint, self.id, 'snooze'))
add_headers = {'from': from_email, }
return self.__class__.create(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'snooze'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Snooze this incident for `duration` seconds. |
def was_packet_accepted(self, packet):
self._validatepacket(packet)
cmd = ord(packet[2])
if cmd == Vendapin.ACK:
return True
elif cmd == Vendapin.NAK:
print('NAK - Rejected/Negative Status')
return False
elif cmd == Vendapin.INC:
ra... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'was_packet_accepted'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | parse the "command" byte from the response packet to get a "response code" |
def apply(self, arr):
for t in self.cpu_transforms:
arr = t.apply(arr)
return arr | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'apply'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Apply all CPU transforms on an array. |
async def update_status(self):
if self._operation.async_url:
self._response = await self.request_status(self._operation.async_url)
self._operation.set_async_url_if_present(self._response)
self._operation.get_status_from_async(self._response)
elif self._operation.locat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_status'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Update the current status of the LRO. |
def replace_name(expr: AST, old_name: str, new_name: str) -> AST:
return _NameReplacer(old_name, new_name).visit(deepcopy(expr)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'replace_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12']}; {'id': '4', 'type': 'typed_parameter', 'childr... | Replace all Name nodes named `old_name` with nodes named `new_name`. |
def __put_key(self, local_file, target_file, acl='public-read', del_after_upload=False, overwrite=True, source="filename"):
action_word = "moving" if del_after_upload else "copying"
try:
self.k.key = target_file
if source == "filename":
self.k.set_contents_from_fi... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '19']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__put_key'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13', '16']}; {'id': '4', 'type': 'identifier',... | Copy a file to s3. |
def update_release(id, **kwargs):
data = update_release_raw(id, **kwargs)
if data:
return utils.format_json(data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_release'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Update an existing ProductRelease with new information |
def user_deleted(sender, **kwargs):
if kwargs.get('created'):
write('user_variations', {'variation': 1}, tags={'action': 'created'})
write('user_count', {'total': User.objects.count()}) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'user_deleted'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | collect metrics about new users signing up |
def maiden(self):
if self._dialect == DIALECT_DEFAULT:
for name in self._names:
if name.type == "maiden":
return name.value[1]
if self._primary and len(self._primary.value) > 3:
return self._primary.value[3]
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'maiden'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Maiden last name, can be None |
def find_hwpack_dir(root):
root = path(root)
log.debug('files in dir: %s', root)
for x in root.walkfiles():
log.debug(' %s', x)
hwpack_dir = None
for h in (root.walkfiles('boards.txt')):
assert not hwpack_dir
hwpack_dir = h.parent
log.debug('found hwpack: %s', hwpack... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_hwpack_dir'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'r... | search for hwpack dir under root. |
def on_any_event(self, event):
self.updated = True
if self._changed:
self._changed() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_any_event'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Called whenever a FS event occurs. |
def merge_users(merge_to, merge_from):
assert(merge_to.username.endswith('umail.ucsb.edu'))
for u2g in merge_from.groups_assocs[:]:
merge_to.group_with(merge_from, u2g.project, bypass_limit=True)
merge_to.classes.extend(merge_from.classes)
merge_to.files.extend(merge_from.files)
for sub in S... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'merge_users'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Merge a non-umail account with a umail account. |
def as_bitcode(self):
ptr = c_char_p(None)
size = c_size_t(-1)
ffi.lib.LLVMPY_WriteBitcodeToString(self, byref(ptr), byref(size))
if not ptr:
raise MemoryError
try:
assert size.value >= 0
return string_at(ptr, size.value)
finally:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'as_bitcode'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Return the module's LLVM bitcode, as a bytes object. |
def next(self):
if self._current_index < len(self._collection):
value = self._collection[self._current_index]
self._current_index += 1
return value
elif self._next_cursor:
self.__fetch_next()
return self.next()
else:
self._c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'next'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Returns the next item in the cursor. |
def update_hash_from_str(hsh, str_input):
byte_input = str(str_input).encode("UTF-8")
hsh.update(byte_input) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_hash_from_str'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Convert a str to object supporting buffer API and update a hash with it. |
def call(self, method, *args, **kwargs):
tried_reconnect = False
for _ in range(2):
try:
self._send_call(self.deluge_version, self.deluge_protocol_version, method, *args, **kwargs)
return self._receive_response(self.deluge_version, self.deluge_protocol_version... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'call'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Calls an RPC function |
def BitVecFuncVal(
value: int,
func_name: str,
size: int,
annotations: Annotations = None,
input_: Union[int, "BitVec"] = None,
) -> BitVecFunc:
raw = z3.BitVecVal(value, size)
return BitVecFunc(raw, func_name, input_, annotations) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '32', '34']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'BitVecFuncVal'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12', '16', '21']}; {'id': '4', 'type': 'typed_parame... | Creates a new bit vector function with a concrete value. |
def feasible_set(self):
for y in itertools.product(*[range(1, k + 1) for k in self.K]):
yield np.array(y) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'feasible_set'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Iterator over values in feasible set |
def in_region(rname, rstart, target_chr, target_start, target_end):
return (rname == target_chr) and \
(target_start <= rstart <= target_end) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'in_region'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': [... | Quick check if a point is within the target region. |
def _sunos_memdata():
grains = {'mem_total': 0, 'swap_total': 0}
prtconf = '/usr/sbin/prtconf 2>/dev/null'
for line in __salt__['cmd.run'](prtconf, python_shell=True).splitlines():
comps = line.split(' ')
if comps[0].strip() == 'Memory' and comps[1].strip() == 'size:':
grains['me... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_sunos_memdata'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '15', '19', '80', ... | Return the memory information for SunOS-like systems |
def _convert_to_boolean(self, value):
if value.lower() not in self.BOOLEAN_STATES:
raise ValueError('Not a boolean: %s' % value)
return self.BOOLEAN_STATES[value.lower()] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_convert_to_boolean'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Return a boolean value translating from other types if necessary. |
def import_discord_user(self, uid, user):
url = api_url+'users/social/'
payload = {
'user': int(user),
'provider': 'discord',
'uid': str(uid)
}
print(json.dumps(payload))
r = requests.put(url, data=json.dumps(payload), headers=self.headers)
print(request_status(r))
r.raise_for_status()
return D... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'import_discord_user'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [... | Add a discord user to the database if not already present, get if is present. |
def ensure_dir(directory: str) -> None:
if not os.path.isdir(directory):
LOG.debug(f"Directory {directory} does not exist, creating it.")
os.makedirs(directory) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ensure_dir'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6'... | Create a directory if it doesn't exist. |
def tag_remove(self, *tags):
return View({**self.spec, 'tag': list(set(self.tags) - set(tags))}) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tag_remove'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Return a view with the specified tags removed |
def _update_table_cache(self):
self._table_cache.clear()
for sel, tab, val in self:
try:
self._table_cache[tab].append((sel, val))
except KeyError:
self._table_cache[tab] = [(sel, val)]
assert len(self) == self._len_table_cache() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_update_table_cache'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Clears and updates the table cache to be in sync with self |
def croak(error, message_writer=message):
if isinstance(error, MaltegoException):
message_writer(MaltegoTransformExceptionMessage(exceptions=[error]))
else:
message_writer(MaltegoTransformExceptionMessage(exceptions=[MaltegoException(error)])) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'croak'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'error'... | Throw an exception in the Maltego GUI containing error_msg. |
def add_block_hash( self, block_hash ):
if len(self.block_hashes) > 2000:
raise Exception("A getheaders request cannot have over 2000 block hashes")
hash_num = int("0x" + block_hash, 16)
bh = BlockHash()
bh.block_hash = hash_num
self.block_hashes.append( bh )
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_block_hash'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Append up to 2000 block hashes for which to get headers. |
def copy(self, new_grab=None):
obj = self.__class__()
obj.process_grab(new_grab if new_grab else self.grab)
copy_keys = ('status', 'code', 'head', 'body', 'total_time',
'connect_time', 'name_lookup_time',
'url', 'charset', '_unicode_body',
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'copy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Clone the Response object. |
def verify_otp(request):
ctx = {}
if request.method == "POST":
verification_code = request.POST.get('verification_code')
if verification_code is None:
ctx['error_message'] = "Missing verification code."
else:
otp_ = UserOTP.objects.get(user=request.user)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'verify_otp'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'reques... | Verify a OTP request |
def _render_template(param, username):
env = Environment()
template = env.from_string(param)
variables = {'username': username}
return template.render(variables) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_render_template'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Render config template, substituting username where found. |
def deploy(self):
if not os.path.exists(self.path):
makedirs(self.path)
link(self.vault_path, self.real_path) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'deploy'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Creates a link at the original path of this target |
def _data_build(self, data, modname, path):
try:
node = _parse(data + "\n")
except (TypeError, ValueError, SyntaxError) as exc:
raise exceptions.AstroidSyntaxError(
"Parsing Python code failed:\n{error}",
source=data,
modname=modnam... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_data_build'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Build tree node from data and add some informations |
def verify_keys(self):
verify_keys_endpoint = Template("${rest_root}/site/${public_key}")
url = verify_keys_endpoint.substitute(rest_root=self._rest_root, public_key=self._public_key)
data = { "clientName": "mollom_python", "clientVersion": "1.0" }
self._client.headers["Content-Type"] = ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'verify_keys'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Verify that the public and private key combination is valid; raises MollomAuthenticationError otherwise |
def create_user(self, claims):
email = claims.get('email')
username = self.get_username(claims)
return self.UserModel.objects.create_user(username, email) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_user'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return object for a newly created user account. |
def _iterate(self, data, expanded_data):
for attribute in SOURCE_KEYS:
for k, v in data[attribute].items():
if k == None:
group = 'Sources'
else:
group = k
if group in data[attribute].keys():
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_iterate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | _Iterate through all data, store the result expansion in extended dictionary |
def parse_lint_result(lint_result_path, manifest_path):
unused_string_pattern = re.compile('The resource `R\.string\.([^`]+)` appears to be unused')
mainfest_string_refs = get_manifest_string_refs(manifest_path)
root = etree.parse(lint_result_path).getroot()
issues = []
for issue_xml in root.findall... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_lint_result'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest |
def select_sources(cat_table, cuts):
nsrc = len(cat_table)
full_mask = np.ones((nsrc), bool)
for cut in cuts:
if cut == 'mask_extended':
full_mask *= mask_extended(cat_table)
elif cut == 'select_extended':
full_mask *= select_extended(cat_table)
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'select_sources'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Select only rows passing a set of cuts from catalog table |
def _convert_asset_timestamp_fields(dict_):
for key in _asset_timestamp_fields & viewkeys(dict_):
value = pd.Timestamp(dict_[key], tz='UTC')
dict_[key] = None if isnull(value) else value
return dict_ | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_convert_asset_timestamp_fields'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children':... | Takes in a dict of Asset init args and converts dates to pd.Timestamps |
def save_cache(dpath, fname, cfgstr, data, ext='.cPkl', verbose=None):
fpath = _args2_fpath(dpath, fname, cfgstr, ext)
util_io.save_data(fpath, data, verbose=verbose)
return fpath | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save_cache'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '11']}; {'id': '4', 'type': 'identifier', 'chil... | Saves data using util_io, but smartly constructs a filename |
def sentence_starts(self):
if not self.is_tagged(SENTENCES):
self.tokenize_sentences()
return self.starts(SENTENCES) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sentence_starts'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | The list of start positions representing ``sentences`` layer elements. |
def remove_child(self, child):
assert child in self.children
self.children.remove(child)
self.index.pop(child.tax_id)
if child.parent is self:
child.parent = None
if child.index is self.index:
child.index = None
for n in child:
if n is ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_child'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Remove a child from this node. |
def create_table_header(self):
self.table_header = QTableView()
self.table_header.verticalHeader().hide()
self.table_header.setEditTriggers(QTableWidget.NoEditTriggers)
self.table_header.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.table_header.setVerticalScrollB... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_table_header'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Create the QTableView that will hold the header model. |
def missing_requirements_command(missing_programs, installation_string,
filename, unused_lines):
verb = 'is'
if len(missing_programs) > 1:
verb = 'are'
return {
filename: {
'skipped': [
'%s %s not installed. %s' % (', '.join(missin... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'missing_requirements_command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier',... | Pseudo-command to be used when requirements are missing. |
def age(self):
if self.exists:
return datetime.utcnow() - datetime.utcfromtimestamp(os.path.getmtime(self.name))
return timedelta() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'age'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Age of the video |
def unsubscribe(self):
_LOGGER.info("PubNub unsubscribing")
self._pubnub.unsubscribe_all()
self._pubnub.stop()
self._pubnub = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unsubscribe'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Completly stop all pubnub operations. |
def getContact(self, user=None, channel=None):
try:
return self.contacts[(channel, user)]
except KeyError:
new_contact = self.contactClass(self, user=user, channel=channel)
self.contacts[(channel, user)] = new_contact
new_contact.setServiceParent(self)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getContact'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | get a Contact instance for ``user`` on ``channel`` |
def _process_value(value):
if not value:
value = _("Not set")
elif value == "None":
value = _("Not set")
elif value == "0":
pass
elif api.is_uid(value):
value = _get_title_or_id_from_uid(value)
elif isinstance(value, (dict)):
value = json.dumps(sorted(value.it... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_process_value'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'va... | Convert the value into a human readable diff string |
def _from_dict(cls, _dict):
args = {}
if 'total_minutes_of_audio' in _dict:
args['total_minutes_of_audio'] = _dict.get('total_minutes_of_audio')
else:
raise ValueError(
'Required property \'total_minutes_of_audio\' not present in AudioResources JSON'
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_from_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'c... | Initialize a AudioResources object from a json dictionary. |
def interface(self, iface_name):
if self.has_interface(iface_name):
return self.interfaces[iface_name]
else:
raise RpcException(ERR_INVALID_PARAMS, "Unknown interface: '%s'", iface_name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'interface'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Returns the interface with the given name, or raises RpcException if no interface matches |
def decrypt_stream(mode, in_stream, out_stream, block_size = BLOCK_SIZE, padding = PADDING_DEFAULT):
'Decrypts a stream of bytes from in_stream to out_stream using mode.'
decrypter = Decrypter(mode, padding = padding)
_feed_stream(decrypter, in_stream, out_stream, block_size) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'decrypt_stream'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10']}; {'id': '4', 'type': 'identifier', 'child... | Decrypts a stream of bytes from in_stream to out_stream using mode. |
def prevent_recursion(default):
def decorator(func):
name = '_calling_%s_' % func.__name__
def newfunc(self, *args, **kwds):
if getattr(self, name, False):
return default()
setattr(self, name, True)
try:
return func(self, *args, **k... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prevent_recursion'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | A decorator that returns the return value of `default` in recursions |
def bft(self):
queue = deque([self])
while queue:
node = queue.pop()
yield node
if hasattr(node, "childs"):
queue.extendleft(node.childs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bft'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Generator that returns each element of the tree in Breadth-first order |
def vectorize(data, cloud=None, api_key=None, version=None, **kwargs):
batch = detect_batch(data)
data = data_preprocess(data, batch=batch)
url_params = {"batch": batch, "api_key": api_key, "version": version, "method": "vectorize"}
return api_handler(data, cloud=cloud, api="custom", url_params=url_para... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'vectorize'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier', 'children'... | Support for raw features from the custom collections API |
def containers_running(get_container_name):
running = []
for n in ['web', 'postgres', 'solr', 'datapusher', 'redis']:
info = docker.inspect_container(get_container_name(n))
if info and not info['State']['Running']:
running.append(n + '(halted)')
elif info:
running... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'containers_running'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Return a list of containers tracked by this environment that are running |
def resolve_import(import_path, from_module):
if not import_path or not import_path.startswith('.'):
return import_path
from_module = from_module.split('.')
dots = 0
for c in import_path:
if c == '.':
dots += 1
else:
break
if dots:
from_module ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'resolve_import'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Resolves relative imports from a module. |
def _check_pointers(parser):
from fortpy.stats.bp import check_pointers
check_pointers(parser, args["source"], args["filter"], args["recursive"]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check_pointers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'p... | Checks the pointer best-practice conditions. |
def mk_tab_context_menu(callback_object):
callback_object.context_menu = Gtk.Menu()
menu = callback_object.context_menu
mi1 = Gtk.MenuItem(_("New Tab"))
mi1.connect("activate", callback_object.on_new_tab)
menu.add(mi1)
mi2 = Gtk.MenuItem(_("Rename"))
mi2.connect("activate", callback_object.o... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'mk_tab_context_menu'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Create the context menu for a notebook tab |
def types(cls, **kwargs):
named = {name: factory(Arguments._fields.index(name), name)
for name, factory in six.iteritems(kwargs)}
return cls(**named) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'types'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'};... | Create an Arguments of the possible Types. |
def cluster_version(self):
versionstr = self.client.info()['version']['number']
return [int(x) for x in versionstr.split('.')] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cluster_version'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Get version of Elasticsearch running on the cluster. |
def _write_config(self, cfg, slot):
old_pgm_seq = self._status.pgm_seq
frame = cfg.to_frame(slot=slot)
self._debug("Writing %s frame :\n%s\n" % \
(yubikey_config.command2str(frame.command), cfg))
self._write(frame)
self._waitfor_clear(yubikey_defs.SLOT_WRI... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_write_config'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Write configuration to YubiKey. |
def zip_nested(arg0, *args, **kwargs):
dict_only = kwargs.pop("dict_only", False)
assert not kwargs
if isinstance(arg0, dict):
return {
k: zip_nested(*a, dict_only=dict_only) for k, a in zip_dict(arg0, *args)
}
elif not dict_only:
if isinstance(arg0, list):
return [zip_nested(*a, dict_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'zip_nested'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Zip data struct together and return a data struct with the same shape. |
def process_settings(self, settings):
logging.debug("Using configuration from: %s", settings.name)
exec(compile(settings.read(), settings.name, 'exec'), globals(), locals()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_settings'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | A lazy way of feeding Dharma with configuration settings. |
def default(self, obj):
if isinstance(obj, Sensor):
return {
'sensor_id': obj.sensor_id,
'children': obj.children,
'type': obj.type,
'sketch_name': obj.sketch_name,
'sketch_version': obj.sketch_version,
'... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'default'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Serialize obj into JSON. |
def _trig_auth_check(self, useriden):
if not self.user.admin and useriden != self.user.iden:
raise s_exc.AuthDeny(user=self.user.name, mesg='As non-admin, may only manipulate triggers created by you') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_trig_auth_check'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Check that, as a non-admin, may only manipulate resources created by you. |
def _run_command_in_extended_path(syslog_ng_sbin_dir, command, params):
orig_path = os.environ.get('PATH', '')
env = None
if syslog_ng_sbin_dir:
env = {
str('PATH'): salt.utils.stringutils.to_str(
os.pathsep.join(
salt.utils.data.decode(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_run_command_in_extended_path'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'ch... | Runs the specified command with the syslog_ng_sbin_dir in the PATH |
def gotoHome(self):
mode = QTextCursor.MoveAnchor
if QApplication.instance().keyboardModifiers() == Qt.ShiftModifier:
mode = QTextCursor.KeepAnchor
cursor = self.textCursor()
block = projex.text.nativestring(cursor.block().text())
cursor.movePosition( QTextCurs... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'gotoHome'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Navigates to the home position for the edit. |
def events(self, all_instances=None, instance_ids=None, filters=None):
params = {}
if filters:
params["filters"] = make_filters(filters)
if instance_ids:
params['InstanceIds'] = instance_ids
statuses = self.status(all_instances, **params)
event_list = []
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'events'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | a list of tuples containing instance Id's and event information |
def parse_seqres(self, pdb):
seqresre = re.compile("SEQRES")
seqreslines = [line for line in pdb.lines if seqresre.match(line)]
for line in seqreslines:
chain = line[11]
resnames = line[19:70].strip()
self.setdefault(chain, [])
self[chain] += resnames.split() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_seqres'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Parse the SEQRES entries into the object |
def visit_exec(self, node, parent):
newnode = nodes.Exec(node.lineno, node.col_offset, parent)
newnode.postinit(
self.visit(node.body, newnode),
_visit_or_none(node, "globals", self, newnode),
_visit_or_none(node, "locals", self, newnode),
)
return new... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_exec'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | visit an Exec node by returning a fresh instance of it |
def _download_ota_script(script_url):
try:
blob = requests.get(script_url, stream=True)
return blob.content
except Exception as e:
iprint("Failed to download OTA script")
iprint(e)
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_download_ota_script'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Download the script from the cloud service and store to temporary file location |
def __edge_weight(edge_id, dfs_data):
graph = dfs_data['graph']
edge_lookup = dfs_data['edge_lookup']
edge = graph.get_edge(edge_id)
u, v = edge['vertices']
d_u = D(u, dfs_data)
d_v = D(v, dfs_data)
lp_1 = L1(v, dfs_data)
d_lp_1 = D(lp_1, dfs_data)
if edge_lookup[edge_id] == 'backedg... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__edge_weight'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Calculates the edge weight used to sort edges. |
def extract_domain(var_name, output):
var = getenv(var_name)
if var:
p = urlparse(var)
output.append(p.hostname) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract_domain'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Extracts just the domain name from an URL and adds it to a list |
def _onMenuButton(self, evt):
x, y = self.GetPositionTuple()
w, h = self.GetSizeTuple()
self.PopupMenuXY(self._menu, x, y+h-4)
evt.Skip() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_onMenuButton'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Handle menu button pressed. |
def _load_custom(self, settings_name='localsettings.py'):
if settings_name[-3:] == '.py':
settings_name = settings_name[:-3]
new_settings = {}
try:
settings = importlib.import_module(settings_name)
new_settings = self._convert_to_dict(settings)
except ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_custom'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Load the user defined settings, overriding the defaults |
def _check_retcode(cmd):
return salt.modules.cmdmod.retcode(cmd, output_loglevel='quiet', ignore_retcode=True) == 0 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_check_retcode'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cm... | Simple internal wrapper for cmdmod.retcode |
def mock_cmd(self, release, *cmd, **kwargs):
fmt = '{mock_cmd}'
if kwargs.get('new_chroot') is True:
fmt +=' --new-chroot'
fmt += ' --configdir={mock_dir}'
return self.call(fmt.format(**release).split()
+ list(cmd)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'mock_cmd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Run a mock command in the chroot for a given release |
def to_dict(self):
dyn_dict = {}
for (key, val) in six.iteritems(self.__dict__):
if key not in self._printable_exclude:
dyn_dict[key] = val
return dyn_dict | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Converts dynstruct to a dictionary. |
def open_as_pillow(filename):
with __sys_open(filename, 'rb') as f:
data = BytesIO(f.read())
return Image.open(data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'open_as_pillow'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fi... | This way can delete file immediately |
def _init_eval_iter(self, eval_data):
if eval_data is None:
return eval_data
if isinstance(eval_data, (tuple, list)) and len(eval_data) == 2:
if eval_data[0] is not None:
if eval_data[1] is None and isinstance(eval_data[0], io.DataIter):
return... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_eval_iter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Initialize the iterator given eval_data. |
def _remap_fields(cls, kwargs):
mapped = {}
for key in kwargs:
if key in cls._remap:
mapped[cls._remap[key]] = kwargs[key]
else:
mapped[key] = kwargs[key]
return mapped | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_remap_fields'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Map fields from kwargs into dict acceptable by NIOS |
def prewalk_hook(self, dispatcher, node):
self.walk(dispatcher, node)
self.finalize()
return node | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prewalk_hook'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | This is for the Unparser to use as a prewalk hook. |
def tag_details(tag, nodenames):
details = {}
details['type'] = tag.name
details['ordinal'] = tag_ordinal(tag)
if tag_details_sibling_ordinal(tag):
details['sibling_ordinal'] = tag_details_sibling_ordinal(tag)
if tag_details_asset(tag):
details['asset'] = tag_details_asset(tag)
o... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tag_details'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Used in media and graphics to extract data from their parent tags |
def do_shutdown (self):
with self.mutex:
unfinished = self.unfinished_tasks - len(self.queue)
self.queue.clear()
if unfinished <= 0:
if unfinished < 0:
raise ValueError('shutdown is in error')
self.all_tasks_done.notifyAll()... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'do_shutdown'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Shutdown the queue by not accepting any more URLs. |
def update_schema(self):
self.commit()
self.build_source_files.schema.objects_to_record()
self.commit() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_schema'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Propagate schema object changes to file records |
def end(self, *args):
if self._is_verbose:
return self
if not args:
self._indent -= 1
return self
self.writeln('end', *args)
self._indent -= 1
return self | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'end'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | End a nested log. |
def queryset(self, request, queryset):
if self.form.is_valid():
filter_params = dict(
filter(lambda x: bool(x[1]), self.form.cleaned_data.items())
)
return queryset.filter(**filter_params)
return queryset | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'queryset'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Filter queryset using params from the form. |
def check_scrapers():
d = {}
for scraperclass in _scraperclasses:
name = scraperclass.getName().lower()
if name in d:
name1 = scraperclass.getName()
name2 = d[name].getName()
raise ValueError('duplicate scrapers %s and %s found' % (name1, name2))
d[nam... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_scrapers'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9']}; {'id': '5',... | Check for duplicate scraper class names. |
def hash_data(obj):
collect = sha1()
for text in bytes_iter(obj):
if isinstance(text, six.text_type):
text = text.encode('utf-8')
collect.update(text)
return collect.hexdigest() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hash_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'obj'}; ... | Generate a SHA1 from a complex object. |
def jx_type(column):
if column.es_column.endswith(EXISTS_TYPE):
return EXISTS
return es_type_to_json_type[column.es_type] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'jx_type'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'column'};... | return the jx_type for given column |
def __convert_key(expression):
if type(expression) is str and len(expression) > 2 and expression[1] == '!':
expression = eval(expression[2:-1])
return expression | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__convert_key'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'exp... | Converts keys in YAML that reference other keys. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.