code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def handle_command_def(self, line):
cmd, arg, line = self.parseline(line)
if not cmd:
return
if cmd == 'silent':
self.commands_silent[self.commands_bnum] = True
return
elif cmd == 'end':
self.cmdqueue = []
return 1
cmdli... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle_command_def'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Handles one command line during command list definition. |
def configure_app(**kwargs):
sys_args = sys.argv
args, command, command_args = parse_args(sys_args[1:])
parser = OptionParser()
parser.add_option('--config', metavar='CONFIG')
(options, logan_args) = parser.parse_args(args)
config_path = options.config
logan_configure(config_path=config_path... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'configure_app'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'dictionary_splat_pattern', 'children': ['5... | Builds up the settings using the same method as logan |
def _eratosthenes():
d = {}
for q in count(2):
p = d.pop(q, None)
if p is None:
yield q
d[q * q] = q
else:
x = p + q
while x in d:
x += p
d[x] = p | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_eratosthenes'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9']}; {'id': '5', ... | Yields the sequence of prime numbers via the Sieve of Eratosthenes. |
def chmod(config):
output_dir = config["output_dir"]
for dirpath, dirnames, filenames in os.walk(output_dir):
for dirname in dirnames:
os.chmod(os.path.join(dirpath, dirname), 0755)
for filename in filenames:
os.chmod(os.path.join(dirpath, filename), 0644) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'chmod'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'config'}; {... | Set correct file permissions. |
def start_http_server(port, addr='', registry=REGISTRY):
CustomMetricsHandler = MetricsHandler.factory(registry)
httpd = _ThreadingSimpleServer((addr, port), CustomMetricsHandler)
t = threading.Thread(target=httpd.serve_forever)
t.daemon = True
t.start() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start_http_server'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': []... | Starts an HTTP server for prometheus metrics as a daemon thread |
def _join_host_port(host, port):
template = "%s:%s"
host_requires_bracketing = ':' in host or '%' in host
if host_requires_bracketing:
template = "[%s]:%s"
return template % (host, port) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_join_host_port'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Adapted golang's net.JoinHostPort |
def convert_option_dict_to_dict(option_dict):
ret_dict = {}
for key, value in option_dict.items():
if is_null(value):
ret_dict[key] = None
elif isinstance(value, tuple):
ret_dict[key] = value[0]
else:
ret_dict[key] = value
return ret_dict | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_option_dict_to_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [],... | Convert a dictionary of options tuples to a simple key-value dictionary |
def sr1(x, promisc=None, filter=None, iface=None, nofilter=0, *args, **kargs):
s = conf.L3socket(promisc=promisc, filter=filter,
nofilter=nofilter, iface=iface)
ans, _ = sndrcv(s, x, *args, **kargs)
s.close()
if len(ans) > 0:
return ans[0][1]
else:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '21']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sr1'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14', '17', '19']}; {'id': '4', 'type': 'identifier', 'chi... | Send packets at layer 3 and return only the first answer |
def convex_hull(features):
points = sorted([s.point() for s in features])
l = reduce(_keep_left, points, [])
u = reduce(_keep_left, reversed(points), [])
return l.extend(u[i] for i in xrange(1, len(u) - 1)) or l | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convex_hull'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'featu... | Returns points on convex hull of an array of points in CCW order. |
def _access_token_endpoint(self, grantType, extraParams={}):
response = requests.post(
self._format_url(OAUTH2_ROOT + 'access_token'),
data = _extend({
'grant_type': grantType,
'client_id': self.client.get('client_id', ''),
'client_secret':... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_access_token_endpoint'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children'... | Base exchange of data for an access_token. |
def pull_all(collector, image, **kwargs):
images = collector.configuration["images"]
for layer in Builder().layered(images, only_pushable=True):
for image_name, image in layer:
log.info("Pulling %s", image_name)
pull(collector, image, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pull_all'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Pull all the images |
def login(self, request, extra_context=None):
redirect_to = request.POST.get(REDIRECT_FIELD_NAME, request.GET.get(REDIRECT_FIELD_NAME))
if not redirect_to or not is_safe_url(url=redirect_to, allowed_hosts=[request.get_host()]):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
r... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'login'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Redirects to the site login page for the given HttpRequest. |
def translate(script):
tree = ast.parse(script)
ZiplineImportVisitor().visit(tree)
return astor.to_source(tree) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'translate'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'script'... | translate zipline script into pylivetrader script. |
def write_data(data, out_file):
with open(out_file, 'w') as handle_out:
handle_out.write(json.dumps([data], skipkeys=True, indent=2)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'd... | write json file from seqcluster cluster |
def getEdges(npArr):
edges = np.concatenate(([0], npArr[:,0] + npArr[:,2]))
return np.array([Decimal(str(i)) for i in edges]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getEdges'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'npArr'};... | get np array of bin edges |
def _indicator_table():
from xclim import temperature, precip
import inspect
inds = _get_indicators([temperature, precip])
table = []
for ind in inds:
args = {name: p.default for (name, p) in ind._sig.parameters.items() if p.default != inspect._empty}
table.append(ind.json(args))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_indicator_table'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '12', '15', '24'... | Return a sequence of dicts storing metadata about all available indices. |
def _on_trace(_loop, adapter, conn_id, trace):
conn_string = adapter._get_property(conn_id, 'connection_string')
if conn_string is None:
adapter._logger.debug("Dropping trace data with unknown conn_id=%s", conn_id)
return
adapter.notify_event_nowait(conn_string, 'trace', trace) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_on_trace'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Callback when tracing data is received. |
def palette():
for i in range(0, 16):
if i % 8 == 0:
print()
if i > 7:
i = "8;5;%s" % i
print("\033[4%sm%s\033[0m" % (i, " " * (80 // 20)), end="")
print("\n") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'palette'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '52']}; {'id': '5', 'type... | Generate a palette from the colors. |
def list_migration_choice(cls, datacenter):
datacenter_id = cls.usable_id(datacenter)
dc_list = cls.list()
available_dcs = [dc for dc in dc_list
if dc['id'] == datacenter_id][0]['can_migrate_to']
choices = [dc for dc in dc_list
if dc['id'] in a... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_migration_choice'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | List available datacenters for migration from given datacenter. |
def _distance_correlation_sqr_naive(x, y, exponent=1):
return _distance_sqr_stats_naive_generic(
x, y,
matrix_centered=_distance_matrix,
product=mean_product,
exponent=exponent).correlation_xy | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_distance_correlation_sqr_naive'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', '... | Biased distance correlation estimator between two matrices. |
def controlprompt_cmd(self, cmd):
data = tags.string_tag('cmbe', cmd) + tags.uint8_tag('cmcc', 0)
return self.daap.post(_CTRL_PROMPT_CMD, data=data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'controlprompt_cmd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Perform a "controlpromptentry" command. |
def no_spikes(tolerance):
def no_spikes(curve):
diff = np.abs(curve - curve.despike())
return np.count_nonzero(diff) < tolerance
return no_spikes | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'no_spikes'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'toleran... | Arg ``tolerance`` is the number of spiky samples allowed. |
def reset(self):
self.skip_function = None
self.skip_during_range = None
self.enabled = True
self.splay = None
self.opts['schedule'] = {} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reset'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Reset the scheduler to defaults |
def reversed_lines(path):
with open(path, 'r') as handle:
part = ''
for block in reversed_blocks(handle):
for c in reversed(block):
if c == '\n' and part:
yield part[::-1]
part = ''
part += c
if part: yield p... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reversed_lines'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'pa... | Generate the lines of file in reverse order. |
def _handle_tag_removeobject2(self):
obj = _make_object("RemoveObject2")
obj.Depth = unpack_ui16(self._src)
return obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_tag_removeobject2'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Handle the RemoveObject2 tag. |
def process_mathjax_script(mathjax_settings):
with open (os.path.dirname(os.path.realpath(__file__))
+ '/mathjax_script_template', 'r') as mathjax_script_template:
mathjax_template = mathjax_script_template.read()
return mathjax_template.format(**mathjax_settings) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_mathjax_script'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Load the mathjax script template from file, and render with the settings |
def get(self, object_path, interfaces_and_properties=None):
if not interfaces_and_properties:
interfaces_and_properties = self._objects.get(object_path)
if not interfaces_and_properties:
return None
property_hub = PropertyHub(interfaces_and_properties)
met... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Create a Device instance from object path. |
def focus_left(pymux):
" Move focus to the left. "
_move_focus(pymux,
lambda wp: wp.xpos - 2,
lambda wp: wp.ypos) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'focus_left'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'pymux'... | Move focus to the left. |
def create_args(line, namespace):
args = []
for arg in shlex.split(line):
if not arg:
continue
if arg[0] == '$':
var_name = arg[1:]
if var_name in namespace:
args.append((namespace[var_name]))
else:
raise Exception('Undefined variable referenced i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_args'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Expand any meta-variable references in the argument list. |
def disconnect(self):
if self._driver:
self._driver.disconnect()
self.axis_homed = {
'x': False, 'y': False, 'z': False, 'a': False, 'b': False} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'disconnect'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Disconnects from the robot. |
def process_results(self, paragraph):
if 'editorMode' in paragraph['config']:
mode = paragraph['config']['editorMode'].split('/')[-1]
if 'results' in paragraph and paragraph['results']['msg']:
msg = paragraph['results']['msg'][0]
if mode not in ('text', 'm... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_results'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Routes Zeppelin output types to corresponding handlers. |
def sizeHint(self):
w, h = self.get_width_height()
return QtCore.QSize(w, h) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sizeHint'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | gives qt a starting point for widget size during window resizing |
def returner(ret):
_options = _get_options(ret)
api_url = _options.get('api_url')
channel = _options.get('channel')
username = _options.get('username')
hook = _options.get('hook')
if not hook:
log.error('mattermost.hook not defined in salt config')
return
returns = ret.get('r... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'returner'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ret'}; {... | Send an mattermost message with the data |
def delete_vlan(self, nexus_host, vlanid):
starttime = time.time()
path_snip = snipp.PATH_VLAN % vlanid
self.client.rest_delete(path_snip, nexus_host)
self.capture_and_print_timeshot(
starttime, "del_vlan",
switch=nexus_host) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_vlan'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Delete a VLAN on Nexus Switch given the VLAN ID. |
def dt2jd(dt):
a = (14 - dt.month)//12
y = dt.year + 4800 - a
m = dt.month + 12*a - 3
return dt.day + ((153*m + 2)//5) + 365*y + y//4 - y//100 + y//400 - 32045 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dt2jd'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'dt'}; {'id'... | Convert datetime to julian date |
def _generate_lambda(self):
self.tf_conf['resource']['aws_lambda_function']['lambda_func'] = {
'filename': 'webhook2lambda2sqs_func.zip',
'function_name': self.resource_name,
'role': '${aws_iam_role.lambda_role.arn}',
'handler': 'webhook2lambda2sqs_func.webhook2la... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_generate_lambda'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Generate the lambda function and its IAM role, and add to self.tf_conf |
def starter(cls):
url = 'http://www.hagardunor.net/comics.php'
data = cls.getPage(url)
pattern = compile(tagre("a", "href", cls.prevUrl))
for starturl in cls.fetchUrls(url, data, pattern):
pass
return starturl | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'starter'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; {'... | Return last gallery link. |
def tar_file(files, tarname):
if isinstance(files, basestring):
files = [files]
o = tarfile.open(tarname, 'w:gz')
for file in files:
o.add(file)
o.close() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tar_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fil... | Compress a file or directory into a tar file. |
def sync_and_deploy_gateway(collector):
configuration = collector.configuration
aws_syncr = configuration['aws_syncr']
find_gateway(aws_syncr, configuration)
artifact = aws_syncr.artifact
aws_syncr.artifact = ""
sync(collector)
aws_syncr.artifact = artifact
deploy_gateway(collector) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sync_and_deploy_gateway'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Do a sync followed by deploying the gateway |
def update(self, **kwargs):
if self.condition is not None:
self.result = self.do_(self.model.table.update().where(self.condition).values(**kwargs))
else:
self.result = self.do_(self.model.table.update().values(**kwargs))
return self.result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Execute update table set field = field+1 like statement |
def overall_metrics(self, timestamp='one_month', metrics=None):
perfs = dict()
if metrics is None:
metrics = self.rolling_performances(timestamp=timestamp)
riskfree = np.mean(metrics['treasury_period_return'])
perfs['sharpe'] = qstk_get_sharpe_ratio(
metrics['algo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'overall_metrics'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Use zipline results to compute some performance indicators |
def _add_umis_with_fastp(read_fq, umi_fq, out_fq, cores):
with utils.open_gzipsafe(umi_fq) as in_handle:
in_handle.readline()
umi_size = len(in_handle.readline().strip())
cmd = ("fastp -Q -A -L -G -w 1 --in1 {read_fq} --in2 {umi_fq} "
"--umi --umi_prefix UMI --umi_loc read2 --umi_len ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_add_umis_with_fastp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'childr... | Add UMIs to reads from separate UMI file using fastp. |
def iter_items(self, depth: int = 1):
if depth is not None and not isinstance(depth, int):
raise TypeError
def itor(root, d):
if d is not None:
d -= 1
if d < 0:
return
for name in os.listdir(root):
pa... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'iter_items'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | get items from directory. |
def read_blast_xml(filename, **kwargs):
with open(filename, 'r') as f:
blast_record = NCBIXML.read(f)
data = {'accession': [],
'hit_def': [],
'hit_id': [],
'title': [],
'length': [],
'e_value': [],
'sequence': []}
for i, s in en... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_blast_xml'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Read BLAST XML format. |
def set(self, id, translation, domain='messages'):
assert isinstance(id, (str, unicode))
assert isinstance(translation, (str, unicode))
assert isinstance(domain, (str, unicode))
self.add({id: translation}, domain) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'set'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Sets a message translation. |
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillva... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'recursive_repr'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6'... | Decorator to make a repr function return fillvalue for a recursive call |
def match_contains(self, el, contains):
match = True
content = None
for contain_list in contains:
if content is None:
content = self.get_text(el, no_iframe=self.is_html)
found = False
for text in contain_list.text:
if text in co... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'match_contains'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Match element if it contains text. |
def build_message(self, stat, value):
return ' '.join((self.prefix + str(stat), str(value), str(round(time())))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_message'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Build a metric in Graphite format. |
def _has_x(self, kwargs):
return (('x' in kwargs) or (self._element_x in kwargs) or
(self._type == 3 and self._element_1mx in kwargs)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_has_x'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Returns True if x is explicitly defined in kwargs |
def addtoreadergroup(self, groupname):
hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER)
if 0 != hresult:
raise EstablishContextException(hresult)
try:
hresult = SCardIntroduceReader(hcontext, self.name, self.name)
if 0 != hresult and SCARD_E_DUPLICA... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'addtoreadergroup'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Add reader to a reader group. |
def _get_names(self, path: str) -> Iterator[str]:
for i in RequirementsFinder._get_names_cached(path):
yield i | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_names'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Load required packages from path to requirements file |
def token(self, adata, load):
try:
token = self.loadauth.get_tok(load['token'])
except Exception as exc:
log.error('Exception occurred when generating auth token: %s', exc)
yield {}
if not token:
log.warning('Authentication failure of type "token" ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'token'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Determine if token auth is valid and yield the adata |
def _subtoken_id_to_subtoken_string(self, subtoken):
if 0 <= subtoken < self.vocab_size:
return self._all_subtoken_strings[subtoken]
return u"" | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_subtoken_id_to_subtoken_string'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'child... | Converts a subtoken integer ID to a subtoken string. |
def internal_only(view_func):
@functools.wraps(view_func)
def wrapper(request, *args, **kwargs):
forwards = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")
if len(forwards) > 1:
raise PermissionDenied()
return view_func(request, *args, **kwargs)
return wrapper | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'internal_only'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'vie... | A view decorator which blocks access for requests coming through the load balancer. |
def makedir(self, tarinfo, targetpath):
try:
os.mkdir(targetpath, 0o700)
except EnvironmentError as e:
if e.errno != errno.EEXIST:
raise | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'makedir'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Make a directory called targetpath. |
def clear(self):
self.blockSignals(True)
items = list(self.items())
for item in items:
item.close()
self.blockSignals(False)
self._currentIndex = -1
self.currentIndexChanged.emit(self._currentIndex) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clear'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Clears out all the items from this tab bar. |
def _init_pval_obj(self):
if self.pval_fnc_name in self.options.keys():
try:
fisher_obj = self.options[self.pval_fnc_name](self.pval_fnc_name, self.log)
except ImportError:
print("fisher module not installed. Falling back on scipy.stats.fisher_exact")
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_pval_obj'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Returns a Fisher object based on user-input. |
def showSectionsHeaders(peInstance):
print "[+] Sections information:\n"
print "--> NumberOfSections: %d\n" % peInstance.ntHeaders.fileHeader.numberOfSections.value
for section in peInstance.sectionHeaders:
fields = section.getFields()
for field in fields:
if isinstance(fields[fi... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'showSectionsHeaders'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Prints IMAGE_SECTION_HEADER for every section present in the file. |
def _implementations(cls):
if cls.__implementations:
return cls.__implementations
cls.__implementations = {}
for implementation in all_subclasses(MetadataExtractor):
try:
feature_name = implementation.feature_name()
cls.__implementations[fe... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_implementations'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Returns all the concrete subclasses of MetadataExtractor. |
def benchmark(self, func, gpu_args, instance, times, verbose):
logging.debug('benchmark ' + instance.name)
logging.debug('thread block dimensions x,y,z=%d,%d,%d', *instance.threads)
logging.debug('grid dimensions x,y,z=%d,%d,%d', *instance.grid)
time = None
try:
time ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'benchmark'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', 'childr... | benchmark the kernel instance |
def log_request(self, code='-', size='-'):
print_size = getattr(thread_local, 'size', -1)
if size != '-':
size_str = ' (%s)' % size
elif print_size >= 0:
size_str = self.log_size_string(print_size) + ' '
else:
size_str = ''
if not self.server.s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Logs the current request. |
def checkMultipleFiles(input):
f,i,o,a=buildFileList(input)
return len(f) > 1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'checkMultipleFiles'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Evaluates the input to determine whether there is 1 or more than 1 valid input file. |
def student_visible(self):
group_ids = set()
for group in Group.objects.all():
if group.properties.student_visible:
group_ids.add(group.id)
return Group.objects.filter(id__in=group_ids) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'student_visible'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Return a list of groups that are student-visible. |
def download_file(file_id, file_name):
extracted_out_dir = os.path.join(app.config['UPLOAD_FOLDER'], file_id)
return send_file(os.path.join(extracted_out_dir, file_name)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Download a file from UPLOAD_FOLDER |
def delbr(self, name):
self.getbr(name)
_runshell([ipexe, 'link', 'set', 'dev', name, 'down'],
"Could not set link down for %s." % name)
_runshell([brctlexe, 'delbr', name],
"Could not delete bridge %s." % name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delbr'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Set the device down and delete the bridge. |
def clear_all():
_TABLES.clear()
_COLUMNS.clear()
_STEPS.clear()
_BROADCASTS.clear()
_INJECTABLES.clear()
_TABLE_CACHE.clear()
_COLUMN_CACHE.clear()
_INJECTABLE_CACHE.clear()
for m in _MEMOIZED.values():
m.value.clear_cached()
_MEMOIZED.clear()
logger.debug('pipeline ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clear_all'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '11', '17', '23', '29',... | Clear any and all stored state from Orca. |
def example_reading_spec(self):
video_fields, video_decoders = (
video_utils.VideoProblem.example_reading_spec(self))
env_fields, env_decoders = env_problem.EnvProblem.example_reading_spec(self)
env_fields.pop(env_problem.OBSERVATION_FIELD)
env_decoders.pop(env_problem.OBSERVATION_FIELD)
env... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'example_reading_spec'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Return a mix of env and video data fields and decoders. |
def init_app(self, app):
self.__init__(aws_access_key_id=app.config.get("SES_AWS_ACCESS_KEY"),
aws_secret_access_key=app.config.get("SES_AWS_SECRET_KEY"),
region=app.config.get("SES_REGION", "us-east-1"),
sender=app.config.get("SES_SENDER", None)... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init_app'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | For Flask using the app config |
def save_intrinsic_alias(module):
for v in module.values():
if isinstance(v, dict):
save_intrinsic_alias(v)
else:
IntrinsicAliases[v] = frozenset((v,))
if isinstance(v, Class):
save_intrinsic_alias(v.fields) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save_intrinsic_alias'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Recursively save default aliases for pythonic functions. |
def _define_helper(flag_name, default_value, docstring, flagtype, required):
option_name = flag_name if required else "--%s" % flag_name
get_context_parser().add_argument(
option_name, default=default_value, help=docstring, type=flagtype) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_define_helper'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'childre... | Registers 'flag_name' with 'default_value' and 'docstring'. |
def _aha_request(self, cmd, ain=None, param=None, rf=str):
url = 'http://' + self._host + '/webservices/homeautoswitch.lua'
params = {
'switchcmd': cmd,
'sid': self._sid
}
if param:
params['param'] = param
if ain:
params['ain'] = ai... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_aha_request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'childre... | Send an AHA request. |
def download(name, options):
dire = os.path.dirname(name)
fName = os.path.basename(name)
fNameOnly, fExt = os.path.splitext(fName)
dwn = 0
if fileExists(fName, dire) and not fileExists((fNameOnly + '.srt'), dire):
if file_downloaded(download_file(fName, options.timeout, dire), fName, options... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'nam... | download a file or all files in a directory |
def CleanAff4Hunts(self):
hunts_ttl = config.CONFIG["DataRetention.hunts_ttl"]
if not hunts_ttl:
self.Log("TTL not set - nothing to do...")
return
exception_label = config.CONFIG["DataRetention.hunts_ttl_exception_label"]
hunts_root = aff4.FACTORY.Open("aff4:/hunts", token=self.token)
hu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'CleanAff4Hunts'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Cleans up old hunt data from aff4. |
def ds_IsEmpty(ds):
out = False
b = ds.GetRasterBand(1)
try:
mm = b.ComputeRasterMinMax()
if (mm[0] == mm[1]):
ndv = b.GetNoDataValue()
if ndv is None:
out = True
else:
if (mm[0] == ndv):
out = True
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ds_IsEmpty'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ds'}; ... | Check to see if dataset is empty after warp |
def convert_ids_to_tokens(self, ids):
tokens = []
for i in ids:
tokens.append(self.ids_to_tokens[i])
return tokens | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_ids_to_tokens'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Converts a sequence of ids in wordpiece tokens using the vocab. |
def allows_not_principal(self):
not_principals = []
for statement in self.statements:
if statement.not_principal and statement.effect == "Allow":
not_principals.append(statement)
return not_principals | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'allows_not_principal'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Find allowed not-principals. |
def single_traj_from_n_files(file_list, top):
traj = None
for ff in file_list:
if traj is None:
traj = md.load(ff, top=top)
else:
traj = traj.join(md.load(ff, top=top))
return traj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'single_traj_from_n_files'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Creates a single trajectory object from a list of files |
def visit_Boolean(self, node):
if node.value == 'true':
return Bool(True)
elif node.value == 'false':
return Bool(False) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_Boolean'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Visitor for `Boolean` AST node. |
def refresh_attributes(self, name):
url = DEVICES_ENDPOINT
response = self.query(url)
if not response or not isinstance(response, dict):
return None
for device in response.get('data'):
if device.get('deviceName') == name:
return device
retu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'refresh_attributes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Refresh attributes from a given Arlo object. |
def class_declaration(self, type_):
utils.loggers.queries_engine.debug(
"Container traits: searching class declaration for %s", type_)
cls_declaration = self.get_container_or_none(type_)
if not cls_declaration:
raise TypeError(
'Type "%s" is not instantiat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'class_declaration'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Returns reference to the class declaration. |
def ec2_image_table(images):
t = prettytable.PrettyTable(['ID', 'State', 'Name', 'Owner', 'Root device', 'Is public', 'Description'])
t.align = 'l'
for i in images:
t.add_row([i.id, i.state, i.name, i.ownerId, i.root_device_type, i.is_public, i.description])
return t | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ec2_image_table'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'i... | Print nice looking table of information from images |
def extract_chunks(blob):
chunks = []
stream = BytesIO(blob.bytes)
current_pos = stream.tell()
stream.seek(0, 2)
length = stream.tell()
stream.seek(current_pos, 0)
while stream.tell() < length:
chunks.append(read_chunk(stream))
return chunks | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract_chunks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'bl... | Splits the blob into chucks grouped by kind. |
def latinize(mapping, bind, values):
for v in values:
if isinstance(v, six.string_types):
v = transliterate(v)
yield v | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'latinize'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Transliterate a given string into the latin alphabet. |
def map_sprinkler(self, sx, sy, watered_crop='^', watered_field='_', dry_field=' ', dry_crop='x'):
maplist = [list(s) for s in self.maplist]
for y, row in enumerate(maplist):
for x, cell in enumerate(row):
if sprinkler_reaches_cell(x, y, sx, sy, self.r):
i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '19']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'map_sprinkler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13', '16']}; {'id': '4', 'type': 'identifi... | Return a version of the ASCII map showing reached crop cells. |
def reconcile_procs(self, running):
retset = set()
for tag in running:
proc = running[tag].get('proc')
if proc:
if not proc.is_alive():
ret_cache = os.path.join(
self.opts['cachedir'],
self.jid,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reconcile_procs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Check the running dict for processes and resolve them |
def retrieve_pool_stats(self, pool, **_params):
return self.get(self.pool_path_stats % (pool), params=_params) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'retrieve_pool_stats'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [... | Retrieves stats for a certain load balancer pool. |
def _parse_property_list(prop, value):
attributes = []
for v in value:
try:
attributes.append(
prop.prop.instance_class.from_api(**v),
)
except AttributeError:
attributes.append(v)
return attributes | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_property_list'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Parse a list property and return a list of the results. |
def instantiate_from_data(self, object_data):
if isinstance(object_data, dict) and 'name' in object_data:
name = object_data['name']
module = importlib.import_module(name)
return self.resolve_and_call(module.create, extra_env=object_data)
if isinstance(object_data, di... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'instantiate_from_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Instantiate object from the supplied data, additional args may come from the environment |
def insert(
self, M_c, T, X_L_list, X_D_list, new_rows=None, N_GRID=31,
CT_KERNEL=0):
if new_rows is None:
raise ValueError("new_row must exist")
if not isinstance(new_rows, list):
raise TypeError('new_rows must be list of lists')
if not isinst... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'insert'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '12', '15']}; {'id': '4', 'type': 'identifier'... | Insert mutates the data T. |
def _unique_names():
characters = ("abcdefghijklmnopqrstuvwxyz"
"0123456789")
characters = [characters[i:i + 1] for i in irange(len(characters))]
rng = random.Random()
while True:
letters = [rng.choice(characters) for i in irange(10)]
yield ''.join(letters) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_unique_names'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '12', '33', '41']};... | Generates unique sequences of bytes. |
def log(self, timer_name, node):
timestamp = time.time()
if hasattr(self, timer_name):
getattr(self, timer_name).append({
"node":node,
"time":timestamp})
else:
setattr(self, timer_name, [{"node":node, "time":timestamp}]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | logs a event in the timer |
def write_str(self, s):
self.write(s)
self.room -= len(s) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_str'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Add string s to the accumulated body. |
def _write_expressiontool(step_dir, name, inputs, outputs, expression, parallel):
out_file = os.path.join(step_dir, "%s.cwl" % name)
out = {"class": "ExpressionTool",
"cwlVersion": "v1.0",
"requirements": [{"class": "InlineJavascriptRequirement"}],
"inputs": [],
"outp... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_write_expressiontool'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifi... | Create an ExpressionTool output for the given inputs |
def _initialize_from_dict(self, data):
self._json = data
self._validate()
for name, value in self._json.items():
if name in self._properties:
if '$ref' in self._properties[name]:
if 'decimal' in self._properties[name]['$ref']:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_initialize_from_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Loads serializer from a request object |
def to_python(self, value, context=None):
value = value.copy()
res = {}
errors = []
for field, schema in self._fields.items():
name = schema.get_attr('name', field)
if name in value:
try:
res[field] = schema.to_python(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_python'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Convert the value to a real python object |
def _bowtie2_args_from_config(config, curcl):
qual_format = config["algorithm"].get("quality_format", "")
if qual_format.lower() == "illumina":
qual_flags = ["--phred64-quals"]
else:
qual_flags = []
num_cores = config["algorithm"].get("num_cores", 1)
core_flags = ["-p", str(num_cores... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_bowtie2_args_from_config'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': ... | Configurable high level options for bowtie2. |
def splitterfields(data, commdct):
objkey = "Connector:Splitter".upper()
fieldlists = splittermixerfieldlists(data, commdct, objkey)
return extractfields(data, commdct, objkey, fieldlists) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'splitterfields'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | get splitter fields to diagram it |
def edge_to_bel(u: BaseEntity, v: BaseEntity, edge_data: EdgeData, sep: Optional[str] = None) -> str:
return edge_to_bel(u, v, data=edge_data, sep=sep) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '25', '27']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'edge_to_bel'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12', '16']}; {'id': '4', 'type': 'typed_parameter', 'c... | Serialize a pair of nodes and related edge data as a BEL relation. |
def open_wordfile (app, filename):
return app.Documents.Open(filename, ReadOnly=True,
AddToRecentFiles=False, Visible=False, NoEncodingDialog=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'open_wordfile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Open given Word file with application object. |
def print_gpustat(json=False, debug=False, **kwargs):
try:
gpu_stats = GPUStatCollection.new_query()
except Exception as e:
sys.stderr.write('Error on querying NVIDIA devices.'
' Use --debug flag for details\n')
if debug:
try:
import t... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'print_gpustat'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10']}; {'id': '4', 'type': 'default_parameter', 'children'... | Display the GPU query results into standard output. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.