id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
8,764
def verify_vlan_range(vlan_range): for vlan_tag in vlan_range: if (not is_valid_vlan_tag(vlan_tag)): raise_invalid_tag(str(vlan_tag), vlan_range) if (vlan_range[1] < vlan_range[0]): raise n_exc.NetworkVlanRangeError(vlan_range=vlan_range, error=_('End of VLAN range is less than start of VLAN range'))
[ "def", "verify_vlan_range", "(", "vlan_range", ")", ":", "for", "vlan_tag", "in", "vlan_range", ":", "if", "(", "not", "is_valid_vlan_tag", "(", "vlan_tag", ")", ")", ":", "raise_invalid_tag", "(", "str", "(", "vlan_tag", ")", ",", "vlan_range", ")", "if", "(", "vlan_range", "[", "1", "]", "<", "vlan_range", "[", "0", "]", ")", ":", "raise", "n_exc", ".", "NetworkVlanRangeError", "(", "vlan_range", "=", "vlan_range", ",", "error", "=", "_", "(", "'End of VLAN range is less than start of VLAN range'", ")", ")" ]
raise an exception for invalid tags or malformed range .
train
false
8,765
def core_requirements(): with open('setup.py') as inp: reqs_raw = re.search('REQUIRES = \\[(.*?)\\]', inp.read(), re.S).group(1) return re.findall("'(.*?)'", reqs_raw)
[ "def", "core_requirements", "(", ")", ":", "with", "open", "(", "'setup.py'", ")", "as", "inp", ":", "reqs_raw", "=", "re", ".", "search", "(", "'REQUIRES = \\\\[(.*?)\\\\]'", ",", "inp", ".", "read", "(", ")", ",", "re", ".", "S", ")", ".", "group", "(", "1", ")", "return", "re", ".", "findall", "(", "\"'(.*?)'\"", ",", "reqs_raw", ")" ]
gather core requirements out of setup .
train
false
8,766
def empty_list(lineno=None, col=None): return ast.List(elts=[], ctx=ast.Load(), lineno=lineno, col_offset=col)
[ "def", "empty_list", "(", "lineno", "=", "None", ",", "col", "=", "None", ")", ":", "return", "ast", ".", "List", "(", "elts", "=", "[", "]", ",", "ctx", "=", "ast", ".", "Load", "(", ")", ",", "lineno", "=", "lineno", ",", "col_offset", "=", "col", ")" ]
creates the ast node for an empty list .
train
false
8,769
def ch_config(cmd, *args, **kwargs): for k in kwargs.keys(): if k.startswith('__pub_'): kwargs.pop(k) kwargs['host'] = DETAILS['host'] kwargs['username'] = DETAILS['username'] kwargs['password'] = DETAILS['password'] kwargs['port'] = DETAILS['port'] kwargs['protocol'] = DETAILS['protocol'] if (('vsphere.' + cmd) not in __salt__): return {'retcode': (-1), 'message': (('vsphere.' + cmd) + ' is not available.')} else: return __salt__[('vsphere.' + cmd)](*args, **kwargs)
[ "def", "ch_config", "(", "cmd", ",", "*", "args", ",", "**", "kwargs", ")", ":", "for", "k", "in", "kwargs", ".", "keys", "(", ")", ":", "if", "k", ".", "startswith", "(", "'__pub_'", ")", ":", "kwargs", ".", "pop", "(", "k", ")", "kwargs", "[", "'host'", "]", "=", "DETAILS", "[", "'host'", "]", "kwargs", "[", "'username'", "]", "=", "DETAILS", "[", "'username'", "]", "kwargs", "[", "'password'", "]", "=", "DETAILS", "[", "'password'", "]", "kwargs", "[", "'port'", "]", "=", "DETAILS", "[", "'port'", "]", "kwargs", "[", "'protocol'", "]", "=", "DETAILS", "[", "'protocol'", "]", "if", "(", "(", "'vsphere.'", "+", "cmd", ")", "not", "in", "__salt__", ")", ":", "return", "{", "'retcode'", ":", "(", "-", "1", ")", ",", "'message'", ":", "(", "(", "'vsphere.'", "+", "cmd", ")", "+", "' is not available.'", ")", "}", "else", ":", "return", "__salt__", "[", "(", "'vsphere.'", "+", "cmd", ")", "]", "(", "*", "args", ",", "**", "kwargs", ")" ]
this function is called by the :mod:salt .
train
true
8,770
def transaction_exists(pkglist): conflicts = [] if (not transaction_helpers): return conflicts pkglist_nvreas = [] for pkg in pkglist: pkglist_nvreas.append(splitFilename(pkg)) unfinished_transactions = find_unfinished_transactions() for trans in unfinished_transactions: steps = find_ts_remaining(trans) for step in steps: (action, step_spec) = step (n, v, r, e, a) = splitFilename(step_spec) for pkg in pkglist_nvreas: label = ('%s-%s' % (n, a)) if ((n == pkg[0]) and (a == pkg[4])): if (label not in conflicts): conflicts.append(('%s-%s' % (n, a))) break return conflicts
[ "def", "transaction_exists", "(", "pkglist", ")", ":", "conflicts", "=", "[", "]", "if", "(", "not", "transaction_helpers", ")", ":", "return", "conflicts", "pkglist_nvreas", "=", "[", "]", "for", "pkg", "in", "pkglist", ":", "pkglist_nvreas", ".", "append", "(", "splitFilename", "(", "pkg", ")", ")", "unfinished_transactions", "=", "find_unfinished_transactions", "(", ")", "for", "trans", "in", "unfinished_transactions", ":", "steps", "=", "find_ts_remaining", "(", "trans", ")", "for", "step", "in", "steps", ":", "(", "action", ",", "step_spec", ")", "=", "step", "(", "n", ",", "v", ",", "r", ",", "e", ",", "a", ")", "=", "splitFilename", "(", "step_spec", ")", "for", "pkg", "in", "pkglist_nvreas", ":", "label", "=", "(", "'%s-%s'", "%", "(", "n", ",", "a", ")", ")", "if", "(", "(", "n", "==", "pkg", "[", "0", "]", ")", "and", "(", "a", "==", "pkg", "[", "4", "]", ")", ")", ":", "if", "(", "label", "not", "in", "conflicts", ")", ":", "conflicts", ".", "append", "(", "(", "'%s-%s'", "%", "(", "n", ",", "a", ")", ")", ")", "break", "return", "conflicts" ]
checks the package list to see if any packages are involved in an incomplete transaction .
train
false
8,771
def get_plugin_keywords(): return iter(plugins.keys())
[ "def", "get_plugin_keywords", "(", ")", ":", "return", "iter", "(", "plugins", ".", "keys", "(", ")", ")" ]
return iterator over all plugin keywords .
train
false
8,772
def comparison_negative(logical_line): match = COMPARE_NEGATIVE_REGEX.search(logical_line) if match: pos = match.start(1) if (match.group(2) == 'in'): (yield (pos, "E713 test for membership should be 'not in'")) else: (yield (pos, "E714 test for object identity should be 'is not'"))
[ "def", "comparison_negative", "(", "logical_line", ")", ":", "match", "=", "COMPARE_NEGATIVE_REGEX", ".", "search", "(", "logical_line", ")", "if", "match", ":", "pos", "=", "match", ".", "start", "(", "1", ")", "if", "(", "match", ".", "group", "(", "2", ")", "==", "'in'", ")", ":", "(", "yield", "(", "pos", ",", "\"E713 test for membership should be 'not in'\"", ")", ")", "else", ":", "(", "yield", "(", "pos", ",", "\"E714 test for object identity should be 'is not'\"", ")", ")" ]
negative comparison .
train
true
8,773
def scp(reactor, username, host, remote_path, local_path, direction, port=22, identity_file=None): if (direction not in (DOWNLOAD, UPLOAD)): raise ValueError('Invalid direction argument {!r}. Must be one of ``runner.DOWNLOAD`` or ``runner.UPLOAD``.'.format(direction)) remote_host_path = ((((username + '@') + host) + ':') + remote_path.path) scp_command = (['scp', '-r', '-P', bytes(port)] + SSH_OPTIONS) if (identity_file is not None): scp_command += ['-i', identity_file.path] if (direction is DOWNLOAD): scp_command += [remote_host_path, local_path.path] else: scp_command += [local_path.path, remote_host_path] action = SCP_ACTION(username=username, host=host, remote_path=remote_path, local_path=local_path, port=port, identity_file=identity_file) failed_reason = [] def handle_stdout(line): SCP_OUTPUT_MESSAGE(line=line).write(action=action) def handle_stderr(line): "\n Notice scp's particular way of describing the file-not-found condition\n and turn it into a more easily recognized form.\n " if ('No such file or directory' in line): failed_reason.append(RemoteFileNotFound(remote_path)) if ('lost connection' in line): failed_reason.append(SCPConnectionError()) SCP_ERROR_MESSAGE(line=line).write(action=action) def scp_failed(reason): '\n Check for a known error with the scp attempt and turn the normal\n failure into a more meaningful one.\n ' reason.trap(ProcessTerminated) if failed_reason: return Failure(failed_reason[(-1)]) return reason with action.context(): context = DeferredContext(run(reactor, scp_command, handle_stdout=handle_stdout, handle_stderr=handle_stderr)) context.addErrback(scp_failed) return context.addActionFinish()
[ "def", "scp", "(", "reactor", ",", "username", ",", "host", ",", "remote_path", ",", "local_path", ",", "direction", ",", "port", "=", "22", ",", "identity_file", "=", "None", ")", ":", "if", "(", "direction", "not", "in", "(", "DOWNLOAD", ",", "UPLOAD", ")", ")", ":", "raise", "ValueError", "(", "'Invalid direction argument {!r}. Must be one of ``runner.DOWNLOAD`` or ``runner.UPLOAD``.'", ".", "format", "(", "direction", ")", ")", "remote_host_path", "=", "(", "(", "(", "(", "username", "+", "'@'", ")", "+", "host", ")", "+", "':'", ")", "+", "remote_path", ".", "path", ")", "scp_command", "=", "(", "[", "'scp'", ",", "'-r'", ",", "'-P'", ",", "bytes", "(", "port", ")", "]", "+", "SSH_OPTIONS", ")", "if", "(", "identity_file", "is", "not", "None", ")", ":", "scp_command", "+=", "[", "'-i'", ",", "identity_file", ".", "path", "]", "if", "(", "direction", "is", "DOWNLOAD", ")", ":", "scp_command", "+=", "[", "remote_host_path", ",", "local_path", ".", "path", "]", "else", ":", "scp_command", "+=", "[", "local_path", ".", "path", ",", "remote_host_path", "]", "action", "=", "SCP_ACTION", "(", "username", "=", "username", ",", "host", "=", "host", ",", "remote_path", "=", "remote_path", ",", "local_path", "=", "local_path", ",", "port", "=", "port", ",", "identity_file", "=", "identity_file", ")", "failed_reason", "=", "[", "]", "def", "handle_stdout", "(", "line", ")", ":", "SCP_OUTPUT_MESSAGE", "(", "line", "=", "line", ")", ".", "write", "(", "action", "=", "action", ")", "def", "handle_stderr", "(", "line", ")", ":", "if", "(", "'No such file or directory'", "in", "line", ")", ":", "failed_reason", ".", "append", "(", "RemoteFileNotFound", "(", "remote_path", ")", ")", "if", "(", "'lost connection'", "in", "line", ")", ":", "failed_reason", ".", "append", "(", "SCPConnectionError", "(", ")", ")", "SCP_ERROR_MESSAGE", "(", "line", "=", "line", ")", ".", "write", "(", "action", "=", "action", ")", "def", "scp_failed", "(", "reason", ")", ":", "reason", ".", "trap", "(", "ProcessTerminated", ")", "if", "failed_reason", ":", "return", "Failure", "(", "failed_reason", "[", "(", "-", "1", ")", "]", ")", "return", "reason", "with", "action", ".", "context", "(", ")", ":", "context", "=", "DeferredContext", "(", "run", "(", "reactor", ",", "scp_command", ",", "handle_stdout", "=", "handle_stdout", ",", "handle_stderr", "=", "handle_stderr", ")", ")", "context", ".", "addErrback", "(", "scp_failed", ")", "return", "context", ".", "addActionFinish", "(", ")" ]
yaml: scp upload files via scp requires the jenkins :jenkins-wiki:scp plugin <scp+plugin> .
train
false
8,776
def is_scheduler_filter_enabled(filter_name): filters = CONF.compute_feature_enabled.scheduler_available_filters if (len(filters) == 0): return False if ('all' in filters): return True if (filter_name in filters): return True return False
[ "def", "is_scheduler_filter_enabled", "(", "filter_name", ")", ":", "filters", "=", "CONF", ".", "compute_feature_enabled", ".", "scheduler_available_filters", "if", "(", "len", "(", "filters", ")", "==", "0", ")", ":", "return", "False", "if", "(", "'all'", "in", "filters", ")", ":", "return", "True", "if", "(", "filter_name", "in", "filters", ")", ":", "return", "True", "return", "False" ]
check the list of enabled compute scheduler filters from config .
train
false
8,777
def test_rgb_to_hsl_part_18(): assert (rgb_to_hsl(51, 0, 51) == (300, 100, 10)) assert (rgb_to_hsl(102, 0, 102) == (300, 100, 20)) assert (rgb_to_hsl(153, 0, 153) == (300, 100, 30)) assert (rgb_to_hsl(204, 0, 204) == (300, 100, 40)) assert (rgb_to_hsl(255, 0, 255) == (300, 100, 50)) assert (rgb_to_hsl(255, 51, 255) == (300, 100, 60)) assert (rgb_to_hsl(255, 102, 255) == (300, 100, 70)) assert (rgb_to_hsl(255, 153, 255) == (300, 100, 80)) assert (rgb_to_hsl(255, 204, 255) == (300, 100, 90))
[ "def", "test_rgb_to_hsl_part_18", "(", ")", ":", "assert", "(", "rgb_to_hsl", "(", "51", ",", "0", ",", "51", ")", "==", "(", "300", ",", "100", ",", "10", ")", ")", "assert", "(", "rgb_to_hsl", "(", "102", ",", "0", ",", "102", ")", "==", "(", "300", ",", "100", ",", "20", ")", ")", "assert", "(", "rgb_to_hsl", "(", "153", ",", "0", ",", "153", ")", "==", "(", "300", ",", "100", ",", "30", ")", ")", "assert", "(", "rgb_to_hsl", "(", "204", ",", "0", ",", "204", ")", "==", "(", "300", ",", "100", ",", "40", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "0", ",", "255", ")", "==", "(", "300", ",", "100", ",", "50", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "51", ",", "255", ")", "==", "(", "300", ",", "100", ",", "60", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "102", ",", "255", ")", "==", "(", "300", ",", "100", ",", "70", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "153", ",", "255", ")", "==", "(", "300", ",", "100", ",", "80", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "204", ",", "255", ")", "==", "(", "300", ",", "100", ",", "90", ")", ")" ]
test rgb to hsl color function .
train
false
8,778
def get_default_cache(): try: return os.environ['PYTHON_EGG_CACHE'] except KeyError: pass if (os.name != 'nt'): return os.path.expanduser('~/.python-eggs') app_data = 'Application Data' app_homes = [(('APPDATA',), None), (('USERPROFILE',), app_data), (('HOMEDRIVE', 'HOMEPATH'), app_data), (('HOMEPATH',), app_data), (('HOME',), None), (('WINDIR',), app_data)] for (keys, subdir) in app_homes: dirname = '' for key in keys: if (key in os.environ): dirname = os.path.join(dirname, os.environ[key]) else: break else: if subdir: dirname = os.path.join(dirname, subdir) return os.path.join(dirname, 'Python-Eggs') else: raise RuntimeError('Please set the PYTHON_EGG_CACHE enviroment variable')
[ "def", "get_default_cache", "(", ")", ":", "try", ":", "return", "os", ".", "environ", "[", "'PYTHON_EGG_CACHE'", "]", "except", "KeyError", ":", "pass", "if", "(", "os", ".", "name", "!=", "'nt'", ")", ":", "return", "os", ".", "path", ".", "expanduser", "(", "'~/.python-eggs'", ")", "app_data", "=", "'Application Data'", "app_homes", "=", "[", "(", "(", "'APPDATA'", ",", ")", ",", "None", ")", ",", "(", "(", "'USERPROFILE'", ",", ")", ",", "app_data", ")", ",", "(", "(", "'HOMEDRIVE'", ",", "'HOMEPATH'", ")", ",", "app_data", ")", ",", "(", "(", "'HOMEPATH'", ",", ")", ",", "app_data", ")", ",", "(", "(", "'HOME'", ",", ")", ",", "None", ")", ",", "(", "(", "'WINDIR'", ",", ")", ",", "app_data", ")", "]", "for", "(", "keys", ",", "subdir", ")", "in", "app_homes", ":", "dirname", "=", "''", "for", "key", "in", "keys", ":", "if", "(", "key", "in", "os", ".", "environ", ")", ":", "dirname", "=", "os", ".", "path", ".", "join", "(", "dirname", ",", "os", ".", "environ", "[", "key", "]", ")", "else", ":", "break", "else", ":", "if", "subdir", ":", "dirname", "=", "os", ".", "path", ".", "join", "(", "dirname", ",", "subdir", ")", "return", "os", ".", "path", ".", "join", "(", "dirname", ",", "'Python-Eggs'", ")", "else", ":", "raise", "RuntimeError", "(", "'Please set the PYTHON_EGG_CACHE enviroment variable'", ")" ]
return the python_egg_cache environment variable or a platform-relevant user cache dir for an app named "python-eggs" .
train
true
8,779
def CDLENGULFING(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLENGULFING)
[ "def", "CDLENGULFING", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLENGULFING", ")" ]
engulfing pattern .
train
false
8,781
def set_config_file(plotly_domain=None, plotly_streaming_domain=None, plotly_api_domain=None, plotly_ssl_verification=None, plotly_proxy_authorization=None, world_readable=None, sharing=None, auto_open=None): if (not check_file_permissions()): raise exceptions.PlotlyError("You don't have proper file permissions to run this function.") ensure_local_plotly_files() utils.validate_world_readable_and_sharing_settings({'sharing': sharing, 'world_readable': world_readable}) settings = get_config_file() if isinstance(plotly_domain, six.string_types): settings['plotly_domain'] = plotly_domain elif (plotly_domain is not None): raise TypeError('plotly_domain should be a string') if isinstance(plotly_streaming_domain, six.string_types): settings['plotly_streaming_domain'] = plotly_streaming_domain elif (plotly_streaming_domain is not None): raise TypeError('plotly_streaming_domain should be a string') if isinstance(plotly_api_domain, six.string_types): settings['plotly_api_domain'] = plotly_api_domain elif (plotly_api_domain is not None): raise TypeError('plotly_api_domain should be a string') if isinstance(plotly_ssl_verification, (six.string_types, bool)): settings['plotly_ssl_verification'] = plotly_ssl_verification elif (plotly_ssl_verification is not None): raise TypeError('plotly_ssl_verification should be a boolean') if isinstance(plotly_proxy_authorization, (six.string_types, bool)): settings['plotly_proxy_authorization'] = plotly_proxy_authorization elif (plotly_proxy_authorization is not None): raise TypeError('plotly_proxy_authorization should be a boolean') if isinstance(auto_open, bool): settings['auto_open'] = auto_open elif (auto_open is not None): raise TypeError('auto_open should be a boolean') if isinstance(world_readable, bool): settings['world_readable'] = world_readable settings.pop('sharing') elif (world_readable is not None): raise TypeError('Input should be a boolean') if isinstance(sharing, six.string_types): settings['sharing'] = sharing elif (sharing is not None): raise TypeError('sharing should be a string') utils.set_sharing_and_world_readable(settings) utils.save_json_dict(CONFIG_FILE, settings) ensure_local_plotly_files()
[ "def", "set_config_file", "(", "plotly_domain", "=", "None", ",", "plotly_streaming_domain", "=", "None", ",", "plotly_api_domain", "=", "None", ",", "plotly_ssl_verification", "=", "None", ",", "plotly_proxy_authorization", "=", "None", ",", "world_readable", "=", "None", ",", "sharing", "=", "None", ",", "auto_open", "=", "None", ")", ":", "if", "(", "not", "check_file_permissions", "(", ")", ")", ":", "raise", "exceptions", ".", "PlotlyError", "(", "\"You don't have proper file permissions to run this function.\"", ")", "ensure_local_plotly_files", "(", ")", "utils", ".", "validate_world_readable_and_sharing_settings", "(", "{", "'sharing'", ":", "sharing", ",", "'world_readable'", ":", "world_readable", "}", ")", "settings", "=", "get_config_file", "(", ")", "if", "isinstance", "(", "plotly_domain", ",", "six", ".", "string_types", ")", ":", "settings", "[", "'plotly_domain'", "]", "=", "plotly_domain", "elif", "(", "plotly_domain", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'plotly_domain should be a string'", ")", "if", "isinstance", "(", "plotly_streaming_domain", ",", "six", ".", "string_types", ")", ":", "settings", "[", "'plotly_streaming_domain'", "]", "=", "plotly_streaming_domain", "elif", "(", "plotly_streaming_domain", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'plotly_streaming_domain should be a string'", ")", "if", "isinstance", "(", "plotly_api_domain", ",", "six", ".", "string_types", ")", ":", "settings", "[", "'plotly_api_domain'", "]", "=", "plotly_api_domain", "elif", "(", "plotly_api_domain", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'plotly_api_domain should be a string'", ")", "if", "isinstance", "(", "plotly_ssl_verification", ",", "(", "six", ".", "string_types", ",", "bool", ")", ")", ":", "settings", "[", "'plotly_ssl_verification'", "]", "=", "plotly_ssl_verification", "elif", "(", "plotly_ssl_verification", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'plotly_ssl_verification should be a boolean'", ")", "if", "isinstance", "(", "plotly_proxy_authorization", ",", "(", "six", ".", "string_types", ",", "bool", ")", ")", ":", "settings", "[", "'plotly_proxy_authorization'", "]", "=", "plotly_proxy_authorization", "elif", "(", "plotly_proxy_authorization", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'plotly_proxy_authorization should be a boolean'", ")", "if", "isinstance", "(", "auto_open", ",", "bool", ")", ":", "settings", "[", "'auto_open'", "]", "=", "auto_open", "elif", "(", "auto_open", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'auto_open should be a boolean'", ")", "if", "isinstance", "(", "world_readable", ",", "bool", ")", ":", "settings", "[", "'world_readable'", "]", "=", "world_readable", "settings", ".", "pop", "(", "'sharing'", ")", "elif", "(", "world_readable", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'Input should be a boolean'", ")", "if", "isinstance", "(", "sharing", ",", "six", ".", "string_types", ")", ":", "settings", "[", "'sharing'", "]", "=", "sharing", "elif", "(", "sharing", "is", "not", "None", ")", ":", "raise", "TypeError", "(", "'sharing should be a string'", ")", "utils", ".", "set_sharing_and_world_readable", "(", "settings", ")", "utils", ".", "save_json_dict", "(", "CONFIG_FILE", ",", "settings", ")", "ensure_local_plotly_files", "(", ")" ]
sets the configurations name .
train
false
8,782
def make_commit(**attrs): default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple())) all_attrs = {'author': 'Test Author <test@nodomain.com>', 'author_time': default_time, 'author_timezone': 0, 'committer': 'Test Committer <test@nodomain.com>', 'commit_time': default_time, 'commit_timezone': 0, 'message': 'Test message.', 'parents': [], 'tree': ('0' * 40)} all_attrs.update(attrs) return make_object(Commit, **all_attrs)
[ "def", "make_commit", "(", "**", "attrs", ")", ":", "default_time", "=", "int", "(", "time", ".", "mktime", "(", "datetime", ".", "datetime", "(", "2010", ",", "1", ",", "1", ")", ".", "timetuple", "(", ")", ")", ")", "all_attrs", "=", "{", "'author'", ":", "'Test Author <test@nodomain.com>'", ",", "'author_time'", ":", "default_time", ",", "'author_timezone'", ":", "0", ",", "'committer'", ":", "'Test Committer <test@nodomain.com>'", ",", "'commit_time'", ":", "default_time", ",", "'commit_timezone'", ":", "0", ",", "'message'", ":", "'Test message.'", ",", "'parents'", ":", "[", "]", ",", "'tree'", ":", "(", "'0'", "*", "40", ")", "}", "all_attrs", ".", "update", "(", "attrs", ")", "return", "make_object", "(", "Commit", ",", "**", "all_attrs", ")" ]
make a commit object with a default set of members .
train
false
8,783
def ensure_sys_path_contains(paths): for entry in paths: if isinstance(entry, (list, tuple)): ensure_sys_path_contains(entry) elif ((entry is not None) and (entry not in sys.path)): sys.path.append(entry)
[ "def", "ensure_sys_path_contains", "(", "paths", ")", ":", "for", "entry", "in", "paths", ":", "if", "isinstance", "(", "entry", ",", "(", "list", ",", "tuple", ")", ")", ":", "ensure_sys_path_contains", "(", "entry", ")", "elif", "(", "(", "entry", "is", "not", "None", ")", "and", "(", "entry", "not", "in", "sys", ".", "path", ")", ")", ":", "sys", ".", "path", ".", "append", "(", "entry", ")" ]
ensure that os .
train
false
8,784
def test_git_require_remote_url(): from fabtools.require.git import working_copy try: working_copy(REMOTE_URL) assert is_dir('fabtools') assert is_dir('fabtools/.git') with cd('fabtools'): remotes = run('git remote -v') assert (remotes == 'origin DCTB https://github.com/disko/fabtools.git (fetch)\r\norigin DCTB https://github.com/disko/fabtools.git (push)') assert (_current_branch() == 'master') finally: run('rm -rf fabtools')
[ "def", "test_git_require_remote_url", "(", ")", ":", "from", "fabtools", ".", "require", ".", "git", "import", "working_copy", "try", ":", "working_copy", "(", "REMOTE_URL", ")", "assert", "is_dir", "(", "'fabtools'", ")", "assert", "is_dir", "(", "'fabtools/.git'", ")", "with", "cd", "(", "'fabtools'", ")", ":", "remotes", "=", "run", "(", "'git remote -v'", ")", "assert", "(", "remotes", "==", "'origin DCTB https://github.com/disko/fabtools.git (fetch)\\r\\norigin DCTB https://github.com/disko/fabtools.git (push)'", ")", "assert", "(", "_current_branch", "(", ")", "==", "'master'", ")", "finally", ":", "run", "(", "'rm -rf fabtools'", ")" ]
test with remote url only .
train
false
8,786
def _isGlobbingExpression(segments=None): if (not segments): return False globCandidate = segments[(-1)] globTranslations = fnmatch.translate(globCandidate) nonGlobTranslations = _testTranslation.replace('TEST', globCandidate, 1) if (nonGlobTranslations == globTranslations): return False else: return True
[ "def", "_isGlobbingExpression", "(", "segments", "=", "None", ")", ":", "if", "(", "not", "segments", ")", ":", "return", "False", "globCandidate", "=", "segments", "[", "(", "-", "1", ")", "]", "globTranslations", "=", "fnmatch", ".", "translate", "(", "globCandidate", ")", "nonGlobTranslations", "=", "_testTranslation", ".", "replace", "(", "'TEST'", ",", "globCandidate", ",", "1", ")", "if", "(", "nonGlobTranslations", "==", "globTranslations", ")", ":", "return", "False", "else", ":", "return", "True" ]
helper for checking if a ftpshell segments contains a wildcard unix expression .
train
false
8,787
def input_function(): import sys if (sys.version_info[0] < 3): user_input = str(raw_input()) if user_input: user_input = user_input.decode('utf-8') else: user_input = input() return user_input
[ "def", "input_function", "(", ")", ":", "import", "sys", "if", "(", "sys", ".", "version_info", "[", "0", "]", "<", "3", ")", ":", "user_input", "=", "str", "(", "raw_input", "(", ")", ")", "if", "user_input", ":", "user_input", "=", "user_input", ".", "decode", "(", "'utf-8'", ")", "else", ":", "user_input", "=", "input", "(", ")", "return", "user_input" ]
normalizes reading input between python 2 and 3 .
train
false
8,788
def isSyncFile(filename): extension = filename.rpartition(u'.')[2].lower() syncfiles = sickrage.srCore.srConfig.SYNC_FILES if ((extension in syncfiles.split(u',')) or filename.startswith(u'.syncthing')): return True else: return False
[ "def", "isSyncFile", "(", "filename", ")", ":", "extension", "=", "filename", ".", "rpartition", "(", "u'.'", ")", "[", "2", "]", ".", "lower", "(", ")", "syncfiles", "=", "sickrage", ".", "srCore", ".", "srConfig", ".", "SYNC_FILES", "if", "(", "(", "extension", "in", "syncfiles", ".", "split", "(", "u','", ")", ")", "or", "filename", ".", "startswith", "(", "u'.syncthing'", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
returns true if filename is a syncfile .
train
false
8,789
def str_to_num(text): try: return int(text) except ValueError: try: return float(text) except ValueError: return text
[ "def", "str_to_num", "(", "text", ")", ":", "try", ":", "return", "int", "(", "text", ")", "except", "ValueError", ":", "try", ":", "return", "float", "(", "text", ")", "except", "ValueError", ":", "return", "text" ]
convert a string to a number .
train
false
8,790
def create_next_subsite(pelican_obj): global _MAIN_SETTINGS if (len(_SUBSITE_QUEUE) == 0): _LOGGER.debug('i18n: Updating cross-site links and context of all generators.') update_generators() _MAIN_SETTINGS = None else: with temporary_locale(): settings = _MAIN_SETTINGS.copy() (lang, overrides) = _SUBSITE_QUEUE.popitem() settings.update(overrides) settings = configure_settings(settings) cls = get_pelican_cls(settings) new_pelican_obj = cls(settings) _LOGGER.debug("Generating i18n subsite for language '{}' using class {}".format(lang, cls)) new_pelican_obj.run()
[ "def", "create_next_subsite", "(", "pelican_obj", ")", ":", "global", "_MAIN_SETTINGS", "if", "(", "len", "(", "_SUBSITE_QUEUE", ")", "==", "0", ")", ":", "_LOGGER", ".", "debug", "(", "'i18n: Updating cross-site links and context of all generators.'", ")", "update_generators", "(", ")", "_MAIN_SETTINGS", "=", "None", "else", ":", "with", "temporary_locale", "(", ")", ":", "settings", "=", "_MAIN_SETTINGS", ".", "copy", "(", ")", "(", "lang", ",", "overrides", ")", "=", "_SUBSITE_QUEUE", ".", "popitem", "(", ")", "settings", ".", "update", "(", "overrides", ")", "settings", "=", "configure_settings", "(", "settings", ")", "cls", "=", "get_pelican_cls", "(", "settings", ")", "new_pelican_obj", "=", "cls", "(", "settings", ")", "_LOGGER", ".", "debug", "(", "\"Generating i18n subsite for language '{}' using class {}\"", ".", "format", "(", "lang", ",", "cls", ")", ")", "new_pelican_obj", ".", "run", "(", ")" ]
create the next subsite using the lang-specific config if there are no more subsites in the generation queue .
train
true
8,792
def make_get_default_gcs_bucket_name_call(rpc): request = app_identity_service_pb.GetDefaultGcsBucketNameRequest() response = app_identity_service_pb.GetDefaultGcsBucketNameResponse() if (rpc.deadline is not None): request.set_deadline(rpc.deadline) def get_default_gcs_bucket_name_result(rpc): "Check success, handle exceptions, and return converted RPC result.\n\n This method waits for the RPC if it has not yet finished, and calls the\n post-call hooks on the first invocation.\n\n Args:\n rpc: A UserRPC object.\n\n Returns:\n A string which is the name of the app's default google storage bucket.\n " assert (rpc.service == _APP_IDENTITY_SERVICE_NAME), repr(rpc.service) assert (rpc.method == _GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME), repr(rpc.method) try: rpc.check_success() except apiproxy_errors.ApplicationError as err: raise _to_app_identity_error(err) return (response.default_gcs_bucket_name() or None) rpc.make_call(_GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME, request, response, get_default_gcs_bucket_name_result)
[ "def", "make_get_default_gcs_bucket_name_call", "(", "rpc", ")", ":", "request", "=", "app_identity_service_pb", ".", "GetDefaultGcsBucketNameRequest", "(", ")", "response", "=", "app_identity_service_pb", ".", "GetDefaultGcsBucketNameResponse", "(", ")", "if", "(", "rpc", ".", "deadline", "is", "not", "None", ")", ":", "request", ".", "set_deadline", "(", "rpc", ".", "deadline", ")", "def", "get_default_gcs_bucket_name_result", "(", "rpc", ")", ":", "assert", "(", "rpc", ".", "service", "==", "_APP_IDENTITY_SERVICE_NAME", ")", ",", "repr", "(", "rpc", ".", "service", ")", "assert", "(", "rpc", ".", "method", "==", "_GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME", ")", ",", "repr", "(", "rpc", ".", "method", ")", "try", ":", "rpc", ".", "check_success", "(", ")", "except", "apiproxy_errors", ".", "ApplicationError", "as", "err", ":", "raise", "_to_app_identity_error", "(", "err", ")", "return", "(", "response", ".", "default_gcs_bucket_name", "(", ")", "or", "None", ")", "rpc", ".", "make_call", "(", "_GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME", ",", "request", ",", "response", ",", "get_default_gcs_bucket_name_result", ")" ]
get default google storage bucket name for the app .
train
false
8,793
def _time_sort_key(d): container_id = (d.get('container_id') or '') attempt_parts = (d.get('attempt_id') or d.get('task_id') or d.get('job_id') or d.get('application_id') or _to_job_id(container_id) or '').split('_') container_parts = container_id.split('_') timestamp_and_step = '_'.join(attempt_parts[1:3]) task_type = '_'.join(attempt_parts[3:4]) task_num = ('_'.join(attempt_parts[4:5]) or '_'.join(container_parts[(-1):])) attempt_num = ('_'.join(attempt_parts[5:6]) or '_'.join(container_parts[(-2):(-1)])) return (timestamp_and_step, container_id, task_type, attempt_num, task_num)
[ "def", "_time_sort_key", "(", "d", ")", ":", "container_id", "=", "(", "d", ".", "get", "(", "'container_id'", ")", "or", "''", ")", "attempt_parts", "=", "(", "d", ".", "get", "(", "'attempt_id'", ")", "or", "d", ".", "get", "(", "'task_id'", ")", "or", "d", ".", "get", "(", "'job_id'", ")", "or", "d", ".", "get", "(", "'application_id'", ")", "or", "_to_job_id", "(", "container_id", ")", "or", "''", ")", ".", "split", "(", "'_'", ")", "container_parts", "=", "container_id", ".", "split", "(", "'_'", ")", "timestamp_and_step", "=", "'_'", ".", "join", "(", "attempt_parts", "[", "1", ":", "3", "]", ")", "task_type", "=", "'_'", ".", "join", "(", "attempt_parts", "[", "3", ":", "4", "]", ")", "task_num", "=", "(", "'_'", ".", "join", "(", "attempt_parts", "[", "4", ":", "5", "]", ")", "or", "'_'", ".", "join", "(", "container_parts", "[", "(", "-", "1", ")", ":", "]", ")", ")", "attempt_num", "=", "(", "'_'", ".", "join", "(", "attempt_parts", "[", "5", ":", "6", "]", ")", "or", "'_'", ".", "join", "(", "container_parts", "[", "(", "-", "2", ")", ":", "(", "-", "1", ")", "]", ")", ")", "return", "(", "timestamp_and_step", ",", "container_id", ",", "task_type", ",", "attempt_num", ",", "task_num", ")" ]
sort key to sort the dictionaries containing ids roughly by time .
train
false
8,794
def __execute_ret(command, host=None, admin_username=None, admin_password=None, module=None): if module: if (module == 'ALL'): modswitch = '-a ' else: modswitch = '-m {0}'.format(module) else: modswitch = '' if (not host): cmd = __salt__['cmd.run_all']('racadm {0} {1}'.format(command, modswitch)) else: cmd = __salt__['cmd.run_all']('racadm -r {0} -u {1} -p {2} {3} {4}'.format(host, admin_username, admin_password, command, modswitch), output_loglevel='quiet') if (cmd['retcode'] != 0): log.warning("racadm return an exit code '{0}'.".format(cmd['retcode'])) else: fmtlines = [] for l in cmd['stdout'].splitlines(): if l.startswith('Security Alert'): continue if l.startswith('RAC1168:'): break if l.startswith('RAC1169:'): break if l.startswith('Continuing execution'): continue if (len(l.strip()) == 0): continue fmtlines.append(l) if ('=' in l): continue cmd['stdout'] = '\n'.join(fmtlines) return cmd
[ "def", "__execute_ret", "(", "command", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ",", "module", "=", "None", ")", ":", "if", "module", ":", "if", "(", "module", "==", "'ALL'", ")", ":", "modswitch", "=", "'-a '", "else", ":", "modswitch", "=", "'-m {0}'", ".", "format", "(", "module", ")", "else", ":", "modswitch", "=", "''", "if", "(", "not", "host", ")", ":", "cmd", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'racadm {0} {1}'", ".", "format", "(", "command", ",", "modswitch", ")", ")", "else", ":", "cmd", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'racadm -r {0} -u {1} -p {2} {3} {4}'", ".", "format", "(", "host", ",", "admin_username", ",", "admin_password", ",", "command", ",", "modswitch", ")", ",", "output_loglevel", "=", "'quiet'", ")", "if", "(", "cmd", "[", "'retcode'", "]", "!=", "0", ")", ":", "log", ".", "warning", "(", "\"racadm return an exit code '{0}'.\"", ".", "format", "(", "cmd", "[", "'retcode'", "]", ")", ")", "else", ":", "fmtlines", "=", "[", "]", "for", "l", "in", "cmd", "[", "'stdout'", "]", ".", "splitlines", "(", ")", ":", "if", "l", ".", "startswith", "(", "'Security Alert'", ")", ":", "continue", "if", "l", ".", "startswith", "(", "'RAC1168:'", ")", ":", "break", "if", "l", ".", "startswith", "(", "'RAC1169:'", ")", ":", "break", "if", "l", ".", "startswith", "(", "'Continuing execution'", ")", ":", "continue", "if", "(", "len", "(", "l", ".", "strip", "(", ")", ")", "==", "0", ")", ":", "continue", "fmtlines", ".", "append", "(", "l", ")", "if", "(", "'='", "in", "l", ")", ":", "continue", "cmd", "[", "'stdout'", "]", "=", "'\\n'", ".", "join", "(", "fmtlines", ")", "return", "cmd" ]
execute rac commands .
train
true
8,796
def generate_boto3_response(operation): def _boto3_request(method): @wraps(method) def f(self, *args, **kwargs): rendered = method(self, *args, **kwargs) if (u'json' in self.headers.get(u'Content-Type', [])): self.response_headers.update({u'x-amzn-requestid': u'2690d7eb-ed86-11dd-9877-6fad448a8419', u'date': datetime.now(pytz.utc).strftime(u'%a, %d %b %Y %H:%M:%S %Z'), u'content-type': u'application/x-amz-json-1.1'}) resp = xml_to_json_response(self.aws_service_spec, operation, rendered) return (u'' if (resp is None) else json.dumps(resp)) return rendered return f return _boto3_request
[ "def", "generate_boto3_response", "(", "operation", ")", ":", "def", "_boto3_request", "(", "method", ")", ":", "@", "wraps", "(", "method", ")", "def", "f", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "rendered", "=", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "if", "(", "u'json'", "in", "self", ".", "headers", ".", "get", "(", "u'Content-Type'", ",", "[", "]", ")", ")", ":", "self", ".", "response_headers", ".", "update", "(", "{", "u'x-amzn-requestid'", ":", "u'2690d7eb-ed86-11dd-9877-6fad448a8419'", ",", "u'date'", ":", "datetime", ".", "now", "(", "pytz", ".", "utc", ")", ".", "strftime", "(", "u'%a, %d %b %Y %H:%M:%S %Z'", ")", ",", "u'content-type'", ":", "u'application/x-amz-json-1.1'", "}", ")", "resp", "=", "xml_to_json_response", "(", "self", ".", "aws_service_spec", ",", "operation", ",", "rendered", ")", "return", "(", "u''", "if", "(", "resp", "is", "None", ")", "else", "json", ".", "dumps", "(", "resp", ")", ")", "return", "rendered", "return", "f", "return", "_boto3_request" ]
the decorator to convert an xml response to json .
train
false
8,797
def process_open_sockets(attrs=None, where=None): return _osquery_cmd(table='process_open_sockets', attrs=attrs, where=where)
[ "def", "process_open_sockets", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'process_open_sockets'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return process_open_sockets information from osquery cli example: .
train
false
8,799
def _prefixlenToNetmask(prefixlen, version): if (prefixlen == 0): return 0 elif (prefixlen < 0): raise ValueError('Prefixlen must be > 0') return (((2 << (prefixlen - 1)) - 1) << (_ipVersionToLen(version) - prefixlen))
[ "def", "_prefixlenToNetmask", "(", "prefixlen", ",", "version", ")", ":", "if", "(", "prefixlen", "==", "0", ")", ":", "return", "0", "elif", "(", "prefixlen", "<", "0", ")", ":", "raise", "ValueError", "(", "'Prefixlen must be > 0'", ")", "return", "(", "(", "(", "2", "<<", "(", "prefixlen", "-", "1", ")", ")", "-", "1", ")", "<<", "(", "_ipVersionToLen", "(", "version", ")", "-", "prefixlen", ")", ")" ]
return a mask of n bits as a long integer .
train
false
8,801
def check_composed_tensor_operations(first_function_name, first_function_args, second_function_name, second_function_args, input_shape): val = (np.random.random(input_shape) - 0.5) xth = KTH.variable(val) xtf = KTF.variable(val) yth = getattr(KTH, first_function_name)(xth, **first_function_args) ytf = getattr(KTF, first_function_name)(xtf, **first_function_args) zth = KTH.eval(getattr(KTH, second_function_name)(yth, **second_function_args)) ztf = KTF.eval(getattr(KTF, second_function_name)(ytf, **second_function_args)) assert (zth.shape == ztf.shape) assert_allclose(zth, ztf, atol=1e-05)
[ "def", "check_composed_tensor_operations", "(", "first_function_name", ",", "first_function_args", ",", "second_function_name", ",", "second_function_args", ",", "input_shape", ")", ":", "val", "=", "(", "np", ".", "random", ".", "random", "(", "input_shape", ")", "-", "0.5", ")", "xth", "=", "KTH", ".", "variable", "(", "val", ")", "xtf", "=", "KTF", ".", "variable", "(", "val", ")", "yth", "=", "getattr", "(", "KTH", ",", "first_function_name", ")", "(", "xth", ",", "**", "first_function_args", ")", "ytf", "=", "getattr", "(", "KTF", ",", "first_function_name", ")", "(", "xtf", ",", "**", "first_function_args", ")", "zth", "=", "KTH", ".", "eval", "(", "getattr", "(", "KTH", ",", "second_function_name", ")", "(", "yth", ",", "**", "second_function_args", ")", ")", "ztf", "=", "KTF", ".", "eval", "(", "getattr", "(", "KTF", ",", "second_function_name", ")", "(", "ytf", ",", "**", "second_function_args", ")", ")", "assert", "(", "zth", ".", "shape", "==", "ztf", ".", "shape", ")", "assert_allclose", "(", "zth", ",", "ztf", ",", "atol", "=", "1e-05", ")" ]
creates a random tensor t0 with shape input_shape and compute t1 = first_function_name t2 = second_function_name with both theano and tensorflow backends and ensures the answers match .
train
false
8,802
def _normalize_equivalencies(equivalencies): if (equivalencies is None): return [] normalized = [] for (i, equiv) in enumerate(equivalencies): if (len(equiv) == 2): (funit, tunit) = equiv a = b = (lambda x: x) elif (len(equiv) == 3): (funit, tunit, a) = equiv b = a elif (len(equiv) == 4): (funit, tunit, a, b) = equiv else: raise ValueError(u'Invalid equivalence entry {0}: {1!r}'.format(i, equiv)) if (not ((funit is Unit(funit)) and ((tunit is None) or (tunit is Unit(tunit))) and six.callable(a) and six.callable(b))): raise ValueError(u'Invalid equivalence entry {0}: {1!r}'.format(i, equiv)) normalized.append((funit, tunit, a, b)) return normalized
[ "def", "_normalize_equivalencies", "(", "equivalencies", ")", ":", "if", "(", "equivalencies", "is", "None", ")", ":", "return", "[", "]", "normalized", "=", "[", "]", "for", "(", "i", ",", "equiv", ")", "in", "enumerate", "(", "equivalencies", ")", ":", "if", "(", "len", "(", "equiv", ")", "==", "2", ")", ":", "(", "funit", ",", "tunit", ")", "=", "equiv", "a", "=", "b", "=", "(", "lambda", "x", ":", "x", ")", "elif", "(", "len", "(", "equiv", ")", "==", "3", ")", ":", "(", "funit", ",", "tunit", ",", "a", ")", "=", "equiv", "b", "=", "a", "elif", "(", "len", "(", "equiv", ")", "==", "4", ")", ":", "(", "funit", ",", "tunit", ",", "a", ",", "b", ")", "=", "equiv", "else", ":", "raise", "ValueError", "(", "u'Invalid equivalence entry {0}: {1!r}'", ".", "format", "(", "i", ",", "equiv", ")", ")", "if", "(", "not", "(", "(", "funit", "is", "Unit", "(", "funit", ")", ")", "and", "(", "(", "tunit", "is", "None", ")", "or", "(", "tunit", "is", "Unit", "(", "tunit", ")", ")", ")", "and", "six", ".", "callable", "(", "a", ")", "and", "six", ".", "callable", "(", "b", ")", ")", ")", ":", "raise", "ValueError", "(", "u'Invalid equivalence entry {0}: {1!r}'", ".", "format", "(", "i", ",", "equiv", ")", ")", "normalized", ".", "append", "(", "(", "funit", ",", "tunit", ",", "a", ",", "b", ")", ")", "return", "normalized" ]
normalizes equivalencies .
train
false
8,804
def nest_toc_tokens(toc_list): ordered_list = [] if len(toc_list): last = toc_list.pop(0) last[u'children'] = [] levels = [last[u'level']] ordered_list.append(last) parents = [] while toc_list: t = toc_list.pop(0) current_level = t[u'level'] t[u'children'] = [] if (current_level < levels[(-1)]): levels.pop() to_pop = 0 for p in reversed(parents): if (current_level <= p[u'level']): to_pop += 1 else: break if to_pop: levels = levels[:(- to_pop)] parents = parents[:(- to_pop)] levels.append(current_level) if (current_level == levels[(-1)]): (parents[(-1)][u'children'] if parents else ordered_list).append(t) else: last[u'children'].append(t) parents.append(last) levels.append(current_level) last = t return ordered_list
[ "def", "nest_toc_tokens", "(", "toc_list", ")", ":", "ordered_list", "=", "[", "]", "if", "len", "(", "toc_list", ")", ":", "last", "=", "toc_list", ".", "pop", "(", "0", ")", "last", "[", "u'children'", "]", "=", "[", "]", "levels", "=", "[", "last", "[", "u'level'", "]", "]", "ordered_list", ".", "append", "(", "last", ")", "parents", "=", "[", "]", "while", "toc_list", ":", "t", "=", "toc_list", ".", "pop", "(", "0", ")", "current_level", "=", "t", "[", "u'level'", "]", "t", "[", "u'children'", "]", "=", "[", "]", "if", "(", "current_level", "<", "levels", "[", "(", "-", "1", ")", "]", ")", ":", "levels", ".", "pop", "(", ")", "to_pop", "=", "0", "for", "p", "in", "reversed", "(", "parents", ")", ":", "if", "(", "current_level", "<=", "p", "[", "u'level'", "]", ")", ":", "to_pop", "+=", "1", "else", ":", "break", "if", "to_pop", ":", "levels", "=", "levels", "[", ":", "(", "-", "to_pop", ")", "]", "parents", "=", "parents", "[", ":", "(", "-", "to_pop", ")", "]", "levels", ".", "append", "(", "current_level", ")", "if", "(", "current_level", "==", "levels", "[", "(", "-", "1", ")", "]", ")", ":", "(", "parents", "[", "(", "-", "1", ")", "]", "[", "u'children'", "]", "if", "parents", "else", "ordered_list", ")", ".", "append", "(", "t", ")", "else", ":", "last", "[", "u'children'", "]", ".", "append", "(", "t", ")", "parents", ".", "append", "(", "last", ")", "levels", ".", "append", "(", "current_level", ")", "last", "=", "t", "return", "ordered_list" ]
given an unsorted list with errors and skips .
train
false
8,805
def getLogRecordFactory(): return _logRecordFactory
[ "def", "getLogRecordFactory", "(", ")", ":", "return", "_logRecordFactory" ]
return the factory to be used when instantiating a log record .
train
false
8,806
def _tree_to_fs_path(root_path, tree_path): assert isinstance(tree_path, bytes) if (os_sep_bytes != '/'): sep_corrected_path = tree_path.replace('/', os_sep_bytes) else: sep_corrected_path = tree_path return os.path.join(root_path, sep_corrected_path)
[ "def", "_tree_to_fs_path", "(", "root_path", ",", "tree_path", ")", ":", "assert", "isinstance", "(", "tree_path", ",", "bytes", ")", "if", "(", "os_sep_bytes", "!=", "'/'", ")", ":", "sep_corrected_path", "=", "tree_path", ".", "replace", "(", "'/'", ",", "os_sep_bytes", ")", "else", ":", "sep_corrected_path", "=", "tree_path", "return", "os", ".", "path", ".", "join", "(", "root_path", ",", "sep_corrected_path", ")" ]
convert a git tree path to a file system path .
train
false
8,807
def _get_key_api(): if ('keyapi' not in __context__): __context__['keyapi'] = salt.key.Key(__opts__) return __context__['keyapi']
[ "def", "_get_key_api", "(", ")", ":", "if", "(", "'keyapi'", "not", "in", "__context__", ")", ":", "__context__", "[", "'keyapi'", "]", "=", "salt", ".", "key", ".", "Key", "(", "__opts__", ")", "return", "__context__", "[", "'keyapi'", "]" ]
return the key api hook .
train
false
8,808
def _parse_till_unescaped_char(stream, chars): rv = '' while True: escaped = False for char in chars: if EscapeCharToken.starts_here(stream, char): rv += (next(stream) + next(stream)) escaped = True if (not escaped): char = next(stream) if (char in chars): break rv += char return (rv, char)
[ "def", "_parse_till_unescaped_char", "(", "stream", ",", "chars", ")", ":", "rv", "=", "''", "while", "True", ":", "escaped", "=", "False", "for", "char", "in", "chars", ":", "if", "EscapeCharToken", ".", "starts_here", "(", "stream", ",", "char", ")", ":", "rv", "+=", "(", "next", "(", "stream", ")", "+", "next", "(", "stream", ")", ")", "escaped", "=", "True", "if", "(", "not", "escaped", ")", ":", "char", "=", "next", "(", "stream", ")", "if", "(", "char", "in", "chars", ")", ":", "break", "rv", "+=", "char", "return", "(", "rv", ",", "char", ")" ]
returns all chars till a non-escaped char is found .
train
false
8,810
def _handle_abuse_flagged_field(form_value, user, cc_content): if form_value: cc_content.flagAbuse(user, cc_content) else: cc_content.unFlagAbuse(user, cc_content, removeAll=False)
[ "def", "_handle_abuse_flagged_field", "(", "form_value", ",", "user", ",", "cc_content", ")", ":", "if", "form_value", ":", "cc_content", ".", "flagAbuse", "(", "user", ",", "cc_content", ")", "else", ":", "cc_content", ".", "unFlagAbuse", "(", "user", ",", "cc_content", ",", "removeAll", "=", "False", ")" ]
mark or unmark thread/comment as abused .
train
false
8,811
def libvlc_media_list_player_stop(p_mlp): f = (_Cfunctions.get('libvlc_media_list_player_stop', None) or _Cfunction('libvlc_media_list_player_stop', ((1,),), None, None, MediaListPlayer)) return f(p_mlp)
[ "def", "libvlc_media_list_player_stop", "(", "p_mlp", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_stop'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_stop'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaListPlayer", ")", ")", "return", "f", "(", "p_mlp", ")" ]
stop playing media list .
train
false
8,812
def DeepDependencyTargets(target_dicts, roots): dependencies = set() pending = set(roots) while pending: r = pending.pop() if (r in dependencies): continue dependencies.add(r) spec = target_dicts[r] pending.update(set(spec.get('dependencies', []))) pending.update(set(spec.get('dependencies_original', []))) return list((dependencies - set(roots)))
[ "def", "DeepDependencyTargets", "(", "target_dicts", ",", "roots", ")", ":", "dependencies", "=", "set", "(", ")", "pending", "=", "set", "(", "roots", ")", "while", "pending", ":", "r", "=", "pending", ".", "pop", "(", ")", "if", "(", "r", "in", "dependencies", ")", ":", "continue", "dependencies", ".", "add", "(", "r", ")", "spec", "=", "target_dicts", "[", "r", "]", "pending", ".", "update", "(", "set", "(", "spec", ".", "get", "(", "'dependencies'", ",", "[", "]", ")", ")", ")", "pending", ".", "update", "(", "set", "(", "spec", ".", "get", "(", "'dependencies_original'", ",", "[", "]", ")", ")", ")", "return", "list", "(", "(", "dependencies", "-", "set", "(", "roots", ")", ")", ")" ]
returns the recursive list of target dependencies .
train
false
8,813
def search_datastore_spec(client_factory, file_name): search_spec = client_factory.create('ns0:HostDatastoreBrowserSearchSpec') search_spec.matchPattern = [file_name] search_spec.details = client_factory.create('ns0:FileQueryFlags') search_spec.details.fileOwner = False search_spec.details.fileSize = True search_spec.details.fileType = False search_spec.details.modification = False return search_spec
[ "def", "search_datastore_spec", "(", "client_factory", ",", "file_name", ")", ":", "search_spec", "=", "client_factory", ".", "create", "(", "'ns0:HostDatastoreBrowserSearchSpec'", ")", "search_spec", ".", "matchPattern", "=", "[", "file_name", "]", "search_spec", ".", "details", "=", "client_factory", ".", "create", "(", "'ns0:FileQueryFlags'", ")", "search_spec", ".", "details", ".", "fileOwner", "=", "False", "search_spec", ".", "details", ".", "fileSize", "=", "True", "search_spec", ".", "details", ".", "fileType", "=", "False", "search_spec", ".", "details", ".", "modification", "=", "False", "return", "search_spec" ]
builds the datastore search spec .
train
false
8,815
def _parse_settings_bond_0(opts, iface, bond_def): bond = {'mode': '0'} valid = ['list of ips (up to 16)'] if ('arp_ip_target' in opts): if isinstance(opts['arp_ip_target'], list): if (1 <= len(opts['arp_ip_target']) <= 16): bond.update({'arp_ip_target': ''}) for ip in opts['arp_ip_target']: if (len(bond['arp_ip_target']) > 0): bond['arp_ip_target'] = ((bond['arp_ip_target'] + ',') + ip) else: bond['arp_ip_target'] = ip else: _raise_error_iface(iface, 'arp_ip_target', valid) else: _raise_error_iface(iface, 'arp_ip_target', valid) else: _raise_error_iface(iface, 'arp_ip_target', valid) if ('arp_interval' in opts): try: int(opts['arp_interval']) bond.update({'arp_interval': opts['arp_interval']}) except Exception: _raise_error_iface(iface, 'arp_interval', ['integer']) else: _log_default_iface(iface, 'arp_interval', bond_def['arp_interval']) bond.update({'arp_interval': bond_def['arp_interval']}) return bond
[ "def", "_parse_settings_bond_0", "(", "opts", ",", "iface", ",", "bond_def", ")", ":", "bond", "=", "{", "'mode'", ":", "'0'", "}", "valid", "=", "[", "'list of ips (up to 16)'", "]", "if", "(", "'arp_ip_target'", "in", "opts", ")", ":", "if", "isinstance", "(", "opts", "[", "'arp_ip_target'", "]", ",", "list", ")", ":", "if", "(", "1", "<=", "len", "(", "opts", "[", "'arp_ip_target'", "]", ")", "<=", "16", ")", ":", "bond", ".", "update", "(", "{", "'arp_ip_target'", ":", "''", "}", ")", "for", "ip", "in", "opts", "[", "'arp_ip_target'", "]", ":", "if", "(", "len", "(", "bond", "[", "'arp_ip_target'", "]", ")", ">", "0", ")", ":", "bond", "[", "'arp_ip_target'", "]", "=", "(", "(", "bond", "[", "'arp_ip_target'", "]", "+", "','", ")", "+", "ip", ")", "else", ":", "bond", "[", "'arp_ip_target'", "]", "=", "ip", "else", ":", "_raise_error_iface", "(", "iface", ",", "'arp_ip_target'", ",", "valid", ")", "else", ":", "_raise_error_iface", "(", "iface", ",", "'arp_ip_target'", ",", "valid", ")", "else", ":", "_raise_error_iface", "(", "iface", ",", "'arp_ip_target'", ",", "valid", ")", "if", "(", "'arp_interval'", "in", "opts", ")", ":", "try", ":", "int", "(", "opts", "[", "'arp_interval'", "]", ")", "bond", ".", "update", "(", "{", "'arp_interval'", ":", "opts", "[", "'arp_interval'", "]", "}", ")", "except", "Exception", ":", "_raise_error_iface", "(", "iface", ",", "'arp_interval'", ",", "[", "'integer'", "]", ")", "else", ":", "_log_default_iface", "(", "iface", ",", "'arp_interval'", ",", "bond_def", "[", "'arp_interval'", "]", ")", "bond", ".", "update", "(", "{", "'arp_interval'", ":", "bond_def", "[", "'arp_interval'", "]", "}", ")", "return", "bond" ]
filters given options and outputs valid settings for bond0 .
train
false
8,816
def _sysv_is_disabled(name): return (not bool(glob.glob('/etc/rc{0}.d/S*{1}'.format(_runlevel(), name))))
[ "def", "_sysv_is_disabled", "(", "name", ")", ":", "return", "(", "not", "bool", "(", "glob", ".", "glob", "(", "'/etc/rc{0}.d/S*{1}'", ".", "format", "(", "_runlevel", "(", ")", ",", "name", ")", ")", ")", ")" ]
a system-v style service is assumed disabled if there is no start-up link to its script in /etc/init .
train
false
8,817
def NormjoinPath(base_path, rel_path): if (rel_path.startswith('$') and (not rel_path.startswith('${configuration}'))): return rel_path return os.path.normpath(os.path.join(base_path, rel_path))
[ "def", "NormjoinPath", "(", "base_path", ",", "rel_path", ")", ":", "if", "(", "rel_path", ".", "startswith", "(", "'$'", ")", "and", "(", "not", "rel_path", ".", "startswith", "(", "'${configuration}'", ")", ")", ")", ":", "return", "rel_path", "return", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "base_path", ",", "rel_path", ")", ")" ]
resolves rel_path against base_path and returns the result .
train
false
8,818
def to_django_query(query, model_cls=None): return Q.from_modm_query(query, model_cls=model_cls).to_django_query()
[ "def", "to_django_query", "(", "query", ",", "model_cls", "=", "None", ")", ":", "return", "Q", ".", "from_modm_query", "(", "query", ",", "model_cls", "=", "model_cls", ")", ".", "to_django_query", "(", ")" ]
translate a modular-odm q or querygroup to a django query .
train
false
8,819
def get_review(app, id): sa_session = app.model.context.current return sa_session.query(app.model.RepositoryReview).get(app.security.decode_id(id))
[ "def", "get_review", "(", "app", ",", "id", ")", ":", "sa_session", "=", "app", ".", "model", ".", "context", ".", "current", "return", "sa_session", ".", "query", "(", "app", ".", "model", ".", "RepositoryReview", ")", ".", "get", "(", "app", ".", "security", ".", "decode_id", "(", "id", ")", ")" ]
get a repository_review from the database via id .
train
false
8,820
@runs_last def code_cleanup(): fprint('Cleaning up local code') local('rm -f hg_revision.txt viewfinder.*.tar.gz')
[ "@", "runs_last", "def", "code_cleanup", "(", ")", ":", "fprint", "(", "'Cleaning up local code'", ")", "local", "(", "'rm -f hg_revision.txt viewfinder.*.tar.gz'", ")" ]
delete the generated tarball and revision file .
train
false
8,821
def is_memcache_running(): return Env.BOK_CHOY_CACHE.set('test', 'test')
[ "def", "is_memcache_running", "(", ")", ":", "return", "Env", ".", "BOK_CHOY_CACHE", ".", "set", "(", "'test'", ",", "'test'", ")" ]
returns true if memcache is running .
train
false
8,822
def host_from_uri(uri): default_ports = {u'HTTP': u'80', u'HTTPS': u'443'} (sch, netloc, path, par, query, fra) = urlparse.urlparse(uri) if (u':' in netloc): (netloc, port) = netloc.split(u':', 1) else: port = default_ports.get(sch.upper()) return (netloc, port)
[ "def", "host_from_uri", "(", "uri", ")", ":", "default_ports", "=", "{", "u'HTTP'", ":", "u'80'", ",", "u'HTTPS'", ":", "u'443'", "}", "(", "sch", ",", "netloc", ",", "path", ",", "par", ",", "query", ",", "fra", ")", "=", "urlparse", ".", "urlparse", "(", "uri", ")", "if", "(", "u':'", "in", "netloc", ")", ":", "(", "netloc", ",", "port", ")", "=", "netloc", ".", "split", "(", "u':'", ",", "1", ")", "else", ":", "port", "=", "default_ports", ".", "get", "(", "sch", ".", "upper", "(", ")", ")", "return", "(", "netloc", ",", "port", ")" ]
extract hostname and port from uri .
train
false
8,823
def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None): if (not cwd): cwd = os.getcwd() if (not env): env = common_environment() cmd = list(cmd) escaped_cmd = ' '.join((pipes.quote(c) for c in cmd)) display.info(('Run command: %s' % escaped_cmd), verbosity=1) display.info(('Working directory: %s' % cwd), verbosity=2) program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning') if program: display.info(('Program found: %s' % program), verbosity=2) for key in sorted(env.keys()): display.info(('%s=%s' % (key, env[key])), verbosity=2) if explain: return (None, None) communicate = False if (stdin is not None): data = None communicate = True elif (data is not None): stdin = subprocess.PIPE communicate = True if stdout: communicate = True if capture: stdout = (stdout or subprocess.PIPE) stderr = subprocess.PIPE communicate = True else: stderr = None start = time.time() try: process = subprocess.Popen(cmd, env=env, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd) except OSError as ex: if (ex.errno == errno.ENOENT): raise ApplicationError(('Required program "%s" not found.' % cmd[0])) raise if communicate: (stdout, stderr) = process.communicate(data) else: process.wait() (stdout, stderr) = (None, None) status = process.returncode runtime = (time.time() - start) display.info(('Command exited with status %s after %s seconds.' % (status, runtime)), verbosity=4) if (status == 0): return (stdout, stderr) raise SubprocessError(cmd, status, stdout, stderr, runtime)
[ "def", "raw_command", "(", "cmd", ",", "capture", "=", "False", ",", "env", "=", "None", ",", "data", "=", "None", ",", "cwd", "=", "None", ",", "explain", "=", "False", ",", "stdin", "=", "None", ",", "stdout", "=", "None", ")", ":", "if", "(", "not", "cwd", ")", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "if", "(", "not", "env", ")", ":", "env", "=", "common_environment", "(", ")", "cmd", "=", "list", "(", "cmd", ")", "escaped_cmd", "=", "' '", ".", "join", "(", "(", "pipes", ".", "quote", "(", "c", ")", "for", "c", "in", "cmd", ")", ")", "display", ".", "info", "(", "(", "'Run command: %s'", "%", "escaped_cmd", ")", ",", "verbosity", "=", "1", ")", "display", ".", "info", "(", "(", "'Working directory: %s'", "%", "cwd", ")", ",", "verbosity", "=", "2", ")", "program", "=", "find_executable", "(", "cmd", "[", "0", "]", ",", "cwd", "=", "cwd", ",", "path", "=", "env", "[", "'PATH'", "]", ",", "required", "=", "'warning'", ")", "if", "program", ":", "display", ".", "info", "(", "(", "'Program found: %s'", "%", "program", ")", ",", "verbosity", "=", "2", ")", "for", "key", "in", "sorted", "(", "env", ".", "keys", "(", ")", ")", ":", "display", ".", "info", "(", "(", "'%s=%s'", "%", "(", "key", ",", "env", "[", "key", "]", ")", ")", ",", "verbosity", "=", "2", ")", "if", "explain", ":", "return", "(", "None", ",", "None", ")", "communicate", "=", "False", "if", "(", "stdin", "is", "not", "None", ")", ":", "data", "=", "None", "communicate", "=", "True", "elif", "(", "data", "is", "not", "None", ")", ":", "stdin", "=", "subprocess", ".", "PIPE", "communicate", "=", "True", "if", "stdout", ":", "communicate", "=", "True", "if", "capture", ":", "stdout", "=", "(", "stdout", "or", "subprocess", ".", "PIPE", ")", "stderr", "=", "subprocess", ".", "PIPE", "communicate", "=", "True", "else", ":", "stderr", "=", "None", "start", "=", "time", ".", "time", "(", ")", "try", ":", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "env", "=", "env", ",", "stdin", "=", "stdin", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ",", "cwd", "=", "cwd", ")", "except", "OSError", "as", "ex", ":", "if", "(", "ex", ".", "errno", "==", "errno", ".", "ENOENT", ")", ":", "raise", "ApplicationError", "(", "(", "'Required program \"%s\" not found.'", "%", "cmd", "[", "0", "]", ")", ")", "raise", "if", "communicate", ":", "(", "stdout", ",", "stderr", ")", "=", "process", ".", "communicate", "(", "data", ")", "else", ":", "process", ".", "wait", "(", ")", "(", "stdout", ",", "stderr", ")", "=", "(", "None", ",", "None", ")", "status", "=", "process", ".", "returncode", "runtime", "=", "(", "time", ".", "time", "(", ")", "-", "start", ")", "display", ".", "info", "(", "(", "'Command exited with status %s after %s seconds.'", "%", "(", "status", ",", "runtime", ")", ")", ",", "verbosity", "=", "4", ")", "if", "(", "status", "==", "0", ")", ":", "return", "(", "stdout", ",", "stderr", ")", "raise", "SubprocessError", "(", "cmd", ",", "status", ",", "stdout", ",", "stderr", ",", "runtime", ")" ]
send raw ipmi command this allows arbitrary ipmi bytes to be issued .
train
false
8,824
def missing_whitespace_after_import_keyword(logical_line): line = logical_line indicator = ' import(' if line.startswith('from '): found = line.find(indicator) if ((-1) < found): pos = ((found + len(indicator)) - 1) (yield (pos, 'E275 missing whitespace after keyword'))
[ "def", "missing_whitespace_after_import_keyword", "(", "logical_line", ")", ":", "line", "=", "logical_line", "indicator", "=", "' import('", "if", "line", ".", "startswith", "(", "'from '", ")", ":", "found", "=", "line", ".", "find", "(", "indicator", ")", "if", "(", "(", "-", "1", ")", "<", "found", ")", ":", "pos", "=", "(", "(", "found", "+", "len", "(", "indicator", ")", ")", "-", "1", ")", "(", "yield", "(", "pos", ",", "'E275 missing whitespace after keyword'", ")", ")" ]
multiple imports in form from x import should have space between import statement and parenthesised name list .
train
true
8,825
def diff_string(old, new): diff = abs((old - new)) diff_str = ('%s%s' % (CMPS[cmp(old, new)], ((diff and ('%.2f' % diff)) or ''))) return diff_str
[ "def", "diff_string", "(", "old", ",", "new", ")", ":", "diff", "=", "abs", "(", "(", "old", "-", "new", ")", ")", "diff_str", "=", "(", "'%s%s'", "%", "(", "CMPS", "[", "cmp", "(", "old", ",", "new", ")", "]", ",", "(", "(", "diff", "and", "(", "'%.2f'", "%", "diff", ")", ")", "or", "''", ")", ")", ")", "return", "diff_str" ]
given a old and new int value .
train
false
8,826
def bitsToString(arr): s = array('c', ('.' * len(arr))) for i in xrange(len(arr)): if (arr[i] == 1): s[i] = '*' return s
[ "def", "bitsToString", "(", "arr", ")", ":", "s", "=", "array", "(", "'c'", ",", "(", "'.'", "*", "len", "(", "arr", ")", ")", ")", "for", "i", "in", "xrange", "(", "len", "(", "arr", ")", ")", ":", "if", "(", "arr", "[", "i", "]", "==", "1", ")", ":", "s", "[", "i", "]", "=", "'*'", "return", "s" ]
returns a string representing a numpy array of 0s and 1s .
train
true
8,827
def absolute_path(path): return os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', path))
[ "def", "absolute_path", "(", "path", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ",", "'..'", ",", "path", ")", ")" ]
returns the absolute path for a path specified as relative to the tests/ directory .
train
false
8,829
def is_possible_short_number(numobj): region_codes = region_codes_for_country_code(numobj.country_code) short_number = national_significant_number(numobj) for region in region_codes: metadata = PhoneMetadata.short_metadata_for_region(region) if (metadata is None): continue if _is_number_possible_for_desc(short_number, metadata.general_desc): return True return False
[ "def", "is_possible_short_number", "(", "numobj", ")", ":", "region_codes", "=", "region_codes_for_country_code", "(", "numobj", ".", "country_code", ")", "short_number", "=", "national_significant_number", "(", "numobj", ")", "for", "region", "in", "region_codes", ":", "metadata", "=", "PhoneMetadata", ".", "short_metadata_for_region", "(", "region", ")", "if", "(", "metadata", "is", "None", ")", ":", "continue", "if", "_is_number_possible_for_desc", "(", "short_number", ",", "metadata", ".", "general_desc", ")", ":", "return", "True", "return", "False" ]
check whether a short number is a possible number .
train
false
8,830
def _apply_service(service, service_func, *service_func_args): entity_ids = service.data.get('entity_id') if entity_ids: _devices = [device for device in DEVICES if (device.entity_id in entity_ids)] else: _devices = DEVICES for device in _devices: service_func(device, *service_func_args) device.update_ha_state(True)
[ "def", "_apply_service", "(", "service", ",", "service_func", ",", "*", "service_func_args", ")", ":", "entity_ids", "=", "service", ".", "data", ".", "get", "(", "'entity_id'", ")", "if", "entity_ids", ":", "_devices", "=", "[", "device", "for", "device", "in", "DEVICES", "if", "(", "device", ".", "entity_id", "in", "entity_ids", ")", "]", "else", ":", "_devices", "=", "DEVICES", "for", "device", "in", "_devices", ":", "service_func", "(", "device", ",", "*", "service_func_args", ")", "device", ".", "update_ha_state", "(", "True", ")" ]
internal func for applying a service .
train
false
8,831
@domain_constructor(loss_target=(-2)) def n_arms(N=2): rng = np.random.RandomState(123) x = hp.choice('x', [0, 1]) reward_mus = as_apply(([(-1)] + ([0] * (N - 1)))) reward_sigmas = as_apply(([1] * N)) return {'loss': scope.normal(reward_mus[x], reward_sigmas[x], rng=rng), 'loss_variance': 1.0, 'status': base.STATUS_OK}
[ "@", "domain_constructor", "(", "loss_target", "=", "(", "-", "2", ")", ")", "def", "n_arms", "(", "N", "=", "2", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "123", ")", "x", "=", "hp", ".", "choice", "(", "'x'", ",", "[", "0", ",", "1", "]", ")", "reward_mus", "=", "as_apply", "(", "(", "[", "(", "-", "1", ")", "]", "+", "(", "[", "0", "]", "*", "(", "N", "-", "1", ")", ")", ")", ")", "reward_sigmas", "=", "as_apply", "(", "(", "[", "1", "]", "*", "N", ")", ")", "return", "{", "'loss'", ":", "scope", ".", "normal", "(", "reward_mus", "[", "x", "]", ",", "reward_sigmas", "[", "x", "]", ",", "rng", "=", "rng", ")", ",", "'loss_variance'", ":", "1.0", ",", "'status'", ":", "base", ".", "STATUS_OK", "}" ]
each arm yields a reward from a different gaussian .
train
false
8,833
def organize_imports(): with RopeContext() as ctx: organizer = ImportOrganizer(ctx.project) changes = organizer.organize_imports(ctx.resource) if (changes is not None): progress = ProgressHandler('Organize imports') ctx.project.do(changes, task_handle=progress.handle) reload_changes(changes)
[ "def", "organize_imports", "(", ")", ":", "with", "RopeContext", "(", ")", "as", "ctx", ":", "organizer", "=", "ImportOrganizer", "(", "ctx", ".", "project", ")", "changes", "=", "organizer", ".", "organize_imports", "(", "ctx", ".", "resource", ")", "if", "(", "changes", "is", "not", "None", ")", ":", "progress", "=", "ProgressHandler", "(", "'Organize imports'", ")", "ctx", ".", "project", ".", "do", "(", "changes", ",", "task_handle", "=", "progress", ".", "handle", ")", "reload_changes", "(", "changes", ")" ]
organize imports in current file .
train
false
8,834
def helper_not_here(): html = u'<!DOCTYPE html>\n <html>\n <head>\n <title>Hello from Flask</title>\n </head>\n <body>Hello World, {{ h.nohere() }} no helper here</body>\n </html>' return render_template_string(html)
[ "def", "helper_not_here", "(", ")", ":", "html", "=", "u'<!DOCTYPE html>\\n <html>\\n <head>\\n <title>Hello from Flask</title>\\n </head>\\n <body>Hello World, {{ h.nohere() }} no helper here</body>\\n </html>'", "return", "render_template_string", "(", "html", ")" ]
a simple template with a helper that doesnt exist .
train
false
8,837
@get('/admin/<taskid>/flush') def task_flush(taskid): for key in list(DataStore.tasks): if (is_admin(taskid) or (DataStore.tasks[key].remote_addr == request.remote_addr)): DataStore.tasks[key].engine_kill() del DataStore.tasks[key] logger.debug(('[%s] Flushed task pool (%s)' % (taskid, ('admin' if is_admin(taskid) else request.remote_addr)))) return jsonize({'success': True})
[ "@", "get", "(", "'/admin/<taskid>/flush'", ")", "def", "task_flush", "(", "taskid", ")", ":", "for", "key", "in", "list", "(", "DataStore", ".", "tasks", ")", ":", "if", "(", "is_admin", "(", "taskid", ")", "or", "(", "DataStore", ".", "tasks", "[", "key", "]", ".", "remote_addr", "==", "request", ".", "remote_addr", ")", ")", ":", "DataStore", ".", "tasks", "[", "key", "]", ".", "engine_kill", "(", ")", "del", "DataStore", ".", "tasks", "[", "key", "]", "logger", ".", "debug", "(", "(", "'[%s] Flushed task pool (%s)'", "%", "(", "taskid", ",", "(", "'admin'", "if", "is_admin", "(", "taskid", ")", "else", "request", ".", "remote_addr", ")", ")", ")", ")", "return", "jsonize", "(", "{", "'success'", ":", "True", "}", ")" ]
flush task spool .
train
false
8,839
def unfinished_word(line): try: for (wordstart, word) in _quotesplit(line): pass except QuoteError: firstchar = line[wordstart] if (firstchar in [q, qq]): return (firstchar, word) else: return (None, word) else: return (None, '')
[ "def", "unfinished_word", "(", "line", ")", ":", "try", ":", "for", "(", "wordstart", ",", "word", ")", "in", "_quotesplit", "(", "line", ")", ":", "pass", "except", "QuoteError", ":", "firstchar", "=", "line", "[", "wordstart", "]", "if", "(", "firstchar", "in", "[", "q", ",", "qq", "]", ")", ":", "return", "(", "firstchar", ",", "word", ")", "else", ":", "return", "(", "None", ",", "word", ")", "else", ":", "return", "(", "None", ",", "''", ")" ]
returns the quotechar .
train
false
8,840
def is_memoryview_format(fmt): x = len(fmt) return (((x == 1) or ((x == 2) and (fmt[0] == '@'))) and (fmt[(x - 1)] in MEMORYVIEW))
[ "def", "is_memoryview_format", "(", "fmt", ")", ":", "x", "=", "len", "(", "fmt", ")", "return", "(", "(", "(", "x", "==", "1", ")", "or", "(", "(", "x", "==", "2", ")", "and", "(", "fmt", "[", "0", "]", "==", "'@'", ")", ")", ")", "and", "(", "fmt", "[", "(", "x", "-", "1", ")", "]", "in", "MEMORYVIEW", ")", ")" ]
format suitable for memoryview .
train
false
8,842
@RegisterWithArgChecks(name='evpn_prefix.add_local', req_args=[EVPN_ROUTE_TYPE, ROUTE_DISTINGUISHER, NEXT_HOP], opt_args=[EVPN_ESI, EVPN_ETHERNET_TAG_ID, REDUNDANCY_MODE, MAC_ADDR, IP_ADDR, IP_PREFIX, GW_IP_ADDR, EVPN_VNI, TUNNEL_TYPE, PMSI_TUNNEL_TYPE]) def add_evpn_local(route_type, route_dist, next_hop, **kwargs): if ((route_type in [EVPN_ETH_AUTO_DISCOVERY, EVPN_ETH_SEGMENT]) and (kwargs['esi'] == 0)): raise ConfigValueError(conf_name=EVPN_ESI, conf_value=kwargs['esi']) try: tm = CORE_MANAGER.get_core_service().table_manager label = tm.update_vrf_table(route_dist, next_hop=next_hop, route_family=VRF_RF_L2_EVPN, route_type=route_type, **kwargs) if label: label = label[0] return [{EVPN_ROUTE_TYPE: route_type, ROUTE_DISTINGUISHER: route_dist, VRF_RF: VRF_RF_L2_EVPN, VPN_LABEL: label}.update(kwargs)] except BgpCoreError as e: raise PrefixError(desc=e)
[ "@", "RegisterWithArgChecks", "(", "name", "=", "'evpn_prefix.add_local'", ",", "req_args", "=", "[", "EVPN_ROUTE_TYPE", ",", "ROUTE_DISTINGUISHER", ",", "NEXT_HOP", "]", ",", "opt_args", "=", "[", "EVPN_ESI", ",", "EVPN_ETHERNET_TAG_ID", ",", "REDUNDANCY_MODE", ",", "MAC_ADDR", ",", "IP_ADDR", ",", "IP_PREFIX", ",", "GW_IP_ADDR", ",", "EVPN_VNI", ",", "TUNNEL_TYPE", ",", "PMSI_TUNNEL_TYPE", "]", ")", "def", "add_evpn_local", "(", "route_type", ",", "route_dist", ",", "next_hop", ",", "**", "kwargs", ")", ":", "if", "(", "(", "route_type", "in", "[", "EVPN_ETH_AUTO_DISCOVERY", ",", "EVPN_ETH_SEGMENT", "]", ")", "and", "(", "kwargs", "[", "'esi'", "]", "==", "0", ")", ")", ":", "raise", "ConfigValueError", "(", "conf_name", "=", "EVPN_ESI", ",", "conf_value", "=", "kwargs", "[", "'esi'", "]", ")", "try", ":", "tm", "=", "CORE_MANAGER", ".", "get_core_service", "(", ")", ".", "table_manager", "label", "=", "tm", ".", "update_vrf_table", "(", "route_dist", ",", "next_hop", "=", "next_hop", ",", "route_family", "=", "VRF_RF_L2_EVPN", ",", "route_type", "=", "route_type", ",", "**", "kwargs", ")", "if", "label", ":", "label", "=", "label", "[", "0", "]", "return", "[", "{", "EVPN_ROUTE_TYPE", ":", "route_type", ",", "ROUTE_DISTINGUISHER", ":", "route_dist", ",", "VRF_RF", ":", "VRF_RF_L2_EVPN", ",", "VPN_LABEL", ":", "label", "}", ".", "update", "(", "kwargs", ")", "]", "except", "BgpCoreError", "as", "e", ":", "raise", "PrefixError", "(", "desc", "=", "e", ")" ]
adds evpn route from vrf identified by *route_dist* .
train
false
8,843
@utils.arg('size', metavar='<size>', type=int, help='Size of monitor in GB') @utils.arg('--snapshot-id', metavar='<snapshot-id>', default=None, help='Create monitor from snapshot id (Optional, Default=None)') @utils.arg('--snapshot_id', help=argparse.SUPPRESS) @utils.arg('--source-volid', metavar='<source-volid>', default=None, help='Create monitor from monitor id (Optional, Default=None)') @utils.arg('--source_volid', help=argparse.SUPPRESS) @utils.arg('--image-id', metavar='<image-id>', default=None, help='Create monitor from image id (Optional, Default=None)') @utils.arg('--image_id', help=argparse.SUPPRESS) @utils.arg('--display-name', metavar='<display-name>', default=None, help='ServiceManage name (Optional, Default=None)') @utils.arg('--display_name', help=argparse.SUPPRESS) @utils.arg('--display-description', metavar='<display-description>', default=None, help='ServiceManage description (Optional, Default=None)') @utils.arg('--display_description', help=argparse.SUPPRESS) @utils.arg('--monitor-type', metavar='<monitor-type>', default=None, help='ServiceManage type (Optional, Default=None)') @utils.arg('--monitor_type', help=argparse.SUPPRESS) @utils.arg('--availability-zone', metavar='<availability-zone>', default=None, help='Availability zone for monitor (Optional, Default=None)') @utils.arg('--availability_zone', help=argparse.SUPPRESS) @utils.arg('--metadata', type=str, nargs='*', metavar='<key=value>', help='Metadata key=value pairs (Optional, Default=None)', default=None) @utils.service_type('monitor') def do_create(cs, args): monitor_metadata = None if (args.metadata is not None): monitor_metadata = _extract_metadata(args) monitor = cs.monitors.create(args.size, args.snapshot_id, args.source_volid, args.display_name, args.display_description, args.monitor_type, availability_zone=args.availability_zone, imageRef=args.image_id, metadata=monitor_metadata) _print_monitor(monitor)
[ "@", "utils", ".", "arg", "(", "'size'", ",", "metavar", "=", "'<size>'", ",", "type", "=", "int", ",", "help", "=", "'Size of monitor in GB'", ")", "@", "utils", ".", "arg", "(", "'--snapshot-id'", ",", "metavar", "=", "'<snapshot-id>'", ",", "default", "=", "None", ",", "help", "=", "'Create monitor from snapshot id (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--snapshot_id'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--source-volid'", ",", "metavar", "=", "'<source-volid>'", ",", "default", "=", "None", ",", "help", "=", "'Create monitor from monitor id (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--source_volid'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--image-id'", ",", "metavar", "=", "'<image-id>'", ",", "default", "=", "None", ",", "help", "=", "'Create monitor from image id (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--image_id'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--display-name'", ",", "metavar", "=", "'<display-name>'", ",", "default", "=", "None", ",", "help", "=", "'ServiceManage name (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--display_name'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--display-description'", ",", "metavar", "=", "'<display-description>'", ",", "default", "=", "None", ",", "help", "=", "'ServiceManage description (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--display_description'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--monitor-type'", ",", "metavar", "=", "'<monitor-type>'", ",", "default", "=", "None", ",", "help", "=", "'ServiceManage type (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--monitor_type'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--availability-zone'", ",", "metavar", "=", "'<availability-zone>'", ",", "default", "=", "None", ",", "help", "=", "'Availability zone for monitor (Optional, Default=None)'", ")", "@", "utils", ".", "arg", "(", "'--availability_zone'", ",", "help", "=", "argparse", ".", "SUPPRESS", ")", "@", "utils", ".", "arg", "(", "'--metadata'", ",", "type", "=", "str", ",", "nargs", "=", "'*'", ",", "metavar", "=", "'<key=value>'", ",", "help", "=", "'Metadata key=value pairs (Optional, Default=None)'", ",", "default", "=", "None", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_create", "(", "cs", ",", "args", ")", ":", "monitor_metadata", "=", "None", "if", "(", "args", ".", "metadata", "is", "not", "None", ")", ":", "monitor_metadata", "=", "_extract_metadata", "(", "args", ")", "monitor", "=", "cs", ".", "monitors", ".", "create", "(", "args", ".", "size", ",", "args", ".", "snapshot_id", ",", "args", ".", "source_volid", ",", "args", ".", "display_name", ",", "args", ".", "display_description", ",", "args", ".", "monitor_type", ",", "availability_zone", "=", "args", ".", "availability_zone", ",", "imageRef", "=", "args", ".", "image_id", ",", "metadata", "=", "monitor_metadata", ")", "_print_monitor", "(", "monitor", ")" ]
create a installer script for each variant in bootstrap_dict .
train
false
8,844
def reformat_comment(data, limit, comment_header): lc = len(comment_header) data = '\n'.join((line[lc:] for line in data.split('\n'))) format_width = max((limit - len(comment_header)), 20) newdata = reformat_paragraph(data, format_width) newdata = newdata.split('\n') block_suffix = '' if (not newdata[(-1)]): block_suffix = '\n' newdata = newdata[:(-1)] return ('\n'.join(((comment_header + line) for line in newdata)) + block_suffix)
[ "def", "reformat_comment", "(", "data", ",", "limit", ",", "comment_header", ")", ":", "lc", "=", "len", "(", "comment_header", ")", "data", "=", "'\\n'", ".", "join", "(", "(", "line", "[", "lc", ":", "]", "for", "line", "in", "data", ".", "split", "(", "'\\n'", ")", ")", ")", "format_width", "=", "max", "(", "(", "limit", "-", "len", "(", "comment_header", ")", ")", ",", "20", ")", "newdata", "=", "reformat_paragraph", "(", "data", ",", "format_width", ")", "newdata", "=", "newdata", ".", "split", "(", "'\\n'", ")", "block_suffix", "=", "''", "if", "(", "not", "newdata", "[", "(", "-", "1", ")", "]", ")", ":", "block_suffix", "=", "'\\n'", "newdata", "=", "newdata", "[", ":", "(", "-", "1", ")", "]", "return", "(", "'\\n'", ".", "join", "(", "(", "(", "comment_header", "+", "line", ")", "for", "line", "in", "newdata", ")", ")", "+", "block_suffix", ")" ]
return data reformatted to specified width with comment header .
train
false
8,846
def append_plugin_urls(): global urlpatterns for plugin in settings.CABOT_PLUGINS_ENABLED_PARSED: try: _module = import_module(('%s.urls' % plugin)) except Exception as e: pass else: urlpatterns += patterns('', url(('^plugins/%s/' % plugin), include(('%s.urls' % plugin))))
[ "def", "append_plugin_urls", "(", ")", ":", "global", "urlpatterns", "for", "plugin", "in", "settings", ".", "CABOT_PLUGINS_ENABLED_PARSED", ":", "try", ":", "_module", "=", "import_module", "(", "(", "'%s.urls'", "%", "plugin", ")", ")", "except", "Exception", "as", "e", ":", "pass", "else", ":", "urlpatterns", "+=", "patterns", "(", "''", ",", "url", "(", "(", "'^plugins/%s/'", "%", "plugin", ")", ",", "include", "(", "(", "'%s.urls'", "%", "plugin", ")", ")", ")", ")" ]
appends plugin specific urls to the urlpatterns variable .
train
false
8,847
def del_token(token): token_path = os.path.join(__opts__['token_dir'], token) if os.path.exists(token_path): return (os.remove(token_path) is None) return False
[ "def", "del_token", "(", "token", ")", ":", "token_path", "=", "os", ".", "path", ".", "join", "(", "__opts__", "[", "'token_dir'", "]", ",", "token", ")", "if", "os", ".", "path", ".", "exists", "(", "token_path", ")", ":", "return", "(", "os", ".", "remove", "(", "token_path", ")", "is", "None", ")", "return", "False" ]
delete an eauth token by name cli example: .
train
true
8,848
@contextmanager def compat_assert_produces_warning(w): if compat.PY3: (yield) else: with tm.assert_produces_warning(expected_warning=w, check_stacklevel=False): (yield)
[ "@", "contextmanager", "def", "compat_assert_produces_warning", "(", "w", ")", ":", "if", "compat", ".", "PY3", ":", "(", "yield", ")", "else", ":", "with", "tm", ".", "assert_produces_warning", "(", "expected_warning", "=", "w", ",", "check_stacklevel", "=", "False", ")", ":", "(", "yield", ")" ]
dont produce a warning under py3 .
train
false
8,849
def LOG_FILE(x): context.log_file = x
[ "def", "LOG_FILE", "(", "x", ")", ":", "context", ".", "log_file", "=", "x" ]
sets a log file to be used via context .
train
false
8,850
def test_callability_checking(): gotoutput = pretty.pretty(Dummy2()) expectedoutput = 'Dummy1(...)' nt.assert_equal(gotoutput, expectedoutput)
[ "def", "test_callability_checking", "(", ")", ":", "gotoutput", "=", "pretty", ".", "pretty", "(", "Dummy2", "(", ")", ")", "expectedoutput", "=", "'Dummy1(...)'", "nt", ".", "assert_equal", "(", "gotoutput", ",", "expectedoutput", ")" ]
test that the _repr_pretty_ method is tested for callability and skipped if not .
train
false
8,851
def log_post_trace(trace, model): return np.vstack(([obs.logp_elemwise(pt) for obs in model.observed_RVs] for pt in trace))
[ "def", "log_post_trace", "(", "trace", ",", "model", ")", ":", "return", "np", ".", "vstack", "(", "(", "[", "obs", ".", "logp_elemwise", "(", "pt", ")", "for", "obs", "in", "model", ".", "observed_RVs", "]", "for", "pt", "in", "trace", ")", ")" ]
calculate the elementwise log-posterior for the sampled trace .
train
false
8,853
def list_intersection(list1, list2): return [item for item in list1 if (item in list2)]
[ "def", "list_intersection", "(", "list1", ",", "list2", ")", ":", "return", "[", "item", "for", "item", "in", "list1", "if", "(", "item", "in", "list2", ")", "]" ]
take the not-in-place intersection of two lists .
train
false
8,855
def translate_js_with_compilation_plan(js, HEADER=DEFAULT_HEADER): (match_increaser_str, match_increaser_num, compilation_plan) = get_compilation_plan(js) cp_hash = hashlib.md5(compilation_plan.encode('utf-8')).digest() try: python_code = cache[cp_hash]['proto_python_code'] except: parser = pyjsparser.PyJsParser() parsed = parser.parse(compilation_plan) translating_nodes.clean_stacks() python_code = translating_nodes.trans(parsed) cache[cp_hash] = {'compilation_plan': compilation_plan, 'proto_python_code': python_code} python_code = match_increaser_str.wrap_up(python_code) python_code = match_increaser_num.wrap_up(python_code) return (HEADER + python_code)
[ "def", "translate_js_with_compilation_plan", "(", "js", ",", "HEADER", "=", "DEFAULT_HEADER", ")", ":", "(", "match_increaser_str", ",", "match_increaser_num", ",", "compilation_plan", ")", "=", "get_compilation_plan", "(", "js", ")", "cp_hash", "=", "hashlib", ".", "md5", "(", "compilation_plan", ".", "encode", "(", "'utf-8'", ")", ")", ".", "digest", "(", ")", "try", ":", "python_code", "=", "cache", "[", "cp_hash", "]", "[", "'proto_python_code'", "]", "except", ":", "parser", "=", "pyjsparser", ".", "PyJsParser", "(", ")", "parsed", "=", "parser", ".", "parse", "(", "compilation_plan", ")", "translating_nodes", ".", "clean_stacks", "(", ")", "python_code", "=", "translating_nodes", ".", "trans", "(", "parsed", ")", "cache", "[", "cp_hash", "]", "=", "{", "'compilation_plan'", ":", "compilation_plan", ",", "'proto_python_code'", ":", "python_code", "}", "python_code", "=", "match_increaser_str", ".", "wrap_up", "(", "python_code", ")", "python_code", "=", "match_increaser_num", ".", "wrap_up", "(", "python_code", ")", "return", "(", "HEADER", "+", "python_code", ")" ]
js has to be a javascript source code .
train
true
8,856
def fetch_necessary_packages(dest_dir, install_dir): names_to_check = sys.argv[1:] errors = [] fetched_packages = [] for package_class in external_packages.ExternalPackage.subclasses: package = package_class() if (names_to_check and (package.name.lower() not in names_to_check)): continue if (not package.is_needed(install_dir)): logging.info('A new %s is not needed on this system.', package.name) if INSTALL_ALL: logging.info('Installing anyways...') else: continue if (not package.fetch(dest_dir)): msg = ('Unable to download %s' % package.name) logging.error(msg) errors.append(msg) else: fetched_packages.append(package) return (fetched_packages, errors)
[ "def", "fetch_necessary_packages", "(", "dest_dir", ",", "install_dir", ")", ":", "names_to_check", "=", "sys", ".", "argv", "[", "1", ":", "]", "errors", "=", "[", "]", "fetched_packages", "=", "[", "]", "for", "package_class", "in", "external_packages", ".", "ExternalPackage", ".", "subclasses", ":", "package", "=", "package_class", "(", ")", "if", "(", "names_to_check", "and", "(", "package", ".", "name", ".", "lower", "(", ")", "not", "in", "names_to_check", ")", ")", ":", "continue", "if", "(", "not", "package", ".", "is_needed", "(", "install_dir", ")", ")", ":", "logging", ".", "info", "(", "'A new %s is not needed on this system.'", ",", "package", ".", "name", ")", "if", "INSTALL_ALL", ":", "logging", ".", "info", "(", "'Installing anyways...'", ")", "else", ":", "continue", "if", "(", "not", "package", ".", "fetch", "(", "dest_dir", ")", ")", ":", "msg", "=", "(", "'Unable to download %s'", "%", "package", ".", "name", ")", "logging", ".", "error", "(", "msg", ")", "errors", ".", "append", "(", "msg", ")", "else", ":", "fetched_packages", ".", "append", "(", "package", ")", "return", "(", "fetched_packages", ",", "errors", ")" ]
fetches all externalpackages into dest_dir .
train
false
8,857
def _generate_paginate_query(context, session, marker, limit, sort_keys, sort_dirs, filters, offset=None, paginate_type=models.Volume): (get_query, process_filters, get) = PAGINATION_HELPERS[paginate_type] (sort_keys, sort_dirs) = process_sort_params(sort_keys, sort_dirs, default_dir='desc') query = get_query(context, session=session) if filters: query = process_filters(query, filters) if (query is None): return None marker_object = None if (marker is not None): marker_object = get(context, marker, session) return sqlalchemyutils.paginate_query(query, paginate_type, limit, sort_keys, marker=marker_object, sort_dirs=sort_dirs, offset=offset)
[ "def", "_generate_paginate_query", "(", "context", ",", "session", ",", "marker", ",", "limit", ",", "sort_keys", ",", "sort_dirs", ",", "filters", ",", "offset", "=", "None", ",", "paginate_type", "=", "models", ".", "Volume", ")", ":", "(", "get_query", ",", "process_filters", ",", "get", ")", "=", "PAGINATION_HELPERS", "[", "paginate_type", "]", "(", "sort_keys", ",", "sort_dirs", ")", "=", "process_sort_params", "(", "sort_keys", ",", "sort_dirs", ",", "default_dir", "=", "'desc'", ")", "query", "=", "get_query", "(", "context", ",", "session", "=", "session", ")", "if", "filters", ":", "query", "=", "process_filters", "(", "query", ",", "filters", ")", "if", "(", "query", "is", "None", ")", ":", "return", "None", "marker_object", "=", "None", "if", "(", "marker", "is", "not", "None", ")", ":", "marker_object", "=", "get", "(", "context", ",", "marker", ",", "session", ")", "return", "sqlalchemyutils", ".", "paginate_query", "(", "query", ",", "paginate_type", ",", "limit", ",", "sort_keys", ",", "marker", "=", "marker_object", ",", "sort_dirs", "=", "sort_dirs", ",", "offset", "=", "offset", ")" ]
generate the query to include the filters and the paginate options .
train
false
8,858
def test_height_ratios(): with pytest.raises(ValueError): gridspec.GridSpec(1, 1, height_ratios=[2, 1, 3])
[ "def", "test_height_ratios", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "gridspec", ".", "GridSpec", "(", "1", ",", "1", ",", "height_ratios", "=", "[", "2", ",", "1", ",", "3", "]", ")" ]
addresses issue #5835 .
train
false
8,859
def build_url(self, *args, **kwargs): return github3.session.GitHubSession().build_url(*args, **kwargs)
[ "def", "build_url", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "github3", ".", "session", ".", "GitHubSession", "(", ")", ".", "build_url", "(", "*", "args", ",", "**", "kwargs", ")" ]
build a url from the label .
train
false
8,860
def getLayerThickness(elementNode): if (elementNode == None): return 0.4 preferences = skeinforge_craft.getCraftPreferences('carve') return getCascadeFloatWithoutSelf(skeinforge_craft.getCraftValue('Layer Thickness', preferences), elementNode, 'layerThickness')
[ "def", "getLayerThickness", "(", "elementNode", ")", ":", "if", "(", "elementNode", "==", "None", ")", ":", "return", "0.4", "preferences", "=", "skeinforge_craft", ".", "getCraftPreferences", "(", "'carve'", ")", "return", "getCascadeFloatWithoutSelf", "(", "skeinforge_craft", ".", "getCraftValue", "(", "'Layer Thickness'", ",", "preferences", ")", ",", "elementNode", ",", "'layerThickness'", ")" ]
get the layer thickness .
train
false
8,861
def get_last_header_before_line(context, filediff, interfilediff, target_line): f = get_file_from_filediff(context, filediff, interfilediff) return _get_last_header_in_chunks_before_line(f[u'chunks'], target_line)
[ "def", "get_last_header_before_line", "(", "context", ",", "filediff", ",", "interfilediff", ",", "target_line", ")", ":", "f", "=", "get_file_from_filediff", "(", "context", ",", "filediff", ",", "interfilediff", ")", "return", "_get_last_header_in_chunks_before_line", "(", "f", "[", "u'chunks'", "]", ",", "target_line", ")" ]
get the last header that occurs before the given line .
train
false
8,862
def _get_obj_attrs_map(obj, attrs): out = {} for attr in attrs: val = getattr(obj, attr, None) if (val is not None): if six.PY2: attr = str(attr) if isinstance(val, six.text_type): val = str(val) out[attr] = val return out
[ "def", "_get_obj_attrs_map", "(", "obj", ",", "attrs", ")", ":", "out", "=", "{", "}", "for", "attr", "in", "attrs", ":", "val", "=", "getattr", "(", "obj", ",", "attr", ",", "None", ")", "if", "(", "val", "is", "not", "None", ")", ":", "if", "six", ".", "PY2", ":", "attr", "=", "str", "(", "attr", ")", "if", "isinstance", "(", "val", ",", "six", ".", "text_type", ")", ":", "val", "=", "str", "(", "val", ")", "out", "[", "attr", "]", "=", "val", "return", "out" ]
get the values for object attrs and return as a dict .
train
false
8,863
@shared_constructor def tensor_constructor(value, name=None, strict=False, allow_downcast=None, borrow=False, broadcastable=None, target='cpu'): if (target != 'cpu'): raise TypeError('not for cpu') if (not isinstance(value, numpy.ndarray)): raise TypeError() if (broadcastable is None): broadcastable = ((False,) * len(value.shape)) type = TensorType(value.dtype, broadcastable=broadcastable) return TensorSharedVariable(type=type, value=numpy.array(value, copy=(not borrow)), name=name, strict=strict, allow_downcast=allow_downcast)
[ "@", "shared_constructor", "def", "tensor_constructor", "(", "value", ",", "name", "=", "None", ",", "strict", "=", "False", ",", "allow_downcast", "=", "None", ",", "borrow", "=", "False", ",", "broadcastable", "=", "None", ",", "target", "=", "'cpu'", ")", ":", "if", "(", "target", "!=", "'cpu'", ")", ":", "raise", "TypeError", "(", "'not for cpu'", ")", "if", "(", "not", "isinstance", "(", "value", ",", "numpy", ".", "ndarray", ")", ")", ":", "raise", "TypeError", "(", ")", "if", "(", "broadcastable", "is", "None", ")", ":", "broadcastable", "=", "(", "(", "False", ",", ")", "*", "len", "(", "value", ".", "shape", ")", ")", "type", "=", "TensorType", "(", "value", ".", "dtype", ",", "broadcastable", "=", "broadcastable", ")", "return", "TensorSharedVariable", "(", "type", "=", "type", ",", "value", "=", "numpy", ".", "array", "(", "value", ",", "copy", "=", "(", "not", "borrow", ")", ")", ",", "name", "=", "name", ",", "strict", "=", "strict", ",", "allow_downcast", "=", "allow_downcast", ")" ]
sharedvariable constructor for tensortype .
train
false
8,864
def getCraftPluginsDirectoryPath(subName=''): return getJoinedPath(getSkeinforgePluginsPath('craft_plugins'), subName)
[ "def", "getCraftPluginsDirectoryPath", "(", "subName", "=", "''", ")", ":", "return", "getJoinedPath", "(", "getSkeinforgePluginsPath", "(", "'craft_plugins'", ")", ",", "subName", ")" ]
get the craft plugins directory path .
train
false
8,865
@hug.get() def made_up_api(hug_my_directive=True): return hug_my_directive
[ "@", "hug", ".", "get", "(", ")", "def", "made_up_api", "(", "hug_my_directive", "=", "True", ")", ":", "return", "hug_my_directive" ]
for testing .
train
false
8,866
def python_to_workflow(as_python, galaxy_interface, workflow_directory): if (workflow_directory is None): workflow_directory = os.path.abspath('.') conversion_context = ConversionContext(galaxy_interface, workflow_directory) return _python_to_workflow(as_python, conversion_context)
[ "def", "python_to_workflow", "(", "as_python", ",", "galaxy_interface", ",", "workflow_directory", ")", ":", "if", "(", "workflow_directory", "is", "None", ")", ":", "workflow_directory", "=", "os", ".", "path", ".", "abspath", "(", "'.'", ")", "conversion_context", "=", "ConversionContext", "(", "galaxy_interface", ",", "workflow_directory", ")", "return", "_python_to_workflow", "(", "as_python", ",", "conversion_context", ")" ]
convert a format 2 workflow into standard galaxy format from supplied dictionary .
train
false
8,867
def MakeNormalPmf(mu, sigma, num_sigmas, n=201): pmf = Pmf() low = (mu - (num_sigmas * sigma)) high = (mu + (num_sigmas * sigma)) for x in np.linspace(low, high, n): p = EvalNormalPdf(x, mu, sigma) pmf.Set(x, p) pmf.Normalize() return pmf
[ "def", "MakeNormalPmf", "(", "mu", ",", "sigma", ",", "num_sigmas", ",", "n", "=", "201", ")", ":", "pmf", "=", "Pmf", "(", ")", "low", "=", "(", "mu", "-", "(", "num_sigmas", "*", "sigma", ")", ")", "high", "=", "(", "mu", "+", "(", "num_sigmas", "*", "sigma", ")", ")", "for", "x", "in", "np", ".", "linspace", "(", "low", ",", "high", ",", "n", ")", ":", "p", "=", "EvalNormalPdf", "(", "x", ",", "mu", ",", "sigma", ")", "pmf", ".", "Set", "(", "x", ",", "p", ")", "pmf", ".", "Normalize", "(", ")", "return", "pmf" ]
makes a pmf discrete approx to a normal distribution .
train
false
8,868
def _set_radio_button(idx, params): radio = params['fig_selection'].radio radio.circles[radio._active_idx].set_facecolor((1.0, 1.0, 1.0, 1.0)) radio.circles[idx].set_facecolor((0.0, 0.0, 1.0, 1.0)) _radio_clicked(radio.labels[idx]._text, params)
[ "def", "_set_radio_button", "(", "idx", ",", "params", ")", ":", "radio", "=", "params", "[", "'fig_selection'", "]", ".", "radio", "radio", ".", "circles", "[", "radio", ".", "_active_idx", "]", ".", "set_facecolor", "(", "(", "1.0", ",", "1.0", ",", "1.0", ",", "1.0", ")", ")", "radio", ".", "circles", "[", "idx", "]", ".", "set_facecolor", "(", "(", "0.0", ",", "0.0", ",", "1.0", ",", "1.0", ")", ")", "_radio_clicked", "(", "radio", ".", "labels", "[", "idx", "]", ".", "_text", ",", "params", ")" ]
helper for setting radio button .
train
false
8,869
def stopped(): if (not is_stopped()): stop('shorewall')
[ "def", "stopped", "(", ")", ":", "if", "(", "not", "is_stopped", "(", ")", ")", ":", "stop", "(", "'shorewall'", ")" ]
stops a vm by shutting it down nicely .
train
false
8,870
def _postprocess(config_string): flags = (re.IGNORECASE | re.MULTILINE) result = re.sub(u'^\\[__COMMENTS__\\](\\n|$)', u'', config_string, flags=flags) result = re.sub(u'\\n__INLINE\\d+__ =(.*)$', u' ;\\g<1>', result, flags=flags) result = re.sub(u'^__HASH\\d+__ =(.*)$', u'#\\g<1>', result, flags=flags) result = re.sub(u'^__SEMICOLON\\d+__ =(.*)$', u';\\g<1>', result, flags=flags) result = re.sub(u'\\n__SECTION\\d+__ =(.*)$', u'\\g<1>', result, flags=flags) result = re.sub(u'^__BLANK\\d+__ =$', u'', result, flags=flags) return result
[ "def", "_postprocess", "(", "config_string", ")", ":", "flags", "=", "(", "re", ".", "IGNORECASE", "|", "re", ".", "MULTILINE", ")", "result", "=", "re", ".", "sub", "(", "u'^\\\\[__COMMENTS__\\\\](\\\\n|$)'", ",", "u''", ",", "config_string", ",", "flags", "=", "flags", ")", "result", "=", "re", ".", "sub", "(", "u'\\\\n__INLINE\\\\d+__ =(.*)$'", ",", "u' ;\\\\g<1>'", ",", "result", ",", "flags", "=", "flags", ")", "result", "=", "re", ".", "sub", "(", "u'^__HASH\\\\d+__ =(.*)$'", ",", "u'#\\\\g<1>'", ",", "result", ",", "flags", "=", "flags", ")", "result", "=", "re", ".", "sub", "(", "u'^__SEMICOLON\\\\d+__ =(.*)$'", ",", "u';\\\\g<1>'", ",", "result", ",", "flags", "=", "flags", ")", "result", "=", "re", ".", "sub", "(", "u'\\\\n__SECTION\\\\d+__ =(.*)$'", ",", "u'\\\\g<1>'", ",", "result", ",", "flags", "=", "flags", ")", "result", "=", "re", ".", "sub", "(", "u'^__BLANK\\\\d+__ =$'", ",", "u''", ",", "result", ",", "flags", "=", "flags", ")", "return", "result" ]
converts a preprocessed config back to original form .
train
false
8,871
def onRequestAccountLogin(loginName, password, datas): INFO_MSG(('onRequestAccountLogin: registerName=%s' % loginName)) commitName = loginName realAccountName = commitName KBEngine.accountLoginResponse(commitName, realAccountName, datas, KBEngine.SERVER_ERR_LOCAL_PROCESSING)
[ "def", "onRequestAccountLogin", "(", "loginName", ",", "password", ",", "datas", ")", ":", "INFO_MSG", "(", "(", "'onRequestAccountLogin: registerName=%s'", "%", "loginName", ")", ")", "commitName", "=", "loginName", "realAccountName", "=", "commitName", "KBEngine", ".", "accountLoginResponse", "(", "commitName", ",", "realAccountName", ",", "datas", ",", "KBEngine", ".", "SERVER_ERR_LOCAL_PROCESSING", ")" ]
kbengine method .
train
false
8,872
def identifierScheme(identifier): if (identifier.startswith('xri://') or (identifier and (identifier[0] in XRI_AUTHORITIES))): return 'XRI' else: return 'URI'
[ "def", "identifierScheme", "(", "identifier", ")", ":", "if", "(", "identifier", ".", "startswith", "(", "'xri://'", ")", "or", "(", "identifier", "and", "(", "identifier", "[", "0", "]", "in", "XRI_AUTHORITIES", ")", ")", ")", ":", "return", "'XRI'", "else", ":", "return", "'URI'" ]
determine if this identifier is an xri or uri .
train
false
8,875
def ajax_content_response(request, course_key, content): user_info = cc.User.from_django_user(request.user).to_dict() annotated_content_info = get_annotated_content_info(course_key, content, request.user, user_info) return JsonResponse({'content': prepare_content(content, course_key), 'annotated_content_info': annotated_content_info})
[ "def", "ajax_content_response", "(", "request", ",", "course_key", ",", "content", ")", ":", "user_info", "=", "cc", ".", "User", ".", "from_django_user", "(", "request", ".", "user", ")", ".", "to_dict", "(", ")", "annotated_content_info", "=", "get_annotated_content_info", "(", "course_key", ",", "content", ",", "request", ".", "user", ",", "user_info", ")", "return", "JsonResponse", "(", "{", "'content'", ":", "prepare_content", "(", "content", ",", "course_key", ")", ",", "'annotated_content_info'", ":", "annotated_content_info", "}", ")" ]
standard ajax response returning the content hierarchy of the current thread .
train
false
8,876
def moebius_kantor_graph(create_using=None): G = LCF_graph(16, [5, (-5)], 8, create_using) G.name = 'Moebius-Kantor Graph' return G
[ "def", "moebius_kantor_graph", "(", "create_using", "=", "None", ")", ":", "G", "=", "LCF_graph", "(", "16", ",", "[", "5", ",", "(", "-", "5", ")", "]", ",", "8", ",", "create_using", ")", "G", ".", "name", "=", "'Moebius-Kantor Graph'", "return", "G" ]
return the moebius-kantor graph .
train
false
8,877
def fleiss_kappa(table): table = (1.0 * np.asarray(table)) (n_sub, n_cat) = table.shape n_total = table.sum() n_rater = table.sum(1) n_rat = n_rater.max() assert (n_total == (n_sub * n_rat)) p_cat = (table.sum(0) / n_total) table2 = (table * table) p_rat = ((table2.sum(1) - n_rat) / (n_rat * (n_rat - 1.0))) p_mean = p_rat.mean() p_mean_exp = (p_cat * p_cat).sum() kappa = ((p_mean - p_mean_exp) / (1 - p_mean_exp)) return kappa
[ "def", "fleiss_kappa", "(", "table", ")", ":", "table", "=", "(", "1.0", "*", "np", ".", "asarray", "(", "table", ")", ")", "(", "n_sub", ",", "n_cat", ")", "=", "table", ".", "shape", "n_total", "=", "table", ".", "sum", "(", ")", "n_rater", "=", "table", ".", "sum", "(", "1", ")", "n_rat", "=", "n_rater", ".", "max", "(", ")", "assert", "(", "n_total", "==", "(", "n_sub", "*", "n_rat", ")", ")", "p_cat", "=", "(", "table", ".", "sum", "(", "0", ")", "/", "n_total", ")", "table2", "=", "(", "table", "*", "table", ")", "p_rat", "=", "(", "(", "table2", ".", "sum", "(", "1", ")", "-", "n_rat", ")", "/", "(", "n_rat", "*", "(", "n_rat", "-", "1.0", ")", ")", ")", "p_mean", "=", "p_rat", ".", "mean", "(", ")", "p_mean_exp", "=", "(", "p_cat", "*", "p_cat", ")", ".", "sum", "(", ")", "kappa", "=", "(", "(", "p_mean", "-", "p_mean_exp", ")", "/", "(", "1", "-", "p_mean_exp", ")", ")", "return", "kappa" ]
returns the reliability of agreement as a number between -1 .
train
false
8,878
def updateFeature(font, name, value): featureRE = compileFeatureRE(name) if featureRE.search(font.features.text): font.features.text = featureRE.sub(value, font.features.text) else: font.features.text += ('\n' + value)
[ "def", "updateFeature", "(", "font", ",", "name", ",", "value", ")", ":", "featureRE", "=", "compileFeatureRE", "(", "name", ")", "if", "featureRE", ".", "search", "(", "font", ".", "features", ".", "text", ")", ":", "font", ".", "features", ".", "text", "=", "featureRE", ".", "sub", "(", "value", ",", "font", ".", "features", ".", "text", ")", "else", ":", "font", ".", "features", ".", "text", "+=", "(", "'\\n'", "+", "value", ")" ]
add a feature definition .
train
false
8,879
def do_nothing(): pass
[ "def", "do_nothing", "(", ")", ":", "pass" ]
dont do anything .
train
false
8,880
def get_tos_and_honor_code_url(): return get_url('TOS_AND_HONOR')
[ "def", "get_tos_and_honor_code_url", "(", ")", ":", "return", "get_url", "(", "'TOS_AND_HONOR'", ")" ]
lookup and return terms of services page url .
train
false
8,882
@box(types.NamedTuple) @box(types.NamedUniTuple) def box_namedtuple(typ, val, c): cls_obj = c.pyapi.unserialize(c.pyapi.serialize_object(typ.instance_class)) tuple_obj = box_tuple(typ, val, c) obj = c.pyapi.call(cls_obj, tuple_obj) c.pyapi.decref(cls_obj) c.pyapi.decref(tuple_obj) return obj
[ "@", "box", "(", "types", ".", "NamedTuple", ")", "@", "box", "(", "types", ".", "NamedUniTuple", ")", "def", "box_namedtuple", "(", "typ", ",", "val", ",", "c", ")", ":", "cls_obj", "=", "c", ".", "pyapi", ".", "unserialize", "(", "c", ".", "pyapi", ".", "serialize_object", "(", "typ", ".", "instance_class", ")", ")", "tuple_obj", "=", "box_tuple", "(", "typ", ",", "val", ",", "c", ")", "obj", "=", "c", ".", "pyapi", ".", "call", "(", "cls_obj", ",", "tuple_obj", ")", "c", ".", "pyapi", ".", "decref", "(", "cls_obj", ")", "c", ".", "pyapi", ".", "decref", "(", "tuple_obj", ")", "return", "obj" ]
convert native array or structure *val* to a namedtuple object .
train
false
8,883
def proto_library_config(append=None, **kwargs): path = kwargs.get('protobuf_include_path') if path: console.warning(('%s: proto_library_config: protobuf_include_path has been renamed to protobuf_incs, and become a list' % blade_config.current_file_name)) del kwargs['protobuf_include_path'] if (isinstance(path, basestring) and (' ' in path)): kwargs['protobuf_incs'] = path.split() else: kwargs['protobuf_incs'] = [path] blade_config.update_config('proto_library_config', append, kwargs)
[ "def", "proto_library_config", "(", "append", "=", "None", ",", "**", "kwargs", ")", ":", "path", "=", "kwargs", ".", "get", "(", "'protobuf_include_path'", ")", "if", "path", ":", "console", ".", "warning", "(", "(", "'%s: proto_library_config: protobuf_include_path has been renamed to protobuf_incs, and become a list'", "%", "blade_config", ".", "current_file_name", ")", ")", "del", "kwargs", "[", "'protobuf_include_path'", "]", "if", "(", "isinstance", "(", "path", ",", "basestring", ")", "and", "(", "' '", "in", "path", ")", ")", ":", "kwargs", "[", "'protobuf_incs'", "]", "=", "path", ".", "split", "(", ")", "else", ":", "kwargs", "[", "'protobuf_incs'", "]", "=", "[", "path", "]", "blade_config", ".", "update_config", "(", "'proto_library_config'", ",", "append", ",", "kwargs", ")" ]
protoc config .
train
false
8,884
@frappe.whitelist() def get_events(start, end, filters=None): from frappe.desk.calendar import get_event_conditions conditions = get_event_conditions(u'Sales Order', filters) data = frappe.db.sql(u"select name, customer_name, delivery_status, billing_status, delivery_date\n DCTB DCTB from `tabSales Order`\n DCTB DCTB where (ifnull(delivery_date, '0000-00-00')!= '0000-00-00') DCTB DCTB DCTB DCTB and (delivery_date between %(start)s and %(end)s)\n DCTB DCTB DCTB DCTB and docstatus < 2\n DCTB DCTB DCTB DCTB {conditions}\n DCTB DCTB ".format(conditions=conditions), {u'start': start, u'end': end}, as_dict=True, update={u'allDay': 0}) return data
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_events", "(", "start", ",", "end", ",", "filters", "=", "None", ")", ":", "from", "frappe", ".", "desk", ".", "calendar", "import", "get_event_conditions", "conditions", "=", "get_event_conditions", "(", "u'Sales Order'", ",", "filters", ")", "data", "=", "frappe", ".", "db", ".", "sql", "(", "u\"select name, customer_name, delivery_status, billing_status, delivery_date\\n DCTB DCTB from `tabSales Order`\\n DCTB DCTB where (ifnull(delivery_date, '0000-00-00')!= '0000-00-00') DCTB DCTB DCTB DCTB and (delivery_date between %(start)s and %(end)s)\\n DCTB DCTB DCTB DCTB and docstatus < 2\\n DCTB DCTB DCTB DCTB {conditions}\\n DCTB DCTB \"", ".", "format", "(", "conditions", "=", "conditions", ")", ",", "{", "u'start'", ":", "start", ",", "u'end'", ":", "end", "}", ",", "as_dict", "=", "True", ",", "update", "=", "{", "u'allDay'", ":", "0", "}", ")", "return", "data" ]
returns events for gantt / calendar view rendering .
train
false
8,885
def array(obj, dtype=None, copy=True, ndmin=0): return core.array(obj, dtype, copy, ndmin)
[ "def", "array", "(", "obj", ",", "dtype", "=", "None", ",", "copy", "=", "True", ",", "ndmin", "=", "0", ")", ":", "return", "core", ".", "array", "(", "obj", ",", "dtype", ",", "copy", ",", "ndmin", ")" ]
creates an array on the current device .
train
false
8,887
def format_unifrac_sample_mapping(sample_ids, otu_ids, otu_table_array): out = [] for (i, row) in enumerate(otu_table_array): for (j, val) in enumerate(row): if (val > 0): line = [otu_ids[i], sample_ids[j], str(val)] out.append(' DCTB '.join(line)) return out
[ "def", "format_unifrac_sample_mapping", "(", "sample_ids", ",", "otu_ids", ",", "otu_table_array", ")", ":", "out", "=", "[", "]", "for", "(", "i", ",", "row", ")", "in", "enumerate", "(", "otu_table_array", ")", ":", "for", "(", "j", ",", "val", ")", "in", "enumerate", "(", "row", ")", ":", "if", "(", "val", ">", "0", ")", ":", "line", "=", "[", "otu_ids", "[", "i", "]", ",", "sample_ids", "[", "j", "]", ",", "str", "(", "val", ")", "]", "out", ".", "append", "(", "' DCTB '", ".", "join", "(", "line", ")", ")", "return", "out" ]
returns a unifrac sample mapping file from output of parse_otu_table .
train
false
8,890
def get_parent(globals, level): orig_level = level if ((not level) or (not isinstance(globals, dict))): return (None, '') pkgname = globals.get('__package__', None) if (pkgname is not None): if (not hasattr(pkgname, 'rindex')): raise ValueError('__package__ set to non-string') if (len(pkgname) == 0): if (level > 0): raise ValueError('Attempted relative import in non-package') return (None, '') name = pkgname else: if ('__name__' not in globals): return (None, '') modname = globals['__name__'] if ('__path__' in globals): globals['__package__'] = name = modname else: lastdot = modname.rfind('.') if (lastdot < 0 < level): raise ValueError('Attempted relative import in non-package') if (lastdot < 0): globals['__package__'] = None return (None, '') globals['__package__'] = name = modname[:lastdot] dot = len(name) for x in range(level, 1, (-1)): try: dot = name.rindex('.', 0, dot) except ValueError: raise ValueError('attempted relative import beyond top-level package') name = name[:dot] try: parent = sys.modules[name] except: if (orig_level < 1): warn(("Parent module '%.200s' not found while handling absolute import" % name)) parent = None else: raise SystemError(("Parent module '%.200s' not loaded, cannot perform relative import" % name)) return (parent, name)
[ "def", "get_parent", "(", "globals", ",", "level", ")", ":", "orig_level", "=", "level", "if", "(", "(", "not", "level", ")", "or", "(", "not", "isinstance", "(", "globals", ",", "dict", ")", ")", ")", ":", "return", "(", "None", ",", "''", ")", "pkgname", "=", "globals", ".", "get", "(", "'__package__'", ",", "None", ")", "if", "(", "pkgname", "is", "not", "None", ")", ":", "if", "(", "not", "hasattr", "(", "pkgname", ",", "'rindex'", ")", ")", ":", "raise", "ValueError", "(", "'__package__ set to non-string'", ")", "if", "(", "len", "(", "pkgname", ")", "==", "0", ")", ":", "if", "(", "level", ">", "0", ")", ":", "raise", "ValueError", "(", "'Attempted relative import in non-package'", ")", "return", "(", "None", ",", "''", ")", "name", "=", "pkgname", "else", ":", "if", "(", "'__name__'", "not", "in", "globals", ")", ":", "return", "(", "None", ",", "''", ")", "modname", "=", "globals", "[", "'__name__'", "]", "if", "(", "'__path__'", "in", "globals", ")", ":", "globals", "[", "'__package__'", "]", "=", "name", "=", "modname", "else", ":", "lastdot", "=", "modname", ".", "rfind", "(", "'.'", ")", "if", "(", "lastdot", "<", "0", "<", "level", ")", ":", "raise", "ValueError", "(", "'Attempted relative import in non-package'", ")", "if", "(", "lastdot", "<", "0", ")", ":", "globals", "[", "'__package__'", "]", "=", "None", "return", "(", "None", ",", "''", ")", "globals", "[", "'__package__'", "]", "=", "name", "=", "modname", "[", ":", "lastdot", "]", "dot", "=", "len", "(", "name", ")", "for", "x", "in", "range", "(", "level", ",", "1", ",", "(", "-", "1", ")", ")", ":", "try", ":", "dot", "=", "name", ".", "rindex", "(", "'.'", ",", "0", ",", "dot", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'attempted relative import beyond top-level package'", ")", "name", "=", "name", "[", ":", "dot", "]", "try", ":", "parent", "=", "sys", ".", "modules", "[", "name", "]", "except", ":", "if", "(", "orig_level", "<", "1", ")", ":", "warn", "(", "(", "\"Parent module '%.200s' not found while handling absolute import\"", "%", "name", ")", ")", "parent", "=", "None", "else", ":", "raise", "SystemError", "(", "(", "\"Parent module '%.200s' not loaded, cannot perform relative import\"", "%", "name", ")", ")", "return", "(", "parent", ",", "name", ")" ]
retrieves a representation of the parent object .
train
true
8,891
def test_hosts_and_roles_together(): @roles('r1', 'r2') @hosts('d') def command(): pass eq_hosts(command, ['a', 'b', 'c', 'd'], env={'roledefs': fake_roles}) eq_effective_roles(command, ['r1', 'r2'], env={'roledefs': fake_roles})
[ "def", "test_hosts_and_roles_together", "(", ")", ":", "@", "roles", "(", "'r1'", ",", "'r2'", ")", "@", "hosts", "(", "'d'", ")", "def", "command", "(", ")", ":", "pass", "eq_hosts", "(", "command", ",", "[", "'a'", ",", "'b'", ",", "'c'", ",", "'d'", "]", ",", "env", "=", "{", "'roledefs'", ":", "fake_roles", "}", ")", "eq_effective_roles", "(", "command", ",", "[", "'r1'", ",", "'r2'", "]", ",", "env", "=", "{", "'roledefs'", ":", "fake_roles", "}", ")" ]
use of @roles and @hosts together results in union of both .
train
false