id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
37,453
@calculator(65792) def calculate_perf_counter_large_rawcount(previous, current, property_name): return current[property_name]
[ "@", "calculator", "(", "65792", ")", "def", "calculate_perf_counter_large_rawcount", "(", "previous", ",", "current", ",", "property_name", ")", ":", "return", "current", "[", "property_name", "]" ]
perf_counter_large_rawcount URL .
train
false
37,456
def _sws(s): if isinstance(s, str): return ''.join(s.split()) else: return b('').join(s.split())
[ "def", "_sws", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "str", ")", ":", "return", "''", ".", "join", "(", "s", ".", "split", "(", ")", ")", "else", ":", "return", "b", "(", "''", ")", ".", "join", "(", "s", ".", "split", "(", ")", ")" ]
remove whitespace from a text or byte string .
train
false
37,457
def ex_rvalue(name): return ast.Name(name, ast.Load())
[ "def", "ex_rvalue", "(", "name", ")", ":", "return", "ast", ".", "Name", "(", "name", ",", "ast", ".", "Load", "(", ")", ")" ]
a variable store expression .
train
false
37,458
def schunk(string, size): return [string[i:(i + size)] for i in range(0, len(string), size)]
[ "def", "schunk", "(", "string", ",", "size", ")", ":", "return", "[", "string", "[", "i", ":", "(", "i", "+", "size", ")", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "string", ")", ",", "size", ")", "]" ]
splits string into n sized chunks .
train
true
37,462
def preprocess_vars(a): if (a is None): return None elif (not isinstance(a, list)): data = [a] else: data = a for item in data: if (not isinstance(item, MutableMapping)): raise AnsibleError(('variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)' % (a, type(a)))) return data
[ "def", "preprocess_vars", "(", "a", ")", ":", "if", "(", "a", "is", "None", ")", ":", "return", "None", "elif", "(", "not", "isinstance", "(", "a", ",", "list", ")", ")", ":", "data", "=", "[", "a", "]", "else", ":", "data", "=", "a", "for", "item", "in", "data", ":", "if", "(", "not", "isinstance", "(", "item", ",", "MutableMapping", ")", ")", ":", "raise", "AnsibleError", "(", "(", "'variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)'", "%", "(", "a", ",", "type", "(", "a", ")", ")", ")", ")", "return", "data" ]
ensures that vars contained in the parameter passed in are returned as a list of dictionaries .
train
false
37,463
def daemon_options(parser, default_pidfile=None, default_logfile=None): group = parser.add_argument_group(u'Daemonization Options') (group.add_argument(u'-f', u'--logfile', default=default_logfile),) (group.add_argument(u'--pidfile', default=default_pidfile),) (group.add_argument(u'--uid', default=None),) (group.add_argument(u'--gid', default=None),) (group.add_argument(u'--umask', default=None),) (group.add_argument(u'--executable', default=None),)
[ "def", "daemon_options", "(", "parser", ",", "default_pidfile", "=", "None", ",", "default_logfile", "=", "None", ")", ":", "group", "=", "parser", ".", "add_argument_group", "(", "u'Daemonization Options'", ")", "(", "group", ".", "add_argument", "(", "u'-f'", ",", "u'--logfile'", ",", "default", "=", "default_logfile", ")", ",", ")", "(", "group", ".", "add_argument", "(", "u'--pidfile'", ",", "default", "=", "default_pidfile", ")", ",", ")", "(", "group", ".", "add_argument", "(", "u'--uid'", ",", "default", "=", "None", ")", ",", ")", "(", "group", ".", "add_argument", "(", "u'--gid'", ",", "default", "=", "None", ")", ",", ")", "(", "group", ".", "add_argument", "(", "u'--umask'", ",", "default", "=", "None", ")", ",", ")", "(", "group", ".", "add_argument", "(", "u'--executable'", ",", "default", "=", "None", ")", ",", ")" ]
add daemon options to argparse parser .
train
false
37,464
def dates_from_range(start, end=None, length=None): dates = date_range_str(start, end, length) return dates_from_str(dates)
[ "def", "dates_from_range", "(", "start", ",", "end", "=", "None", ",", "length", "=", "None", ")", ":", "dates", "=", "date_range_str", "(", "start", ",", "end", ",", "length", ")", "return", "dates_from_str", "(", "dates", ")" ]
turns a sequence of date strings and returns a list of datetime .
train
false
37,466
def ALL_RATINGS(): ALL_RATINGS = [] for rb in RATINGS_BODIES.values(): ALL_RATINGS.extend(rb.ratings) return ALL_RATINGS
[ "def", "ALL_RATINGS", "(", ")", ":", "ALL_RATINGS", "=", "[", "]", "for", "rb", "in", "RATINGS_BODIES", ".", "values", "(", ")", ":", "ALL_RATINGS", ".", "extend", "(", "rb", ".", "ratings", ")", "return", "ALL_RATINGS" ]
list of all ratings with waffled bodies .
train
false
37,467
def invert(d): out = {} for (k, v) in iteritems(d): try: out[v].add(k) except KeyError: out[v] = {k} return out
[ "def", "invert", "(", "d", ")", ":", "out", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "d", ")", ":", "try", ":", "out", "[", "v", "]", ".", "add", "(", "k", ")", "except", "KeyError", ":", "out", "[", "v", "]", "=", "{", "k", "}", "return", "out" ]
invert an image .
train
true
37,468
def _fastq_sanger_convert_fastq_solexa(in_handle, out_handle, alphabet=None): from Bio.SeqIO.QualityIO import solexa_quality_from_phred trunc_char = chr(1) mapping = ''.join(((([chr(0) for ascii in range(0, 33)] + [chr((64 + int(round(solexa_quality_from_phred(q))))) for q in range(0, (62 + 1))]) + [trunc_char for ascii in range(96, 127)]) + [chr(0) for ascii in range(127, 256)])) assert (len(mapping) == 256) return _fastq_generic2(in_handle, out_handle, mapping, trunc_char, 'Data loss - max Solexa quality 62 in Solexa FASTQ')
[ "def", "_fastq_sanger_convert_fastq_solexa", "(", "in_handle", ",", "out_handle", ",", "alphabet", "=", "None", ")", ":", "from", "Bio", ".", "SeqIO", ".", "QualityIO", "import", "solexa_quality_from_phred", "trunc_char", "=", "chr", "(", "1", ")", "mapping", "=", "''", ".", "join", "(", "(", "(", "(", "[", "chr", "(", "0", ")", "for", "ascii", "in", "range", "(", "0", ",", "33", ")", "]", "+", "[", "chr", "(", "(", "64", "+", "int", "(", "round", "(", "solexa_quality_from_phred", "(", "q", ")", ")", ")", ")", ")", "for", "q", "in", "range", "(", "0", ",", "(", "62", "+", "1", ")", ")", "]", ")", "+", "[", "trunc_char", "for", "ascii", "in", "range", "(", "96", ",", "127", ")", "]", ")", "+", "[", "chr", "(", "0", ")", "for", "ascii", "in", "range", "(", "127", ",", "256", ")", "]", ")", ")", "assert", "(", "len", "(", "mapping", ")", "==", "256", ")", "return", "_fastq_generic2", "(", "in_handle", ",", "out_handle", ",", "mapping", ",", "trunc_char", ",", "'Data loss - max Solexa quality 62 in Solexa FASTQ'", ")" ]
fast sanger fastq to solexa fastq conversion .
train
false
37,469
def _to_model_dict(resource_type_name, ns_res_type_dict): model_dict = {'name': resource_type_name, 'properties_target': ns_res_type_dict['properties_target'], 'prefix': ns_res_type_dict['prefix'], 'created_at': ns_res_type_dict['created_at'], 'updated_at': ns_res_type_dict['updated_at']} return model_dict
[ "def", "_to_model_dict", "(", "resource_type_name", ",", "ns_res_type_dict", ")", ":", "model_dict", "=", "{", "'name'", ":", "resource_type_name", ",", "'properties_target'", ":", "ns_res_type_dict", "[", "'properties_target'", "]", ",", "'prefix'", ":", "ns_res_type_dict", "[", "'prefix'", "]", ",", "'created_at'", ":", "ns_res_type_dict", "[", "'created_at'", "]", ",", "'updated_at'", ":", "ns_res_type_dict", "[", "'updated_at'", "]", "}", "return", "model_dict" ]
transform a metadef_namespace_resource_type dict to a model dict .
train
false
37,470
@utils.arg('network_id', metavar='<network_id>', help='ID of network') @shell.deprecated_network def do_tenant_network_delete(cs, args): cs.tenant_networks.delete(args.network_id)
[ "@", "utils", ".", "arg", "(", "'network_id'", ",", "metavar", "=", "'<network_id>'", ",", "help", "=", "'ID of network'", ")", "@", "shell", ".", "deprecated_network", "def", "do_tenant_network_delete", "(", "cs", ",", "args", ")", ":", "cs", ".", "tenant_networks", ".", "delete", "(", "args", ".", "network_id", ")" ]
delete a tenant network .
train
false
37,471
@pytest.mark.cmd def test_cmd_contributors_mutually_exclusive(capfd): with pytest.raises(CommandError) as e: call_command('contributors', '--include-anonymous', '--mailmerge') assert ('argument --mailmerge: not allowed with argument --include-anonymous' in str(e)) with pytest.raises(CommandError) as e: call_command('contributors', '--mailmerge', '--include-anonymous') assert ('argument --include-anonymous: not allowed with argument --mailmerge' in str(e))
[ "@", "pytest", ".", "mark", ".", "cmd", "def", "test_cmd_contributors_mutually_exclusive", "(", "capfd", ")", ":", "with", "pytest", ".", "raises", "(", "CommandError", ")", "as", "e", ":", "call_command", "(", "'contributors'", ",", "'--include-anonymous'", ",", "'--mailmerge'", ")", "assert", "(", "'argument --mailmerge: not allowed with argument --include-anonymous'", "in", "str", "(", "e", ")", ")", "with", "pytest", ".", "raises", "(", "CommandError", ")", "as", "e", ":", "call_command", "(", "'contributors'", ",", "'--mailmerge'", ",", "'--include-anonymous'", ")", "assert", "(", "'argument --include-anonymous: not allowed with argument --mailmerge'", "in", "str", "(", "e", ")", ")" ]
test mutually exclusive arguments are not accepted .
train
false
37,472
def get_steps_pip(distribution, package_source=PackageSource()): if (distribution not in PIP_DISTRIBUTIONS): raise UsageError(('Distribution %r not supported. Available distributions: %s' % (distribution, ', '.join(PIP_DISTRIBUTIONS)))) package_manager = DOCKER_IMAGES[distribution].package_manager virtualenv = 'flocker-client' steps = [ensure_minimal_setup(package_manager), task_cli_pip_prereqs(package_manager), task_cli_pip_install(virtualenv, package_source), cli_pip_test(virtualenv, package_source)] return steps
[ "def", "get_steps_pip", "(", "distribution", ",", "package_source", "=", "PackageSource", "(", ")", ")", ":", "if", "(", "distribution", "not", "in", "PIP_DISTRIBUTIONS", ")", ":", "raise", "UsageError", "(", "(", "'Distribution %r not supported. Available distributions: %s'", "%", "(", "distribution", ",", "', '", ".", "join", "(", "PIP_DISTRIBUTIONS", ")", ")", ")", ")", "package_manager", "=", "DOCKER_IMAGES", "[", "distribution", "]", ".", "package_manager", "virtualenv", "=", "'flocker-client'", "steps", "=", "[", "ensure_minimal_setup", "(", "package_manager", ")", ",", "task_cli_pip_prereqs", "(", "package_manager", ")", ",", "task_cli_pip_install", "(", "virtualenv", ",", "package_source", ")", ",", "cli_pip_test", "(", "virtualenv", ",", "package_source", ")", "]", "return", "steps" ]
get commands to run for testing client pip installation .
train
false
37,473
def make_digest(app, global_conf, realm, authfunc, **kw): from paste.util.import_string import eval_import import types authfunc = eval_import(authfunc) assert isinstance(authfunc, types.FunctionType), 'authfunc must resolve to a function' return AuthDigestHandler(app, realm, authfunc)
[ "def", "make_digest", "(", "app", ",", "global_conf", ",", "realm", ",", "authfunc", ",", "**", "kw", ")", ":", "from", "paste", ".", "util", ".", "import_string", "import", "eval_import", "import", "types", "authfunc", "=", "eval_import", "(", "authfunc", ")", "assert", "isinstance", "(", "authfunc", ",", "types", ".", "FunctionType", ")", ",", "'authfunc must resolve to a function'", "return", "AuthDigestHandler", "(", "app", ",", "realm", ",", "authfunc", ")" ]
grant access via digest authentication config looks like this:: [filter:grant] use = egg:paste#auth_digest realm=myrealm authfunc=somepackage .
train
false
37,475
def group_member_create(context, data_dict): _check_access('group_member_create', context, data_dict) return _group_or_org_member_create(context, data_dict)
[ "def", "group_member_create", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'group_member_create'", ",", "context", ",", "data_dict", ")", "return", "_group_or_org_member_create", "(", "context", ",", "data_dict", ")" ]
make a user a member of a group .
train
false
37,476
def win_fix_python3(tools_dir): try: print '[INFO] Running Python 3 path fix in Windows' python3_path = '' if (len(settings.PYTHON3_PATH) > 2): python3_path = settings.python3_path else: pathenv = os.environ['path'] if pathenv: paths = pathenv.split(';') for path in paths: if ('python3' in path.lower()): python3_path = path python3 = (('"' + os.path.join(python3_path, 'python')) + '"') dmy = os.path.join(tools_dir, 'enjarify/enjarify.tmp') org = os.path.join(tools_dir, 'enjarify/enjarify.bat') dat = '' with open(dmy, 'r') as file_pointer: dat = file_pointer.read().replace('[xxx]', python3) with open(org, 'w') as file_pointer: file_pointer.write(dat) except: PrintException('[ERROR] Running Python 3 path fix in Windows')
[ "def", "win_fix_python3", "(", "tools_dir", ")", ":", "try", ":", "print", "'[INFO] Running Python 3 path fix in Windows'", "python3_path", "=", "''", "if", "(", "len", "(", "settings", ".", "PYTHON3_PATH", ")", ">", "2", ")", ":", "python3_path", "=", "settings", ".", "python3_path", "else", ":", "pathenv", "=", "os", ".", "environ", "[", "'path'", "]", "if", "pathenv", ":", "paths", "=", "pathenv", ".", "split", "(", "';'", ")", "for", "path", "in", "paths", ":", "if", "(", "'python3'", "in", "path", ".", "lower", "(", ")", ")", ":", "python3_path", "=", "path", "python3", "=", "(", "(", "'\"'", "+", "os", ".", "path", ".", "join", "(", "python3_path", ",", "'python'", ")", ")", "+", "'\"'", ")", "dmy", "=", "os", ".", "path", ".", "join", "(", "tools_dir", ",", "'enjarify/enjarify.tmp'", ")", "org", "=", "os", ".", "path", ".", "join", "(", "tools_dir", ",", "'enjarify/enjarify.bat'", ")", "dat", "=", "''", "with", "open", "(", "dmy", ",", "'r'", ")", "as", "file_pointer", ":", "dat", "=", "file_pointer", ".", "read", "(", ")", ".", "replace", "(", "'[xxx]'", ",", "python3", ")", "with", "open", "(", "org", ",", "'w'", ")", "as", "file_pointer", ":", "file_pointer", ".", "write", "(", "dat", ")", "except", ":", "PrintException", "(", "'[ERROR] Running Python 3 path fix in Windows'", ")" ]
runn python 3 path fix in windows .
train
false
37,478
def default_order_handler(request, order_form, order): pass
[ "def", "default_order_handler", "(", "request", ",", "order_form", ",", "order", ")", ":", "pass" ]
default order handler - called when the order is complete and contains its final data .
train
false
37,479
def order(x, NoneIsLast=True, decreasing=False): omitNone = False if (NoneIsLast is None): NoneIsLast = True omitNone = True n = len(x) ix = range(n) if (None not in x): ix.sort(reverse=decreasing, key=(lambda j: x[j])) else: def key(i, x=x): elem = x[i] if (decreasing == NoneIsLast): return ((not (elem is None)), elem) else: return ((elem is None), elem) ix = range(n) ix.sort(key=key, reverse=decreasing) if omitNone: n = len(x) for i in range((n - 1), (-1), (-1)): if (x[ix[i]] is None): n -= 1 return ix[:n] return ix
[ "def", "order", "(", "x", ",", "NoneIsLast", "=", "True", ",", "decreasing", "=", "False", ")", ":", "omitNone", "=", "False", "if", "(", "NoneIsLast", "is", "None", ")", ":", "NoneIsLast", "=", "True", "omitNone", "=", "True", "n", "=", "len", "(", "x", ")", "ix", "=", "range", "(", "n", ")", "if", "(", "None", "not", "in", "x", ")", ":", "ix", ".", "sort", "(", "reverse", "=", "decreasing", ",", "key", "=", "(", "lambda", "j", ":", "x", "[", "j", "]", ")", ")", "else", ":", "def", "key", "(", "i", ",", "x", "=", "x", ")", ":", "elem", "=", "x", "[", "i", "]", "if", "(", "decreasing", "==", "NoneIsLast", ")", ":", "return", "(", "(", "not", "(", "elem", "is", "None", ")", ")", ",", "elem", ")", "else", ":", "return", "(", "(", "elem", "is", "None", ")", ",", "elem", ")", "ix", "=", "range", "(", "n", ")", "ix", ".", "sort", "(", "key", "=", "key", ",", "reverse", "=", "decreasing", ")", "if", "omitNone", ":", "n", "=", "len", "(", "x", ")", "for", "i", "in", "range", "(", "(", "n", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "if", "(", "x", "[", "ix", "[", "i", "]", "]", "is", "None", ")", ":", "n", "-=", "1", "return", "ix", "[", ":", "n", "]", "return", "ix" ]
returns a list of indices in the order as when the given list is sorted .
train
false
37,480
def all_hooks(): return _HOOKS
[ "def", "all_hooks", "(", ")", ":", "return", "_HOOKS" ]
return all registered hooks .
train
false
37,482
@curry def _format_url(instrument_type, instrument_ids, start_date, end_date, earliest_allowed_date): return 'http://www.bankofcanada.ca/stats/results/csv?lP=lookup_{instrument_type}_yields.php&sR={restrict}&se={instrument_ids}&dF={start}&dT={end}'.format(instrument_type=instrument_type, instrument_ids='-'.join(map(prepend('L_'), instrument_ids)), restrict=earliest_allowed_date.strftime('%Y-%m-%d'), start=start_date.strftime('%Y-%m-%d'), end=end_date.strftime('%Y-%m-%d'))
[ "@", "curry", "def", "_format_url", "(", "instrument_type", ",", "instrument_ids", ",", "start_date", ",", "end_date", ",", "earliest_allowed_date", ")", ":", "return", "'http://www.bankofcanada.ca/stats/results/csv?lP=lookup_{instrument_type}_yields.php&sR={restrict}&se={instrument_ids}&dF={start}&dT={end}'", ".", "format", "(", "instrument_type", "=", "instrument_type", ",", "instrument_ids", "=", "'-'", ".", "join", "(", "map", "(", "prepend", "(", "'L_'", ")", ",", "instrument_ids", ")", ")", ",", "restrict", "=", "earliest_allowed_date", ".", "strftime", "(", "'%Y-%m-%d'", ")", ",", "start", "=", "start_date", ".", "strftime", "(", "'%Y-%m-%d'", ")", ",", "end", "=", "end_date", ".", "strftime", "(", "'%Y-%m-%d'", ")", ")" ]
format a url for loading data from bank of canada .
train
true
37,483
def transform_tev(t, rho, x): def _check_args(rho, x): cond1 = (x > 0) cond2 = ((rho > 0) and (rho < 1)) return (cond1 and cond2) if (not np.all(_check_args(rho, x))): raise ValueError('invalid args') from scipy.stats import t as stats_t z = (np.sqrt((1.0 + x)) * (np.power((t / (1.0 - t)), (1.0 / x)) - rho)) z /= np.sqrt((1 - (rho * rho))) transf = (((1 - t) * stats_t._cdf(z, (x + 1))) + (t * stats_t._cdf(z, (x + 1)))) return transf
[ "def", "transform_tev", "(", "t", ",", "rho", ",", "x", ")", ":", "def", "_check_args", "(", "rho", ",", "x", ")", ":", "cond1", "=", "(", "x", ">", "0", ")", "cond2", "=", "(", "(", "rho", ">", "0", ")", "and", "(", "rho", "<", "1", ")", ")", "return", "(", "cond1", "and", "cond2", ")", "if", "(", "not", "np", ".", "all", "(", "_check_args", "(", "rho", ",", "x", ")", ")", ")", ":", "raise", "ValueError", "(", "'invalid args'", ")", "from", "scipy", ".", "stats", "import", "t", "as", "stats_t", "z", "=", "(", "np", ".", "sqrt", "(", "(", "1.0", "+", "x", ")", ")", "*", "(", "np", ".", "power", "(", "(", "t", "/", "(", "1.0", "-", "t", ")", ")", ",", "(", "1.0", "/", "x", ")", ")", "-", "rho", ")", ")", "z", "/=", "np", ".", "sqrt", "(", "(", "1", "-", "(", "rho", "*", "rho", ")", ")", ")", "transf", "=", "(", "(", "(", "1", "-", "t", ")", "*", "stats_t", ".", "_cdf", "(", "z", ",", "(", "x", "+", "1", ")", ")", ")", "+", "(", "t", "*", "stats_t", ".", "_cdf", "(", "z", ",", "(", "x", "+", "1", ")", ")", ")", ")", "return", "transf" ]
t-ev model of demarta and mcneil 2005 restrictions: - rho in - x > 0 .
train
false
37,484
def test_rast(h, f): if (h[:4] == 'Y\xa6j\x95'): return 'rast'
[ "def", "test_rast", "(", "h", ",", "f", ")", ":", "if", "(", "h", "[", ":", "4", "]", "==", "'Y\\xa6j\\x95'", ")", ":", "return", "'rast'" ]
sun raster file .
train
false
37,486
def wait_until_no_listen_pids_namespace(namespace, timeout=SIGTERM_WAITTIME): start = end = time.time() while ((end - start) < timeout): if (not find_listen_pids_namespace(namespace)): return time.sleep(1) end = time.time() raise PidsInNamespaceException
[ "def", "wait_until_no_listen_pids_namespace", "(", "namespace", ",", "timeout", "=", "SIGTERM_WAITTIME", ")", ":", "start", "=", "end", "=", "time", ".", "time", "(", ")", "while", "(", "(", "end", "-", "start", ")", "<", "timeout", ")", ":", "if", "(", "not", "find_listen_pids_namespace", "(", "namespace", ")", ")", ":", "return", "time", ".", "sleep", "(", "1", ")", "end", "=", "time", ".", "time", "(", ")", "raise", "PidsInNamespaceException" ]
poll listening processes within the given namespace .
train
false
37,487
@ioflo.base.deeding.deedify('SaltRaetWorkerFork', ioinits={'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr', 'worker_verify': '.salt.var.worker_verify', 'access_keys': '.salt.access_keys', 'mkey': '.salt.var.zmq.master_key', 'aes': '.salt.var.zmq.aes'}) def worker_fork(self): for index in range(int(self.opts.value['worker_threads'])): time.sleep(0.01) self.proc_mgr.value.add_process(Worker, args=(self.opts.value, (index + 1), self.worker_verify.value, self.access_keys.value, self.mkey.value, self.aes.value))
[ "@", "ioflo", ".", "base", ".", "deeding", ".", "deedify", "(", "'SaltRaetWorkerFork'", ",", "ioinits", "=", "{", "'opts'", ":", "'.salt.opts'", ",", "'proc_mgr'", ":", "'.salt.usr.proc_mgr'", ",", "'worker_verify'", ":", "'.salt.var.worker_verify'", ",", "'access_keys'", ":", "'.salt.access_keys'", ",", "'mkey'", ":", "'.salt.var.zmq.master_key'", ",", "'aes'", ":", "'.salt.var.zmq.aes'", "}", ")", "def", "worker_fork", "(", "self", ")", ":", "for", "index", "in", "range", "(", "int", "(", "self", ".", "opts", ".", "value", "[", "'worker_threads'", "]", ")", ")", ":", "time", ".", "sleep", "(", "0.01", ")", "self", ".", "proc_mgr", ".", "value", ".", "add_process", "(", "Worker", ",", "args", "=", "(", "self", ".", "opts", ".", "value", ",", "(", "index", "+", "1", ")", ",", "self", ".", "worker_verify", ".", "value", ",", "self", ".", "access_keys", ".", "value", ",", "self", ".", "mkey", ".", "value", ",", "self", ".", "aes", ".", "value", ")", ")" ]
fork off the worker procs floscript: do salt raet worker fork at enter .
train
false
37,489
def pd_split(df, p, random_state=None): p = list(p) index = pseudorandom(len(df), p, random_state) return [df.iloc[(index == i)] for i in range(len(p))]
[ "def", "pd_split", "(", "df", ",", "p", ",", "random_state", "=", "None", ")", ":", "p", "=", "list", "(", "p", ")", "index", "=", "pseudorandom", "(", "len", "(", "df", ")", ",", "p", ",", "random_state", ")", "return", "[", "df", ".", "iloc", "[", "(", "index", "==", "i", ")", "]", "for", "i", "in", "range", "(", "len", "(", "p", ")", ")", "]" ]
split dataframe into multiple pieces pseudorandomly .
train
false
37,491
@with_setup(step_runner_environ) def test_can_figure_out_why_has_failed(): f = Feature.from_string(FEATURE1) feature_result = f.run() scenario_result = feature_result.scenario_results[0] failed_step = scenario_result.steps_failed[0] assert_equals(failed_step.why.cause, 'It should fail') assert ('Traceback (most recent call last):' in failed_step.why.traceback) assert ('AssertionError: It should fail' in failed_step.why.traceback) assert_equals(type(failed_step.why.exception), AssertionError)
[ "@", "with_setup", "(", "step_runner_environ", ")", "def", "test_can_figure_out_why_has_failed", "(", ")", ":", "f", "=", "Feature", ".", "from_string", "(", "FEATURE1", ")", "feature_result", "=", "f", ".", "run", "(", ")", "scenario_result", "=", "feature_result", ".", "scenario_results", "[", "0", "]", "failed_step", "=", "scenario_result", ".", "steps_failed", "[", "0", "]", "assert_equals", "(", "failed_step", ".", "why", ".", "cause", ",", "'It should fail'", ")", "assert", "(", "'Traceback (most recent call last):'", "in", "failed_step", ".", "why", ".", "traceback", ")", "assert", "(", "'AssertionError: It should fail'", "in", "failed_step", ".", "why", ".", "traceback", ")", "assert_equals", "(", "type", "(", "failed_step", ".", "why", ".", "exception", ")", ",", "AssertionError", ")" ]
it can figure out why the test has failed .
train
false
37,492
def remove_course_content_user_milestones(course_key, content_key, user, relationship): if (not settings.FEATURES.get('MILESTONES_APP')): return [] course_content_milestones = milestones_api.get_course_content_milestones(course_key, content_key, relationship) for milestone in course_content_milestones: milestones_api.remove_user_milestone({'id': user.id}, milestone)
[ "def", "remove_course_content_user_milestones", "(", "course_key", ",", "content_key", ",", "user", ",", "relationship", ")", ":", "if", "(", "not", "settings", ".", "FEATURES", ".", "get", "(", "'MILESTONES_APP'", ")", ")", ":", "return", "[", "]", "course_content_milestones", "=", "milestones_api", ".", "get_course_content_milestones", "(", "course_key", ",", "content_key", ",", "relationship", ")", "for", "milestone", "in", "course_content_milestones", ":", "milestones_api", ".", "remove_user_milestone", "(", "{", "'id'", ":", "user", ".", "id", "}", ",", "milestone", ")" ]
removes the specified user-milestone link from the system for the specified course content module .
train
false
37,493
def fork_job(mod_name, func_name, args=(), kwargs={}, timeout=300, cwd=None, priority=u'normal', env={}, no_output=False, heartbeat=None, abort=None, module_is_source_code=False): ans = {u'result': None, u'stdout_stderr': None} (listener, w) = create_worker(env, priority, cwd) try: communicate(ans, w, listener, (mod_name, func_name, args, kwargs, module_is_source_code), timeout=timeout, heartbeat=heartbeat, abort=abort) finally: t = Thread(target=w.kill) t.daemon = True t.start() if no_output: try: os.remove(w.log_path) except: pass if (not no_output): ans[u'stdout_stderr'] = w.log_path return ans
[ "def", "fork_job", "(", "mod_name", ",", "func_name", ",", "args", "=", "(", ")", ",", "kwargs", "=", "{", "}", ",", "timeout", "=", "300", ",", "cwd", "=", "None", ",", "priority", "=", "u'normal'", ",", "env", "=", "{", "}", ",", "no_output", "=", "False", ",", "heartbeat", "=", "None", ",", "abort", "=", "None", ",", "module_is_source_code", "=", "False", ")", ":", "ans", "=", "{", "u'result'", ":", "None", ",", "u'stdout_stderr'", ":", "None", "}", "(", "listener", ",", "w", ")", "=", "create_worker", "(", "env", ",", "priority", ",", "cwd", ")", "try", ":", "communicate", "(", "ans", ",", "w", ",", "listener", ",", "(", "mod_name", ",", "func_name", ",", "args", ",", "kwargs", ",", "module_is_source_code", ")", ",", "timeout", "=", "timeout", ",", "heartbeat", "=", "heartbeat", ",", "abort", "=", "abort", ")", "finally", ":", "t", "=", "Thread", "(", "target", "=", "w", ".", "kill", ")", "t", ".", "daemon", "=", "True", "t", ".", "start", "(", ")", "if", "no_output", ":", "try", ":", "os", ".", "remove", "(", "w", ".", "log_path", ")", "except", ":", "pass", "if", "(", "not", "no_output", ")", ":", "ans", "[", "u'stdout_stderr'", "]", "=", "w", ".", "log_path", "return", "ans" ]
run a job in a worker process .
train
false
37,495
def execute_and_trace(task_name, *args, **kwargs): hostname = kwargs.get('hostname') platforms.set_mp_process_title('celeryd', task_name, hostname=hostname) try: return WorkerTaskTrace(task_name, *args, **kwargs).execute_safe() finally: platforms.set_mp_process_title('celeryd', '-idle-', hostname)
[ "def", "execute_and_trace", "(", "task_name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "hostname", "=", "kwargs", ".", "get", "(", "'hostname'", ")", "platforms", ".", "set_mp_process_title", "(", "'celeryd'", ",", "task_name", ",", "hostname", "=", "hostname", ")", "try", ":", "return", "WorkerTaskTrace", "(", "task_name", ",", "*", "args", ",", "**", "kwargs", ")", ".", "execute_safe", "(", ")", "finally", ":", "platforms", ".", "set_mp_process_title", "(", "'celeryd'", ",", "'-idle-'", ",", "hostname", ")" ]
this is a pickleable method used as a target when applying to pools .
train
false
37,496
def comparePointIndexDescending(self, other): if (self.pointIndex > other.pointIndex): return (-1) if (self.pointIndex < other.pointIndex): return 1 return 0
[ "def", "comparePointIndexDescending", "(", "self", ",", "other", ")", ":", "if", "(", "self", ".", "pointIndex", ">", "other", ".", "pointIndex", ")", ":", "return", "(", "-", "1", ")", "if", "(", "self", ".", "pointIndex", "<", "other", ".", "pointIndex", ")", ":", "return", "1", "return", "0" ]
get comparison in order to sort y intersections in descending order of point index .
train
false
37,497
def _have_socket_can(): try: s = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) except (AttributeError, OSError): return False else: s.close() return True
[ "def", "_have_socket_can", "(", ")", ":", "try", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "PF_CAN", ",", "socket", ".", "SOCK_RAW", ",", "socket", ".", "CAN_RAW", ")", "except", "(", "AttributeError", ",", "OSError", ")", ":", "return", "False", "else", ":", "s", ".", "close", "(", ")", "return", "True" ]
check whether can sockets are supported on this host .
train
false
37,499
def host_update(hostid, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'host.update' params = {'hostid': hostid} params = _params_extend(params, _ignore_name=True, **connection_args) ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result']['hostids'] else: raise KeyError except KeyError: return ret
[ "def", "host_update", "(", "hostid", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'host.update'", "params", "=", "{", "'hostid'", ":", "hostid", "}", "params", "=", "_params_extend", "(", "params", ",", "_ignore_name", "=", "True", ",", "**", "connection_args", ")", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "[", "'hostids'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "ret" ]
update existing hosts .
train
true
37,500
def recapitalize(text): text = text.lower() capsRE = re.compile('(?:^|(?<=[\\.\\?\\!] ))([a-z])') text = capsRE.sub((lambda x: x.group(1).upper()), text) return text
[ "def", "recapitalize", "(", "text", ")", ":", "text", "=", "text", ".", "lower", "(", ")", "capsRE", "=", "re", ".", "compile", "(", "'(?:^|(?<=[\\\\.\\\\?\\\\!] ))([a-z])'", ")", "text", "=", "capsRE", ".", "sub", "(", "(", "lambda", "x", ":", "x", ".", "group", "(", "1", ")", ".", "upper", "(", ")", ")", ",", "text", ")", "return", "text" ]
recapitalizes text .
train
false
37,501
def compute_inherited_metadata(descriptor): if descriptor.has_children: parent_metadata = descriptor.xblock_kvs.inherited_settings.copy() for field in InheritanceMixin.fields.values(): if field.is_set_on(descriptor): parent_metadata[field.name] = field.read_json(descriptor) for child in descriptor.get_children(): inherit_metadata(child, parent_metadata) compute_inherited_metadata(child)
[ "def", "compute_inherited_metadata", "(", "descriptor", ")", ":", "if", "descriptor", ".", "has_children", ":", "parent_metadata", "=", "descriptor", ".", "xblock_kvs", ".", "inherited_settings", ".", "copy", "(", ")", "for", "field", "in", "InheritanceMixin", ".", "fields", ".", "values", "(", ")", ":", "if", "field", ".", "is_set_on", "(", "descriptor", ")", ":", "parent_metadata", "[", "field", ".", "name", "]", "=", "field", ".", "read_json", "(", "descriptor", ")", "for", "child", "in", "descriptor", ".", "get_children", "(", ")", ":", "inherit_metadata", "(", "child", ",", "parent_metadata", ")", "compute_inherited_metadata", "(", "child", ")" ]
given a descriptor .
train
false
37,503
def get_comms(target_name): from ipykernel.comm import Comm return Comm(target_name=target_name, data={})
[ "def", "get_comms", "(", "target_name", ")", ":", "from", "ipykernel", ".", "comm", "import", "Comm", "return", "Comm", "(", "target_name", "=", "target_name", ",", "data", "=", "{", "}", ")" ]
create a jupyter comms object for a specific target .
train
false
37,504
def _node_object_id(node): node_object_id = '{}_{}'.format(slugify(_node_name(node)), node.node_id) return node_object_id
[ "def", "_node_object_id", "(", "node", ")", ":", "node_object_id", "=", "'{}_{}'", ".", "format", "(", "slugify", "(", "_node_name", "(", "node", ")", ")", ",", "node", ".", "node_id", ")", "return", "node_object_id" ]
return the object_id of the node .
train
false
37,506
def disassociate_route_table(association_id, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if conn.disassociate_route_table(association_id): log.info('Route table with association id {0} has been disassociated.'.format(association_id)) return {'disassociated': True} else: log.warning('Route table with association id {0} has not been disassociated.'.format(association_id)) return {'disassociated': False} except BotoServerError as e: return {'disassociated': False, 'error': salt.utils.boto.get_error(e)}
[ "def", "disassociate_route_table", "(", "association_id", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "conn", ".", "disassociate_route_table", "(", "association_id", ")", ":", "log", ".", "info", "(", "'Route table with association id {0} has been disassociated.'", ".", "format", "(", "association_id", ")", ")", "return", "{", "'disassociated'", ":", "True", "}", "else", ":", "log", ".", "warning", "(", "'Route table with association id {0} has not been disassociated.'", ".", "format", "(", "association_id", ")", ")", "return", "{", "'disassociated'", ":", "False", "}", "except", "BotoServerError", "as", "e", ":", "return", "{", "'disassociated'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto", ".", "get_error", "(", "e", ")", "}" ]
dissassociates a route table .
train
false
37,507
def _lookup_by_attr_factory(attr, unique, iterator, element_name, doc): def lookup_by_attr(self, ref, before=None): u'\n Given a string *ref*, finds the first element in the iterator\n where the given attribute == *ref*. If *before* is provided,\n will stop searching at the object *before*. This is\n important, since "forward references" are not allowed in the\n VOTABLE format.\n ' for element in getattr(self, iterator)(): if (element is before): if (getattr(element, attr, None) == ref): vo_raise(u'{} references itself'.format(element_name), element._config, element._pos, KeyError) break if (getattr(element, attr, None) == ref): (yield element) def lookup_by_attr_unique(self, ref, before=None): for element in lookup_by_attr(self, ref, before=before): return element raise KeyError(u"No {} with {} '{}' found before the referencing {}".format(element_name, attr, ref, element_name)) if unique: lookup_by_attr_unique.__doc__ = doc return lookup_by_attr_unique else: lookup_by_attr.__doc__ = doc return lookup_by_attr
[ "def", "_lookup_by_attr_factory", "(", "attr", ",", "unique", ",", "iterator", ",", "element_name", ",", "doc", ")", ":", "def", "lookup_by_attr", "(", "self", ",", "ref", ",", "before", "=", "None", ")", ":", "for", "element", "in", "getattr", "(", "self", ",", "iterator", ")", "(", ")", ":", "if", "(", "element", "is", "before", ")", ":", "if", "(", "getattr", "(", "element", ",", "attr", ",", "None", ")", "==", "ref", ")", ":", "vo_raise", "(", "u'{} references itself'", ".", "format", "(", "element_name", ")", ",", "element", ".", "_config", ",", "element", ".", "_pos", ",", "KeyError", ")", "break", "if", "(", "getattr", "(", "element", ",", "attr", ",", "None", ")", "==", "ref", ")", ":", "(", "yield", "element", ")", "def", "lookup_by_attr_unique", "(", "self", ",", "ref", ",", "before", "=", "None", ")", ":", "for", "element", "in", "lookup_by_attr", "(", "self", ",", "ref", ",", "before", "=", "before", ")", ":", "return", "element", "raise", "KeyError", "(", "u\"No {} with {} '{}' found before the referencing {}\"", ".", "format", "(", "element_name", ",", "attr", ",", "ref", ",", "element_name", ")", ")", "if", "unique", ":", "lookup_by_attr_unique", ".", "__doc__", "=", "doc", "return", "lookup_by_attr_unique", "else", ":", "lookup_by_attr", ".", "__doc__", "=", "doc", "return", "lookup_by_attr" ]
creates a function useful for looking up an element by a given attribute .
train
false
37,508
def fsl_name(obj, fname): ext = Info.output_type_to_ext(obj.inputs.output_type) return (fname + ext)
[ "def", "fsl_name", "(", "obj", ",", "fname", ")", ":", "ext", "=", "Info", ".", "output_type_to_ext", "(", "obj", ".", "inputs", ".", "output_type", ")", "return", "(", "fname", "+", "ext", ")" ]
create valid fsl name .
train
false
37,509
def _count_pylint_violations(report_file): num_violations_report = 0 pylint_pattern = re.compile('.(\\d+):\\ \\[(\\D\\d+.+\\]).') for line in open(report_file): violation_list_for_line = pylint_pattern.split(line) if (len(violation_list_for_line) == 4): num_violations_report += 1 return num_violations_report
[ "def", "_count_pylint_violations", "(", "report_file", ")", ":", "num_violations_report", "=", "0", "pylint_pattern", "=", "re", ".", "compile", "(", "'.(\\\\d+):\\\\ \\\\[(\\\\D\\\\d+.+\\\\]).'", ")", "for", "line", "in", "open", "(", "report_file", ")", ":", "violation_list_for_line", "=", "pylint_pattern", ".", "split", "(", "line", ")", "if", "(", "len", "(", "violation_list_for_line", ")", "==", "4", ")", ":", "num_violations_report", "+=", "1", "return", "num_violations_report" ]
parses a pylint report line-by-line and determines the number of violations reported .
train
false
37,510
def get_model_meta(model): meta_def = {} for (kwd, defn) in meta_details.items(): try: meta_def[kwd] = get_value(model._meta, defn) except IsDefault: pass for base in model.__bases__: if (hasattr(base, '_meta') and issubclass(base, models.Model)): if (not base._meta.abstract): if ('_ormbases' not in meta_def): meta_def['_ormbases'] = [] meta_def['_ormbases'].append(('%s.%s' % (base._meta.app_label, base._meta.object_name))) return meta_def
[ "def", "get_model_meta", "(", "model", ")", ":", "meta_def", "=", "{", "}", "for", "(", "kwd", ",", "defn", ")", "in", "meta_details", ".", "items", "(", ")", ":", "try", ":", "meta_def", "[", "kwd", "]", "=", "get_value", "(", "model", ".", "_meta", ",", "defn", ")", "except", "IsDefault", ":", "pass", "for", "base", "in", "model", ".", "__bases__", ":", "if", "(", "hasattr", "(", "base", ",", "'_meta'", ")", "and", "issubclass", "(", "base", ",", "models", ".", "Model", ")", ")", ":", "if", "(", "not", "base", ".", "_meta", ".", "abstract", ")", ":", "if", "(", "'_ormbases'", "not", "in", "meta_def", ")", ":", "meta_def", "[", "'_ormbases'", "]", "=", "[", "]", "meta_def", "[", "'_ormbases'", "]", ".", "append", "(", "(", "'%s.%s'", "%", "(", "base", ".", "_meta", ".", "app_label", ",", "base", ".", "_meta", ".", "object_name", ")", ")", ")", "return", "meta_def" ]
given a model class .
train
false
37,511
def global_constant(builder_or_module, name, value, linkage='internal'): if isinstance(builder_or_module, ir.Module): module = builder_or_module else: module = builder_or_module.module data = module.add_global_variable(value.type, name=name) data.linkage = linkage data.global_constant = True data.initializer = value return data
[ "def", "global_constant", "(", "builder_or_module", ",", "name", ",", "value", ",", "linkage", "=", "'internal'", ")", ":", "if", "isinstance", "(", "builder_or_module", ",", "ir", ".", "Module", ")", ":", "module", "=", "builder_or_module", "else", ":", "module", "=", "builder_or_module", ".", "module", "data", "=", "module", ".", "add_global_variable", "(", "value", ".", "type", ",", "name", "=", "name", ")", "data", ".", "linkage", "=", "linkage", "data", ".", "global_constant", "=", "True", "data", ".", "initializer", "=", "value", "return", "data" ]
get or create a global constant with *name* or *value* .
train
false
37,512
def bracket_split(source, brackets=('()', '{}', '[]'), strip=False): starts = [e[0] for e in brackets] in_bracket = 0 n = 0 last = 0 while (n < len(source)): e = source[n] if ((not in_bracket) and (e in starts)): in_bracket = 1 start = n (b_start, b_end) = brackets[starts.index(e)] elif in_bracket: if (e == b_start): in_bracket += 1 elif (e == b_end): in_bracket -= 1 if (not in_bracket): if source[last:start]: (yield source[last:start]) last = (n + 1) (yield source[(start + strip):((n + 1) - strip)]) n += 1 if source[last:]: (yield source[last:])
[ "def", "bracket_split", "(", "source", ",", "brackets", "=", "(", "'()'", ",", "'{}'", ",", "'[]'", ")", ",", "strip", "=", "False", ")", ":", "starts", "=", "[", "e", "[", "0", "]", "for", "e", "in", "brackets", "]", "in_bracket", "=", "0", "n", "=", "0", "last", "=", "0", "while", "(", "n", "<", "len", "(", "source", ")", ")", ":", "e", "=", "source", "[", "n", "]", "if", "(", "(", "not", "in_bracket", ")", "and", "(", "e", "in", "starts", ")", ")", ":", "in_bracket", "=", "1", "start", "=", "n", "(", "b_start", ",", "b_end", ")", "=", "brackets", "[", "starts", ".", "index", "(", "e", ")", "]", "elif", "in_bracket", ":", "if", "(", "e", "==", "b_start", ")", ":", "in_bracket", "+=", "1", "elif", "(", "e", "==", "b_end", ")", ":", "in_bracket", "-=", "1", "if", "(", "not", "in_bracket", ")", ":", "if", "source", "[", "last", ":", "start", "]", ":", "(", "yield", "source", "[", "last", ":", "start", "]", ")", "last", "=", "(", "n", "+", "1", ")", "(", "yield", "source", "[", "(", "start", "+", "strip", ")", ":", "(", "(", "n", "+", "1", ")", "-", "strip", ")", "]", ")", "n", "+=", "1", "if", "source", "[", "last", ":", "]", ":", "(", "yield", "source", "[", "last", ":", "]", ")" ]
does not return empty strings .
train
true
37,513
def addDictOption(opts, choicesDict, default, name, helpStr=None): if (default not in choicesDict): raise Exception(('Invalid default %s for choices dict: %s' % (default, name))) if (not helpStr): helpStr = ('|'.join(sorted(choicesDict.keys())) + '[,param=value...]') opts.add_option(('--' + name), type='string', default=default, help=helpStr)
[ "def", "addDictOption", "(", "opts", ",", "choicesDict", ",", "default", ",", "name", ",", "helpStr", "=", "None", ")", ":", "if", "(", "default", "not", "in", "choicesDict", ")", ":", "raise", "Exception", "(", "(", "'Invalid default %s for choices dict: %s'", "%", "(", "default", ",", "name", ")", ")", ")", "if", "(", "not", "helpStr", ")", ":", "helpStr", "=", "(", "'|'", ".", "join", "(", "sorted", "(", "choicesDict", ".", "keys", "(", ")", ")", ")", "+", "'[,param=value...]'", ")", "opts", ".", "add_option", "(", "(", "'--'", "+", "name", ")", ",", "type", "=", "'string'", ",", "default", "=", "default", ",", "help", "=", "helpStr", ")" ]
convenience function to add choices dicts to optionparser .
train
false
37,516
def is_uri(uri): return (('://' in uri) and bool(urlparse(uri).scheme))
[ "def", "is_uri", "(", "uri", ")", ":", "return", "(", "(", "'://'", "in", "uri", ")", "and", "bool", "(", "urlparse", "(", "uri", ")", ".", "scheme", ")", ")" ]
return true if *uri* is a uri and contains :// .
train
false
37,517
def _get_rrd(server, vm_uuid): try: xml = urllib.urlopen(('%s://%s:%s@%s/vm_rrd?uuid=%s' % (server[0], CONF.xenapi_connection_username, CONF.xenapi_connection_password, server[1], vm_uuid))) return xml.read() except IOError: LOG.exception((_('Unable to obtain RRD XML for VM %(vm_uuid)s with server details: %(server)s.') % locals())) return None
[ "def", "_get_rrd", "(", "server", ",", "vm_uuid", ")", ":", "try", ":", "xml", "=", "urllib", ".", "urlopen", "(", "(", "'%s://%s:%s@%s/vm_rrd?uuid=%s'", "%", "(", "server", "[", "0", "]", ",", "CONF", ".", "xenapi_connection_username", ",", "CONF", ".", "xenapi_connection_password", ",", "server", "[", "1", "]", ",", "vm_uuid", ")", ")", ")", "return", "xml", ".", "read", "(", ")", "except", "IOError", ":", "LOG", ".", "exception", "(", "(", "_", "(", "'Unable to obtain RRD XML for VM %(vm_uuid)s with server details: %(server)s.'", ")", "%", "locals", "(", ")", ")", ")", "return", "None" ]
return the vm rrd xml as a string .
train
false
37,518
def make_spd_matrix(n_dim, random_state=None): generator = check_random_state(random_state) A = generator.rand(n_dim, n_dim) (U, s, V) = linalg.svd(np.dot(A.T, A)) X = np.dot(np.dot(U, (1.0 + np.diag(generator.rand(n_dim)))), V) return X
[ "def", "make_spd_matrix", "(", "n_dim", ",", "random_state", "=", "None", ")", ":", "generator", "=", "check_random_state", "(", "random_state", ")", "A", "=", "generator", ".", "rand", "(", "n_dim", ",", "n_dim", ")", "(", "U", ",", "s", ",", "V", ")", "=", "linalg", ".", "svd", "(", "np", ".", "dot", "(", "A", ".", "T", ",", "A", ")", ")", "X", "=", "np", ".", "dot", "(", "np", ".", "dot", "(", "U", ",", "(", "1.0", "+", "np", ".", "diag", "(", "generator", ".", "rand", "(", "n_dim", ")", ")", ")", ")", ",", "V", ")", "return", "X" ]
generate a random symmetric .
train
false
37,519
def hazard_point(): return s3_rest_controller()
[ "def", "hazard_point", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
hazard points .
train
false
37,521
def require_permission(level=None): def decorator(f): @gen.engine def wrapper(self, *args, **kwargs): assert (level in [None, 'root', 'support']) self._permissions = (yield gen.Task(self.QueryAdminPermissions)) if (level == 'root'): self.CheckIsRoot() elif (level == 'support'): self.CheckIsSupport() f(self, *args, **kwargs) return wrapper return decorator
[ "def", "require_permission", "(", "level", "=", "None", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "gen", ".", "engine", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "assert", "(", "level", "in", "[", "None", ",", "'root'", ",", "'support'", "]", ")", "self", ".", "_permissions", "=", "(", "yield", "gen", ".", "Task", "(", "self", ".", "QueryAdminPermissions", ")", ")", "if", "(", "level", "==", "'root'", ")", ":", "self", ".", "CheckIsRoot", "(", ")", "elif", "(", "level", "==", "'support'", ")", ":", "self", ".", "CheckIsSupport", "(", ")", "f", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
decorator to be used in admin get/post methods .
train
false
37,522
def check_for_prefix_and_key(function): @functools.wraps(function) def wrapper(*args, **kwargs): offset = 1 if ('prefix' in kwargs): prefix = name_or_value(kwargs['prefix']) else: prefix = name_or_value(args[offset]) offset += 1 if ('key' in kwargs): key = name_or_value(kwargs['key']) else: key = name_or_value(args[offset]) if ((prefix not in args[0]._stack) or (key not in args[0]._stack[prefix])): return False return function(*args, **kwargs) return wrapper
[ "def", "check_for_prefix_and_key", "(", "function", ")", ":", "@", "functools", ".", "wraps", "(", "function", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "offset", "=", "1", "if", "(", "'prefix'", "in", "kwargs", ")", ":", "prefix", "=", "name_or_value", "(", "kwargs", "[", "'prefix'", "]", ")", "else", ":", "prefix", "=", "name_or_value", "(", "args", "[", "offset", "]", ")", "offset", "+=", "1", "if", "(", "'key'", "in", "kwargs", ")", ":", "key", "=", "name_or_value", "(", "kwargs", "[", "'key'", "]", ")", "else", ":", "key", "=", "name_or_value", "(", "args", "[", "offset", "]", ")", "if", "(", "(", "prefix", "not", "in", "args", "[", "0", "]", ".", "_stack", ")", "or", "(", "key", "not", "in", "args", "[", "0", "]", ".", "_stack", "[", "prefix", "]", ")", ")", ":", "return", "False", "return", "function", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
automatically return false if the key or prefix is not in the callbacks for the instance .
train
false
37,525
@bdd.when('selection is supported') def selection_supported(qapp): if (not qapp.clipboard().supportsSelection()): pytest.skip("OS doesn't support primary selection!")
[ "@", "bdd", ".", "when", "(", "'selection is supported'", ")", "def", "selection_supported", "(", "qapp", ")", ":", "if", "(", "not", "qapp", ".", "clipboard", "(", ")", ".", "supportsSelection", "(", ")", ")", ":", "pytest", ".", "skip", "(", "\"OS doesn't support primary selection!\"", ")" ]
skip the test if selection isnt supported .
train
false
37,526
@bp.route('/signup', methods=['GET', 'POST']) def signup(): next_url = request.args.get('next', url_for('.setting')) token = request.args.get('token') if token: user = verify_auth_token(token, 1) if (not user): flash(_('Invalid or expired token.'), 'error') return redirect(next_url) user.role = 'user' user.save() login_user(user) flash(_('This account is verified.'), 'success') return redirect(next_url) form = SignupForm() if form.validate_on_submit(): verify_email = current_app.config.get('VERIFY_EMAIL', True) if (not verify_email): user = form.save('user') login_user(user) return redirect(next_url) user = form.save() login_user(user) msg = signup_mail(user) if current_app.debug: return msg.html flash(_('We have sent you an activate email, check your inbox.'), 'info') return redirect(next_url) return render_template('account/signup.html', form=form)
[ "@", "bp", ".", "route", "(", "'/signup'", ",", "methods", "=", "[", "'GET'", ",", "'POST'", "]", ")", "def", "signup", "(", ")", ":", "next_url", "=", "request", ".", "args", ".", "get", "(", "'next'", ",", "url_for", "(", "'.setting'", ")", ")", "token", "=", "request", ".", "args", ".", "get", "(", "'token'", ")", "if", "token", ":", "user", "=", "verify_auth_token", "(", "token", ",", "1", ")", "if", "(", "not", "user", ")", ":", "flash", "(", "_", "(", "'Invalid or expired token.'", ")", ",", "'error'", ")", "return", "redirect", "(", "next_url", ")", "user", ".", "role", "=", "'user'", "user", ".", "save", "(", ")", "login_user", "(", "user", ")", "flash", "(", "_", "(", "'This account is verified.'", ")", ",", "'success'", ")", "return", "redirect", "(", "next_url", ")", "form", "=", "SignupForm", "(", ")", "if", "form", ".", "validate_on_submit", "(", ")", ":", "verify_email", "=", "current_app", ".", "config", ".", "get", "(", "'VERIFY_EMAIL'", ",", "True", ")", "if", "(", "not", "verify_email", ")", ":", "user", "=", "form", ".", "save", "(", "'user'", ")", "login_user", "(", "user", ")", "return", "redirect", "(", "next_url", ")", "user", "=", "form", ".", "save", "(", ")", "login_user", "(", "user", ")", "msg", "=", "signup_mail", "(", "user", ")", "if", "current_app", ".", "debug", ":", "return", "msg", ".", "html", "flash", "(", "_", "(", "'We have sent you an activate email, check your inbox.'", ")", ",", "'info'", ")", "return", "redirect", "(", "next_url", ")", "return", "render_template", "(", "'account/signup.html'", ",", "form", "=", "form", ")" ]
signup form .
train
false
37,528
def has_reached_plugin_limit(placeholder, plugin_type, language, template=None): limits = get_placeholder_conf('limits', placeholder.slot, template) if limits: global_limit = limits.get('global') type_limit = limits.get(plugin_type) count = placeholder.get_plugins(language=language).count() if (global_limit and (count >= global_limit)): raise PluginLimitReached(_(('This placeholder already has the maximum number of plugins (%s).' % count))) elif type_limit: type_count = placeholder.get_plugins(language=language).filter(plugin_type=plugin_type).count() if (type_count >= type_limit): plugin_name = force_text(plugin_pool.get_plugin(plugin_type).name) raise PluginLimitReached((_('This placeholder already has the maximum number (%(limit)s) of allowed %(plugin_name)s plugins.') % {'limit': type_limit, 'plugin_name': plugin_name})) return False
[ "def", "has_reached_plugin_limit", "(", "placeholder", ",", "plugin_type", ",", "language", ",", "template", "=", "None", ")", ":", "limits", "=", "get_placeholder_conf", "(", "'limits'", ",", "placeholder", ".", "slot", ",", "template", ")", "if", "limits", ":", "global_limit", "=", "limits", ".", "get", "(", "'global'", ")", "type_limit", "=", "limits", ".", "get", "(", "plugin_type", ")", "count", "=", "placeholder", ".", "get_plugins", "(", "language", "=", "language", ")", ".", "count", "(", ")", "if", "(", "global_limit", "and", "(", "count", ">=", "global_limit", ")", ")", ":", "raise", "PluginLimitReached", "(", "_", "(", "(", "'This placeholder already has the maximum number of plugins (%s).'", "%", "count", ")", ")", ")", "elif", "type_limit", ":", "type_count", "=", "placeholder", ".", "get_plugins", "(", "language", "=", "language", ")", ".", "filter", "(", "plugin_type", "=", "plugin_type", ")", ".", "count", "(", ")", "if", "(", "type_count", ">=", "type_limit", ")", ":", "plugin_name", "=", "force_text", "(", "plugin_pool", ".", "get_plugin", "(", "plugin_type", ")", ".", "name", ")", "raise", "PluginLimitReached", "(", "(", "_", "(", "'This placeholder already has the maximum number (%(limit)s) of allowed %(plugin_name)s plugins.'", ")", "%", "{", "'limit'", ":", "type_limit", ",", "'plugin_name'", ":", "plugin_name", "}", ")", ")", "return", "False" ]
checks if placeholder has reached its global plugin limit .
train
false
37,529
def alias(selectable, name=None, flat=False): return _interpret_as_from(selectable).alias(name=name, flat=flat)
[ "def", "alias", "(", "selectable", ",", "name", "=", "None", ",", "flat", "=", "False", ")", ":", "return", "_interpret_as_from", "(", "selectable", ")", ".", "alias", "(", "name", "=", "name", ",", "flat", "=", "flat", ")" ]
decorating a class with @alias allows the class to be referenced by each of the names provided as arguments .
train
false
37,530
@not_implemented_for('undirected') def is_semiconnected(G): if (len(G) == 0): raise nx.NetworkXPointlessConcept('Connectivity is undefined for the null graph.') if (not nx.is_weakly_connected(G)): return False G = nx.condensation(G) path = nx.topological_sort(G) return all((G.has_edge(u, v) for (u, v) in pairwise(path)))
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "is_semiconnected", "(", "G", ")", ":", "if", "(", "len", "(", "G", ")", "==", "0", ")", ":", "raise", "nx", ".", "NetworkXPointlessConcept", "(", "'Connectivity is undefined for the null graph.'", ")", "if", "(", "not", "nx", ".", "is_weakly_connected", "(", "G", ")", ")", ":", "return", "False", "G", "=", "nx", ".", "condensation", "(", "G", ")", "path", "=", "nx", ".", "topological_sort", "(", "G", ")", "return", "all", "(", "(", "G", ".", "has_edge", "(", "u", ",", "v", ")", "for", "(", "u", ",", "v", ")", "in", "pairwise", "(", "path", ")", ")", ")" ]
return true if the graph is semiconnected .
train
false
37,531
def get_x(): cmd = 'setxkbmap -query | grep layout' out = __salt__['cmd.run'](cmd, python_shell=True).split(':') return out[1].strip()
[ "def", "get_x", "(", ")", ":", "cmd", "=", "'setxkbmap -query | grep layout'", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "True", ")", ".", "split", "(", "':'", ")", "return", "out", "[", "1", "]", ".", "strip", "(", ")" ]
get current x keyboard setting cli example: .
train
true
37,532
@register.inclusion_tag(u'admin/change_list_results.html') def result_list(cl): headers = list(result_headers(cl)) num_sorted_fields = 0 for h in headers: if (h[u'sortable'] and h[u'sorted']): num_sorted_fields += 1 return {u'cl': cl, u'result_hidden_fields': list(result_hidden_fields(cl)), u'result_headers': headers, u'num_sorted_fields': num_sorted_fields, u'results': list(results(cl))}
[ "@", "register", ".", "inclusion_tag", "(", "u'admin/change_list_results.html'", ")", "def", "result_list", "(", "cl", ")", ":", "headers", "=", "list", "(", "result_headers", "(", "cl", ")", ")", "num_sorted_fields", "=", "0", "for", "h", "in", "headers", ":", "if", "(", "h", "[", "u'sortable'", "]", "and", "h", "[", "u'sorted'", "]", ")", ":", "num_sorted_fields", "+=", "1", "return", "{", "u'cl'", ":", "cl", ",", "u'result_hidden_fields'", ":", "list", "(", "result_hidden_fields", "(", "cl", ")", ")", ",", "u'result_headers'", ":", "headers", ",", "u'num_sorted_fields'", ":", "num_sorted_fields", ",", "u'results'", ":", "list", "(", "results", "(", "cl", ")", ")", "}" ]
displays the headers and data list together .
train
false
37,534
def getLowerLeftCorner(nestedRings): lowerLeftCorner = Vector3() lowestRealPlusImaginary = 987654321.0 for nestedRing in nestedRings: for point in nestedRing.boundary: realPlusImaginary = (point.real + point.imag) if (realPlusImaginary < lowestRealPlusImaginary): lowestRealPlusImaginary = realPlusImaginary lowerLeftCorner.setToXYZ(point.real, point.imag, nestedRing.z) return lowerLeftCorner
[ "def", "getLowerLeftCorner", "(", "nestedRings", ")", ":", "lowerLeftCorner", "=", "Vector3", "(", ")", "lowestRealPlusImaginary", "=", "987654321.0", "for", "nestedRing", "in", "nestedRings", ":", "for", "point", "in", "nestedRing", ".", "boundary", ":", "realPlusImaginary", "=", "(", "point", ".", "real", "+", "point", ".", "imag", ")", "if", "(", "realPlusImaginary", "<", "lowestRealPlusImaginary", ")", ":", "lowestRealPlusImaginary", "=", "realPlusImaginary", "lowerLeftCorner", ".", "setToXYZ", "(", "point", ".", "real", ",", "point", ".", "imag", ",", "nestedRing", ".", "z", ")", "return", "lowerLeftCorner" ]
get the lower left corner from the nestedrings .
train
false
37,535
@pytest.mark.cmd @pytest.mark.django_db def test_changed_languages_noargs(capfd): revision = Revision.get() call_command('changed_languages') (out, err) = capfd.readouterr() assert (out == u'language0,language1,templates\n') assert (('Will show languages changed between revisions -1 (exclusive) and %d (inclusive)' % revision) in err)
[ "@", "pytest", ".", "mark", ".", "cmd", "@", "pytest", ".", "mark", ".", "django_db", "def", "test_changed_languages_noargs", "(", "capfd", ")", ":", "revision", "=", "Revision", ".", "get", "(", ")", "call_command", "(", "'changed_languages'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "out", "==", "u'language0,language1,templates\\n'", ")", "assert", "(", "(", "'Will show languages changed between revisions -1 (exclusive) and %d (inclusive)'", "%", "revision", ")", "in", "err", ")" ]
get changed languages since last sync .
train
false
37,536
def show_cfg(resource_url, escape=u'##'): resource_url = normalize_resource_url(resource_url) resource_val = load(resource_url, format=u'text', cache=False) lines = resource_val.splitlines() for l in lines: if l.startswith(escape): continue if re.match(u'^$', l): continue print(l)
[ "def", "show_cfg", "(", "resource_url", ",", "escape", "=", "u'##'", ")", ":", "resource_url", "=", "normalize_resource_url", "(", "resource_url", ")", "resource_val", "=", "load", "(", "resource_url", ",", "format", "=", "u'text'", ",", "cache", "=", "False", ")", "lines", "=", "resource_val", ".", "splitlines", "(", ")", "for", "l", "in", "lines", ":", "if", "l", ".", "startswith", "(", "escape", ")", ":", "continue", "if", "re", ".", "match", "(", "u'^$'", ",", "l", ")", ":", "continue", "print", "(", "l", ")" ]
write out a grammar file .
train
false
37,539
def lbp_key(f): return (sig_key(Sign(f), Polyn(f).ring.order), (- Num(f)))
[ "def", "lbp_key", "(", "f", ")", ":", "return", "(", "sig_key", "(", "Sign", "(", "f", ")", ",", "Polyn", "(", "f", ")", ".", "ring", ".", "order", ")", ",", "(", "-", "Num", "(", "f", ")", ")", ")" ]
key for comparing two labeled polynomials .
train
false
37,540
def shortest_path(digr, s): nodes_explored = set([s]) nodes_unexplored = DFS(digr, s) nodes_unexplored.remove(s) dist = {s: 0} node_heap = [] for n in nodes_unexplored: min = compute_min_dist(digr, n, nodes_explored, dist) heapq.heappush(node_heap, (min, n)) while (len(node_heap) > 0): (min_dist, nearest_node) = heapq.heappop(node_heap) dist[nearest_node] = min_dist nodes_explored.add(nearest_node) nodes_unexplored.remove(nearest_node) for v in digr.neighbors(nearest_node): if (v in nodes_unexplored): for i in range(len(node_heap)): if (node_heap[i][1] == v): node_heap[i] = (compute_min_dist(digr, v, nodes_explored, dist), v) heapq.heapify(node_heap) return dist
[ "def", "shortest_path", "(", "digr", ",", "s", ")", ":", "nodes_explored", "=", "set", "(", "[", "s", "]", ")", "nodes_unexplored", "=", "DFS", "(", "digr", ",", "s", ")", "nodes_unexplored", ".", "remove", "(", "s", ")", "dist", "=", "{", "s", ":", "0", "}", "node_heap", "=", "[", "]", "for", "n", "in", "nodes_unexplored", ":", "min", "=", "compute_min_dist", "(", "digr", ",", "n", ",", "nodes_explored", ",", "dist", ")", "heapq", ".", "heappush", "(", "node_heap", ",", "(", "min", ",", "n", ")", ")", "while", "(", "len", "(", "node_heap", ")", ">", "0", ")", ":", "(", "min_dist", ",", "nearest_node", ")", "=", "heapq", ".", "heappop", "(", "node_heap", ")", "dist", "[", "nearest_node", "]", "=", "min_dist", "nodes_explored", ".", "add", "(", "nearest_node", ")", "nodes_unexplored", ".", "remove", "(", "nearest_node", ")", "for", "v", "in", "digr", ".", "neighbors", "(", "nearest_node", ")", ":", "if", "(", "v", "in", "nodes_unexplored", ")", ":", "for", "i", "in", "range", "(", "len", "(", "node_heap", ")", ")", ":", "if", "(", "node_heap", "[", "i", "]", "[", "1", "]", "==", "v", ")", ":", "node_heap", "[", "i", "]", "=", "(", "compute_min_dist", "(", "digr", ",", "v", ",", "nodes_explored", ",", "dist", ")", ",", "v", ")", "heapq", ".", "heapify", "(", "node_heap", ")", "return", "dist" ]
returns the shortest representation of the given locations .
train
false
37,542
def increment_assignment_versions(course_key, usage_key, user_id): problem_descriptor = modulestore().get_item(usage_key) assignments = outcomes.get_assignments_for_problem(problem_descriptor, user_id, course_key) for assignment in assignments: assignment.version_number += 1 assignment.save() return assignments
[ "def", "increment_assignment_versions", "(", "course_key", ",", "usage_key", ",", "user_id", ")", ":", "problem_descriptor", "=", "modulestore", "(", ")", ".", "get_item", "(", "usage_key", ")", "assignments", "=", "outcomes", ".", "get_assignments_for_problem", "(", "problem_descriptor", ",", "user_id", ",", "course_key", ")", "for", "assignment", "in", "assignments", ":", "assignment", ".", "version_number", "+=", "1", "assignment", ".", "save", "(", ")", "return", "assignments" ]
update the version numbers for all assignments that are affected by a score change event .
train
false
37,547
def _graded_assignments(course_key): grading_context = grading_context_for_course(course_key) graded_assignments_map = OrderedDict() for (assignment_type_name, subsection_infos) in grading_context['all_graded_subsections_by_type'].iteritems(): graded_subsections_map = OrderedDict() for (subsection_index, subsection_info) in enumerate(subsection_infos, start=1): subsection = subsection_info['subsection_block'] header_name = u'{assignment_type} {subsection_index}: {subsection_name}'.format(assignment_type=assignment_type_name, subsection_index=subsection_index, subsection_name=subsection.display_name) graded_subsections_map[subsection.location] = header_name average_header = u'{assignment_type}'.format(assignment_type=assignment_type_name) use_subsection_headers = (len(subsection_infos) > 1) if use_subsection_headers: average_header += u' (Avg)' graded_assignments_map[assignment_type_name] = {'subsection_headers': graded_subsections_map, 'average_header': average_header, 'use_subsection_headers': use_subsection_headers} return graded_assignments_map
[ "def", "_graded_assignments", "(", "course_key", ")", ":", "grading_context", "=", "grading_context_for_course", "(", "course_key", ")", "graded_assignments_map", "=", "OrderedDict", "(", ")", "for", "(", "assignment_type_name", ",", "subsection_infos", ")", "in", "grading_context", "[", "'all_graded_subsections_by_type'", "]", ".", "iteritems", "(", ")", ":", "graded_subsections_map", "=", "OrderedDict", "(", ")", "for", "(", "subsection_index", ",", "subsection_info", ")", "in", "enumerate", "(", "subsection_infos", ",", "start", "=", "1", ")", ":", "subsection", "=", "subsection_info", "[", "'subsection_block'", "]", "header_name", "=", "u'{assignment_type} {subsection_index}: {subsection_name}'", ".", "format", "(", "assignment_type", "=", "assignment_type_name", ",", "subsection_index", "=", "subsection_index", ",", "subsection_name", "=", "subsection", ".", "display_name", ")", "graded_subsections_map", "[", "subsection", ".", "location", "]", "=", "header_name", "average_header", "=", "u'{assignment_type}'", ".", "format", "(", "assignment_type", "=", "assignment_type_name", ")", "use_subsection_headers", "=", "(", "len", "(", "subsection_infos", ")", ">", "1", ")", "if", "use_subsection_headers", ":", "average_header", "+=", "u' (Avg)'", "graded_assignments_map", "[", "assignment_type_name", "]", "=", "{", "'subsection_headers'", ":", "graded_subsections_map", ",", "'average_header'", ":", "average_header", ",", "'use_subsection_headers'", ":", "use_subsection_headers", "}", "return", "graded_assignments_map" ]
returns an ordereddict that maps an assignment type to a dict of subsection-headers and average-header .
train
false
37,548
def set_var(name, value): return config(name, value)
[ "def", "set_var", "(", "name", ",", "value", ")", ":", "return", "config", "(", "name", ",", "value", ")" ]
set traffic server configuration variable values .
train
false
37,550
def cmd_condition(args): if (len(args) == 0): print ('condition is: %s' % mestate.settings.condition) return mestate.settings.condition = ' '.join(args) if ((len(mestate.settings.condition) == 0) or (mestate.settings.condition == 'clear')): mestate.settings.condition = None
[ "def", "cmd_condition", "(", "args", ")", ":", "if", "(", "len", "(", "args", ")", "==", "0", ")", ":", "print", "(", "'condition is: %s'", "%", "mestate", ".", "settings", ".", "condition", ")", "return", "mestate", ".", "settings", ".", "condition", "=", "' '", ".", "join", "(", "args", ")", "if", "(", "(", "len", "(", "mestate", ".", "settings", ".", "condition", ")", "==", "0", ")", "or", "(", "mestate", ".", "settings", ".", "condition", "==", "'clear'", ")", ")", ":", "mestate", ".", "settings", ".", "condition", "=", "None" ]
control mavexporer conditions .
train
true
37,551
def outerjoin(left, right, onclause=None, join_to_left=None): return _ORMJoin(left, right, onclause, True)
[ "def", "outerjoin", "(", "left", ",", "right", ",", "onclause", "=", "None", ",", "join_to_left", "=", "None", ")", ":", "return", "_ORMJoin", "(", "left", ",", "right", ",", "onclause", ",", "True", ")" ]
produce a left outer join between left and right clauses .
train
false
37,553
def apply_to_binary_file(f): @wraps(f) def f_in_file(fname): with open(fname, 'rb') as inf: data = inf.read() data = f(data) with open(fname, 'wb+') as outf: outf.write(data) return f_in_file
[ "def", "apply_to_binary_file", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "f_in_file", "(", "fname", ")", ":", "with", "open", "(", "fname", ",", "'rb'", ")", "as", "inf", ":", "data", "=", "inf", ".", "read", "(", ")", "data", "=", "f", "(", "data", ")", "with", "open", "(", "fname", ",", "'wb+'", ")", "as", "outf", ":", "outf", ".", "write", "(", "data", ")", "return", "f_in_file" ]
apply a filter to a binary file .
train
false
37,554
def try_ipv6_socket(): if (not socket.has_ipv6): return False try: socket.socket(socket.AF_INET6).close() return True except IOError as error: logger.debug(u'Platform supports IPv6, but socket creation failed, disabling: %s', encoding.locale_decode(error)) return False
[ "def", "try_ipv6_socket", "(", ")", ":", "if", "(", "not", "socket", ".", "has_ipv6", ")", ":", "return", "False", "try", ":", "socket", ".", "socket", "(", "socket", ".", "AF_INET6", ")", ".", "close", "(", ")", "return", "True", "except", "IOError", "as", "error", ":", "logger", ".", "debug", "(", "u'Platform supports IPv6, but socket creation failed, disabling: %s'", ",", "encoding", ".", "locale_decode", "(", "error", ")", ")", "return", "False" ]
determine if system really supports ipv6 .
train
false
37,556
def control_ttyhup(port, disable_hup): if (platform.system() == 'Linux'): if disable_hup: os.system(('stty -F %s -hup' % port)) else: os.system(('stty -F %s hup' % port))
[ "def", "control_ttyhup", "(", "port", ",", "disable_hup", ")", ":", "if", "(", "platform", ".", "system", "(", ")", "==", "'Linux'", ")", ":", "if", "disable_hup", ":", "os", ".", "system", "(", "(", "'stty -F %s -hup'", "%", "port", ")", ")", "else", ":", "os", ".", "system", "(", "(", "'stty -F %s hup'", "%", "port", ")", ")" ]
controls the hupcl .
train
false
37,557
def make_lex(symbols): lex = [] header = u"\n##################################################################\n# Lexical rules automatically generated by running 'chat80.py -x'.\n##################################################################\n\n" lex.append(header) template = u"PropN[num=sg, sem=<\\P.(P %s)>] -> '%s'\n" for s in symbols: parts = s.split(u'_') caps = [p.capitalize() for p in parts] pname = u'_'.join(caps) rule = (template % (s, pname)) lex.append(rule) return lex
[ "def", "make_lex", "(", "symbols", ")", ":", "lex", "=", "[", "]", "header", "=", "u\"\\n##################################################################\\n# Lexical rules automatically generated by running 'chat80.py -x'.\\n##################################################################\\n\\n\"", "lex", ".", "append", "(", "header", ")", "template", "=", "u\"PropN[num=sg, sem=<\\\\P.(P %s)>] -> '%s'\\n\"", "for", "s", "in", "symbols", ":", "parts", "=", "s", ".", "split", "(", "u'_'", ")", "caps", "=", "[", "p", ".", "capitalize", "(", ")", "for", "p", "in", "parts", "]", "pname", "=", "u'_'", ".", "join", "(", "caps", ")", "rule", "=", "(", "template", "%", "(", "s", ",", "pname", ")", ")", "lex", ".", "append", "(", "rule", ")", "return", "lex" ]
create lexical cfg rules for each individual symbol .
train
false
37,558
def list_blobs(kwargs=None, storage_conn=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The list_blobs function must be called with -f or --function.') if (kwargs is None): kwargs = {} if ('container' not in kwargs): raise SaltCloudSystemExit('An storage container name must be specified as "container"') if (not storage_conn): storage_conn = get_storage_conn(conn_kwargs=kwargs) return salt.utils.msazure.list_blobs(storage_conn=storage_conn, **kwargs)
[ "def", "list_blobs", "(", "kwargs", "=", "None", ",", "storage_conn", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_blobs function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "if", "(", "'container'", "not", "in", "kwargs", ")", ":", "raise", "SaltCloudSystemExit", "(", "'An storage container name must be specified as \"container\"'", ")", "if", "(", "not", "storage_conn", ")", ":", "storage_conn", "=", "get_storage_conn", "(", "conn_kwargs", "=", "kwargs", ")", "return", "salt", ".", "utils", ".", "msazure", ".", "list_blobs", "(", "storage_conn", "=", "storage_conn", ",", "**", "kwargs", ")" ]
lists all the blobs in the bucket .
train
true
37,560
def get_profile_image_storage(): config = settings.PROFILE_IMAGE_BACKEND storage_class = get_storage_class(config['class']) return storage_class(**config['options'])
[ "def", "get_profile_image_storage", "(", ")", ":", "config", "=", "settings", ".", "PROFILE_IMAGE_BACKEND", "storage_class", "=", "get_storage_class", "(", "config", "[", "'class'", "]", ")", "return", "storage_class", "(", "**", "config", "[", "'options'", "]", ")" ]
configures and returns a django storage instance that can be used to physically locate .
train
false
37,561
def test_train_predict2(): import tempfile sp = SequencePattern() tempdir = tempfile.mkdtemp() ts2s = TFLearnSeq2Seq(sp, seq2seq_model='embedding_attention', data_dir=tempdir, name='attention') tf.reset_default_graph() ts2s.train(num_epochs=1, num_points=1000, weights_output_fn=1, weights_input_fn=0) assert os.path.exists(ts2s.weights_output_fn) tf.reset_default_graph() ts2s = TFLearnSeq2Seq(sp, seq2seq_model='embedding_attention', data_dir='DATA', name='attention', verbose=1) (prediction, y) = ts2s.predict(Xin=range(10), weights_input_fn=1) assert len((prediction == 10)) os.system(('rm -rf %s' % tempdir))
[ "def", "test_train_predict2", "(", ")", ":", "import", "tempfile", "sp", "=", "SequencePattern", "(", ")", "tempdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "ts2s", "=", "TFLearnSeq2Seq", "(", "sp", ",", "seq2seq_model", "=", "'embedding_attention'", ",", "data_dir", "=", "tempdir", ",", "name", "=", "'attention'", ")", "tf", ".", "reset_default_graph", "(", ")", "ts2s", ".", "train", "(", "num_epochs", "=", "1", ",", "num_points", "=", "1000", ",", "weights_output_fn", "=", "1", ",", "weights_input_fn", "=", "0", ")", "assert", "os", ".", "path", ".", "exists", "(", "ts2s", ".", "weights_output_fn", ")", "tf", ".", "reset_default_graph", "(", ")", "ts2s", "=", "TFLearnSeq2Seq", "(", "sp", ",", "seq2seq_model", "=", "'embedding_attention'", ",", "data_dir", "=", "'DATA'", ",", "name", "=", "'attention'", ",", "verbose", "=", "1", ")", "(", "prediction", ",", "y", ")", "=", "ts2s", ".", "predict", "(", "Xin", "=", "range", "(", "10", ")", ",", "weights_input_fn", "=", "1", ")", "assert", "len", "(", "(", "prediction", "==", "10", ")", ")", "os", ".", "system", "(", "(", "'rm -rf %s'", "%", "tempdir", ")", ")" ]
test that the embedding_attention model works .
train
false
37,562
def _calculate_delta_pos(adjacency_arr, pos, t, optimal): delta = (pos[:, np.newaxis, :] - pos) distance2 = (delta * delta).sum(axis=(-1)) distance2 = np.where((distance2 < 0.0001), 0.0001, distance2) distance = np.sqrt(distance2) displacement = np.zeros((len(delta), 2)) for ii in range(2): displacement[:, ii] = (delta[:, :, ii] * (((optimal * optimal) / (distance * distance)) - ((adjacency_arr * distance) / optimal))).sum(axis=1) length = np.sqrt((displacement ** 2).sum(axis=1)) length = np.where((length < 0.01), 0.1, length) delta_pos = ((displacement * t) / length[:, np.newaxis]) return delta_pos
[ "def", "_calculate_delta_pos", "(", "adjacency_arr", ",", "pos", ",", "t", ",", "optimal", ")", ":", "delta", "=", "(", "pos", "[", ":", ",", "np", ".", "newaxis", ",", ":", "]", "-", "pos", ")", "distance2", "=", "(", "delta", "*", "delta", ")", ".", "sum", "(", "axis", "=", "(", "-", "1", ")", ")", "distance2", "=", "np", ".", "where", "(", "(", "distance2", "<", "0.0001", ")", ",", "0.0001", ",", "distance2", ")", "distance", "=", "np", ".", "sqrt", "(", "distance2", ")", "displacement", "=", "np", ".", "zeros", "(", "(", "len", "(", "delta", ")", ",", "2", ")", ")", "for", "ii", "in", "range", "(", "2", ")", ":", "displacement", "[", ":", ",", "ii", "]", "=", "(", "delta", "[", ":", ",", ":", ",", "ii", "]", "*", "(", "(", "(", "optimal", "*", "optimal", ")", "/", "(", "distance", "*", "distance", ")", ")", "-", "(", "(", "adjacency_arr", "*", "distance", ")", "/", "optimal", ")", ")", ")", ".", "sum", "(", "axis", "=", "1", ")", "length", "=", "np", ".", "sqrt", "(", "(", "displacement", "**", "2", ")", ".", "sum", "(", "axis", "=", "1", ")", ")", "length", "=", "np", ".", "where", "(", "(", "length", "<", "0.01", ")", ",", "0.1", ",", "length", ")", "delta_pos", "=", "(", "(", "displacement", "*", "t", ")", "/", "length", "[", ":", ",", "np", ".", "newaxis", "]", ")", "return", "delta_pos" ]
helper to calculate the delta position .
train
true
37,563
def _raise_warnings(image_properties): ip = image_properties if ip.unsupported_dtype: warn('Non-standard image type; displaying image with stretched contrast.') if ip.low_data_range: warn('Low image data range; displaying image with stretched contrast.') if ip.out_of_range_float: warn('Float image out of standard range; displaying image with stretched contrast.')
[ "def", "_raise_warnings", "(", "image_properties", ")", ":", "ip", "=", "image_properties", "if", "ip", ".", "unsupported_dtype", ":", "warn", "(", "'Non-standard image type; displaying image with stretched contrast.'", ")", "if", "ip", ".", "low_data_range", ":", "warn", "(", "'Low image data range; displaying image with stretched contrast.'", ")", "if", "ip", ".", "out_of_range_float", ":", "warn", "(", "'Float image out of standard range; displaying image with stretched contrast.'", ")" ]
raise the appropriate warning for each nonstandard image type .
train
false
37,564
def assert_datasource_protocol(event): assert (event.type in DATASOURCE_TYPE) if (not (event.type == DATASOURCE_TYPE.DONE)): assert isinstance(event.dt, datetime) assert (event.dt.tzinfo == pytz.utc)
[ "def", "assert_datasource_protocol", "(", "event", ")", ":", "assert", "(", "event", ".", "type", "in", "DATASOURCE_TYPE", ")", "if", "(", "not", "(", "event", ".", "type", "==", "DATASOURCE_TYPE", ".", "DONE", ")", ")", ":", "assert", "isinstance", "(", "event", ".", "dt", ",", "datetime", ")", "assert", "(", "event", ".", "dt", ".", "tzinfo", "==", "pytz", ".", "utc", ")" ]
assert that an event meets the protocol for datasource outputs .
train
true
37,566
def filter_identity(x): return x
[ "def", "filter_identity", "(", "x", ")", ":", "return", "x" ]
return the same thing given .
train
false
37,567
def _offset_to_min(utc_offset): match = re.match('^([+-])?(\\d\\d)(\\d\\d)$', utc_offset) if (not match): raise SaltInvocationError('Invalid UTC offset') sign = ((-1) if (match.group(1) == '-') else 1) hours_offset = int(match.group(2)) minutes_offset = int(match.group(3)) total_offset = (sign * ((hours_offset * 60) + minutes_offset)) return total_offset
[ "def", "_offset_to_min", "(", "utc_offset", ")", ":", "match", "=", "re", ".", "match", "(", "'^([+-])?(\\\\d\\\\d)(\\\\d\\\\d)$'", ",", "utc_offset", ")", "if", "(", "not", "match", ")", ":", "raise", "SaltInvocationError", "(", "'Invalid UTC offset'", ")", "sign", "=", "(", "(", "-", "1", ")", "if", "(", "match", ".", "group", "(", "1", ")", "==", "'-'", ")", "else", "1", ")", "hours_offset", "=", "int", "(", "match", ".", "group", "(", "2", ")", ")", "minutes_offset", "=", "int", "(", "match", ".", "group", "(", "3", ")", ")", "total_offset", "=", "(", "sign", "*", "(", "(", "hours_offset", "*", "60", ")", "+", "minutes_offset", ")", ")", "return", "total_offset" ]
helper function that converts the utc offset string into number of minutes offset .
train
true
37,568
def linux_shutdown(): if (not HAVE_DBUS): os._exit(0) (proxy, interface) = _get_sessionproxy() if proxy: if proxy.CanShutdown(): proxy.Shutdown(dbus_interface=interface) else: (proxy, interface, _pinterface) = _get_systemproxy('ConsoleKit') if proxy: if proxy.CanStop(dbus_interface=interface): try: proxy.Stop(dbus_interface=interface) except dbus.exceptions.DBusException as msg: logging.info('Received a DBus exception %s', msg) else: logging.info('DBus does not support Stop (shutdown)') os._exit(0)
[ "def", "linux_shutdown", "(", ")", ":", "if", "(", "not", "HAVE_DBUS", ")", ":", "os", ".", "_exit", "(", "0", ")", "(", "proxy", ",", "interface", ")", "=", "_get_sessionproxy", "(", ")", "if", "proxy", ":", "if", "proxy", ".", "CanShutdown", "(", ")", ":", "proxy", ".", "Shutdown", "(", "dbus_interface", "=", "interface", ")", "else", ":", "(", "proxy", ",", "interface", ",", "_pinterface", ")", "=", "_get_systemproxy", "(", "'ConsoleKit'", ")", "if", "proxy", ":", "if", "proxy", ".", "CanStop", "(", "dbus_interface", "=", "interface", ")", ":", "try", ":", "proxy", ".", "Stop", "(", "dbus_interface", "=", "interface", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", "as", "msg", ":", "logging", ".", "info", "(", "'Received a DBus exception %s'", ",", "msg", ")", "else", ":", "logging", ".", "info", "(", "'DBus does not support Stop (shutdown)'", ")", "os", ".", "_exit", "(", "0", ")" ]
make linux system shutdown .
train
false
37,569
def build_and_run(args): cy_args = [] last_arg = None for (i, arg) in enumerate(args): if arg.startswith('-'): cy_args.append(arg) elif (last_arg in ('-X', '--directive')): cy_args.append(arg) else: input_file = arg args = args[(i + 1):] break last_arg = arg else: raise ValueError('no input file provided') program_name = build(input_file, cy_args) exec_file(program_name, args)
[ "def", "build_and_run", "(", "args", ")", ":", "cy_args", "=", "[", "]", "last_arg", "=", "None", "for", "(", "i", ",", "arg", ")", "in", "enumerate", "(", "args", ")", ":", "if", "arg", ".", "startswith", "(", "'-'", ")", ":", "cy_args", ".", "append", "(", "arg", ")", "elif", "(", "last_arg", "in", "(", "'-X'", ",", "'--directive'", ")", ")", ":", "cy_args", ".", "append", "(", "arg", ")", "else", ":", "input_file", "=", "arg", "args", "=", "args", "[", "(", "i", "+", "1", ")", ":", "]", "break", "last_arg", "=", "arg", "else", ":", "raise", "ValueError", "(", "'no input file provided'", ")", "program_name", "=", "build", "(", "input_file", ",", "cy_args", ")", "exec_file", "(", "program_name", ",", "args", ")" ]
build an executable program from a cython module and runs it .
train
false
37,570
def three_to_index(s): return d3_to_index[s]
[ "def", "three_to_index", "(", "s", ")", ":", "return", "d3_to_index", "[", "s", "]" ]
three letter code to index .
train
false
37,571
def enumerate(): with _active_limbo_lock: return (_active.values() + _limbo.values())
[ "def", "enumerate", "(", ")", ":", "with", "_active_limbo_lock", ":", "return", "(", "_active", ".", "values", "(", ")", "+", "_limbo", ".", "values", "(", ")", ")" ]
return a list of all thread objects currently alive .
train
false
37,573
def parsedate(data): t = parsedate_tz(data) if (t is None): return t return t[:9]
[ "def", "parsedate", "(", "data", ")", ":", "t", "=", "parsedate_tz", "(", "data", ")", "if", "(", "t", "is", "None", ")", ":", "return", "t", "return", "t", "[", ":", "9", "]" ]
convert a time string to a time tuple .
train
false
37,575
def display_graph(graphdef): mestate.console.write(('Expression: %s\n' % ' '.join(graphdef.expression.split()))) mg = grapher.MavGraph() mg.set_marker(mestate.settings.marker) mg.set_condition(mestate.settings.condition) mg.set_xaxis(mestate.settings.xaxis) mg.set_linestyle(mestate.settings.linestyle) mg.set_show_flightmode(mestate.settings.show_flightmode) mg.set_legend(mestate.settings.legend) mg.add_mav(mestate.mlog) for f in graphdef.expression.split(): mg.add_field(f) mg.process(mestate.flightmode_selections, mestate.mlog._flightmodes) lenmavlist = len(mg.mav_list) mg.mav_list = [] child = multiprocessing.Process(target=graph_process, args=[mg, lenmavlist]) child.start() mestate.mlog.rewind()
[ "def", "display_graph", "(", "graphdef", ")", ":", "mestate", ".", "console", ".", "write", "(", "(", "'Expression: %s\\n'", "%", "' '", ".", "join", "(", "graphdef", ".", "expression", ".", "split", "(", ")", ")", ")", ")", "mg", "=", "grapher", ".", "MavGraph", "(", ")", "mg", ".", "set_marker", "(", "mestate", ".", "settings", ".", "marker", ")", "mg", ".", "set_condition", "(", "mestate", ".", "settings", ".", "condition", ")", "mg", ".", "set_xaxis", "(", "mestate", ".", "settings", ".", "xaxis", ")", "mg", ".", "set_linestyle", "(", "mestate", ".", "settings", ".", "linestyle", ")", "mg", ".", "set_show_flightmode", "(", "mestate", ".", "settings", ".", "show_flightmode", ")", "mg", ".", "set_legend", "(", "mestate", ".", "settings", ".", "legend", ")", "mg", ".", "add_mav", "(", "mestate", ".", "mlog", ")", "for", "f", "in", "graphdef", ".", "expression", ".", "split", "(", ")", ":", "mg", ".", "add_field", "(", "f", ")", "mg", ".", "process", "(", "mestate", ".", "flightmode_selections", ",", "mestate", ".", "mlog", ".", "_flightmodes", ")", "lenmavlist", "=", "len", "(", "mg", ".", "mav_list", ")", "mg", ".", "mav_list", "=", "[", "]", "child", "=", "multiprocessing", ".", "Process", "(", "target", "=", "graph_process", ",", "args", "=", "[", "mg", ",", "lenmavlist", "]", ")", "child", ".", "start", "(", ")", "mestate", ".", "mlog", ".", "rewind", "(", ")" ]
display a termgraph interactively from within ipython .
train
false
37,577
def _nodes(e): from .basic import Basic if isinstance(e, Basic): return e.count(Basic) elif iterable(e): return (1 + sum((_nodes(ei) for ei in e))) elif isinstance(e, dict): return (1 + sum(((_nodes(k) + _nodes(v)) for (k, v) in e.items()))) else: return 1
[ "def", "_nodes", "(", "e", ")", ":", "from", ".", "basic", "import", "Basic", "if", "isinstance", "(", "e", ",", "Basic", ")", ":", "return", "e", ".", "count", "(", "Basic", ")", "elif", "iterable", "(", "e", ")", ":", "return", "(", "1", "+", "sum", "(", "(", "_nodes", "(", "ei", ")", "for", "ei", "in", "e", ")", ")", ")", "elif", "isinstance", "(", "e", ",", "dict", ")", ":", "return", "(", "1", "+", "sum", "(", "(", "(", "_nodes", "(", "k", ")", "+", "_nodes", "(", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "e", ".", "items", "(", ")", ")", ")", ")", "else", ":", "return", "1" ]
a helper for ordered() which returns the node count of e which for basic objects is the number of basic nodes in the expression tree but for other objects is 1 .
train
true
37,580
def arcsec(val): return numpy.arccos((1.0 / val))
[ "def", "arcsec", "(", "val", ")", ":", "return", "numpy", ".", "arccos", "(", "(", "1.0", "/", "val", ")", ")" ]
inverse secant .
train
false
37,581
def survey_answerlist_dataTable_pre(): list_fields = ['created_on', 'series_id', 'location', 'modified_by'] current.s3db.configure('survey_complete', list_fields=list_fields)
[ "def", "survey_answerlist_dataTable_pre", "(", ")", ":", "list_fields", "=", "[", "'created_on'", ",", "'series_id'", ",", "'location'", ",", "'modified_by'", "]", "current", ".", "s3db", ".", "configure", "(", "'survey_complete'", ",", "list_fields", "=", "list_fields", ")" ]
the answer list has been removed for the moment .
train
false
37,582
def console_auth_token_create(context, values): return IMPL.console_auth_token_create(context, values)
[ "def", "console_auth_token_create", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "console_auth_token_create", "(", "context", ",", "values", ")" ]
create a console authorization .
train
false
37,583
@depends(_HAS_WIN32TS_DEPENDENCIES) def logoff_session(session_id): try: win32ts.WTSLogoffSession(win32ts.WTS_CURRENT_SERVER_HANDLE, session_id, True) except PyWinError as error: _LOG.error('Error calling WTSLogoffSession: %s', error) return False return True
[ "@", "depends", "(", "_HAS_WIN32TS_DEPENDENCIES", ")", "def", "logoff_session", "(", "session_id", ")", ":", "try", ":", "win32ts", ".", "WTSLogoffSession", "(", "win32ts", ".", "WTS_CURRENT_SERVER_HANDLE", ",", "session_id", ",", "True", ")", "except", "PyWinError", "as", "error", ":", "_LOG", ".", "error", "(", "'Error calling WTSLogoffSession: %s'", ",", "error", ")", "return", "False", "return", "True" ]
initiate the logoff of a session .
train
true
37,584
def need_join(model, table): return (table not in model._sa_class_manager.mapper.tables)
[ "def", "need_join", "(", "model", ",", "table", ")", ":", "return", "(", "table", "not", "in", "model", ".", "_sa_class_manager", ".", "mapper", ".", "tables", ")" ]
check if join to a table is necessary .
train
false
37,585
def isEven(x): return ((x % 2) == 0)
[ "def", "isEven", "(", "x", ")", ":", "return", "(", "(", "x", "%", "2", ")", "==", "0", ")" ]
test predicate .
train
false
37,588
def _property(methode, zone, key, value): ret = {'status': True} cfg_file = None if (methode not in ['set', 'clear']): ret['status'] = False ret['message'] = 'unkown methode {0}!'.format(methode) else: cfg_file = salt.utils.files.mkstemp() with salt.utils.fpopen(cfg_file, 'w+', mode=384) as fp_: if (methode == 'set'): fp_.write('{0} {1}={2}\n'.format(methode, key, value)) elif (methode == 'clear'): fp_.write('{0} {1}\n'.format(methode, key)) if cfg_file: res = __salt__['cmd.run_all']('zonecfg -z {zone} -f {path}'.format(zone=zone, path=cfg_file)) ret['status'] = (res['retcode'] == 0) ret['message'] = (res['stdout'] if ret['status'] else res['stderr']) ret['message'] = ret['message'].replace('zonecfg: ', '') if (ret['message'] == ''): del ret['message'] __salt__['file.remove'](cfg_file) return ret
[ "def", "_property", "(", "methode", ",", "zone", ",", "key", ",", "value", ")", ":", "ret", "=", "{", "'status'", ":", "True", "}", "cfg_file", "=", "None", "if", "(", "methode", "not", "in", "[", "'set'", ",", "'clear'", "]", ")", ":", "ret", "[", "'status'", "]", "=", "False", "ret", "[", "'message'", "]", "=", "'unkown methode {0}!'", ".", "format", "(", "methode", ")", "else", ":", "cfg_file", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", ")", "with", "salt", ".", "utils", ".", "fpopen", "(", "cfg_file", ",", "'w+'", ",", "mode", "=", "384", ")", "as", "fp_", ":", "if", "(", "methode", "==", "'set'", ")", ":", "fp_", ".", "write", "(", "'{0} {1}={2}\\n'", ".", "format", "(", "methode", ",", "key", ",", "value", ")", ")", "elif", "(", "methode", "==", "'clear'", ")", ":", "fp_", ".", "write", "(", "'{0} {1}\\n'", ".", "format", "(", "methode", ",", "key", ")", ")", "if", "cfg_file", ":", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'zonecfg -z {zone} -f {path}'", ".", "format", "(", "zone", "=", "zone", ",", "path", "=", "cfg_file", ")", ")", "ret", "[", "'status'", "]", "=", "(", "res", "[", "'retcode'", "]", "==", "0", ")", "ret", "[", "'message'", "]", "=", "(", "res", "[", "'stdout'", "]", "if", "ret", "[", "'status'", "]", "else", "res", "[", "'stderr'", "]", ")", "ret", "[", "'message'", "]", "=", "ret", "[", "'message'", "]", ".", "replace", "(", "'zonecfg: '", ",", "''", ")", "if", "(", "ret", "[", "'message'", "]", "==", "''", ")", ":", "del", "ret", "[", "'message'", "]", "__salt__", "[", "'file.remove'", "]", "(", "cfg_file", ")", "return", "ret" ]
internal handler for set and clear_property methode : string either set .
train
false
37,589
def status_charge(): data = status() if ('BCHARGE' in data): charge = data['BCHARGE'].split() if (charge[1].lower() == 'percent'): return float(charge[0]) return {'Error': 'Load not available.'}
[ "def", "status_charge", "(", ")", ":", "data", "=", "status", "(", ")", "if", "(", "'BCHARGE'", "in", "data", ")", ":", "charge", "=", "data", "[", "'BCHARGE'", "]", ".", "split", "(", ")", "if", "(", "charge", "[", "1", "]", ".", "lower", "(", ")", "==", "'percent'", ")", ":", "return", "float", "(", "charge", "[", "0", "]", ")", "return", "{", "'Error'", ":", "'Load not available.'", "}" ]
return battery charge cli example: .
train
true
37,591
def add_to_reconciler_queue(container_ring, account, container, obj, obj_policy_index, obj_timestamp, op, force=False, conn_timeout=5, response_timeout=15): container_name = get_reconciler_container_name(obj_timestamp) object_name = get_reconciler_obj_name(obj_policy_index, account, container, obj) if force: x_timestamp = Timestamp(time.time()).internal else: x_timestamp = obj_timestamp q_op_type = get_reconciler_content_type(op) headers = {'X-Size': 0, 'X-Etag': obj_timestamp, 'X-Timestamp': x_timestamp, 'X-Content-Type': q_op_type} def _check_success(*args, **kwargs): try: direct_put_container_object(*args, **kwargs) return 1 except (ClientException, Timeout, socket.error): return 0 pile = GreenPile() (part, nodes) = container_ring.get_nodes(MISPLACED_OBJECTS_ACCOUNT, container_name) for node in nodes: pile.spawn(_check_success, node, part, MISPLACED_OBJECTS_ACCOUNT, container_name, object_name, headers=headers, conn_timeout=conn_timeout, response_timeout=response_timeout) successes = sum(pile) if (successes >= majority_size(len(nodes))): return container_name else: return False
[ "def", "add_to_reconciler_queue", "(", "container_ring", ",", "account", ",", "container", ",", "obj", ",", "obj_policy_index", ",", "obj_timestamp", ",", "op", ",", "force", "=", "False", ",", "conn_timeout", "=", "5", ",", "response_timeout", "=", "15", ")", ":", "container_name", "=", "get_reconciler_container_name", "(", "obj_timestamp", ")", "object_name", "=", "get_reconciler_obj_name", "(", "obj_policy_index", ",", "account", ",", "container", ",", "obj", ")", "if", "force", ":", "x_timestamp", "=", "Timestamp", "(", "time", ".", "time", "(", ")", ")", ".", "internal", "else", ":", "x_timestamp", "=", "obj_timestamp", "q_op_type", "=", "get_reconciler_content_type", "(", "op", ")", "headers", "=", "{", "'X-Size'", ":", "0", ",", "'X-Etag'", ":", "obj_timestamp", ",", "'X-Timestamp'", ":", "x_timestamp", ",", "'X-Content-Type'", ":", "q_op_type", "}", "def", "_check_success", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "direct_put_container_object", "(", "*", "args", ",", "**", "kwargs", ")", "return", "1", "except", "(", "ClientException", ",", "Timeout", ",", "socket", ".", "error", ")", ":", "return", "0", "pile", "=", "GreenPile", "(", ")", "(", "part", ",", "nodes", ")", "=", "container_ring", ".", "get_nodes", "(", "MISPLACED_OBJECTS_ACCOUNT", ",", "container_name", ")", "for", "node", "in", "nodes", ":", "pile", ".", "spawn", "(", "_check_success", ",", "node", ",", "part", ",", "MISPLACED_OBJECTS_ACCOUNT", ",", "container_name", ",", "object_name", ",", "headers", "=", "headers", ",", "conn_timeout", "=", "conn_timeout", ",", "response_timeout", "=", "response_timeout", ")", "successes", "=", "sum", "(", "pile", ")", "if", "(", "successes", ">=", "majority_size", "(", "len", "(", "nodes", ")", ")", ")", ":", "return", "container_name", "else", ":", "return", "False" ]
add an object to the container reconcilers queue .
train
false
37,592
def _get_all(context, session, filters=None, marker=None, limit=None, sort_keys=None, sort_dirs=None, show_level=ga.Showlevel.NONE): filters = (filters or {}) query = _do_artifacts_query(context, session, show_level) (basic_conds, tag_conds, prop_conds) = _do_query_filters(filters) if basic_conds: for basic_condition in basic_conds: query = query.filter(and_(*basic_condition)) if tag_conds: for tag_condition in tag_conds: query = query.join(models.ArtifactTag, aliased=True).filter(and_(*tag_condition)) if prop_conds: for prop_condition in prop_conds: query = query.join(models.ArtifactProperty, aliased=True).filter(and_(*prop_condition)) marker_artifact = None if (marker is not None): marker_artifact = _get(context, marker, session, None, None) if (sort_keys is None): sort_keys = [('created_at', None), ('id', None)] sort_dirs = ['desc', 'desc'] else: for key in [('created_at', None), ('id', None)]: if (key not in sort_keys): sort_keys.append(key) sort_dirs.append('desc') if (('version', None) in sort_keys): i = sort_keys.index(('version', None)) version_sort_dir = sort_dirs[i] sort_keys[i:(i + 1)] = [('version_prefix', None), ('version_suffix', None), ('version_meta', None)] sort_dirs[i:(i + 1)] = ([version_sort_dir] * 3) query = _do_paginate_query(query=query, limit=limit, sort_keys=sort_keys, marker=marker_artifact, sort_dirs=sort_dirs) return query.all()
[ "def", "_get_all", "(", "context", ",", "session", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ",", "show_level", "=", "ga", ".", "Showlevel", ".", "NONE", ")", ":", "filters", "=", "(", "filters", "or", "{", "}", ")", "query", "=", "_do_artifacts_query", "(", "context", ",", "session", ",", "show_level", ")", "(", "basic_conds", ",", "tag_conds", ",", "prop_conds", ")", "=", "_do_query_filters", "(", "filters", ")", "if", "basic_conds", ":", "for", "basic_condition", "in", "basic_conds", ":", "query", "=", "query", ".", "filter", "(", "and_", "(", "*", "basic_condition", ")", ")", "if", "tag_conds", ":", "for", "tag_condition", "in", "tag_conds", ":", "query", "=", "query", ".", "join", "(", "models", ".", "ArtifactTag", ",", "aliased", "=", "True", ")", ".", "filter", "(", "and_", "(", "*", "tag_condition", ")", ")", "if", "prop_conds", ":", "for", "prop_condition", "in", "prop_conds", ":", "query", "=", "query", ".", "join", "(", "models", ".", "ArtifactProperty", ",", "aliased", "=", "True", ")", ".", "filter", "(", "and_", "(", "*", "prop_condition", ")", ")", "marker_artifact", "=", "None", "if", "(", "marker", "is", "not", "None", ")", ":", "marker_artifact", "=", "_get", "(", "context", ",", "marker", ",", "session", ",", "None", ",", "None", ")", "if", "(", "sort_keys", "is", "None", ")", ":", "sort_keys", "=", "[", "(", "'created_at'", ",", "None", ")", ",", "(", "'id'", ",", "None", ")", "]", "sort_dirs", "=", "[", "'desc'", ",", "'desc'", "]", "else", ":", "for", "key", "in", "[", "(", "'created_at'", ",", "None", ")", ",", "(", "'id'", ",", "None", ")", "]", ":", "if", "(", "key", "not", "in", "sort_keys", ")", ":", "sort_keys", ".", "append", "(", "key", ")", "sort_dirs", ".", "append", "(", "'desc'", ")", "if", "(", "(", "'version'", ",", "None", ")", "in", "sort_keys", ")", ":", "i", "=", "sort_keys", ".", "index", "(", "(", "'version'", ",", "None", ")", ")", "version_sort_dir", "=", "sort_dirs", "[", "i", "]", "sort_keys", "[", "i", ":", "(", "i", "+", "1", ")", "]", "=", "[", "(", "'version_prefix'", ",", "None", ")", ",", "(", "'version_suffix'", ",", "None", ")", ",", "(", "'version_meta'", ",", "None", ")", "]", "sort_dirs", "[", "i", ":", "(", "i", "+", "1", ")", "]", "=", "(", "[", "version_sort_dir", "]", "*", "3", ")", "query", "=", "_do_paginate_query", "(", "query", "=", "query", ",", "limit", "=", "limit", ",", "sort_keys", "=", "sort_keys", ",", "marker", "=", "marker_artifact", ",", "sort_dirs", "=", "sort_dirs", ")", "return", "query", ".", "all", "(", ")" ]
get all namespaces that match zero or more filters .
train
false
37,593
def record_registration_attributions(request, user): record_affiliate_registration_attribution(request, user) record_utm_registration_attribution(request, user)
[ "def", "record_registration_attributions", "(", "request", ",", "user", ")", ":", "record_affiliate_registration_attribution", "(", "request", ",", "user", ")", "record_utm_registration_attribution", "(", "request", ",", "user", ")" ]
attribute this users registration based on referrer cookies .
train
false
37,594
def print_profile_info(results): print('Profile Infos:') info = results.get('profileInfo') print(('Account Id = %s' % info.get('accountId'))) print(('Web Property Id = %s' % info.get('webPropertyId'))) print(('Profile Id = %s' % info.get('profileId'))) print(('Table Id = %s' % info.get('tableId'))) print(('Profile Name = %s' % info.get('profileName'))) print()
[ "def", "print_profile_info", "(", "results", ")", ":", "print", "(", "'Profile Infos:'", ")", "info", "=", "results", ".", "get", "(", "'profileInfo'", ")", "print", "(", "(", "'Account Id = %s'", "%", "info", ".", "get", "(", "'accountId'", ")", ")", ")", "print", "(", "(", "'Web Property Id = %s'", "%", "info", ".", "get", "(", "'webPropertyId'", ")", ")", ")", "print", "(", "(", "'Profile Id = %s'", "%", "info", ".", "get", "(", "'profileId'", ")", ")", ")", "print", "(", "(", "'Table Id = %s'", "%", "info", ".", "get", "(", "'tableId'", ")", ")", ")", "print", "(", "(", "'Profile Name = %s'", "%", "info", ".", "get", "(", "'profileName'", ")", ")", ")", "print", "(", ")" ]
prints information about the profile .
train
false
37,595
def admin_wrapper(request): return staff_member_required(site.index)(request)
[ "def", "admin_wrapper", "(", "request", ")", ":", "return", "staff_member_required", "(", "site", ".", "index", ")", "(", "request", ")" ]
wrapper that allows us to properly use the base django admin site .
train
false