id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
44,040
def configuration_model(aseq, bseq, create_using=None, seed=None): if (create_using is None): create_using = networkx.MultiGraph() elif create_using.is_directed(): raise networkx.NetworkXError('Directed Graph not supported') G = networkx.empty_graph(0, create_using) if (not (seed is None)): random.seed(seed) lena = len(aseq) lenb = len(bseq) suma = sum(aseq) sumb = sum(bseq) if (not (suma == sumb)): raise networkx.NetworkXError(('invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s' % (suma, sumb))) G = _add_nodes_with_bipartite_label(G, lena, lenb) if (max(aseq) == 0): return G stubs = [] stubs.extend([([v] * aseq[v]) for v in range(0, lena)]) astubs = [] astubs = [x for subseq in stubs for x in subseq] stubs = [] stubs.extend([([v] * bseq[(v - lena)]) for v in range(lena, (lena + lenb))]) bstubs = [] bstubs = [x for subseq in stubs for x in subseq] random.shuffle(astubs) random.shuffle(bstubs) G.add_edges_from([[astubs[i], bstubs[i]] for i in range(suma)]) G.name = 'bipartite_configuration_model' return G
[ "def", "configuration_model", "(", "aseq", ",", "bseq", ",", "create_using", "=", "None", ",", "seed", "=", "None", ")", ":", "if", "(", "create_using", "is", "None", ")", ":", "create_using", "=", "networkx", ".", "MultiGraph", "(", ")", "elif", "create_using", ".", "is_directed", "(", ")", ":", "raise", "networkx", ".", "NetworkXError", "(", "'Directed Graph not supported'", ")", "G", "=", "networkx", ".", "empty_graph", "(", "0", ",", "create_using", ")", "if", "(", "not", "(", "seed", "is", "None", ")", ")", ":", "random", ".", "seed", "(", "seed", ")", "lena", "=", "len", "(", "aseq", ")", "lenb", "=", "len", "(", "bseq", ")", "suma", "=", "sum", "(", "aseq", ")", "sumb", "=", "sum", "(", "bseq", ")", "if", "(", "not", "(", "suma", "==", "sumb", ")", ")", ":", "raise", "networkx", ".", "NetworkXError", "(", "(", "'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s'", "%", "(", "suma", ",", "sumb", ")", ")", ")", "G", "=", "_add_nodes_with_bipartite_label", "(", "G", ",", "lena", ",", "lenb", ")", "if", "(", "max", "(", "aseq", ")", "==", "0", ")", ":", "return", "G", "stubs", "=", "[", "]", "stubs", ".", "extend", "(", "[", "(", "[", "v", "]", "*", "aseq", "[", "v", "]", ")", "for", "v", "in", "range", "(", "0", ",", "lena", ")", "]", ")", "astubs", "=", "[", "]", "astubs", "=", "[", "x", "for", "subseq", "in", "stubs", "for", "x", "in", "subseq", "]", "stubs", "=", "[", "]", "stubs", ".", "extend", "(", "[", "(", "[", "v", "]", "*", "bseq", "[", "(", "v", "-", "lena", ")", "]", ")", "for", "v", "in", "range", "(", "lena", ",", "(", "lena", "+", "lenb", ")", ")", "]", ")", "bstubs", "=", "[", "]", "bstubs", "=", "[", "x", "for", "subseq", "in", "stubs", "for", "x", "in", "subseq", "]", "random", ".", "shuffle", "(", "astubs", ")", "random", ".", "shuffle", "(", "bstubs", ")", "G", ".", "add_edges_from", "(", "[", "[", "astubs", "[", "i", "]", ",", "bstubs", "[", "i", "]", "]", "for", "i", "in", "range", "(", "suma", ")", "]", ")", "G", ".", "name", "=", "'bipartite_configuration_model'", "return", "G" ]
return a random graph with the given degree sequence .
train
false
44,041
def getAwayPath(path, radius): if (len(path) < 2): return path lastPoint = path[(-1)] awayPath = getAwayPoints(path, radius) if (len(awayPath) == 0): return [lastPoint] if (abs((lastPoint - awayPath[(-1)])) > (0.001 * radius)): awayPath.append(lastPoint) return awayPath
[ "def", "getAwayPath", "(", "path", ",", "radius", ")", ":", "if", "(", "len", "(", "path", ")", "<", "2", ")", ":", "return", "path", "lastPoint", "=", "path", "[", "(", "-", "1", ")", "]", "awayPath", "=", "getAwayPoints", "(", "path", ",", "radius", ")", "if", "(", "len", "(", "awayPath", ")", "==", "0", ")", ":", "return", "[", "lastPoint", "]", "if", "(", "abs", "(", "(", "lastPoint", "-", "awayPath", "[", "(", "-", "1", ")", "]", ")", ")", ">", "(", "0.001", "*", "radius", ")", ")", ":", "awayPath", ".", "append", "(", "lastPoint", ")", "return", "awayPath" ]
get a path with only the points that are far enough away from each other .
train
false
44,044
def _evaluate_standard(op, op_str, a, b, raise_on_error=True, **eval_kwargs): if _TEST_MODE: _store_test_result(False) with np.errstate(all='ignore'): return op(a, b)
[ "def", "_evaluate_standard", "(", "op", ",", "op_str", ",", "a", ",", "b", ",", "raise_on_error", "=", "True", ",", "**", "eval_kwargs", ")", ":", "if", "_TEST_MODE", ":", "_store_test_result", "(", "False", ")", "with", "np", ".", "errstate", "(", "all", "=", "'ignore'", ")", ":", "return", "op", "(", "a", ",", "b", ")" ]
standard evaluation .
train
true
44,045
def _AddStrMethod(message_descriptor, cls): def __str__(self): return text_format.MessageToString(self) cls.__str__ = __str__
[ "def", "_AddStrMethod", "(", "message_descriptor", ",", "cls", ")", ":", "def", "__str__", "(", "self", ")", ":", "return", "text_format", ".", "MessageToString", "(", "self", ")", "cls", ".", "__str__", "=", "__str__" ]
helper for _addmessagemethods() .
train
true
44,046
def get_cycles_per_ms(): global _cycles_per_ms if (_cycles_per_ms is None): start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record() torch.cuda._sleep(1000000) end.record() end.synchronize() _cycles_per_ms = (1000000 / start.elapsed_time(end)) return _cycles_per_ms
[ "def", "get_cycles_per_ms", "(", ")", ":", "global", "_cycles_per_ms", "if", "(", "_cycles_per_ms", "is", "None", ")", ":", "start", "=", "torch", ".", "cuda", ".", "Event", "(", "enable_timing", "=", "True", ")", "end", "=", "torch", ".", "cuda", ".", "Event", "(", "enable_timing", "=", "True", ")", "start", ".", "record", "(", ")", "torch", ".", "cuda", ".", "_sleep", "(", "1000000", ")", "end", ".", "record", "(", ")", "end", ".", "synchronize", "(", ")", "_cycles_per_ms", "=", "(", "1000000", "/", "start", ".", "elapsed_time", "(", "end", ")", ")", "return", "_cycles_per_ms" ]
approximate number of cycles per millisecond for torch .
train
false
44,047
def vehicle_type(): return s3_rest_controller()
[ "def", "vehicle_type", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
44,049
def _print_rate_limits(limits): columns = ['Verb', 'URI', 'Value', 'Remain', 'Unit', 'Next_Available'] utils.print_list(limits, columns)
[ "def", "_print_rate_limits", "(", "limits", ")", ":", "columns", "=", "[", "'Verb'", ",", "'URI'", ",", "'Value'", ",", "'Remain'", ",", "'Unit'", ",", "'Next_Available'", "]", "utils", ".", "print_list", "(", "limits", ",", "columns", ")" ]
print rate limits .
train
false
44,050
def flatten_choices_dict(choices): ret = OrderedDict() for (key, value) in choices.items(): if isinstance(value, dict): for (sub_key, sub_value) in value.items(): ret[sub_key] = sub_value else: ret[key] = value return ret
[ "def", "flatten_choices_dict", "(", "choices", ")", ":", "ret", "=", "OrderedDict", "(", ")", "for", "(", "key", ",", "value", ")", "in", "choices", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "for", "(", "sub_key", ",", "sub_value", ")", "in", "value", ".", "items", "(", ")", ":", "ret", "[", "sub_key", "]", "=", "sub_value", "else", ":", "ret", "[", "key", "]", "=", "value", "return", "ret" ]
convert a group choices dict into a flat dict of choices .
train
true
44,051
def cxESTwoPoint(ind1, ind2): size = min(len(ind1), len(ind2)) pt1 = random.randint(1, size) pt2 = random.randint(1, (size - 1)) if (pt2 >= pt1): pt2 += 1 else: (pt1, pt2) = (pt2, pt1) (ind1[pt1:pt2], ind2[pt1:pt2]) = (ind2[pt1:pt2], ind1[pt1:pt2]) (ind1.strategy[pt1:pt2], ind2.strategy[pt1:pt2]) = (ind2.strategy[pt1:pt2], ind1.strategy[pt1:pt2]) return (ind1, ind2)
[ "def", "cxESTwoPoint", "(", "ind1", ",", "ind2", ")", ":", "size", "=", "min", "(", "len", "(", "ind1", ")", ",", "len", "(", "ind2", ")", ")", "pt1", "=", "random", ".", "randint", "(", "1", ",", "size", ")", "pt2", "=", "random", ".", "randint", "(", "1", ",", "(", "size", "-", "1", ")", ")", "if", "(", "pt2", ">=", "pt1", ")", ":", "pt2", "+=", "1", "else", ":", "(", "pt1", ",", "pt2", ")", "=", "(", "pt2", ",", "pt1", ")", "(", "ind1", "[", "pt1", ":", "pt2", "]", ",", "ind2", "[", "pt1", ":", "pt2", "]", ")", "=", "(", "ind2", "[", "pt1", ":", "pt2", "]", ",", "ind1", "[", "pt1", ":", "pt2", "]", ")", "(", "ind1", ".", "strategy", "[", "pt1", ":", "pt2", "]", ",", "ind2", ".", "strategy", "[", "pt1", ":", "pt2", "]", ")", "=", "(", "ind2", ".", "strategy", "[", "pt1", ":", "pt2", "]", ",", "ind1", ".", "strategy", "[", "pt1", ":", "pt2", "]", ")", "return", "(", "ind1", ",", "ind2", ")" ]
executes a classical two points crossover on both the individuals and their strategy .
train
false
44,052
def get_asynchronous_eventlet_pool(size=1000): global ASYNC_EVENTLET_THREAD_POOL_LIST pool = eventlet.GreenPool(size=size) ASYNC_EVENTLET_THREAD_POOL_LIST.append(pool) return pool
[ "def", "get_asynchronous_eventlet_pool", "(", "size", "=", "1000", ")", ":", "global", "ASYNC_EVENTLET_THREAD_POOL_LIST", "pool", "=", "eventlet", ".", "GreenPool", "(", "size", "=", "size", ")", "ASYNC_EVENTLET_THREAD_POOL_LIST", ".", "append", "(", "pool", ")", "return", "pool" ]
return eventlet pool to caller .
train
false
44,053
def get_bootdev(**kwargs): with _IpmiCommand(**kwargs) as s: return s.get_bootdev()
[ "def", "get_bootdev", "(", "**", "kwargs", ")", ":", "with", "_IpmiCommand", "(", "**", "kwargs", ")", "as", "s", ":", "return", "s", ".", "get_bootdev", "(", ")" ]
get current boot device override information .
train
false
44,054
def _gpi11iterator(handle): for inline in handle: if (inline[0] == '!'): continue inrec = inline.rstrip('\n').split(' DCTB ') if (len(inrec) == 1): continue inrec[2] = inrec[2].split('|') inrec[3] = inrec[3].split('|') inrec[7] = inrec[7].split('|') inrec[8] = inrec[8].split('|') (yield dict(zip(GPI11FIELDS, inrec)))
[ "def", "_gpi11iterator", "(", "handle", ")", ":", "for", "inline", "in", "handle", ":", "if", "(", "inline", "[", "0", "]", "==", "'!'", ")", ":", "continue", "inrec", "=", "inline", ".", "rstrip", "(", "'\\n'", ")", ".", "split", "(", "' DCTB '", ")", "if", "(", "len", "(", "inrec", ")", "==", "1", ")", ":", "continue", "inrec", "[", "2", "]", "=", "inrec", "[", "2", "]", ".", "split", "(", "'|'", ")", "inrec", "[", "3", "]", "=", "inrec", "[", "3", "]", ".", "split", "(", "'|'", ")", "inrec", "[", "7", "]", "=", "inrec", "[", "7", "]", ".", "split", "(", "'|'", ")", "inrec", "[", "8", "]", "=", "inrec", "[", "8", "]", ".", "split", "(", "'|'", ")", "(", "yield", "dict", "(", "zip", "(", "GPI11FIELDS", ",", "inrec", ")", ")", ")" ]
read gpi 1 .
train
false
44,055
def effect_mandelbrot(size, extent, quality): return Image()._new(core.effect_mandelbrot(size, extent, quality))
[ "def", "effect_mandelbrot", "(", "size", ",", "extent", ",", "quality", ")", ":", "return", "Image", "(", ")", ".", "_new", "(", "core", ".", "effect_mandelbrot", "(", "size", ",", "extent", ",", "quality", ")", ")" ]
generate a mandelbrot set covering the given extent .
train
false
44,056
def _create_version(): import os from subprocess import check_output, CalledProcessError, STDOUT version = 'Unknown' root = os.path.dirname(os.path.abspath(__file__)) try: with open(os.path.join(root, 'VERSION.txt'), 'r') as f: version = f.read().strip() except IOError as err: print(err) try: version = ('%s (rev %s)' % (version, check_output('git rev-parse --short HEAD', shell=True, cwd=root, stderr=STDOUT).strip())) except (IOError, CalledProcessError): pass return version
[ "def", "_create_version", "(", ")", ":", "import", "os", "from", "subprocess", "import", "check_output", ",", "CalledProcessError", ",", "STDOUT", "version", "=", "'Unknown'", "root", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "try", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "root", ",", "'VERSION.txt'", ")", ",", "'r'", ")", "as", "f", ":", "version", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "except", "IOError", "as", "err", ":", "print", "(", "err", ")", "try", ":", "version", "=", "(", "'%s (rev %s)'", "%", "(", "version", ",", "check_output", "(", "'git rev-parse --short HEAD'", ",", "shell", "=", "True", ",", "cwd", "=", "root", ",", "stderr", "=", "STDOUT", ")", ".", "strip", "(", ")", ")", ")", "except", "(", "IOError", ",", "CalledProcessError", ")", ":", "pass", "return", "version" ]
helper function for building the version string .
train
false
44,057
def is_valid_mpls_label(label): if ((not isinstance(label, numbers.Integral)) or (4 <= label <= 15) or ((label < 0) or (label > (2 ** 20)))): return False return True
[ "def", "is_valid_mpls_label", "(", "label", ")", ":", "if", "(", "(", "not", "isinstance", "(", "label", ",", "numbers", ".", "Integral", ")", ")", "or", "(", "4", "<=", "label", "<=", "15", ")", "or", "(", "(", "label", "<", "0", ")", "or", "(", "label", ">", "(", "2", "**", "20", ")", ")", ")", ")", ":", "return", "False", "return", "True" ]
validates label according to mpls label rules rfc says: this 20-bit field .
train
true
44,058
def babel_extract_qweb(fileobj, keywords, comment_tags, options): result = [] def handle_text(text, lineno): result.append((lineno, None, text, [])) tree = etree.parse(fileobj) _extract_translatable_qweb_terms(tree.getroot(), handle_text) return result
[ "def", "babel_extract_qweb", "(", "fileobj", ",", "keywords", ",", "comment_tags", ",", "options", ")", ":", "result", "=", "[", "]", "def", "handle_text", "(", "text", ",", "lineno", ")", ":", "result", ".", "append", "(", "(", "lineno", ",", "None", ",", "text", ",", "[", "]", ")", ")", "tree", "=", "etree", ".", "parse", "(", "fileobj", ")", "_extract_translatable_qweb_terms", "(", "tree", ".", "getroot", "(", ")", ",", "handle_text", ")", "return", "result" ]
babel message extractor for qweb template files .
train
false
44,059
@with_setup(prepare_stdout) def test_run_only_fast_tests(): from lettuce import step good_one = Mock() bad_one = Mock() @step('I wait for 0 seconds') def wait_for_0_seconds(step): good_one(step.sentence) @step('the time passed is 0 seconds') def time_passed_0_sec(step): good_one(step.sentence) @step('I wait for 60 seconds') def wait_for_60_seconds(step): bad_one(step.sentence) @step('the time passed is 1 minute') def time_passed_1_min(step): bad_one(step.sentence) filename = tag_feature_name('timebound') runner = Runner(filename, verbosity=1, tags=['fast-ish']) runner.run() assert_stdout_lines('.\n1 feature (1 passed)\n1 scenario (1 passed)\n2 steps (2 passed)\n')
[ "@", "with_setup", "(", "prepare_stdout", ")", "def", "test_run_only_fast_tests", "(", ")", ":", "from", "lettuce", "import", "step", "good_one", "=", "Mock", "(", ")", "bad_one", "=", "Mock", "(", ")", "@", "step", "(", "'I wait for 0 seconds'", ")", "def", "wait_for_0_seconds", "(", "step", ")", ":", "good_one", "(", "step", ".", "sentence", ")", "@", "step", "(", "'the time passed is 0 seconds'", ")", "def", "time_passed_0_sec", "(", "step", ")", ":", "good_one", "(", "step", ".", "sentence", ")", "@", "step", "(", "'I wait for 60 seconds'", ")", "def", "wait_for_60_seconds", "(", "step", ")", ":", "bad_one", "(", "step", ".", "sentence", ")", "@", "step", "(", "'the time passed is 1 minute'", ")", "def", "time_passed_1_min", "(", "step", ")", ":", "bad_one", "(", "step", ".", "sentence", ")", "filename", "=", "tag_feature_name", "(", "'timebound'", ")", "runner", "=", "Runner", "(", "filename", ",", "verbosity", "=", "1", ",", "tags", "=", "[", "'fast-ish'", "]", ")", "runner", ".", "run", "(", ")", "assert_stdout_lines", "(", "'.\\n1 feature (1 passed)\\n1 scenario (1 passed)\\n2 steps (2 passed)\\n'", ")" ]
runner can filter by tags .
train
false
44,060
def _user_cert_subject(user_id, project_id): return (CONF.crypto.user_cert_subject % (project_id, user_id, utils.isotime()))
[ "def", "_user_cert_subject", "(", "user_id", ",", "project_id", ")", ":", "return", "(", "CONF", ".", "crypto", ".", "user_cert_subject", "%", "(", "project_id", ",", "user_id", ",", "utils", ".", "isotime", "(", ")", ")", ")" ]
helper to generate user cert subject .
train
false
44,062
def nova_docstring_one_line(physical_line, previous_logical): line = physical_line.lstrip() if is_docstring(physical_line, previous_logical): pos = max([line.find(i) for i in START_DOCSTRING_TRIPLE]) end = max([(line[(-4):(-1)] == i) for i in END_DOCSTRING_TRIPLE]) if ((pos != (-1)) and end and (len(line) > (pos + 4))): if (line[(-5)] not in ['.', '?', '!']): return (pos, 'N402: one line docstring needs punctuation.')
[ "def", "nova_docstring_one_line", "(", "physical_line", ",", "previous_logical", ")", ":", "line", "=", "physical_line", ".", "lstrip", "(", ")", "if", "is_docstring", "(", "physical_line", ",", "previous_logical", ")", ":", "pos", "=", "max", "(", "[", "line", ".", "find", "(", "i", ")", "for", "i", "in", "START_DOCSTRING_TRIPLE", "]", ")", "end", "=", "max", "(", "[", "(", "line", "[", "(", "-", "4", ")", ":", "(", "-", "1", ")", "]", "==", "i", ")", "for", "i", "in", "END_DOCSTRING_TRIPLE", "]", ")", "if", "(", "(", "pos", "!=", "(", "-", "1", ")", ")", "and", "end", "and", "(", "len", "(", "line", ")", ">", "(", "pos", "+", "4", ")", ")", ")", ":", "if", "(", "line", "[", "(", "-", "5", ")", "]", "not", "in", "[", "'.'", ",", "'?'", ",", "'!'", "]", ")", ":", "return", "(", "pos", ",", "'N402: one line docstring needs punctuation.'", ")" ]
check one line docstring end .
train
false
44,063
def JSONTuple(*args, **kw): (values, end) = JSONArray(*args, **kw) return (tuple(values), end)
[ "def", "JSONTuple", "(", "*", "args", ",", "**", "kw", ")", ":", "(", "values", ",", "end", ")", "=", "JSONArray", "(", "*", "args", ",", "**", "kw", ")", "return", "(", "tuple", "(", "values", ")", ",", "end", ")" ]
parse a json array .
train
false
44,065
def backup_mode(backup=''): if backup: return backup return option('backup_mode')
[ "def", "backup_mode", "(", "backup", "=", "''", ")", ":", "if", "backup", ":", "return", "backup", "return", "option", "(", "'backup_mode'", ")" ]
return the backup mode cli example: .
train
false
44,066
def provider_info(provider_name): if (provider_name not in providers): raise NotSupportedError('Provider: {} not supported.'.format(provider_name)) return providers[provider_name]
[ "def", "provider_info", "(", "provider_name", ")", ":", "if", "(", "provider_name", "not", "in", "providers", ")", ":", "raise", "NotSupportedError", "(", "'Provider: {} not supported.'", ".", "format", "(", "provider_name", ")", ")", "return", "providers", "[", "provider_name", "]" ]
like providers[provider_name] except raises inbox .
train
false
44,068
def cut_normalized(labels, rag, thresh=0.001, num_cuts=10, in_place=True, max_edge=1.0): if (not in_place): rag = rag.copy() for node in rag.nodes_iter(): rag.add_edge(node, node, weight=max_edge) _ncut_relabel(rag, thresh, num_cuts) map_array = np.zeros((labels.max() + 1), dtype=labels.dtype) for (n, d) in rag.nodes_iter(data=True): map_array[d['labels']] = d['ncut label'] return map_array[labels]
[ "def", "cut_normalized", "(", "labels", ",", "rag", ",", "thresh", "=", "0.001", ",", "num_cuts", "=", "10", ",", "in_place", "=", "True", ",", "max_edge", "=", "1.0", ")", ":", "if", "(", "not", "in_place", ")", ":", "rag", "=", "rag", ".", "copy", "(", ")", "for", "node", "in", "rag", ".", "nodes_iter", "(", ")", ":", "rag", ".", "add_edge", "(", "node", ",", "node", ",", "weight", "=", "max_edge", ")", "_ncut_relabel", "(", "rag", ",", "thresh", ",", "num_cuts", ")", "map_array", "=", "np", ".", "zeros", "(", "(", "labels", ".", "max", "(", ")", "+", "1", ")", ",", "dtype", "=", "labels", ".", "dtype", ")", "for", "(", "n", ",", "d", ")", "in", "rag", ".", "nodes_iter", "(", "data", "=", "True", ")", ":", "map_array", "[", "d", "[", "'labels'", "]", "]", "=", "d", "[", "'ncut label'", "]", "return", "map_array", "[", "labels", "]" ]
perform normalized graph cut on the region adjacency graph .
train
false
44,069
def _open_terminal(): for x in 'pqrstuvwxyzPQRST': for y in '0123456789abcdef': pty_name = (('/dev/pty' + x) + y) try: fd = os.open(pty_name, os.O_RDWR) except OSError: continue return (fd, (('/dev/tty' + x) + y)) raise OSError('out of pty devices')
[ "def", "_open_terminal", "(", ")", ":", "for", "x", "in", "'pqrstuvwxyzPQRST'", ":", "for", "y", "in", "'0123456789abcdef'", ":", "pty_name", "=", "(", "(", "'/dev/pty'", "+", "x", ")", "+", "y", ")", "try", ":", "fd", "=", "os", ".", "open", "(", "pty_name", ",", "os", ".", "O_RDWR", ")", "except", "OSError", ":", "continue", "return", "(", "fd", ",", "(", "(", "'/dev/tty'", "+", "x", ")", "+", "y", ")", ")", "raise", "OSError", "(", "'out of pty devices'", ")" ]
open pty master and return .
train
true
44,070
def file_content(project_tree, f): return FileContent(f.path, project_tree.content(f.path))
[ "def", "file_content", "(", "project_tree", ",", "f", ")", ":", "return", "FileContent", "(", "f", ".", "path", ",", "project_tree", ".", "content", "(", "f", ".", "path", ")", ")" ]
return a filecontent for a known-existing file .
train
false
44,072
def _getset_factory_factory(column_name, string_getter): def getset_factory(underlying_type, instance): def getter(translations): if (translations is None): return None text = getattr(translations, column_name) if (text is None): return text session = object_session(translations) language = translations.local_language return string_getter(text, session, language) def setter(translations, value): raise AttributeError(('Cannot set %s' % column_name)) return (getter, setter) return getset_factory
[ "def", "_getset_factory_factory", "(", "column_name", ",", "string_getter", ")", ":", "def", "getset_factory", "(", "underlying_type", ",", "instance", ")", ":", "def", "getter", "(", "translations", ")", ":", "if", "(", "translations", "is", "None", ")", ":", "return", "None", "text", "=", "getattr", "(", "translations", ",", "column_name", ")", "if", "(", "text", "is", "None", ")", ":", "return", "text", "session", "=", "object_session", "(", "translations", ")", "language", "=", "translations", ".", "local_language", "return", "string_getter", "(", "text", ",", "session", ",", "language", ")", "def", "setter", "(", "translations", ",", "value", ")", ":", "raise", "AttributeError", "(", "(", "'Cannot set %s'", "%", "column_name", ")", ")", "return", "(", "getter", ",", "setter", ")", "return", "getset_factory" ]
hello! i am a factory for creating getset_factory functions for sqla .
train
false
44,073
def make_histograms(lengths, binwidth=10): min_len = min(lengths) max_len = max(lengths) floor = ((min_len / binwidth) * binwidth) ceil = (((max_len / binwidth) + 2) * binwidth) bins = np.arange(floor, ceil, binwidth) (hist, bin_edges) = np.histogram(lengths, bins) return (hist, bin_edges)
[ "def", "make_histograms", "(", "lengths", ",", "binwidth", "=", "10", ")", ":", "min_len", "=", "min", "(", "lengths", ")", "max_len", "=", "max", "(", "lengths", ")", "floor", "=", "(", "(", "min_len", "/", "binwidth", ")", "*", "binwidth", ")", "ceil", "=", "(", "(", "(", "max_len", "/", "binwidth", ")", "+", "2", ")", "*", "binwidth", ")", "bins", "=", "np", ".", "arange", "(", "floor", ",", "ceil", ",", "binwidth", ")", "(", "hist", ",", "bin_edges", ")", "=", "np", ".", "histogram", "(", "lengths", ",", "bins", ")", "return", "(", "hist", ",", "bin_edges", ")" ]
makes histogram data for pre and post lengths .
train
false
44,074
def full_query(query_type='list_nodes_full'): client = _get_client() info = client.full_query(query_type) return info
[ "def", "full_query", "(", "query_type", "=", "'list_nodes_full'", ")", ":", "client", "=", "_get_client", "(", ")", "info", "=", "client", ".", "full_query", "(", "query_type", ")", "return", "info" ]
list all available cloud provider data .
train
false
44,076
def isclass(object): return isinstance(object, (type, types.ClassType))
[ "def", "isclass", "(", "object", ")", ":", "return", "isinstance", "(", "object", ",", "(", "type", ",", "types", ".", "ClassType", ")", ")" ]
returns true if and only if the specified object is a type .
train
false
44,077
def _is_simple_type(value): return (isinstance(value, str) or isinstance(value, int) or isinstance(value, float) or isinstance(value, bool))
[ "def", "_is_simple_type", "(", "value", ")", ":", "return", "(", "isinstance", "(", "value", ",", "str", ")", "or", "isinstance", "(", "value", ",", "int", ")", "or", "isinstance", "(", "value", ",", "float", ")", "or", "isinstance", "(", "value", ",", "bool", ")", ")" ]
returns true .
train
false
44,078
def get_short_name(fq_name): return fq_name.split('.')[(-1):][0]
[ "def", "get_short_name", "(", "fq_name", ")", ":", "return", "fq_name", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", ":", "]", "[", "0", "]" ]
returns the last component of the name .
train
false
44,079
def MAXINDEX(ds, count, timeperiod=(- (2 ** 31))): return call_talib_with_ds(ds, count, talib.MAXINDEX, timeperiod)
[ "def", "MAXINDEX", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "MAXINDEX", ",", "timeperiod", ")" ]
index of highest value over a specified period .
train
false
44,080
def event_type(): return s3_rest_controller('event', 'event_type')
[ "def", "event_type", "(", ")", ":", "return", "s3_rest_controller", "(", "'event'", ",", "'event_type'", ")" ]
restful crud controller .
train
false
44,081
def get_cookie_dict(environ): header = environ.get('HTTP_COOKIE') if (not header): return {} if ('paste.cookies.dict' in environ): (cookies, check_header) = environ['paste.cookies.dict'] if (check_header == header): return cookies cookies = SimpleCookie() try: cookies.load(header) except CookieError: pass result = {} for name in cookies: result[name] = cookies[name].value environ['paste.cookies.dict'] = (result, header) return result
[ "def", "get_cookie_dict", "(", "environ", ")", ":", "header", "=", "environ", ".", "get", "(", "'HTTP_COOKIE'", ")", "if", "(", "not", "header", ")", ":", "return", "{", "}", "if", "(", "'paste.cookies.dict'", "in", "environ", ")", ":", "(", "cookies", ",", "check_header", ")", "=", "environ", "[", "'paste.cookies.dict'", "]", "if", "(", "check_header", "==", "header", ")", ":", "return", "cookies", "cookies", "=", "SimpleCookie", "(", ")", "try", ":", "cookies", ".", "load", "(", "header", ")", "except", "CookieError", ":", "pass", "result", "=", "{", "}", "for", "name", "in", "cookies", ":", "result", "[", "name", "]", "=", "cookies", "[", "name", "]", ".", "value", "environ", "[", "'paste.cookies.dict'", "]", "=", "(", "result", ",", "header", ")", "return", "result" ]
return a *plain* dictionary of cookies as found in the request .
train
false
44,082
def _parse_unquoted_string(data, start, stop_at_equals): value = u'' pos = start while (pos < len(data)): char = data[pos] if (char == u'\\'): if ((pos + 1) < len(data)): value += data[(pos + 1)] pos += 2 else: raise ParsingError(u'Unexpected end of data while escaping ({0})'.format(_format_position(data, pos))) elif char.isspace(): break elif ((char == u'=') and stop_at_equals): break elif ((char == u"'") or (char == u'"')): raise ParsingError(u'Unexpected quotation mark in unquoted string ({0})'.format(_format_position(data, pos))) else: value += char pos += 1 return (pos, value)
[ "def", "_parse_unquoted_string", "(", "data", ",", "start", ",", "stop_at_equals", ")", ":", "value", "=", "u''", "pos", "=", "start", "while", "(", "pos", "<", "len", "(", "data", ")", ")", ":", "char", "=", "data", "[", "pos", "]", "if", "(", "char", "==", "u'\\\\'", ")", ":", "if", "(", "(", "pos", "+", "1", ")", "<", "len", "(", "data", ")", ")", ":", "value", "+=", "data", "[", "(", "pos", "+", "1", ")", "]", "pos", "+=", "2", "else", ":", "raise", "ParsingError", "(", "u'Unexpected end of data while escaping ({0})'", ".", "format", "(", "_format_position", "(", "data", ",", "pos", ")", ")", ")", "elif", "char", ".", "isspace", "(", ")", ":", "break", "elif", "(", "(", "char", "==", "u'='", ")", "and", "stop_at_equals", ")", ":", "break", "elif", "(", "(", "char", "==", "u\"'\"", ")", "or", "(", "char", "==", "u'\"'", ")", ")", ":", "raise", "ParsingError", "(", "u'Unexpected quotation mark in unquoted string ({0})'", ".", "format", "(", "_format_position", "(", "data", ",", "pos", ")", ")", ")", "else", ":", "value", "+=", "char", "pos", "+=", "1", "return", "(", "pos", ",", "value", ")" ]
parse an unquoted string starting at position start in data .
train
false
44,083
@contextfunction def news_update_list(context, updates, skip_group=False): request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('news/tags/update_list', {'updates': updates}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "news_update_list", "(", "context", ",", "updates", ",", "skip_group", "=", "False", ")", ":", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ")", ":", "response_format", "=", "context", "[", "'response_format'", "]", "return", "Markup", "(", "render_to_string", "(", "'news/tags/update_list'", ",", "{", "'updates'", ":", "updates", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")", ")" ]
print a list of orders .
train
false
44,084
def JsonResponse(dump_object, xssi_protection=True): result = JsonDumpForScriptContext(dump_object) if xssi_protection: result = (')]}\n' + result) response = http.HttpResponse(result, content_type='application/json; charset=utf-8') response['Content-Disposition'] = 'attachment; filename=response.json' response['X-Content-Type-Options'] = 'nosniff' return response
[ "def", "JsonResponse", "(", "dump_object", ",", "xssi_protection", "=", "True", ")", ":", "result", "=", "JsonDumpForScriptContext", "(", "dump_object", ")", "if", "xssi_protection", ":", "result", "=", "(", "')]}\\n'", "+", "result", ")", "response", "=", "http", ".", "HttpResponse", "(", "result", ",", "content_type", "=", "'application/json; charset=utf-8'", ")", "response", "[", "'Content-Disposition'", "]", "=", "'attachment; filename=response.json'", "response", "[", "'X-Content-Type-Options'", "]", "=", "'nosniff'", "return", "response" ]
return a django json response object with correct headers .
train
false
44,085
def test_gt_grad(): floatX = config.floatX T = theano.tensor input_ = T.vector(dtype=floatX) random_values = numpy.random.RandomState(1234).uniform(low=(-1), high=1, size=(2, 2)) W_values = numpy.asarray(random_values, dtype=floatX) W = theano.shared(value=W_values, name='weights') correct_score = T.dot(input_, W) wrong_input = T.vector(dtype=floatX) wrong_score = theano.clone(correct_score, {input_: wrong_input}) scores = ((T.ones_like(correct_score) - correct_score) + wrong_score) cost = (scores * (scores > 0)).sum() T.grad(cost, input_)
[ "def", "test_gt_grad", "(", ")", ":", "floatX", "=", "config", ".", "floatX", "T", "=", "theano", ".", "tensor", "input_", "=", "T", ".", "vector", "(", "dtype", "=", "floatX", ")", "random_values", "=", "numpy", ".", "random", ".", "RandomState", "(", "1234", ")", ".", "uniform", "(", "low", "=", "(", "-", "1", ")", ",", "high", "=", "1", ",", "size", "=", "(", "2", ",", "2", ")", ")", "W_values", "=", "numpy", ".", "asarray", "(", "random_values", ",", "dtype", "=", "floatX", ")", "W", "=", "theano", ".", "shared", "(", "value", "=", "W_values", ",", "name", "=", "'weights'", ")", "correct_score", "=", "T", ".", "dot", "(", "input_", ",", "W", ")", "wrong_input", "=", "T", ".", "vector", "(", "dtype", "=", "floatX", ")", "wrong_score", "=", "theano", ".", "clone", "(", "correct_score", ",", "{", "input_", ":", "wrong_input", "}", ")", "scores", "=", "(", "(", "T", ".", "ones_like", "(", "correct_score", ")", "-", "correct_score", ")", "+", "wrong_score", ")", "cost", "=", "(", "scores", "*", "(", "scores", ">", "0", ")", ")", ".", "sum", "(", ")", "T", ".", "grad", "(", "cost", ",", "input_", ")" ]
a user test that failed .
train
false
44,086
@task def undrain(): ami = get_ami_metadata() instance_id = ami['instance-id'] fprint('Waiting for healthy backend') num_healthz_ok = 0 for i in range(60): if get_healthz(): num_healthz_ok += 1 if (num_healthz_ok >= 3): break else: num_healthz_ok = 0 time.sleep(2) if (num_healthz_ok < 3): raise Exception('healthz timeout') ec2_utils.AddELBInstance(env.region, instance_id, env.nodetype) fprint(('Added instance %s to %s load balancers' % (instance_id, env.nodetype))) for i in range(60): health = ec2_utils.GetELBInstanceHealth(env.region, instance_id, node_types=[env.nodetype]) if (health is None): fprint(('No load balancer health information for instance %s; waiting.' % instance_id)) elif (health == 'InService'): fprint(('Load balancer health for instance %s is InService.' % instance_id)) return else: fprint(('Load balancer health information for instance %s is %s; waiting.' % (instance_id, health))) time.sleep(2) raise Exception('timeout')
[ "@", "task", "def", "undrain", "(", ")", ":", "ami", "=", "get_ami_metadata", "(", ")", "instance_id", "=", "ami", "[", "'instance-id'", "]", "fprint", "(", "'Waiting for healthy backend'", ")", "num_healthz_ok", "=", "0", "for", "i", "in", "range", "(", "60", ")", ":", "if", "get_healthz", "(", ")", ":", "num_healthz_ok", "+=", "1", "if", "(", "num_healthz_ok", ">=", "3", ")", ":", "break", "else", ":", "num_healthz_ok", "=", "0", "time", ".", "sleep", "(", "2", ")", "if", "(", "num_healthz_ok", "<", "3", ")", ":", "raise", "Exception", "(", "'healthz timeout'", ")", "ec2_utils", ".", "AddELBInstance", "(", "env", ".", "region", ",", "instance_id", ",", "env", ".", "nodetype", ")", "fprint", "(", "(", "'Added instance %s to %s load balancers'", "%", "(", "instance_id", ",", "env", ".", "nodetype", ")", ")", ")", "for", "i", "in", "range", "(", "60", ")", ":", "health", "=", "ec2_utils", ".", "GetELBInstanceHealth", "(", "env", ".", "region", ",", "instance_id", ",", "node_types", "=", "[", "env", ".", "nodetype", "]", ")", "if", "(", "health", "is", "None", ")", ":", "fprint", "(", "(", "'No load balancer health information for instance %s; waiting.'", "%", "instance_id", ")", ")", "elif", "(", "health", "==", "'InService'", ")", ":", "fprint", "(", "(", "'Load balancer health for instance %s is InService.'", "%", "instance_id", ")", ")", "return", "else", ":", "fprint", "(", "(", "'Load balancer health information for instance %s is %s; waiting.'", "%", "(", "instance_id", ",", "health", ")", ")", ")", "time", ".", "sleep", "(", "2", ")", "raise", "Exception", "(", "'timeout'", ")" ]
undrain nodes of a given type .
train
false
44,088
def check_grad(func, grad, x0, *args, **kwargs): step = kwargs.pop('epsilon', _epsilon) if kwargs: raise ValueError(('Unknown keyword arguments: %r' % (list(kwargs.keys()),))) return sqrt(sum(((grad(x0, *args) - approx_fprime(x0, func, step, *args)) ** 2)))
[ "def", "check_grad", "(", "func", ",", "grad", ",", "x0", ",", "*", "args", ",", "**", "kwargs", ")", ":", "step", "=", "kwargs", ".", "pop", "(", "'epsilon'", ",", "_epsilon", ")", "if", "kwargs", ":", "raise", "ValueError", "(", "(", "'Unknown keyword arguments: %r'", "%", "(", "list", "(", "kwargs", ".", "keys", "(", ")", ")", ",", ")", ")", ")", "return", "sqrt", "(", "sum", "(", "(", "(", "grad", "(", "x0", ",", "*", "args", ")", "-", "approx_fprime", "(", "x0", ",", "func", ",", "step", ",", "*", "args", ")", ")", "**", "2", ")", ")", ")" ]
check the correctness of a gradient function by comparing it against a finite-difference approximation of the gradient .
train
false
44,089
def find_pure_symbol(symbols, unknown_clauses): for sym in symbols: (found_pos, found_neg) = (False, False) for c in unknown_clauses: if ((not found_pos) and (sym in disjuncts(c))): found_pos = True if ((not found_neg) and (Not(sym) in disjuncts(c))): found_neg = True if (found_pos != found_neg): return (sym, found_pos) return (None, None)
[ "def", "find_pure_symbol", "(", "symbols", ",", "unknown_clauses", ")", ":", "for", "sym", "in", "symbols", ":", "(", "found_pos", ",", "found_neg", ")", "=", "(", "False", ",", "False", ")", "for", "c", "in", "unknown_clauses", ":", "if", "(", "(", "not", "found_pos", ")", "and", "(", "sym", "in", "disjuncts", "(", "c", ")", ")", ")", ":", "found_pos", "=", "True", "if", "(", "(", "not", "found_neg", ")", "and", "(", "Not", "(", "sym", ")", "in", "disjuncts", "(", "c", ")", ")", ")", ":", "found_neg", "=", "True", "if", "(", "found_pos", "!=", "found_neg", ")", ":", "return", "(", "sym", ",", "found_pos", ")", "return", "(", "None", ",", "None", ")" ]
find a symbol and its value if it appears only as a positive literal in clauses .
train
false
44,092
def _parse_typed_parameter_typed_value(values): (type_, value) = _expand_one_key_dictionary(values) _current_parameter_value.type = type_ if _is_simple_type(value): arg = Argument(value) _current_parameter_value.add_argument(arg) elif isinstance(value, list): for idx in value: arg = Argument(idx) _current_parameter_value.add_argument(arg)
[ "def", "_parse_typed_parameter_typed_value", "(", "values", ")", ":", "(", "type_", ",", "value", ")", "=", "_expand_one_key_dictionary", "(", "values", ")", "_current_parameter_value", ".", "type", "=", "type_", "if", "_is_simple_type", "(", "value", ")", ":", "arg", "=", "Argument", "(", "value", ")", "_current_parameter_value", ".", "add_argument", "(", "arg", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "for", "idx", "in", "value", ":", "arg", "=", "Argument", "(", "idx", ")", "_current_parameter_value", ".", "add_argument", "(", "arg", ")" ]
creates arguments in a typedparametervalue .
train
true
44,093
def P_(x, y): return (x, y)
[ "def", "P_", "(", "x", ",", "y", ")", ":", "return", "(", "x", ",", "y", ")" ]
convenience method for handling pluralizations .
train
false
44,097
def allcombinations(A, B, ordered): if (ordered == 'commutative'): ordered = 11 if (ordered == 'associative'): ordered = None (sm, bg) = ((A, B) if (len(A) < len(B)) else (B, A)) for part in kbins(list(range(len(bg))), len(sm), ordered=ordered): if (bg == B): (yield (tuple(((a,) for a in A)), partition(B, part))) else: (yield (partition(A, part), tuple(((b,) for b in B))))
[ "def", "allcombinations", "(", "A", ",", "B", ",", "ordered", ")", ":", "if", "(", "ordered", "==", "'commutative'", ")", ":", "ordered", "=", "11", "if", "(", "ordered", "==", "'associative'", ")", ":", "ordered", "=", "None", "(", "sm", ",", "bg", ")", "=", "(", "(", "A", ",", "B", ")", "if", "(", "len", "(", "A", ")", "<", "len", "(", "B", ")", ")", "else", "(", "B", ",", "A", ")", ")", "for", "part", "in", "kbins", "(", "list", "(", "range", "(", "len", "(", "bg", ")", ")", ")", ",", "len", "(", "sm", ")", ",", "ordered", "=", "ordered", ")", ":", "if", "(", "bg", "==", "B", ")", ":", "(", "yield", "(", "tuple", "(", "(", "(", "a", ",", ")", "for", "a", "in", "A", ")", ")", ",", "partition", "(", "B", ",", "part", ")", ")", ")", "else", ":", "(", "yield", "(", "partition", "(", "A", ",", "part", ")", ",", "tuple", "(", "(", "(", "b", ",", ")", "for", "b", "in", "B", ")", ")", ")", ")" ]
restructure a and b to have the same number of elements ordered must be either commutative or associative a and b can be rearranged so that the larger of the two lists is reorganized into smaller sublists .
train
false
44,098
def _js_string_filter(value): string = json.dumps(value) replacements = [('\\', '\\\\'), ('"', '\\"'), ("'", "\\'"), ('\n', '\\n'), ('\r', '\\r'), ('\x08', '\\b'), ('<', '\\u003c'), ('>', '\\u003e'), ('&', '\\u0026')] for replacement in replacements: string = string.replace(replacement[0], replacement[1]) return jinja2.utils.Markup(string)
[ "def", "_js_string_filter", "(", "value", ")", ":", "string", "=", "json", ".", "dumps", "(", "value", ")", "replacements", "=", "[", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ",", "(", "'\"'", ",", "'\\\\\"'", ")", ",", "(", "\"'\"", ",", "\"\\\\'\"", ")", ",", "(", "'\\n'", ",", "'\\\\n'", ")", ",", "(", "'\\r'", ",", "'\\\\r'", ")", ",", "(", "'\\x08'", ",", "'\\\\b'", ")", ",", "(", "'<'", ",", "'\\\\u003c'", ")", ",", "(", "'>'", ",", "'\\\\u003e'", ")", ",", "(", "'&'", ",", "'\\\\u0026'", ")", "]", "for", "replacement", "in", "replacements", ":", "string", "=", "string", ".", "replace", "(", "replacement", "[", "0", "]", ",", "replacement", "[", "1", "]", ")", "return", "jinja2", ".", "utils", ".", "Markup", "(", "string", ")" ]
converts a value to a json string for use in javascript code .
train
false
44,100
def pr_get_ancestors(pe_id): s3db = current.s3db atable = s3db.pr_affiliation rtable = s3db.pr_role query = (((((atable.deleted != True) & (atable.role_id == rtable.id)) & (atable.pe_id == pe_id)) & (rtable.deleted != True)) & (rtable.role_type == OU)) roles = current.db(query).select(rtable.id, rtable.pe_id, rtable.path, rtable.role_type) paths = [] append = paths.append for role in roles: path = S3MultiPath([role.pe_id]) if (role.path is None): ppath = pr_role_rebuild_path(role) else: ppath = S3MultiPath(role.path) path.extend(role.pe_id, ppath, cut=pe_id) append(path) ancestors = S3MultiPath.all_nodes(paths) return ancestors
[ "def", "pr_get_ancestors", "(", "pe_id", ")", ":", "s3db", "=", "current", ".", "s3db", "atable", "=", "s3db", ".", "pr_affiliation", "rtable", "=", "s3db", ".", "pr_role", "query", "=", "(", "(", "(", "(", "(", "atable", ".", "deleted", "!=", "True", ")", "&", "(", "atable", ".", "role_id", "==", "rtable", ".", "id", ")", ")", "&", "(", "atable", ".", "pe_id", "==", "pe_id", ")", ")", "&", "(", "rtable", ".", "deleted", "!=", "True", ")", ")", "&", "(", "rtable", ".", "role_type", "==", "OU", ")", ")", "roles", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "rtable", ".", "id", ",", "rtable", ".", "pe_id", ",", "rtable", ".", "path", ",", "rtable", ".", "role_type", ")", "paths", "=", "[", "]", "append", "=", "paths", ".", "append", "for", "role", "in", "roles", ":", "path", "=", "S3MultiPath", "(", "[", "role", ".", "pe_id", "]", ")", "if", "(", "role", ".", "path", "is", "None", ")", ":", "ppath", "=", "pr_role_rebuild_path", "(", "role", ")", "else", ":", "ppath", "=", "S3MultiPath", "(", "role", ".", "path", ")", "path", ".", "extend", "(", "role", ".", "pe_id", ",", "ppath", ",", "cut", "=", "pe_id", ")", "append", "(", "path", ")", "ancestors", "=", "S3MultiPath", ".", "all_nodes", "(", "paths", ")", "return", "ancestors" ]
find all ancestor entities of a person entity in the ou hierarchy .
train
false
44,102
def MimeTrimFilename(header, extension): start = header.find('filename=') start = header.find('"', start) end = (header.find('"', (start + 1)) + 1) start = header.find((('.' + extension) + '"'), start, end) if ((start > 0) and (end > start)): headernew = (header[:start] + header[(end - 1):]) else: headernew = header[:] return headernew
[ "def", "MimeTrimFilename", "(", "header", ",", "extension", ")", ":", "start", "=", "header", ".", "find", "(", "'filename='", ")", "start", "=", "header", ".", "find", "(", "'\"'", ",", "start", ")", "end", "=", "(", "header", ".", "find", "(", "'\"'", ",", "(", "start", "+", "1", ")", ")", "+", "1", ")", "start", "=", "header", ".", "find", "(", "(", "(", "'.'", "+", "extension", ")", "+", "'\"'", ")", ",", "start", ",", "end", ")", "if", "(", "(", "start", ">", "0", ")", "and", "(", "end", ">", "start", ")", ")", ":", "headernew", "=", "(", "header", "[", ":", "start", "]", "+", "header", "[", "(", "end", "-", "1", ")", ":", "]", ")", "else", ":", "headernew", "=", "header", "[", ":", "]", "return", "headernew" ]
accepts a mime header containing filename=" .
train
false
44,104
def purge_deleted(): db_api.purge_deleted(CONF.command.age, CONF.command.granularity, CONF.command.project_id, CONF.command.batch_size)
[ "def", "purge_deleted", "(", ")", ":", "db_api", ".", "purge_deleted", "(", "CONF", ".", "command", ".", "age", ",", "CONF", ".", "command", ".", "granularity", ",", "CONF", ".", "command", ".", "project_id", ",", "CONF", ".", "command", ".", "batch_size", ")" ]
remove database records that have been previously soft deleted .
train
false
44,106
def notification_sample(sample): def wrap(cls): if (not getattr(cls, 'samples', None)): cls.samples = [sample] else: cls.samples.append(sample) return cls return wrap
[ "def", "notification_sample", "(", "sample", ")", ":", "def", "wrap", "(", "cls", ")", ":", "if", "(", "not", "getattr", "(", "cls", ",", "'samples'", ",", "None", ")", ")", ":", "cls", ".", "samples", "=", "[", "sample", "]", "else", ":", "cls", ".", "samples", ".", "append", "(", "sample", ")", "return", "cls", "return", "wrap" ]
class decorator to attach the notification sample information to the notification object for documentation generation purposes .
train
false
44,107
def subset_dict(dict_, keys): subset = partition_dict(dict_, keys)[0] return subset
[ "def", "subset_dict", "(", "dict_", ",", "keys", ")", ":", "subset", "=", "partition_dict", "(", "dict_", ",", "keys", ")", "[", "0", "]", "return", "subset" ]
return a dict that only contains a subset of keys .
train
false
44,109
def LoadAppInclude(app_include): builder = yaml_object.ObjectBuilder(AppInclude) handler = yaml_builder.BuilderHandler(builder) listener = yaml_listener.EventListener(handler) listener.Parse(app_include) includes = handler.GetResults() if (len(includes) < 1): raise appinfo_errors.EmptyConfigurationFile() if (len(includes) > 1): raise appinfo_errors.MultipleConfigurationFile() includeyaml = includes[0] if includeyaml.handlers: for handler in includeyaml.handlers: handler.FixSecureDefaults() handler.WarnReservedURLs() if includeyaml.builtins: BuiltinHandler.Validate(includeyaml.builtins) return includeyaml
[ "def", "LoadAppInclude", "(", "app_include", ")", ":", "builder", "=", "yaml_object", ".", "ObjectBuilder", "(", "AppInclude", ")", "handler", "=", "yaml_builder", ".", "BuilderHandler", "(", "builder", ")", "listener", "=", "yaml_listener", ".", "EventListener", "(", "handler", ")", "listener", ".", "Parse", "(", "app_include", ")", "includes", "=", "handler", ".", "GetResults", "(", ")", "if", "(", "len", "(", "includes", ")", "<", "1", ")", ":", "raise", "appinfo_errors", ".", "EmptyConfigurationFile", "(", ")", "if", "(", "len", "(", "includes", ")", ">", "1", ")", ":", "raise", "appinfo_errors", ".", "MultipleConfigurationFile", "(", ")", "includeyaml", "=", "includes", "[", "0", "]", "if", "includeyaml", ".", "handlers", ":", "for", "handler", "in", "includeyaml", ".", "handlers", ":", "handler", ".", "FixSecureDefaults", "(", ")", "handler", ".", "WarnReservedURLs", "(", ")", "if", "includeyaml", ".", "builtins", ":", "BuiltinHandler", ".", "Validate", "(", "includeyaml", ".", "builtins", ")", "return", "includeyaml" ]
load a single appinclude object where one and only one is expected .
train
false
44,110
def config_option_update(context, data_dict): return {'success': False}
[ "def", "config_option_update", "(", "context", ",", "data_dict", ")", ":", "return", "{", "'success'", ":", "False", "}" ]
update the runtime-editable configuration options only sysdmins can do it .
train
false
44,111
def test_aware_datetime_explicit_tz(): new_datetime = aware_datetime(2016, 1, 2, 21, 52, 25, tz=pytz.utc) assert timezone.is_aware(new_datetime) assert (new_datetime.tzinfo.zone == pytz.utc.zone)
[ "def", "test_aware_datetime_explicit_tz", "(", ")", ":", "new_datetime", "=", "aware_datetime", "(", "2016", ",", "1", ",", "2", ",", "21", ",", "52", ",", "25", ",", "tz", "=", "pytz", ".", "utc", ")", "assert", "timezone", ".", "is_aware", "(", "new_datetime", ")", "assert", "(", "new_datetime", ".", "tzinfo", ".", "zone", "==", "pytz", ".", "utc", ".", "zone", ")" ]
tests the creation of a explicitly provided timezone-aware datetime .
train
false
44,112
def _bytesChr(i): if _PY3: return bytes([i]) else: return chr(i)
[ "def", "_bytesChr", "(", "i", ")", ":", "if", "_PY3", ":", "return", "bytes", "(", "[", "i", "]", ")", "else", ":", "return", "chr", "(", "i", ")" ]
like l{chr} but always works on ascii .
train
false
44,114
def tk_window_focus(): if (rcParams['backend'] != 'TkAgg'): return False return rcParams['tk.window_focus']
[ "def", "tk_window_focus", "(", ")", ":", "if", "(", "rcParams", "[", "'backend'", "]", "!=", "'TkAgg'", ")", ":", "return", "False", "return", "rcParams", "[", "'tk.window_focus'", "]" ]
return true if focus maintenance under tkagg on win32 is on .
train
false
44,115
def _resolve_id(val): return (val if isinstance(val, six.string_types) else val.id)
[ "def", "_resolve_id", "(", "val", ")", ":", "return", "(", "val", "if", "isinstance", "(", "val", ",", "six", ".", "string_types", ")", "else", "val", ".", "id", ")" ]
takes an object or an id and returns the id .
train
false
44,116
def quote_plus(string, safe='', encoding=None, errors=None): if ((isinstance(string, str) and (' ' not in string)) or (isinstance(string, bytes) and (' ' not in string))): return quote(string, safe, encoding, errors) if isinstance(safe, str): space = ' ' else: space = ' ' string = quote(string, (safe + space), encoding, errors) return string.replace(' ', '+')
[ "def", "quote_plus", "(", "string", ",", "safe", "=", "''", ",", "encoding", "=", "None", ",", "errors", "=", "None", ")", ":", "if", "(", "(", "isinstance", "(", "string", ",", "str", ")", "and", "(", "' '", "not", "in", "string", ")", ")", "or", "(", "isinstance", "(", "string", ",", "bytes", ")", "and", "(", "' '", "not", "in", "string", ")", ")", ")", ":", "return", "quote", "(", "string", ",", "safe", ",", "encoding", ",", "errors", ")", "if", "isinstance", "(", "safe", ",", "str", ")", ":", "space", "=", "' '", "else", ":", "space", "=", "' '", "string", "=", "quote", "(", "string", ",", "(", "safe", "+", "space", ")", ",", "encoding", ",", "errors", ")", "return", "string", ".", "replace", "(", "' '", ",", "'+'", ")" ]
quote the query fragment of a url; replacing with + .
train
true
44,117
def _fail_neg(values, errmsg='negative value'): for x in values: if (x < 0): raise StatisticsError(errmsg) (yield x)
[ "def", "_fail_neg", "(", "values", ",", "errmsg", "=", "'negative value'", ")", ":", "for", "x", "in", "values", ":", "if", "(", "x", "<", "0", ")", ":", "raise", "StatisticsError", "(", "errmsg", ")", "(", "yield", "x", ")" ]
iterate over values .
train
false
44,118
def runfcgi(func): from flup.server.fcgi import WSGIServer if (len(sys.argv) > 2): args = sys.argv[:] if ('fastcgi' in args): args.remove('fastcgi') elif ('fcgi' in args): args.remove('fcgi') hostport = validip(args[1]) elif (len(sys.argv) > 1): hostport = ('localhost', 8000) else: hostport = None return makeserver(WSGIServer)(func, multiplexed=True, bindAddress=hostport).run()
[ "def", "runfcgi", "(", "func", ")", ":", "from", "flup", ".", "server", ".", "fcgi", "import", "WSGIServer", "if", "(", "len", "(", "sys", ".", "argv", ")", ">", "2", ")", ":", "args", "=", "sys", ".", "argv", "[", ":", "]", "if", "(", "'fastcgi'", "in", "args", ")", ":", "args", ".", "remove", "(", "'fastcgi'", ")", "elif", "(", "'fcgi'", "in", "args", ")", ":", "args", ".", "remove", "(", "'fcgi'", ")", "hostport", "=", "validip", "(", "args", "[", "1", "]", ")", "elif", "(", "len", "(", "sys", ".", "argv", ")", ">", "1", ")", ":", "hostport", "=", "(", "'localhost'", ",", "8000", ")", "else", ":", "hostport", "=", "None", "return", "makeserver", "(", "WSGIServer", ")", "(", "func", ",", "multiplexed", "=", "True", ",", "bindAddress", "=", "hostport", ")", ".", "run", "(", ")" ]
runs a wsgi-function with a fastcgi server .
train
false
44,119
def hostgroup_delete(hostgroupids, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'hostgroup.delete' if (not isinstance(hostgroupids, list)): params = [hostgroupids] else: params = hostgroupids ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result']['groupids'] else: raise KeyError except KeyError: return ret
[ "def", "hostgroup_delete", "(", "hostgroupids", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'hostgroup.delete'", "if", "(", "not", "isinstance", "(", "hostgroupids", ",", "list", ")", ")", ":", "params", "=", "[", "hostgroupids", "]", "else", ":", "params", "=", "hostgroupids", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "[", "'groupids'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "ret" ]
delete the host group .
train
true
44,120
def getInterval(): return 1
[ "def", "getInterval", "(", ")", ":", "return", "1" ]
interval in seconds .
train
false
44,121
def runtime_hooks(): return [join(curdir, 'pyi_rth_kivy.py')]
[ "def", "runtime_hooks", "(", ")", ":", "return", "[", "join", "(", "curdir", ",", "'pyi_rth_kivy.py'", ")", "]" ]
returns a list with the runtime hooks for kivy .
train
false
44,122
def push_on_stack_section(args): if (len(args) == 0): return '' parts = ['self->deeplevel++;'] for idx in xrange(len(args)): parts.append(('self->arguments_stack[self->deeplevel, "arg%d"] = self->arg%d;\n DCTB self->arg%d = arg%d;' % (idx, idx, idx, idx))) return '\n DCTB '.join(parts)
[ "def", "push_on_stack_section", "(", "args", ")", ":", "if", "(", "len", "(", "args", ")", "==", "0", ")", ":", "return", "''", "parts", "=", "[", "'self->deeplevel++;'", "]", "for", "idx", "in", "xrange", "(", "len", "(", "args", ")", ")", ":", "parts", ".", "append", "(", "(", "'self->arguments_stack[self->deeplevel, \"arg%d\"] = self->arg%d;\\n DCTB self->arg%d = arg%d;'", "%", "(", "idx", ",", "idx", ",", "idx", ",", "idx", ")", ")", ")", "return", "'\\n DCTB '", ".", "join", "(", "parts", ")" ]
composes a "push arguments on stack" section of an entry pid dtrace probe .
train
false
44,123
def show_triple(tokens, text, triple): (nsubj, verb, dobj) = triple nsubj_text = phrase_text_for_head(tokens, text, nsubj) verb_text = tokens[verb]['text']['content'] dobj_text = phrase_text_for_head(tokens, text, dobj) left = textwrap.wrap(nsubj_text, width=28) mid = textwrap.wrap(verb_text, width=10) right = textwrap.wrap(dobj_text, width=28) print (((((('+' + (30 * '-')) + '+') + (12 * '-')) + '+') + (30 * '-')) + '+') for (l, m, r) in zip(left, mid, right): print '| {:<28s} | {:<10s} | {:<28s} |'.format((l or ''), (m or ''), (r or ''))
[ "def", "show_triple", "(", "tokens", ",", "text", ",", "triple", ")", ":", "(", "nsubj", ",", "verb", ",", "dobj", ")", "=", "triple", "nsubj_text", "=", "phrase_text_for_head", "(", "tokens", ",", "text", ",", "nsubj", ")", "verb_text", "=", "tokens", "[", "verb", "]", "[", "'text'", "]", "[", "'content'", "]", "dobj_text", "=", "phrase_text_for_head", "(", "tokens", ",", "text", ",", "dobj", ")", "left", "=", "textwrap", ".", "wrap", "(", "nsubj_text", ",", "width", "=", "28", ")", "mid", "=", "textwrap", ".", "wrap", "(", "verb_text", ",", "width", "=", "10", ")", "right", "=", "textwrap", ".", "wrap", "(", "dobj_text", ",", "width", "=", "28", ")", "print", "(", "(", "(", "(", "(", "(", "'+'", "+", "(", "30", "*", "'-'", ")", ")", "+", "'+'", ")", "+", "(", "12", "*", "'-'", ")", ")", "+", "'+'", ")", "+", "(", "30", "*", "'-'", ")", ")", "+", "'+'", ")", "for", "(", "l", ",", "m", ",", "r", ")", "in", "zip", "(", "left", ",", "mid", ",", "right", ")", ":", "print", "'| {:<28s} | {:<10s} | {:<28s} |'", ".", "format", "(", "(", "l", "or", "''", ")", ",", "(", "m", "or", "''", ")", ",", "(", "r", "or", "''", ")", ")" ]
prints the given triple .
train
false
44,125
def get_default_page_content_type(): return ContentType.objects.get_for_model(Page)
[ "def", "get_default_page_content_type", "(", ")", ":", "return", "ContentType", ".", "objects", ".", "get_for_model", "(", "Page", ")" ]
returns the content type to use as a default for pages whose content type has been deleted .
train
false
44,126
def _release_subtask_lock(task_id): key = 'subtask-{}'.format(task_id) cache.delete(key)
[ "def", "_release_subtask_lock", "(", "task_id", ")", ":", "key", "=", "'subtask-{}'", ".", "format", "(", "task_id", ")", "cache", ".", "delete", "(", "key", ")" ]
unmark the specified task_id as being no longer in progress .
train
false
44,127
def seam_carve(img, energy_map, mode, num, border=1, force_copy=True): utils.assert_nD(img, (2, 3)) image = util.img_as_float(img, force_copy) energy_map = util.img_as_float(energy_map, force_copy) if (image.ndim == 2): image = image[..., np.newaxis] if (mode == 'horizontal'): image = np.transpose(image, (1, 0, 2)) image = np.ascontiguousarray(image) out = _seam_carve_v(image, energy_map, num, border) if (mode == 'horizontal'): out = np.transpose(out, (1, 0, 2)) return np.squeeze(out)
[ "def", "seam_carve", "(", "img", ",", "energy_map", ",", "mode", ",", "num", ",", "border", "=", "1", ",", "force_copy", "=", "True", ")", ":", "utils", ".", "assert_nD", "(", "img", ",", "(", "2", ",", "3", ")", ")", "image", "=", "util", ".", "img_as_float", "(", "img", ",", "force_copy", ")", "energy_map", "=", "util", ".", "img_as_float", "(", "energy_map", ",", "force_copy", ")", "if", "(", "image", ".", "ndim", "==", "2", ")", ":", "image", "=", "image", "[", "...", ",", "np", ".", "newaxis", "]", "if", "(", "mode", "==", "'horizontal'", ")", ":", "image", "=", "np", ".", "transpose", "(", "image", ",", "(", "1", ",", "0", ",", "2", ")", ")", "image", "=", "np", ".", "ascontiguousarray", "(", "image", ")", "out", "=", "_seam_carve_v", "(", "image", ",", "energy_map", ",", "num", ",", "border", ")", "if", "(", "mode", "==", "'horizontal'", ")", ":", "out", "=", "np", ".", "transpose", "(", "out", ",", "(", "1", ",", "0", ",", "2", ")", ")", "return", "np", ".", "squeeze", "(", "out", ")" ]
carve vertical or horizontal seams off an image .
train
false
44,128
def deprecated_property(attr, name, text, version): def warn(): warn_deprecation(('The attribute %s is deprecated: %s' % (attr, text)), version, 3) def fget(self): warn() return attr.__get__(self, type(self)) def fset(self, val): warn() attr.__set__(self, val) def fdel(self): warn() attr.__delete__(self) return property(fget, fset, fdel, ('<Deprecated attribute %s>' % attr))
[ "def", "deprecated_property", "(", "attr", ",", "name", ",", "text", ",", "version", ")", ":", "def", "warn", "(", ")", ":", "warn_deprecation", "(", "(", "'The attribute %s is deprecated: %s'", "%", "(", "attr", ",", "text", ")", ")", ",", "version", ",", "3", ")", "def", "fget", "(", "self", ")", ":", "warn", "(", ")", "return", "attr", ".", "__get__", "(", "self", ",", "type", "(", "self", ")", ")", "def", "fset", "(", "self", ",", "val", ")", ":", "warn", "(", ")", "attr", ".", "__set__", "(", "self", ",", "val", ")", "def", "fdel", "(", "self", ")", ":", "warn", "(", ")", "attr", ".", "__delete__", "(", "self", ")", "return", "property", "(", "fget", ",", "fset", ",", "fdel", ",", "(", "'<Deprecated attribute %s>'", "%", "attr", ")", ")" ]
wraps a descriptor .
train
false
44,129
def find_prototypes(code): prots = [] lines = code.split('\n') for line in lines: m = re.match(('\\s*' + re_func_prot), line) if (m is not None): (rtype, name, args) = m.groups()[:3] if ((args == 'void') or (args.strip() == '')): args = [] else: args = [tuple(arg.strip().split(' ')) for arg in args.split(',')] prots.append((name, args, rtype)) return prots
[ "def", "find_prototypes", "(", "code", ")", ":", "prots", "=", "[", "]", "lines", "=", "code", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "m", "=", "re", ".", "match", "(", "(", "'\\\\s*'", "+", "re_func_prot", ")", ",", "line", ")", "if", "(", "m", "is", "not", "None", ")", ":", "(", "rtype", ",", "name", ",", "args", ")", "=", "m", ".", "groups", "(", ")", "[", ":", "3", "]", "if", "(", "(", "args", "==", "'void'", ")", "or", "(", "args", ".", "strip", "(", ")", "==", "''", ")", ")", ":", "args", "=", "[", "]", "else", ":", "args", "=", "[", "tuple", "(", "arg", ".", "strip", "(", ")", ".", "split", "(", "' '", ")", ")", "for", "arg", "in", "args", ".", "split", "(", "','", ")", "]", "prots", ".", "append", "(", "(", "name", ",", "args", ",", "rtype", ")", ")", "return", "prots" ]
return a list of signatures for each function prototype declared in *code* .
train
true
44,130
def utils(opts, whitelist=None, context=None): return LazyLoader(_module_dirs(opts, 'utils', ext_type_dirs='utils_dirs'), opts, tag='utils', whitelist=whitelist, pack={'__context__': context})
[ "def", "utils", "(", "opts", ",", "whitelist", "=", "None", ",", "context", "=", "None", ")", ":", "return", "LazyLoader", "(", "_module_dirs", "(", "opts", ",", "'utils'", ",", "ext_type_dirs", "=", "'utils_dirs'", ")", ",", "opts", ",", "tag", "=", "'utils'", ",", "whitelist", "=", "whitelist", ",", "pack", "=", "{", "'__context__'", ":", "context", "}", ")" ]
returns the utility modules .
train
true
44,131
def package_revision_list(context, data_dict): model = context['model'] id = _get_or_bust(data_dict, 'id') pkg = model.Package.get(id) if (pkg is None): raise NotFound _check_access('package_revision_list', context, data_dict) revision_dicts = [] for (revision, object_revisions) in pkg.all_related_revisions: revision_dicts.append(model.revision_as_dict(revision, include_packages=False, include_groups=False)) return revision_dicts
[ "def", "package_revision_list", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "id", "=", "_get_or_bust", "(", "data_dict", ",", "'id'", ")", "pkg", "=", "model", ".", "Package", ".", "get", "(", "id", ")", "if", "(", "pkg", "is", "None", ")", ":", "raise", "NotFound", "_check_access", "(", "'package_revision_list'", ",", "context", ",", "data_dict", ")", "revision_dicts", "=", "[", "]", "for", "(", "revision", ",", "object_revisions", ")", "in", "pkg", ".", "all_related_revisions", ":", "revision_dicts", ".", "append", "(", "model", ".", "revision_as_dict", "(", "revision", ",", "include_packages", "=", "False", ",", "include_groups", "=", "False", ")", ")", "return", "revision_dicts" ]
return a dataset s revisions as a list of dictionaries .
train
false
44,132
def storage_service_key(bucket, file_name): key_name = '{}/{}'.format(settings.VIDEO_UPLOAD_PIPELINE.get('ROOT_PATH', ''), file_name) return s3.key.Key(bucket, key_name)
[ "def", "storage_service_key", "(", "bucket", ",", "file_name", ")", ":", "key_name", "=", "'{}/{}'", ".", "format", "(", "settings", ".", "VIDEO_UPLOAD_PIPELINE", ".", "get", "(", "'ROOT_PATH'", ",", "''", ")", ",", "file_name", ")", "return", "s3", ".", "key", ".", "Key", "(", "bucket", ",", "key_name", ")" ]
returns an s3 key to the given file in the given bucket .
train
false
44,133
def make_NQueens_CSP(n): vars = list(range(n)) domains = list(range(n)) neighbors = {} for v in vars: neighbors[v] = vars[:] neighbors[v].remove(v) return NQueensCSP(vars=vars, domains=defaultdict((lambda : domains)), neighbors=neighbors, binary_constraint=queens_constraint)
[ "def", "make_NQueens_CSP", "(", "n", ")", ":", "vars", "=", "list", "(", "range", "(", "n", ")", ")", "domains", "=", "list", "(", "range", "(", "n", ")", ")", "neighbors", "=", "{", "}", "for", "v", "in", "vars", ":", "neighbors", "[", "v", "]", "=", "vars", "[", ":", "]", "neighbors", "[", "v", "]", ".", "remove", "(", "v", ")", "return", "NQueensCSP", "(", "vars", "=", "vars", ",", "domains", "=", "defaultdict", "(", "(", "lambda", ":", "domains", ")", ")", ",", "neighbors", "=", "neighbors", ",", "binary_constraint", "=", "queens_constraint", ")" ]
creates a n-queens csp problem for a given n .
train
false
44,134
def page_msg(page=0): if isinstance(g.content, PaginatedContent): page_count = g.content.numPages() else: page_count = math.ceil((g.result_count / getxy().max_results)) if (page_count > 1): pagemsg = '{}{}/{}{}' return pagemsg.format(('<' if (page > 0) else '['), ('%s%s%s' % (c.y, (page + 1), c.w)), page_count, ('>' if ((page + 1) < page_count) else ']')) return None
[ "def", "page_msg", "(", "page", "=", "0", ")", ":", "if", "isinstance", "(", "g", ".", "content", ",", "PaginatedContent", ")", ":", "page_count", "=", "g", ".", "content", ".", "numPages", "(", ")", "else", ":", "page_count", "=", "math", ".", "ceil", "(", "(", "g", ".", "result_count", "/", "getxy", "(", ")", ".", "max_results", ")", ")", "if", "(", "page_count", ">", "1", ")", ":", "pagemsg", "=", "'{}{}/{}{}'", "return", "pagemsg", ".", "format", "(", "(", "'<'", "if", "(", "page", ">", "0", ")", "else", "'['", ")", ",", "(", "'%s%s%s'", "%", "(", "c", ".", "y", ",", "(", "page", "+", "1", ")", ",", "c", ".", "w", ")", ")", ",", "page_count", ",", "(", "'>'", "if", "(", "(", "page", "+", "1", ")", "<", "page_count", ")", "else", "']'", ")", ")", "return", "None" ]
format information about currently displayed page to a string .
train
false
44,136
def infinibox_argument_spec(): return dict(system=dict(required=True), user=dict(), password=dict(no_log=True))
[ "def", "infinibox_argument_spec", "(", ")", ":", "return", "dict", "(", "system", "=", "dict", "(", "required", "=", "True", ")", ",", "user", "=", "dict", "(", ")", ",", "password", "=", "dict", "(", "no_log", "=", "True", ")", ")" ]
return standard base dictionary used for the argument_spec argument in ansiblemodule .
train
false
44,139
def getSelectedRadioPlugin(names, radioPlugins): for radioPlugin in radioPlugins: if radioPlugin.value: return radioPlugin for name in names: for radioPlugin in radioPlugins: if (radioPlugin.name == name): radioPlugin.value = True return radioPlugin print 'this should never happen, no getSelectedRadioPlugin in settings' print names return radioPlugin[0]
[ "def", "getSelectedRadioPlugin", "(", "names", ",", "radioPlugins", ")", ":", "for", "radioPlugin", "in", "radioPlugins", ":", "if", "radioPlugin", ".", "value", ":", "return", "radioPlugin", "for", "name", "in", "names", ":", "for", "radioPlugin", "in", "radioPlugins", ":", "if", "(", "radioPlugin", ".", "name", "==", "name", ")", ":", "radioPlugin", ".", "value", "=", "True", "return", "radioPlugin", "print", "'this should never happen, no getSelectedRadioPlugin in settings'", "print", "names", "return", "radioPlugin", "[", "0", "]" ]
get the selected radio button if it exists .
train
false
44,140
def gray_to_bin(bin_list): b = [bin_list[0]] for i in range(1, len(bin_list)): b += str(int((b[(i - 1)] != bin_list[i]))) return ''.join(b)
[ "def", "gray_to_bin", "(", "bin_list", ")", ":", "b", "=", "[", "bin_list", "[", "0", "]", "]", "for", "i", "in", "range", "(", "1", ",", "len", "(", "bin_list", ")", ")", ":", "b", "+=", "str", "(", "int", "(", "(", "b", "[", "(", "i", "-", "1", ")", "]", "!=", "bin_list", "[", "i", "]", ")", ")", ")", "return", "''", ".", "join", "(", "b", ")" ]
convert from gray coding to binary coding .
train
false
44,141
@facebook_required_lazy def lazy_decorator_example(request, graph): if graph: if (request.method == 'POST'): friends = graph.get('me/friends') return HttpResponse('authorized') else: return HttpResponse('user denied or error')
[ "@", "facebook_required_lazy", "def", "lazy_decorator_example", "(", "request", ",", "graph", ")", ":", "if", "graph", ":", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "friends", "=", "graph", ".", "get", "(", "'me/friends'", ")", "return", "HttpResponse", "(", "'authorized'", ")", "else", ":", "return", "HttpResponse", "(", "'user denied or error'", ")" ]
the lazy decorator is faster .
train
false
44,142
def crc32(seq): try: return _crc32(_as_bytes(str(seq))) except AttributeError: return _crc32(_as_bytes(seq))
[ "def", "crc32", "(", "seq", ")", ":", "try", ":", "return", "_crc32", "(", "_as_bytes", "(", "str", "(", "seq", ")", ")", ")", "except", "AttributeError", ":", "return", "_crc32", "(", "_as_bytes", "(", "seq", ")", ")" ]
return a crc32 checksum of a file .
train
false
44,144
def get_user_project_permissions(user, project, cache='user'): membership = _get_user_project_membership(user, project, cache=cache) is_member = (membership is not None) is_admin = (is_member and membership.is_admin) return calculate_permissions(is_authenticated=user.is_authenticated(), is_superuser=user.is_superuser, is_member=is_member, is_admin=is_admin, role_permissions=_get_membership_permissions(membership), anon_permissions=project.anon_permissions, public_permissions=project.public_permissions)
[ "def", "get_user_project_permissions", "(", "user", ",", "project", ",", "cache", "=", "'user'", ")", ":", "membership", "=", "_get_user_project_membership", "(", "user", ",", "project", ",", "cache", "=", "cache", ")", "is_member", "=", "(", "membership", "is", "not", "None", ")", "is_admin", "=", "(", "is_member", "and", "membership", ".", "is_admin", ")", "return", "calculate_permissions", "(", "is_authenticated", "=", "user", ".", "is_authenticated", "(", ")", ",", "is_superuser", "=", "user", ".", "is_superuser", ",", "is_member", "=", "is_member", ",", "is_admin", "=", "is_admin", ",", "role_permissions", "=", "_get_membership_permissions", "(", "membership", ")", ",", "anon_permissions", "=", "project", ".", "anon_permissions", ",", "public_permissions", "=", "project", ".", "public_permissions", ")" ]
cache param determines how memberships are calculated trying to reuse the existing data in cache .
train
false
44,149
def resource_url(resource, request, *elements, **kw): return request.resource_url(resource, *elements, **kw)
[ "def", "resource_url", "(", "resource", ",", "request", ",", "*", "elements", ",", "**", "kw", ")", ":", "return", "request", ".", "resource_url", "(", "resource", ",", "*", "elements", ",", "**", "kw", ")" ]
this is a backwards compatibility function .
train
false
44,150
@decorators.memoize def _check_zfs(): return salt.utils.which('zfs')
[ "@", "decorators", ".", "memoize", "def", "_check_zfs", "(", ")", ":", "return", "salt", ".", "utils", ".", "which", "(", "'zfs'", ")" ]
looks to see if zfs is present on the system .
train
false
44,151
def get_reboot_type(task_state, current_power_state): if (current_power_state != power_state.RUNNING): return 'HARD' soft_types = [task_states.REBOOT_STARTED, task_states.REBOOT_PENDING, task_states.REBOOTING] reboot_type = ('SOFT' if (task_state in soft_types) else 'HARD') return reboot_type
[ "def", "get_reboot_type", "(", "task_state", ",", "current_power_state", ")", ":", "if", "(", "current_power_state", "!=", "power_state", ".", "RUNNING", ")", ":", "return", "'HARD'", "soft_types", "=", "[", "task_states", ".", "REBOOT_STARTED", ",", "task_states", ".", "REBOOT_PENDING", ",", "task_states", ".", "REBOOTING", "]", "reboot_type", "=", "(", "'SOFT'", "if", "(", "task_state", "in", "soft_types", ")", "else", "'HARD'", ")", "return", "reboot_type" ]
checks if the current instance state requires a hard reboot .
train
false
44,152
def TranslateSystemErrors(method): def WrappedMethod(self, *args, **kwargs): try: return method(self, *args, **kwargs) except socket.gaierror as e: raise apiproxy_errors.ApplicationError(remote_socket_service_pb.RemoteSocketServiceError.GAI_ERROR, ('system_error:%u error_detail:"%s"' % (e.errno, e.strerror))) except socket.timeout as e: raise apiproxy_errors.ApplicationError(remote_socket_service_pb.RemoteSocketServiceError.SYSTEM_ERROR, ('system_error:%u error_detail:"%s"' % (errno.EAGAIN, os.strerror(errno.EAGAIN)))) except socket.error as e: raise apiproxy_errors.ApplicationError(remote_socket_service_pb.RemoteSocketServiceError.SYSTEM_ERROR, ('system_error:%u error_detail:"%s"' % (e.errno, e.strerror))) return WrappedMethod
[ "def", "TranslateSystemErrors", "(", "method", ")", ":", "def", "WrappedMethod", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "except", "socket", ".", "gaierror", "as", "e", ":", "raise", "apiproxy_errors", ".", "ApplicationError", "(", "remote_socket_service_pb", ".", "RemoteSocketServiceError", ".", "GAI_ERROR", ",", "(", "'system_error:%u error_detail:\"%s\"'", "%", "(", "e", ".", "errno", ",", "e", ".", "strerror", ")", ")", ")", "except", "socket", ".", "timeout", "as", "e", ":", "raise", "apiproxy_errors", ".", "ApplicationError", "(", "remote_socket_service_pb", ".", "RemoteSocketServiceError", ".", "SYSTEM_ERROR", ",", "(", "'system_error:%u error_detail:\"%s\"'", "%", "(", "errno", ".", "EAGAIN", ",", "os", ".", "strerror", "(", "errno", ".", "EAGAIN", ")", ")", ")", ")", "except", "socket", ".", "error", "as", "e", ":", "raise", "apiproxy_errors", ".", "ApplicationError", "(", "remote_socket_service_pb", ".", "RemoteSocketServiceError", ".", "SYSTEM_ERROR", ",", "(", "'system_error:%u error_detail:\"%s\"'", "%", "(", "e", ".", "errno", ",", "e", ".", "strerror", ")", ")", ")", "return", "WrappedMethod" ]
decorator to catch and translate socket .
train
false
44,153
@domain_constructor(loss_target=(-2.5)) def gauss_wave2(): rng = np.random.RandomState(123) var = 0.1 x = hp.uniform('x', (-20), 20) amp = hp.uniform('amp', 0, 1) t = (scope.normal(0, var, rng=rng) + (2 * scope.exp((- (old_div(x, 5.0) ** 2))))) return {'loss': (- hp.choice('hf', [t, (t + (scope.sin(x) * amp))])), 'loss_variance': var, 'status': base.STATUS_OK}
[ "@", "domain_constructor", "(", "loss_target", "=", "(", "-", "2.5", ")", ")", "def", "gauss_wave2", "(", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "123", ")", "var", "=", "0.1", "x", "=", "hp", ".", "uniform", "(", "'x'", ",", "(", "-", "20", ")", ",", "20", ")", "amp", "=", "hp", ".", "uniform", "(", "'amp'", ",", "0", ",", "1", ")", "t", "=", "(", "scope", ".", "normal", "(", "0", ",", "var", ",", "rng", "=", "rng", ")", "+", "(", "2", "*", "scope", ".", "exp", "(", "(", "-", "(", "old_div", "(", "x", ",", "5.0", ")", "**", "2", ")", ")", ")", ")", ")", "return", "{", "'loss'", ":", "(", "-", "hp", ".", "choice", "(", "'hf'", ",", "[", "t", ",", "(", "t", "+", "(", "scope", ".", "sin", "(", "x", ")", "*", "amp", ")", ")", "]", ")", ")", ",", "'loss_variance'", ":", "var", ",", "'status'", ":", "base", ".", "STATUS_OK", "}" ]
variant of the gausswave problem in which noise is added to the score function .
train
false
44,154
def FakeURandom(n): bytes = '' for _ in range(n): bytes += chr(random.randint(0, 255)) return bytes
[ "def", "FakeURandom", "(", "n", ")", ":", "bytes", "=", "''", "for", "_", "in", "range", "(", "n", ")", ":", "bytes", "+=", "chr", "(", "random", ".", "randint", "(", "0", ",", "255", ")", ")", "return", "bytes" ]
fake version of os .
train
false
44,155
def web2py_uuid(ctokens=UNPACKED_CTOKENS): rand_longs = (random.getrandbits(64), random.getrandbits(64)) if HAVE_URANDOM: urand_longs = _struct_2_long_long.unpack(fast_urandom16()) byte_s = _struct_2_long_long.pack(((rand_longs[0] ^ urand_longs[0]) ^ ctokens[0]), ((rand_longs[1] ^ urand_longs[1]) ^ ctokens[1])) else: byte_s = _struct_2_long_long.pack((rand_longs[0] ^ ctokens[0]), (rand_longs[1] ^ ctokens[1])) return str(uuid.UUID(bytes=byte_s, version=4))
[ "def", "web2py_uuid", "(", "ctokens", "=", "UNPACKED_CTOKENS", ")", ":", "rand_longs", "=", "(", "random", ".", "getrandbits", "(", "64", ")", ",", "random", ".", "getrandbits", "(", "64", ")", ")", "if", "HAVE_URANDOM", ":", "urand_longs", "=", "_struct_2_long_long", ".", "unpack", "(", "fast_urandom16", "(", ")", ")", "byte_s", "=", "_struct_2_long_long", ".", "pack", "(", "(", "(", "rand_longs", "[", "0", "]", "^", "urand_longs", "[", "0", "]", ")", "^", "ctokens", "[", "0", "]", ")", ",", "(", "(", "rand_longs", "[", "1", "]", "^", "urand_longs", "[", "1", "]", ")", "^", "ctokens", "[", "1", "]", ")", ")", "else", ":", "byte_s", "=", "_struct_2_long_long", ".", "pack", "(", "(", "rand_longs", "[", "0", "]", "^", "ctokens", "[", "0", "]", ")", ",", "(", "rand_longs", "[", "1", "]", "^", "ctokens", "[", "1", "]", ")", ")", "return", "str", "(", "uuid", ".", "UUID", "(", "bytes", "=", "byte_s", ",", "version", "=", "4", ")", ")" ]
this function follows from the following discussion: URL it works like uuid .
train
false
44,156
def changelog_as_markdown(): rst = get_latest_changelog() rst = re.sub(':doc:`/plugins/(\\w+)`', '``\\1``', rst) rst = re.sub(':ref:`([^<]+)(<[^>]+>)`', '\\1', rst) rst = re.sub('(\\s)`([^`]+)`([^_])', '\\1``\\2``\\3', rst) rst = re.sub(':ref:`(\\w+)-cmd`', '``\\1``', rst) rst = re.sub(':bug:`(\\d+)`', '#\\1', rst) rst = re.sub(':user:`(\\w+)`', '@\\1', rst) md = rst2md(rst) md = re.sub('\\\\#(\\d+)\\b', '#\\1', md) return md
[ "def", "changelog_as_markdown", "(", ")", ":", "rst", "=", "get_latest_changelog", "(", ")", "rst", "=", "re", ".", "sub", "(", "':doc:`/plugins/(\\\\w+)`'", ",", "'``\\\\1``'", ",", "rst", ")", "rst", "=", "re", ".", "sub", "(", "':ref:`([^<]+)(<[^>]+>)`'", ",", "'\\\\1'", ",", "rst", ")", "rst", "=", "re", ".", "sub", "(", "'(\\\\s)`([^`]+)`([^_])'", ",", "'\\\\1``\\\\2``\\\\3'", ",", "rst", ")", "rst", "=", "re", ".", "sub", "(", "':ref:`(\\\\w+)-cmd`'", ",", "'``\\\\1``'", ",", "rst", ")", "rst", "=", "re", ".", "sub", "(", "':bug:`(\\\\d+)`'", ",", "'#\\\\1'", ",", "rst", ")", "rst", "=", "re", ".", "sub", "(", "':user:`(\\\\w+)`'", ",", "'@\\\\1'", ",", "rst", ")", "md", "=", "rst2md", "(", "rst", ")", "md", "=", "re", ".", "sub", "(", "'\\\\\\\\#(\\\\d+)\\\\b'", ",", "'#\\\\1'", ",", "md", ")", "return", "md" ]
get the latest changelog entry as hacked up markdown .
train
false
44,157
def send_formatting(message): message = message.replace('\r\n', '\n') if ('\n' in message): return ('+%s\r\n.\r\n' % message.replace('\n', '\r\n')) else: return (message + '\r\n')
[ "def", "send_formatting", "(", "message", ")", ":", "message", "=", "message", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", "if", "(", "'\\n'", "in", "message", ")", ":", "return", "(", "'+%s\\r\\n.\\r\\n'", "%", "message", ".", "replace", "(", "'\\n'", ",", "'\\r\\n'", ")", ")", "else", ":", "return", "(", "message", "+", "'\\r\\n'", ")" ]
performs the formatting expected from sent control messages .
train
false
44,158
def dtype_for(t): if (t in dtype_dict): return dtype_dict[t] return np.typeDict.get(t, t)
[ "def", "dtype_for", "(", "t", ")", ":", "if", "(", "t", "in", "dtype_dict", ")", ":", "return", "dtype_dict", "[", "t", "]", "return", "np", ".", "typeDict", ".", "get", "(", "t", ",", "t", ")" ]
return my dtype mapping .
train
true
44,159
def copytimes(source, dest): st = os.stat(source) mode = stat.S_IMODE(st.st_mode) if hasattr(os, 'utime'): os.utime(dest, (st.st_atime, st.st_mtime))
[ "def", "copytimes", "(", "source", ",", "dest", ")", ":", "st", "=", "os", ".", "stat", "(", "source", ")", "mode", "=", "stat", ".", "S_IMODE", "(", "st", ".", "st_mode", ")", "if", "hasattr", "(", "os", ",", "'utime'", ")", ":", "os", ".", "utime", "(", "dest", ",", "(", "st", ".", "st_atime", ",", "st", ".", "st_mtime", ")", ")" ]
copy a files modification times .
train
false
44,160
def mem_fence(*args, **kargs): raise _stub_error
[ "def", "mem_fence", "(", "*", "args", ",", "**", "kargs", ")", ":", "raise", "_stub_error" ]
opencl mem_fence() example: # local memory fence hsa .
train
false
44,161
def mpf2float(x): return float(mpmath.nstr(x, 17, min_fixed=0, max_fixed=0))
[ "def", "mpf2float", "(", "x", ")", ":", "return", "float", "(", "mpmath", ".", "nstr", "(", "x", ",", "17", ",", "min_fixed", "=", "0", ",", "max_fixed", "=", "0", ")", ")" ]
convert an mpf to the nearest floating point number .
train
false
44,162
def _groupname(): if grp: groupname = grp.getgrgid(os.getgid()).gr_name else: groupname = '' return groupname
[ "def", "_groupname", "(", ")", ":", "if", "grp", ":", "groupname", "=", "grp", ".", "getgrgid", "(", "os", ".", "getgid", "(", ")", ")", ".", "gr_name", "else", ":", "groupname", "=", "''", "return", "groupname" ]
grain for the minion groupname .
train
true
44,164
def gf_ddf_zassenhaus(f, p, K): (i, g, factors) = (1, [K.one, K.zero], []) b = gf_frobenius_monomial_base(f, p, K) while ((2 * i) <= gf_degree(f)): g = gf_frobenius_map(g, f, b, p, K) h = gf_gcd(f, gf_sub(g, [K.one, K.zero], p, K), p, K) if (h != [K.one]): factors.append((h, i)) f = gf_quo(f, h, p, K) g = gf_rem(g, f, p, K) b = gf_frobenius_monomial_base(f, p, K) i += 1 if (f != [K.one]): return (factors + [(f, gf_degree(f))]) else: return factors
[ "def", "gf_ddf_zassenhaus", "(", "f", ",", "p", ",", "K", ")", ":", "(", "i", ",", "g", ",", "factors", ")", "=", "(", "1", ",", "[", "K", ".", "one", ",", "K", ".", "zero", "]", ",", "[", "]", ")", "b", "=", "gf_frobenius_monomial_base", "(", "f", ",", "p", ",", "K", ")", "while", "(", "(", "2", "*", "i", ")", "<=", "gf_degree", "(", "f", ")", ")", ":", "g", "=", "gf_frobenius_map", "(", "g", ",", "f", ",", "b", ",", "p", ",", "K", ")", "h", "=", "gf_gcd", "(", "f", ",", "gf_sub", "(", "g", ",", "[", "K", ".", "one", ",", "K", ".", "zero", "]", ",", "p", ",", "K", ")", ",", "p", ",", "K", ")", "if", "(", "h", "!=", "[", "K", ".", "one", "]", ")", ":", "factors", ".", "append", "(", "(", "h", ",", "i", ")", ")", "f", "=", "gf_quo", "(", "f", ",", "h", ",", "p", ",", "K", ")", "g", "=", "gf_rem", "(", "g", ",", "f", ",", "p", ",", "K", ")", "b", "=", "gf_frobenius_monomial_base", "(", "f", ",", "p", ",", "K", ")", "i", "+=", "1", "if", "(", "f", "!=", "[", "K", ".", "one", "]", ")", ":", "return", "(", "factors", "+", "[", "(", "f", ",", "gf_degree", "(", "f", ")", ")", "]", ")", "else", ":", "return", "factors" ]
cantor-zassenhaus: deterministic distinct degree factorization given a monic square-free polynomial f in gf(p)[x] .
train
false
44,165
def test_longer_than(): assert (hug.types.longer_than(10)('quite a bit of text here should be') == 'quite a bit of text here should be') assert (hug.types.longer_than(10)(12345678910) == '12345678910') assert (hug.types.longer_than(10)(100123456789100) == '100123456789100') assert ('10' in hug.types.longer_than(10).__doc__) with pytest.raises(ValueError): assert hug.types.longer_than(10)('short')
[ "def", "test_longer_than", "(", ")", ":", "assert", "(", "hug", ".", "types", ".", "longer_than", "(", "10", ")", "(", "'quite a bit of text here should be'", ")", "==", "'quite a bit of text here should be'", ")", "assert", "(", "hug", ".", "types", ".", "longer_than", "(", "10", ")", "(", "12345678910", ")", "==", "'12345678910'", ")", "assert", "(", "hug", ".", "types", ".", "longer_than", "(", "10", ")", "(", "100123456789100", ")", "==", "'100123456789100'", ")", "assert", "(", "'10'", "in", "hug", ".", "types", ".", "longer_than", "(", "10", ")", ".", "__doc__", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "assert", "hug", ".", "types", ".", "longer_than", "(", "10", ")", "(", "'short'", ")" ]
tests that hugs greater than type succefully limis the values passed in .
train
false
44,167
def parseStringList(s): assert isinstance(s, basestring) return [int(i) for i in s.split()]
[ "def", "parseStringList", "(", "s", ")", ":", "assert", "isinstance", "(", "s", ",", "basestring", ")", "return", "[", "int", "(", "i", ")", "for", "i", "in", "s", ".", "split", "(", ")", "]" ]
parse a string of space-separated numbers .
train
true
44,168
def _encode_decimal128(name, value, dummy0, dummy1): return (('\x13' + name) + value.bid)
[ "def", "_encode_decimal128", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "return", "(", "(", "'\\x13'", "+", "name", ")", "+", "value", ".", "bid", ")" ]
encode bson .
train
false
44,169
def get_if_list(): try: fd = os.popen(('%s -a' % conf.prog.ifconfig)) except OSError as msg: raise Scapy_Exception(('Failed to execute ifconfig: (%s)' % msg)) interfaces = [line[:line.find(':')] for line in fd.readlines() if (': flags' in line.lower())] return interfaces
[ "def", "get_if_list", "(", ")", ":", "try", ":", "fd", "=", "os", ".", "popen", "(", "(", "'%s -a'", "%", "conf", ".", "prog", ".", "ifconfig", ")", ")", "except", "OSError", "as", "msg", ":", "raise", "Scapy_Exception", "(", "(", "'Failed to execute ifconfig: (%s)'", "%", "msg", ")", ")", "interfaces", "=", "[", "line", "[", ":", "line", ".", "find", "(", "':'", ")", "]", "for", "line", "in", "fd", ".", "readlines", "(", ")", "if", "(", "': flags'", "in", "line", ".", "lower", "(", ")", ")", "]", "return", "interfaces" ]
returns a list containing all network interfaces .
train
true
44,172
def parse_routing_rules(routing_config, hostname): rules = [] for (prefix, relative_redirects) in routing_config.items(): for (postfix, destination) in relative_redirects.items(): destination.setdefault('http_redirect_code', '302') destination['protocol'] = 'https' if ('hostname' not in destination.keys()): destination['hostname'] = hostname for key in ('replace_key', 'replace_key_prefix'): if (key in destination): destination[key] = (prefix + destination[key]) rules.append(RoutingRule.when(key_prefix=(prefix + postfix)).then_redirect(**destination)) return RoutingRules(sorted(rules, key=(lambda rule: rule.condition.key_prefix), reverse=True))
[ "def", "parse_routing_rules", "(", "routing_config", ",", "hostname", ")", ":", "rules", "=", "[", "]", "for", "(", "prefix", ",", "relative_redirects", ")", "in", "routing_config", ".", "items", "(", ")", ":", "for", "(", "postfix", ",", "destination", ")", "in", "relative_redirects", ".", "items", "(", ")", ":", "destination", ".", "setdefault", "(", "'http_redirect_code'", ",", "'302'", ")", "destination", "[", "'protocol'", "]", "=", "'https'", "if", "(", "'hostname'", "not", "in", "destination", ".", "keys", "(", ")", ")", ":", "destination", "[", "'hostname'", "]", "=", "hostname", "for", "key", "in", "(", "'replace_key'", ",", "'replace_key_prefix'", ")", ":", "if", "(", "key", "in", "destination", ")", ":", "destination", "[", "key", "]", "=", "(", "prefix", "+", "destination", "[", "key", "]", ")", "rules", ".", "append", "(", "RoutingRule", ".", "when", "(", "key_prefix", "=", "(", "prefix", "+", "postfix", ")", ")", ".", "then_redirect", "(", "**", "destination", ")", ")", "return", "RoutingRules", "(", "sorted", "(", "rules", ",", "key", "=", "(", "lambda", "rule", ":", "rule", ".", "condition", ".", "key_prefix", ")", ",", "reverse", "=", "True", ")", ")" ]
parse routing rule description .
train
false