id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
54,550
def setConfigOptions(**opts): for (k, v) in opts.items(): setConfigOption(k, v)
[ "def", "setConfigOptions", "(", "**", "opts", ")", ":", "for", "(", "k", ",", "v", ")", "in", "opts", ".", "items", "(", ")", ":", "setConfigOption", "(", "k", ",", "v", ")" ]
set global configuration options .
train
false
54,551
def _getReportItem(itemName, results): subKeys = itemName.split(':') subResults = results for subKey in subKeys: subResults = subResults[subKey] return subResults
[ "def", "_getReportItem", "(", "itemName", ",", "results", ")", ":", "subKeys", "=", "itemName", ".", "split", "(", "':'", ")", "subResults", "=", "results", "for", "subKey", "in", "subKeys", ":", "subResults", "=", "subResults", "[", "subKey", "]", "return", "subResults" ]
get a specific item by name out of the results dict .
train
true
54,552
def getInnerText(node): inner_text = [] for child in node.childNodes: if ((child.nodeType == child.TEXT_NODE) or (child.nodeType == child.CDATA_SECTION_NODE)): inner_text.append(child.data) elif (child.nodeType == child.ELEMENT_NODE): inner_text.extend(getInnerText(child)) else: pass return ''.join(inner_text)
[ "def", "getInnerText", "(", "node", ")", ":", "inner_text", "=", "[", "]", "for", "child", "in", "node", ".", "childNodes", ":", "if", "(", "(", "child", ".", "nodeType", "==", "child", ".", "TEXT_NODE", ")", "or", "(", "child", ".", "nodeType", "==", "child", ".", "CDATA_SECTION_NODE", ")", ")", ":", "inner_text", ".", "append", "(", "child", ".", "data", ")", "elif", "(", "child", ".", "nodeType", "==", "child", ".", "ELEMENT_NODE", ")", ":", "inner_text", ".", "extend", "(", "getInnerText", "(", "child", ")", ")", "else", ":", "pass", "return", "''", ".", "join", "(", "inner_text", ")" ]
get all the inner text of a dom node .
train
false
54,553
def separate_qtranslate_content(text): qt_start = u'<!--:' qt_end = u'-->' qt_end_with_lang_len = 5 qt_chunks = text.split(qt_start) content_by_lang = {} common_txt_list = [] for c in qt_chunks: if (not c.strip()): continue if c.startswith(qt_end): lang = u'' c = c.lstrip(qt_end) if (not c): continue elif c[2:].startswith(qt_end): lang = c[:2] c = c[qt_end_with_lang_len:] else: lang = u'' if (not lang): common_txt_list.append(c) for l in content_by_lang.keys(): content_by_lang[l].append(c) else: content_by_lang[lang] = (content_by_lang.get(lang, common_txt_list) + [c]) if (common_txt_list and (not content_by_lang)): content_by_lang[u''] = common_txt_list for l in content_by_lang.keys(): content_by_lang[l] = u' '.join(content_by_lang[l]) return content_by_lang
[ "def", "separate_qtranslate_content", "(", "text", ")", ":", "qt_start", "=", "u'<!--:'", "qt_end", "=", "u'-->'", "qt_end_with_lang_len", "=", "5", "qt_chunks", "=", "text", ".", "split", "(", "qt_start", ")", "content_by_lang", "=", "{", "}", "common_txt_list", "=", "[", "]", "for", "c", "in", "qt_chunks", ":", "if", "(", "not", "c", ".", "strip", "(", ")", ")", ":", "continue", "if", "c", ".", "startswith", "(", "qt_end", ")", ":", "lang", "=", "u''", "c", "=", "c", ".", "lstrip", "(", "qt_end", ")", "if", "(", "not", "c", ")", ":", "continue", "elif", "c", "[", "2", ":", "]", ".", "startswith", "(", "qt_end", ")", ":", "lang", "=", "c", "[", ":", "2", "]", "c", "=", "c", "[", "qt_end_with_lang_len", ":", "]", "else", ":", "lang", "=", "u''", "if", "(", "not", "lang", ")", ":", "common_txt_list", ".", "append", "(", "c", ")", "for", "l", "in", "content_by_lang", ".", "keys", "(", ")", ":", "content_by_lang", "[", "l", "]", ".", "append", "(", "c", ")", "else", ":", "content_by_lang", "[", "lang", "]", "=", "(", "content_by_lang", ".", "get", "(", "lang", ",", "common_txt_list", ")", "+", "[", "c", "]", ")", "if", "(", "common_txt_list", "and", "(", "not", "content_by_lang", ")", ")", ":", "content_by_lang", "[", "u''", "]", "=", "common_txt_list", "for", "l", "in", "content_by_lang", ".", "keys", "(", ")", ":", "content_by_lang", "[", "l", "]", "=", "u' '", ".", "join", "(", "content_by_lang", "[", "l", "]", ")", "return", "content_by_lang" ]
parse the content of a wordpress post or page and separate qtranslate languages .
train
false
54,555
def detect_paragraph_type(txt): txt = txt.replace('\r\n', '\n') txt = txt.replace('\r', '\n') txt_line_count = len(re.findall('(?mu)^\\s*.+$', txt)) docanalysis = DocAnalysis('txt', txt) hardbreaks = docanalysis.line_histogram(0.55) if hardbreaks: tab_line_count = len(re.findall('(?mu)^( DCTB |\\s{2,}).+$', txt)) print_percent = (tab_line_count / float(txt_line_count)) empty_line_count = len(re.findall('(?mu)^\\s*$', txt)) block_percent = (empty_line_count / float(txt_line_count)) if (print_percent >= block_percent): if (0.15 <= print_percent <= 0.75): return 'print' elif (0.15 <= block_percent <= 0.75): return 'block' return 'unformatted' return 'single'
[ "def", "detect_paragraph_type", "(", "txt", ")", ":", "txt", "=", "txt", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", "txt", "=", "txt", ".", "replace", "(", "'\\r'", ",", "'\\n'", ")", "txt_line_count", "=", "len", "(", "re", ".", "findall", "(", "'(?mu)^\\\\s*.+$'", ",", "txt", ")", ")", "docanalysis", "=", "DocAnalysis", "(", "'txt'", ",", "txt", ")", "hardbreaks", "=", "docanalysis", ".", "line_histogram", "(", "0.55", ")", "if", "hardbreaks", ":", "tab_line_count", "=", "len", "(", "re", ".", "findall", "(", "'(?mu)^( DCTB |\\\\s{2,}).+$'", ",", "txt", ")", ")", "print_percent", "=", "(", "tab_line_count", "/", "float", "(", "txt_line_count", ")", ")", "empty_line_count", "=", "len", "(", "re", ".", "findall", "(", "'(?mu)^\\\\s*$'", ",", "txt", ")", ")", "block_percent", "=", "(", "empty_line_count", "/", "float", "(", "txt_line_count", ")", ")", "if", "(", "print_percent", ">=", "block_percent", ")", ":", "if", "(", "0.15", "<=", "print_percent", "<=", "0.75", ")", ":", "return", "'print'", "elif", "(", "0.15", "<=", "block_percent", "<=", "0.75", ")", ":", "return", "'block'", "return", "'unformatted'", "return", "'single'" ]
tries to determine the paragraph type of the document .
train
false
54,556
def _traverse_generic(start_node, get_parents, get_children, filter_func=None, yield_descendants_of_unyielded=False): filter_func = (filter_func or (lambda __: True)) stack = deque([start_node]) yield_results = {} while stack: current_node = stack.pop() if (get_parents and (current_node != start_node)): parents = get_parents(current_node) if (not all(((parent in yield_results) for parent in parents))): continue elif ((not yield_descendants_of_unyielded) and (not any((yield_results[parent] for parent in parents)))): continue if (current_node not in yield_results): if get_parents: unvisited_children = list(get_children(current_node)) else: unvisited_children = list((child for child in get_children(current_node) if (child not in yield_results))) unvisited_children.reverse() stack.extend(unvisited_children) should_yield_node = filter_func(current_node) if should_yield_node: (yield current_node) yield_results[current_node] = should_yield_node
[ "def", "_traverse_generic", "(", "start_node", ",", "get_parents", ",", "get_children", ",", "filter_func", "=", "None", ",", "yield_descendants_of_unyielded", "=", "False", ")", ":", "filter_func", "=", "(", "filter_func", "or", "(", "lambda", "__", ":", "True", ")", ")", "stack", "=", "deque", "(", "[", "start_node", "]", ")", "yield_results", "=", "{", "}", "while", "stack", ":", "current_node", "=", "stack", ".", "pop", "(", ")", "if", "(", "get_parents", "and", "(", "current_node", "!=", "start_node", ")", ")", ":", "parents", "=", "get_parents", "(", "current_node", ")", "if", "(", "not", "all", "(", "(", "(", "parent", "in", "yield_results", ")", "for", "parent", "in", "parents", ")", ")", ")", ":", "continue", "elif", "(", "(", "not", "yield_descendants_of_unyielded", ")", "and", "(", "not", "any", "(", "(", "yield_results", "[", "parent", "]", "for", "parent", "in", "parents", ")", ")", ")", ")", ":", "continue", "if", "(", "current_node", "not", "in", "yield_results", ")", ":", "if", "get_parents", ":", "unvisited_children", "=", "list", "(", "get_children", "(", "current_node", ")", ")", "else", ":", "unvisited_children", "=", "list", "(", "(", "child", "for", "child", "in", "get_children", "(", "current_node", ")", "if", "(", "child", "not", "in", "yield_results", ")", ")", ")", "unvisited_children", ".", "reverse", "(", ")", "stack", ".", "extend", "(", "unvisited_children", ")", "should_yield_node", "=", "filter_func", "(", "current_node", ")", "if", "should_yield_node", ":", "(", "yield", "current_node", ")", "yield_results", "[", "current_node", "]", "=", "should_yield_node" ]
helper function to avoid duplicating functionality between traverse_depth_first and traverse_topologically .
train
false
54,558
def _modified_weiszfeld_step(X, x_old): diff = (X - x_old) diff_norm = np.sqrt(np.sum((diff ** 2), axis=1)) mask = (diff_norm >= _EPSILON) is_x_old_in_X = int((mask.sum() < X.shape[0])) diff = diff[mask] diff_norm = diff_norm[mask][:, np.newaxis] quotient_norm = linalg.norm(np.sum((diff / diff_norm), axis=0)) if (quotient_norm > _EPSILON): new_direction = (np.sum((X[mask, :] / diff_norm), axis=0) / np.sum((1 / diff_norm), axis=0)) else: new_direction = 1.0 quotient_norm = 1.0 return ((max(0.0, (1.0 - (is_x_old_in_X / quotient_norm))) * new_direction) + (min(1.0, (is_x_old_in_X / quotient_norm)) * x_old))
[ "def", "_modified_weiszfeld_step", "(", "X", ",", "x_old", ")", ":", "diff", "=", "(", "X", "-", "x_old", ")", "diff_norm", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "diff", "**", "2", ")", ",", "axis", "=", "1", ")", ")", "mask", "=", "(", "diff_norm", ">=", "_EPSILON", ")", "is_x_old_in_X", "=", "int", "(", "(", "mask", ".", "sum", "(", ")", "<", "X", ".", "shape", "[", "0", "]", ")", ")", "diff", "=", "diff", "[", "mask", "]", "diff_norm", "=", "diff_norm", "[", "mask", "]", "[", ":", ",", "np", ".", "newaxis", "]", "quotient_norm", "=", "linalg", ".", "norm", "(", "np", ".", "sum", "(", "(", "diff", "/", "diff_norm", ")", ",", "axis", "=", "0", ")", ")", "if", "(", "quotient_norm", ">", "_EPSILON", ")", ":", "new_direction", "=", "(", "np", ".", "sum", "(", "(", "X", "[", "mask", ",", ":", "]", "/", "diff_norm", ")", ",", "axis", "=", "0", ")", "/", "np", ".", "sum", "(", "(", "1", "/", "diff_norm", ")", ",", "axis", "=", "0", ")", ")", "else", ":", "new_direction", "=", "1.0", "quotient_norm", "=", "1.0", "return", "(", "(", "max", "(", "0.0", ",", "(", "1.0", "-", "(", "is_x_old_in_X", "/", "quotient_norm", ")", ")", ")", "*", "new_direction", ")", "+", "(", "min", "(", "1.0", ",", "(", "is_x_old_in_X", "/", "quotient_norm", ")", ")", "*", "x_old", ")", ")" ]
modified weiszfeld step .
train
false
54,559
def rand_uuid_hex(): return uuid.uuid4().hex
[ "def", "rand_uuid_hex", "(", ")", ":", "return", "uuid", ".", "uuid4", "(", ")", ".", "hex" ]
generate a random uuid hex string :return: a random uuid :rtype: string .
train
false
54,560
def check_packages(module, xbps_path, packages, state): would_be_changed = [] for package in packages: (installed, updated) = query_package(module, xbps_path, package) if (((state in ['present', 'latest']) and (not installed)) or ((state == 'absent') and installed) or ((state == 'latest') and (not updated))): would_be_changed.append(package) if would_be_changed: if (state == 'absent'): state = 'removed' module.exit_json(changed=True, msg=('%s package(s) would be %s' % (len(would_be_changed), state)), packages=would_be_changed) else: module.exit_json(changed=False, msg=('package(s) already %s' % state), packages=[])
[ "def", "check_packages", "(", "module", ",", "xbps_path", ",", "packages", ",", "state", ")", ":", "would_be_changed", "=", "[", "]", "for", "package", "in", "packages", ":", "(", "installed", ",", "updated", ")", "=", "query_package", "(", "module", ",", "xbps_path", ",", "package", ")", "if", "(", "(", "(", "state", "in", "[", "'present'", ",", "'latest'", "]", ")", "and", "(", "not", "installed", ")", ")", "or", "(", "(", "state", "==", "'absent'", ")", "and", "installed", ")", "or", "(", "(", "state", "==", "'latest'", ")", "and", "(", "not", "updated", ")", ")", ")", ":", "would_be_changed", ".", "append", "(", "package", ")", "if", "would_be_changed", ":", "if", "(", "state", "==", "'absent'", ")", ":", "state", "=", "'removed'", "module", ".", "exit_json", "(", "changed", "=", "True", ",", "msg", "=", "(", "'%s package(s) would be %s'", "%", "(", "len", "(", "would_be_changed", ")", ",", "state", ")", ")", ",", "packages", "=", "would_be_changed", ")", "else", ":", "module", ".", "exit_json", "(", "changed", "=", "False", ",", "msg", "=", "(", "'package(s) already %s'", "%", "state", ")", ",", "packages", "=", "[", "]", ")" ]
returns change status of command .
train
false
54,561
def set_driver(drivers, provider, module, klass): if (provider in drivers): raise AttributeError(('Provider %s already registered' % provider)) drivers[provider] = (module, klass) try: driver = get_driver(drivers, provider) except (ImportError, AttributeError): exp = sys.exc_info()[1] drivers.pop(provider) raise exp return driver
[ "def", "set_driver", "(", "drivers", ",", "provider", ",", "module", ",", "klass", ")", ":", "if", "(", "provider", "in", "drivers", ")", ":", "raise", "AttributeError", "(", "(", "'Provider %s already registered'", "%", "provider", ")", ")", "drivers", "[", "provider", "]", "=", "(", "module", ",", "klass", ")", "try", ":", "driver", "=", "get_driver", "(", "drivers", ",", "provider", ")", "except", "(", "ImportError", ",", "AttributeError", ")", ":", "exp", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "drivers", ".", "pop", "(", "provider", ")", "raise", "exp", "return", "driver" ]
sets a driver .
train
false
54,563
def memcache_set(request): keys = [] for i in request['item']: if ('MemcacheSetRequest_Item' in i): key = i['MemcacheSetRequest_Item']['key'] else: key = i['Item']['key'] keys.append(truncate(key)) return '\n'.join(keys)
[ "def", "memcache_set", "(", "request", ")", ":", "keys", "=", "[", "]", "for", "i", "in", "request", "[", "'item'", "]", ":", "if", "(", "'MemcacheSetRequest_Item'", "in", "i", ")", ":", "key", "=", "i", "[", "'MemcacheSetRequest_Item'", "]", "[", "'key'", "]", "else", ":", "key", "=", "i", "[", "'Item'", "]", "[", "'key'", "]", "keys", ".", "append", "(", "truncate", "(", "key", ")", ")", "return", "'\\n'", ".", "join", "(", "keys", ")" ]
pretty-format a memcache .
train
false
54,564
@contextfunction def core_generic_list(context, objects, skip_group=False, tag=None): if tag: return tag(context, objects) request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('core/tags/generic_list', {'objects': objects, 'skip_group': skip_group}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "core_generic_list", "(", "context", ",", "objects", ",", "skip_group", "=", "False", ",", "tag", "=", "None", ")", ":", "if", "tag", ":", "return", "tag", "(", "context", ",", "objects", ")", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ")", ":", "response_format", "=", "context", "[", "'response_format'", "]", "return", "Markup", "(", "render_to_string", "(", "'core/tags/generic_list'", ",", "{", "'objects'", ":", "objects", ",", "'skip_group'", ":", "skip_group", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")", ")" ]
print a list of objects .
train
false
54,565
def _get_sysfs_netdev_path(pci_addr, pf_interface): if pf_interface: return ('/sys/bus/pci/devices/%s/physfn/net' % pci_addr) return ('/sys/bus/pci/devices/%s/net' % pci_addr)
[ "def", "_get_sysfs_netdev_path", "(", "pci_addr", ",", "pf_interface", ")", ":", "if", "pf_interface", ":", "return", "(", "'/sys/bus/pci/devices/%s/physfn/net'", "%", "pci_addr", ")", "return", "(", "'/sys/bus/pci/devices/%s/net'", "%", "pci_addr", ")" ]
get the sysfs path based on the pci address of the device .
train
false
54,566
def convert_labels(labels, transl): return [transl[l] for l in labels]
[ "def", "convert_labels", "(", "labels", ",", "transl", ")", ":", "return", "[", "transl", "[", "l", "]", "for", "l", "in", "labels", "]" ]
convert between strings and numbers .
train
false
54,567
@blueprint.route('/users') def list_all_users(): return _list_users()
[ "@", "blueprint", ".", "route", "(", "'/users'", ")", "def", "list_all_users", "(", ")", ":", "return", "_list_users", "(", ")" ]
return a list of all known user names .
train
false
54,568
def dmp_ground_TC(f, u, K): while u: f = dmp_TC(f, K) u -= 1 return dup_TC(f, K)
[ "def", "dmp_ground_TC", "(", "f", ",", "u", ",", "K", ")", ":", "while", "u", ":", "f", "=", "dmp_TC", "(", "f", ",", "K", ")", "u", "-=", "1", "return", "dup_TC", "(", "f", ",", "K", ")" ]
return the ground trailing coefficient .
train
false
54,569
def _selective_search_IJCV_top_k(split, year, top_k): imdb = datasets.pascal_voc(split, year) imdb.roidb_handler = imdb.selective_search_IJCV_roidb imdb.config['top_k'] = top_k return imdb
[ "def", "_selective_search_IJCV_top_k", "(", "split", ",", "year", ",", "top_k", ")", ":", "imdb", "=", "datasets", ".", "pascal_voc", "(", "split", ",", "year", ")", "imdb", ".", "roidb_handler", "=", "imdb", ".", "selective_search_IJCV_roidb", "imdb", ".", "config", "[", "'top_k'", "]", "=", "top_k", "return", "imdb" ]
return an imdb that uses the top k proposals from the selective search ijcv code .
train
false
54,570
def is_file_ignored(opts, fname): if opts['file_ignore_regex']: for regex in opts['file_ignore_regex']: if re.search(regex, fname): log.debug('File matching file_ignore_regex. Skipping: {0}'.format(fname)) return True if opts['file_ignore_glob']: for glob in opts['file_ignore_glob']: if fnmatch.fnmatch(fname, glob): log.debug('File matching file_ignore_glob. Skipping: {0}'.format(fname)) return True return False
[ "def", "is_file_ignored", "(", "opts", ",", "fname", ")", ":", "if", "opts", "[", "'file_ignore_regex'", "]", ":", "for", "regex", "in", "opts", "[", "'file_ignore_regex'", "]", ":", "if", "re", ".", "search", "(", "regex", ",", "fname", ")", ":", "log", ".", "debug", "(", "'File matching file_ignore_regex. Skipping: {0}'", ".", "format", "(", "fname", ")", ")", "return", "True", "if", "opts", "[", "'file_ignore_glob'", "]", ":", "for", "glob", "in", "opts", "[", "'file_ignore_glob'", "]", ":", "if", "fnmatch", ".", "fnmatch", "(", "fname", ",", "glob", ")", ":", "log", ".", "debug", "(", "'File matching file_ignore_glob. Skipping: {0}'", ".", "format", "(", "fname", ")", ")", "return", "True", "return", "False" ]
if file_ignore_regex or file_ignore_glob were given in config .
train
true
54,572
def rs_diff(p, x): R = p.ring n = R.gens.index(x) p1 = R.zero mn = ([0] * R.ngens) mn[n] = 1 mn = tuple(mn) for expv in p: if expv[n]: e = monomial_ldiv(expv, mn) p1[e] = (p[expv] * expv[n]) return p1
[ "def", "rs_diff", "(", "p", ",", "x", ")", ":", "R", "=", "p", ".", "ring", "n", "=", "R", ".", "gens", ".", "index", "(", "x", ")", "p1", "=", "R", ".", "zero", "mn", "=", "(", "[", "0", "]", "*", "R", ".", "ngens", ")", "mn", "[", "n", "]", "=", "1", "mn", "=", "tuple", "(", "mn", ")", "for", "expv", "in", "p", ":", "if", "expv", "[", "n", "]", ":", "e", "=", "monomial_ldiv", "(", "expv", ",", "mn", ")", "p1", "[", "e", "]", "=", "(", "p", "[", "expv", "]", "*", "expv", "[", "n", "]", ")", "return", "p1" ]
return partial derivative of p with respect to x .
train
false
54,573
def follow_files(follow_paths, outstream, lastlines_dirpath=None, waitsecs=5): (procs, pipes) = launch_tails(follow_paths, lastlines_dirpath) while pipes: (lines, bad_pipes) = poll_tail_pipes(pipes, lastlines_dirpath, waitsecs) for bad in bad_pipes: pipes.pop(bad) try: outstream.writelines((['\n'] + lines)) outstream.flush() except (IOError, OSError) as e: break snuff(procs.values())
[ "def", "follow_files", "(", "follow_paths", ",", "outstream", ",", "lastlines_dirpath", "=", "None", ",", "waitsecs", "=", "5", ")", ":", "(", "procs", ",", "pipes", ")", "=", "launch_tails", "(", "follow_paths", ",", "lastlines_dirpath", ")", "while", "pipes", ":", "(", "lines", ",", "bad_pipes", ")", "=", "poll_tail_pipes", "(", "pipes", ",", "lastlines_dirpath", ",", "waitsecs", ")", "for", "bad", "in", "bad_pipes", ":", "pipes", ".", "pop", "(", "bad", ")", "try", ":", "outstream", ".", "writelines", "(", "(", "[", "'\\n'", "]", "+", "lines", ")", ")", "outstream", ".", "flush", "(", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "e", ":", "break", "snuff", "(", "procs", ".", "values", "(", ")", ")" ]
launch tail on a set of files and merge their output into outstream .
train
false
54,574
def oo_select_keys_from_list(data, keys): if (not isinstance(data, list)): raise errors.AnsibleFilterError('|failed expects to filter on a list') if (not isinstance(keys, list)): raise errors.AnsibleFilterError('|failed expects first param is a list') retval = [oo_select_keys(item, keys) for item in data] return oo_flatten(retval)
[ "def", "oo_select_keys_from_list", "(", "data", ",", "keys", ")", ":", "if", "(", "not", "isinstance", "(", "data", ",", "list", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects to filter on a list'", ")", "if", "(", "not", "isinstance", "(", "keys", ",", "list", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects first param is a list'", ")", "retval", "=", "[", "oo_select_keys", "(", "item", ",", "keys", ")", "for", "item", "in", "data", "]", "return", "oo_flatten", "(", "retval", ")" ]
this returns a list .
train
false
54,576
def produce(url, ident): ctx = zmq.Context.instance() s = ctx.socket(zmq.PUSH) s.connect(url) print(('Producing %s' % ident)) for i in range(MSGS): s.send((u'%s: %i' % (ident, time.time())).encode('utf8')) time.sleep(1) print(('Producer %s done' % ident)) s.close()
[ "def", "produce", "(", "url", ",", "ident", ")", ":", "ctx", "=", "zmq", ".", "Context", ".", "instance", "(", ")", "s", "=", "ctx", ".", "socket", "(", "zmq", ".", "PUSH", ")", "s", ".", "connect", "(", "url", ")", "print", "(", "(", "'Producing %s'", "%", "ident", ")", ")", "for", "i", "in", "range", "(", "MSGS", ")", ":", "s", ".", "send", "(", "(", "u'%s: %i'", "%", "(", "ident", ",", "time", ".", "time", "(", ")", ")", ")", ".", "encode", "(", "'utf8'", ")", ")", "time", ".", "sleep", "(", "1", ")", "print", "(", "(", "'Producer %s done'", "%", "ident", ")", ")", "s", ".", "close", "(", ")" ]
produce messages .
train
false
54,577
def getLoopStartingClosest(extrusionHalfWidth, location, loop): closestIndex = getClosestDistanceIndexToLine(location, loop).index loop = getAroundLoop(closestIndex, closestIndex, loop) closestPoint = getClosestPointOnSegment(loop[0], loop[1], location) if ((abs((closestPoint - loop[0])) > extrusionHalfWidth) and (abs((closestPoint - loop[1])) > extrusionHalfWidth)): loop = (([closestPoint] + loop[1:]) + [loop[0]]) elif (abs((closestPoint - loop[0])) > abs((closestPoint - loop[1]))): loop = (loop[1:] + [loop[0]]) return loop
[ "def", "getLoopStartingClosest", "(", "extrusionHalfWidth", ",", "location", ",", "loop", ")", ":", "closestIndex", "=", "getClosestDistanceIndexToLine", "(", "location", ",", "loop", ")", ".", "index", "loop", "=", "getAroundLoop", "(", "closestIndex", ",", "closestIndex", ",", "loop", ")", "closestPoint", "=", "getClosestPointOnSegment", "(", "loop", "[", "0", "]", ",", "loop", "[", "1", "]", ",", "location", ")", "if", "(", "(", "abs", "(", "(", "closestPoint", "-", "loop", "[", "0", "]", ")", ")", ">", "extrusionHalfWidth", ")", "and", "(", "abs", "(", "(", "closestPoint", "-", "loop", "[", "1", "]", ")", ")", ">", "extrusionHalfWidth", ")", ")", ":", "loop", "=", "(", "(", "[", "closestPoint", "]", "+", "loop", "[", "1", ":", "]", ")", "+", "[", "loop", "[", "0", "]", "]", ")", "elif", "(", "abs", "(", "(", "closestPoint", "-", "loop", "[", "0", "]", ")", ")", ">", "abs", "(", "(", "closestPoint", "-", "loop", "[", "1", "]", ")", ")", ")", ":", "loop", "=", "(", "loop", "[", "1", ":", "]", "+", "[", "loop", "[", "0", "]", "]", ")", "return", "loop" ]
add to threads from the last location from loop .
train
false
54,578
def var_count_error(is_independent, is_plotting): if is_plotting: v = 'Plotting' else: v = 'Registering plot modes' if is_independent: (n, s) = (PlotMode._i_var_max, 'independent') else: (n, s) = (PlotMode._d_var_max, 'dependent') return ('%s with more than %i %s variables is not supported.' % (v, n, s))
[ "def", "var_count_error", "(", "is_independent", ",", "is_plotting", ")", ":", "if", "is_plotting", ":", "v", "=", "'Plotting'", "else", ":", "v", "=", "'Registering plot modes'", "if", "is_independent", ":", "(", "n", ",", "s", ")", "=", "(", "PlotMode", ".", "_i_var_max", ",", "'independent'", ")", "else", ":", "(", "n", ",", "s", ")", "=", "(", "PlotMode", ".", "_d_var_max", ",", "'dependent'", ")", "return", "(", "'%s with more than %i %s variables is not supported.'", "%", "(", "v", ",", "n", ",", "s", ")", ")" ]
used to format an error message which differs slightly in 4 places .
train
false
54,580
def test_scharr_v_vertical(): (i, j) = np.mgrid[(-5):6, (-5):6] image = (j >= 0).astype(float) result = filters.scharr_v(image) j[(np.abs(i) == 5)] = 10000 assert np.all((result[(j == 0)] == 1)) assert np.all((result[(np.abs(j) > 1)] == 0))
[ "def", "test_scharr_v_vertical", "(", ")", ":", "(", "i", ",", "j", ")", "=", "np", ".", "mgrid", "[", "(", "-", "5", ")", ":", "6", ",", "(", "-", "5", ")", ":", "6", "]", "image", "=", "(", "j", ">=", "0", ")", ".", "astype", "(", "float", ")", "result", "=", "filters", ".", "scharr_v", "(", "image", ")", "j", "[", "(", "np", ".", "abs", "(", "i", ")", "==", "5", ")", "]", "=", "10000", "assert", "np", ".", "all", "(", "(", "result", "[", "(", "j", "==", "0", ")", "]", "==", "1", ")", ")", "assert", "np", ".", "all", "(", "(", "result", "[", "(", "np", ".", "abs", "(", "j", ")", ">", "1", ")", "]", "==", "0", ")", ")" ]
vertical scharr on an edge should be a vertical line .
train
false
54,581
def _get_lines_from_file(filename, lineno, context_lines): try: source = open(filename).readlines() lower_bound = max(0, (lineno - context_lines)) upper_bound = (lineno + context_lines) pre_context = [line.strip('\n') for line in source[lower_bound:lineno]] context_line = source[lineno].strip('\n') post_context = [line.strip('\n') for line in source[(lineno + 1):upper_bound]] return (lower_bound, pre_context, context_line, post_context) except (OSError, IOError): return (None, [], None, [])
[ "def", "_get_lines_from_file", "(", "filename", ",", "lineno", ",", "context_lines", ")", ":", "try", ":", "source", "=", "open", "(", "filename", ")", ".", "readlines", "(", ")", "lower_bound", "=", "max", "(", "0", ",", "(", "lineno", "-", "context_lines", ")", ")", "upper_bound", "=", "(", "lineno", "+", "context_lines", ")", "pre_context", "=", "[", "line", ".", "strip", "(", "'\\n'", ")", "for", "line", "in", "source", "[", "lower_bound", ":", "lineno", "]", "]", "context_line", "=", "source", "[", "lineno", "]", ".", "strip", "(", "'\\n'", ")", "post_context", "=", "[", "line", ".", "strip", "(", "'\\n'", ")", "for", "line", "in", "source", "[", "(", "lineno", "+", "1", ")", ":", "upper_bound", "]", "]", "return", "(", "lower_bound", ",", "pre_context", ",", "context_line", ",", "post_context", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "(", "None", ",", "[", "]", ",", "None", ",", "[", "]", ")" ]
returns context_lines before and after lineno from file .
train
false
54,582
def download_libxml2(dest_dir, version=None): version_re = re.compile('^LATEST_LIBXML2_IS_(.*)$') filename = 'libxml2-%s.tar.gz' return download_library(dest_dir, LIBXML2_LOCATION, 'libxml2', version_re, filename, version=version)
[ "def", "download_libxml2", "(", "dest_dir", ",", "version", "=", "None", ")", ":", "version_re", "=", "re", ".", "compile", "(", "'^LATEST_LIBXML2_IS_(.*)$'", ")", "filename", "=", "'libxml2-%s.tar.gz'", "return", "download_library", "(", "dest_dir", ",", "LIBXML2_LOCATION", ",", "'libxml2'", ",", "version_re", ",", "filename", ",", "version", "=", "version", ")" ]
downloads libxml2 .
train
false
54,583
def enqueue_push_course_update(update, course_key): if (push_notification_enabled() and update.get('push_notification_selected')): course = modulestore().get_course(course_key) if course: push_course_update_task.delay(unicode(course_key), course.clean_id(padding_char='_'), course.display_name)
[ "def", "enqueue_push_course_update", "(", "update", ",", "course_key", ")", ":", "if", "(", "push_notification_enabled", "(", ")", "and", "update", ".", "get", "(", "'push_notification_selected'", ")", ")", ":", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ")", "if", "course", ":", "push_course_update_task", ".", "delay", "(", "unicode", "(", "course_key", ")", ",", "course", ".", "clean_id", "(", "padding_char", "=", "'_'", ")", ",", "course", ".", "display_name", ")" ]
enqueues a task for push notification for the given update for the given course if (1) the feature is enabled and (2) push_notification is selected for the update .
train
false
54,584
def fake_os_walk(paths): paths_dict = dict(paths) def os_walk(top, topdown=True): (dirs, nondirs) = paths_dict[top] (yield (top, dirs, nondirs)) for name in dirs: new_path = '/'.join([top, name]) for x in os_walk(new_path, topdown): (yield x) return os_walk
[ "def", "fake_os_walk", "(", "paths", ")", ":", "paths_dict", "=", "dict", "(", "paths", ")", "def", "os_walk", "(", "top", ",", "topdown", "=", "True", ")", ":", "(", "dirs", ",", "nondirs", ")", "=", "paths_dict", "[", "top", "]", "(", "yield", "(", "top", ",", "dirs", ",", "nondirs", ")", ")", "for", "name", "in", "dirs", ":", "new_path", "=", "'/'", ".", "join", "(", "[", "top", ",", "name", "]", ")", "for", "x", "in", "os_walk", "(", "new_path", ",", "topdown", ")", ":", "(", "yield", "x", ")", "return", "os_walk" ]
helper function for mocking os .
train
false
54,587
def random_selection(a, start, end, i): if (start < end): p = choosePivot(start, end) (a[start], a[p]) = (a[p], a[start]) j = partition(a, start, end) if (j == i): return a[i] if (j < i): return random_selection(a, (j + 1), end, i) else: return random_selection(a, start, (j - 1), i) else: return a[start]
[ "def", "random_selection", "(", "a", ",", "start", ",", "end", ",", "i", ")", ":", "if", "(", "start", "<", "end", ")", ":", "p", "=", "choosePivot", "(", "start", ",", "end", ")", "(", "a", "[", "start", "]", ",", "a", "[", "p", "]", ")", "=", "(", "a", "[", "p", "]", ",", "a", "[", "start", "]", ")", "j", "=", "partition", "(", "a", ",", "start", ",", "end", ")", "if", "(", "j", "==", "i", ")", ":", "return", "a", "[", "i", "]", "if", "(", "j", "<", "i", ")", ":", "return", "random_selection", "(", "a", ",", "(", "j", "+", "1", ")", ",", "end", ",", "i", ")", "else", ":", "return", "random_selection", "(", "a", ",", "start", ",", "(", "j", "-", "1", ")", ",", "i", ")", "else", ":", "return", "a", "[", "start", "]" ]
returns the ith order statistic in the array a in linear time .
train
false
54,588
def encrypt(plaintext): salt = _make_salt() return _encrypt(salt, plaintext, g.tracking_secret)
[ "def", "encrypt", "(", "plaintext", ")", ":", "salt", "=", "_make_salt", "(", ")", "return", "_encrypt", "(", "salt", ",", "plaintext", ",", "g", ".", "tracking_secret", ")" ]
return the message plaintext encrypted .
train
false
54,589
def popen_nonblock(*args, **kwargs): proc = popen_sp(*args, **kwargs) if proc.stdin: proc.stdin = pipebuf.NonBlockBufferedWriter(proc.stdin) if proc.stdout: proc.stdout = pipebuf.NonBlockBufferedReader(proc.stdout) if proc.stderr: proc.stderr = pipebuf.NonBlockBufferedReader(proc.stderr) return proc
[ "def", "popen_nonblock", "(", "*", "args", ",", "**", "kwargs", ")", ":", "proc", "=", "popen_sp", "(", "*", "args", ",", "**", "kwargs", ")", "if", "proc", ".", "stdin", ":", "proc", ".", "stdin", "=", "pipebuf", ".", "NonBlockBufferedWriter", "(", "proc", ".", "stdin", ")", "if", "proc", ".", "stdout", ":", "proc", ".", "stdout", "=", "pipebuf", ".", "NonBlockBufferedReader", "(", "proc", ".", "stdout", ")", "if", "proc", ".", "stderr", ":", "proc", ".", "stderr", "=", "pipebuf", ".", "NonBlockBufferedReader", "(", "proc", ".", "stderr", ")", "return", "proc" ]
create a process in the same way as popen_sp .
train
true
54,591
def service_status(hostname=None, service=None, **kwargs): if (not hostname): raise CommandExecutionError('Missing hostname parameter') if (not service): raise CommandExecutionError('Missing service parameter') target = 'service' numeric = kwargs.get('numeric') data = _status_query(target, hostname, service=service, enumerate=numeric) ret = {'result': data['result']} if ret['result']: ret['status'] = data.get('json_data', {}).get('data', {}).get(target, {}).get('status', (((not numeric) and 'Unknown') or 2)) else: ret['error'] = data['error'] return ret
[ "def", "service_status", "(", "hostname", "=", "None", ",", "service", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "not", "hostname", ")", ":", "raise", "CommandExecutionError", "(", "'Missing hostname parameter'", ")", "if", "(", "not", "service", ")", ":", "raise", "CommandExecutionError", "(", "'Missing service parameter'", ")", "target", "=", "'service'", "numeric", "=", "kwargs", ".", "get", "(", "'numeric'", ")", "data", "=", "_status_query", "(", "target", ",", "hostname", ",", "service", "=", "service", ",", "enumerate", "=", "numeric", ")", "ret", "=", "{", "'result'", ":", "data", "[", "'result'", "]", "}", "if", "ret", "[", "'result'", "]", ":", "ret", "[", "'status'", "]", "=", "data", ".", "get", "(", "'json_data'", ",", "{", "}", ")", ".", "get", "(", "'data'", ",", "{", "}", ")", ".", "get", "(", "target", ",", "{", "}", ")", ".", "get", "(", "'status'", ",", "(", "(", "(", "not", "numeric", ")", "and", "'Unknown'", ")", "or", "2", ")", ")", "else", ":", "ret", "[", "'error'", "]", "=", "data", "[", "'error'", "]", "return", "ret" ]
check status of a particular service on a host on it in nagios .
train
true
54,592
def command_show(problem): print problem.get_html()
[ "def", "command_show", "(", "problem", ")", ":", "print", "problem", ".", "get_html", "(", ")" ]
display the text for this problem .
train
false
54,593
def obfuscatePowershellScript(code): import re newCode = code newCode = remove_comments(newCode) if ('function Invoke-ReflectivePEInjection' in newCode): newCode = newCode.replace("$TypeBuilder.DefineLiteral('IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE', [UInt16] 0x0040) | Out-Null", "$TypeBuilder.DefineLiteral('IMAGE_DLL_CHARACTERIS'+'TICS_DYNAMIC_BASE', [UInt16] 0x0040) | Out-Null") return newCode
[ "def", "obfuscatePowershellScript", "(", "code", ")", ":", "import", "re", "newCode", "=", "code", "newCode", "=", "remove_comments", "(", "newCode", ")", "if", "(", "'function Invoke-ReflectivePEInjection'", "in", "newCode", ")", ":", "newCode", "=", "newCode", ".", "replace", "(", "\"$TypeBuilder.DefineLiteral('IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE', [UInt16] 0x0040) | Out-Null\"", ",", "\"$TypeBuilder.DefineLiteral('IMAGE_DLL_CHARACTERIS'+'TICS_DYNAMIC_BASE', [UInt16] 0x0040) | Out-Null\"", ")", "return", "newCode" ]
try to clean powershell script .
train
false
54,594
def factory(type): return ArrowFactory(type)
[ "def", "factory", "(", "type", ")", ":", "return", "ArrowFactory", "(", "type", ")" ]
returns an :class: .
train
false
54,595
def errors_response(status, errors): document = {'errors': errors, 'jsonapi': {'version': JSONAPI_VERSION}} return (jsonpify(document), status)
[ "def", "errors_response", "(", "status", ",", "errors", ")", ":", "document", "=", "{", "'errors'", ":", "errors", ",", "'jsonapi'", ":", "{", "'version'", ":", "JSONAPI_VERSION", "}", "}", "return", "(", "jsonpify", "(", "document", ")", ",", "status", ")" ]
return an error response with multiple errors .
train
false
54,596
def dmp_integrate_in(f, m, j, u, K): if ((j < 0) or (j > u)): raise IndexError(('0 <= j <= u expected, got %s' % (u, j))) return _rec_integrate_in(f, m, u, 0, j, K)
[ "def", "dmp_integrate_in", "(", "f", ",", "m", ",", "j", ",", "u", ",", "K", ")", ":", "if", "(", "(", "j", "<", "0", ")", "or", "(", "j", ">", "u", ")", ")", ":", "raise", "IndexError", "(", "(", "'0 <= j <= u expected, got %s'", "%", "(", "u", ",", "j", ")", ")", ")", "return", "_rec_integrate_in", "(", "f", ",", "m", ",", "u", ",", "0", ",", "j", ",", "K", ")" ]
computes the indefinite integral of f in x_j in k[x] .
train
false
54,597
def dump_thread_stack(): threads = threading.enumerate() output_file = (PROFILING_OUTPUT_FMT % get_filename_fmt()) data = {} for (thread, frame) in sys._current_frames().items(): trace = traceback.format_stack(frame) data[('%x' % thread)] = {'traceback': trace, 'name': get_thread_name(threads, thread)} json.dump(data, file(output_file, 'w'), indent=4)
[ "def", "dump_thread_stack", "(", ")", ":", "threads", "=", "threading", ".", "enumerate", "(", ")", "output_file", "=", "(", "PROFILING_OUTPUT_FMT", "%", "get_filename_fmt", "(", ")", ")", "data", "=", "{", "}", "for", "(", "thread", ",", "frame", ")", "in", "sys", ".", "_current_frames", "(", ")", ".", "items", "(", ")", ":", "trace", "=", "traceback", ".", "format_stack", "(", "frame", ")", "data", "[", "(", "'%x'", "%", "thread", ")", "]", "=", "{", "'traceback'", ":", "trace", ",", "'name'", ":", "get_thread_name", "(", "threads", ",", "thread", ")", "}", "json", ".", "dump", "(", "data", ",", "file", "(", "output_file", ",", "'w'", ")", ",", "indent", "=", "4", ")" ]
dumps all thread stacks to a file .
train
false
54,598
def get_injected_network_template(network_info, use_ipv6=CONF.use_ipv6, template=CONF.injected_network_template): if (network_info is None): return None if hasattr(network_info, 'legacy'): network_info = network_info.legacy() nets = [] ifc_num = (-1) have_injected_networks = False for (network_ref, mapping) in network_info: ifc_num += 1 if (not network_ref['injected']): continue have_injected_networks = True address = mapping['ips'][0]['ip'] netmask = mapping['ips'][0]['netmask'] address_v6 = None gateway_v6 = None netmask_v6 = None if use_ipv6: address_v6 = mapping['ip6s'][0]['ip'] netmask_v6 = mapping['ip6s'][0]['netmask'] gateway_v6 = mapping['gateway_v6'] net_info = {'name': ('eth%d' % ifc_num), 'address': address, 'netmask': netmask, 'gateway': mapping['gateway'], 'broadcast': mapping['broadcast'], 'dns': ' '.join(mapping['dns']), 'address_v6': address_v6, 'gateway_v6': gateway_v6, 'netmask_v6': netmask_v6} nets.append(net_info) if (have_injected_networks is False): return None if (not template): return None _late_load_cheetah() ifc_template = open(template).read() return str(Template(ifc_template, searchList=[{'interfaces': nets, 'use_ipv6': use_ipv6}]))
[ "def", "get_injected_network_template", "(", "network_info", ",", "use_ipv6", "=", "CONF", ".", "use_ipv6", ",", "template", "=", "CONF", ".", "injected_network_template", ")", ":", "if", "(", "network_info", "is", "None", ")", ":", "return", "None", "if", "hasattr", "(", "network_info", ",", "'legacy'", ")", ":", "network_info", "=", "network_info", ".", "legacy", "(", ")", "nets", "=", "[", "]", "ifc_num", "=", "(", "-", "1", ")", "have_injected_networks", "=", "False", "for", "(", "network_ref", ",", "mapping", ")", "in", "network_info", ":", "ifc_num", "+=", "1", "if", "(", "not", "network_ref", "[", "'injected'", "]", ")", ":", "continue", "have_injected_networks", "=", "True", "address", "=", "mapping", "[", "'ips'", "]", "[", "0", "]", "[", "'ip'", "]", "netmask", "=", "mapping", "[", "'ips'", "]", "[", "0", "]", "[", "'netmask'", "]", "address_v6", "=", "None", "gateway_v6", "=", "None", "netmask_v6", "=", "None", "if", "use_ipv6", ":", "address_v6", "=", "mapping", "[", "'ip6s'", "]", "[", "0", "]", "[", "'ip'", "]", "netmask_v6", "=", "mapping", "[", "'ip6s'", "]", "[", "0", "]", "[", "'netmask'", "]", "gateway_v6", "=", "mapping", "[", "'gateway_v6'", "]", "net_info", "=", "{", "'name'", ":", "(", "'eth%d'", "%", "ifc_num", ")", ",", "'address'", ":", "address", ",", "'netmask'", ":", "netmask", ",", "'gateway'", ":", "mapping", "[", "'gateway'", "]", ",", "'broadcast'", ":", "mapping", "[", "'broadcast'", "]", ",", "'dns'", ":", "' '", ".", "join", "(", "mapping", "[", "'dns'", "]", ")", ",", "'address_v6'", ":", "address_v6", ",", "'gateway_v6'", ":", "gateway_v6", ",", "'netmask_v6'", ":", "netmask_v6", "}", "nets", ".", "append", "(", "net_info", ")", "if", "(", "have_injected_networks", "is", "False", ")", ":", "return", "None", "if", "(", "not", "template", ")", ":", "return", "None", "_late_load_cheetah", "(", ")", "ifc_template", "=", "open", "(", "template", ")", ".", "read", "(", ")", "return", "str", "(", "Template", "(", "ifc_template", ",", "searchList", "=", "[", "{", "'interfaces'", ":", "nets", ",", "'use_ipv6'", ":", "use_ipv6", "}", "]", ")", ")" ]
return a rendered network template for the given network_info .
train
false
54,599
@pytest.fixture def reset_standarddir(no_cachedir_tag): standarddir.init(None) (yield) standarddir.init(None)
[ "@", "pytest", ".", "fixture", "def", "reset_standarddir", "(", "no_cachedir_tag", ")", ":", "standarddir", ".", "init", "(", "None", ")", "(", "yield", ")", "standarddir", ".", "init", "(", "None", ")" ]
clean up standarddir arguments before and after each test .
train
false
54,600
def from_current_timezone(value): if (settings.USE_TZ and (value is not None) and timezone.is_naive(value)): current_timezone = timezone.get_current_timezone() try: return timezone.make_aware(value, current_timezone) except Exception: raise ValidationError((_(u"%(datetime)s couldn't be interpreted in time zone %(current_timezone)s; it may be ambiguous or it may not exist.") % {u'datetime': value, u'current_timezone': current_timezone})) return value
[ "def", "from_current_timezone", "(", "value", ")", ":", "if", "(", "settings", ".", "USE_TZ", "and", "(", "value", "is", "not", "None", ")", "and", "timezone", ".", "is_naive", "(", "value", ")", ")", ":", "current_timezone", "=", "timezone", ".", "get_current_timezone", "(", ")", "try", ":", "return", "timezone", ".", "make_aware", "(", "value", ",", "current_timezone", ")", "except", "Exception", ":", "raise", "ValidationError", "(", "(", "_", "(", "u\"%(datetime)s couldn't be interpreted in time zone %(current_timezone)s; it may be ambiguous or it may not exist.\"", ")", "%", "{", "u'datetime'", ":", "value", ",", "u'current_timezone'", ":", "current_timezone", "}", ")", ")", "return", "value" ]
when time zone support is enabled .
train
false
54,602
def load_lang_conf(): if osp.isfile(LANG_FILE): with open(LANG_FILE, 'r') as f: lang = f.read() else: lang = get_interface_language() save_lang_conf(lang) if (lang.strip('\n') in DISABLED_LANGUAGES): lang = DEFAULT_LANGUAGE save_lang_conf(lang) return lang
[ "def", "load_lang_conf", "(", ")", ":", "if", "osp", ".", "isfile", "(", "LANG_FILE", ")", ":", "with", "open", "(", "LANG_FILE", ",", "'r'", ")", "as", "f", ":", "lang", "=", "f", ".", "read", "(", ")", "else", ":", "lang", "=", "get_interface_language", "(", ")", "save_lang_conf", "(", "lang", ")", "if", "(", "lang", ".", "strip", "(", "'\\n'", ")", "in", "DISABLED_LANGUAGES", ")", ":", "lang", "=", "DEFAULT_LANGUAGE", "save_lang_conf", "(", "lang", ")", "return", "lang" ]
load language setting from language config file if it exists .
train
true
54,603
def badDecorator(fn): def nameCollision(*args, **kwargs): return fn(*args, **kwargs) return nameCollision
[ "def", "badDecorator", "(", "fn", ")", ":", "def", "nameCollision", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "fn", "(", "*", "args", ",", "**", "kwargs", ")", "return", "nameCollision" ]
decorate a function without preserving the name of the original function .
train
false
54,605
def http_date_to_dt(http_date, obs_date=False): if (not obs_date): return strptime(http_date, '%a, %d %b %Y %H:%M:%S %Z') time_formats = ('%a, %d %b %Y %H:%M:%S %Z', '%a, %d-%b-%Y %H:%M:%S %Z', '%A, %d-%b-%y %H:%M:%S %Z', '%a %b %d %H:%M:%S %Y') for time_format in time_formats: try: return strptime(http_date, time_format) except ValueError: continue raise ValueError(('time data %r does not match known formats' % http_date))
[ "def", "http_date_to_dt", "(", "http_date", ",", "obs_date", "=", "False", ")", ":", "if", "(", "not", "obs_date", ")", ":", "return", "strptime", "(", "http_date", ",", "'%a, %d %b %Y %H:%M:%S %Z'", ")", "time_formats", "=", "(", "'%a, %d %b %Y %H:%M:%S %Z'", ",", "'%a, %d-%b-%Y %H:%M:%S %Z'", ",", "'%A, %d-%b-%y %H:%M:%S %Z'", ",", "'%a %b %d %H:%M:%S %Y'", ")", "for", "time_format", "in", "time_formats", ":", "try", ":", "return", "strptime", "(", "http_date", ",", "time_format", ")", "except", "ValueError", ":", "continue", "raise", "ValueError", "(", "(", "'time data %r does not match known formats'", "%", "http_date", ")", ")" ]
converts an http date string to a datetime instance .
train
false
54,606
def task_enable_flocker_control(distribution, action='start'): validate_start_action(action) if is_systemd_distribution(distribution): return sequence([run_from_args(['systemctl', 'enable', 'flocker-control']), run_from_args(['systemctl', action.lower(), 'flocker-control'])]) elif is_ubuntu(distribution): return sequence([put(path='/etc/init/flocker-control.override', content=dedent(' start on runlevel [2345]\n stop on runlevel [016]\n ')), run("echo 'flocker-control-api DCTB 4523/tcp DCTB DCTB DCTB # Flocker Control API port' >> /etc/services"), run("echo 'flocker-control-agent DCTB 4524/tcp DCTB DCTB DCTB # Flocker Control Agent port' >> /etc/services"), run_from_args(['service', 'flocker-control', action.lower()])]) else: raise DistributionNotSupported(distribution=distribution)
[ "def", "task_enable_flocker_control", "(", "distribution", ",", "action", "=", "'start'", ")", ":", "validate_start_action", "(", "action", ")", "if", "is_systemd_distribution", "(", "distribution", ")", ":", "return", "sequence", "(", "[", "run_from_args", "(", "[", "'systemctl'", ",", "'enable'", ",", "'flocker-control'", "]", ")", ",", "run_from_args", "(", "[", "'systemctl'", ",", "action", ".", "lower", "(", ")", ",", "'flocker-control'", "]", ")", "]", ")", "elif", "is_ubuntu", "(", "distribution", ")", ":", "return", "sequence", "(", "[", "put", "(", "path", "=", "'/etc/init/flocker-control.override'", ",", "content", "=", "dedent", "(", "' start on runlevel [2345]\\n stop on runlevel [016]\\n '", ")", ")", ",", "run", "(", "\"echo 'flocker-control-api DCTB 4523/tcp DCTB DCTB DCTB # Flocker Control API port' >> /etc/services\"", ")", ",", "run", "(", "\"echo 'flocker-control-agent DCTB 4524/tcp DCTB DCTB DCTB # Flocker Control Agent port' >> /etc/services\"", ")", ",", "run_from_args", "(", "[", "'service'", ",", "'flocker-control'", ",", "action", ".", "lower", "(", ")", "]", ")", "]", ")", "else", ":", "raise", "DistributionNotSupported", "(", "distribution", "=", "distribution", ")" ]
enable flocker-control service .
train
false
54,607
def autocorr(s, axis=(-1)): N = s.shape[axis] S = np.fft.fft(s, n=((2 * N) - 1), axis=axis) sxx = np.fft.ifft((S * S.conjugate()), axis=axis).real[:N] return (sxx / N)
[ "def", "autocorr", "(", "s", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "N", "=", "s", ".", "shape", "[", "axis", "]", "S", "=", "np", ".", "fft", ".", "fft", "(", "s", ",", "n", "=", "(", "(", "2", "*", "N", ")", "-", "1", ")", ",", "axis", "=", "axis", ")", "sxx", "=", "np", ".", "fft", ".", "ifft", "(", "(", "S", "*", "S", ".", "conjugate", "(", ")", ")", ",", "axis", "=", "axis", ")", ".", "real", "[", ":", "N", "]", "return", "(", "sxx", "/", "N", ")" ]
returns the autocorrelation of signal s at all lags .
train
false
54,608
def test_cnn_sample_wt_fit(): cnn = CondensedNearestNeighbour(random_state=RND_SEED) assert_raises(RuntimeError, cnn.sample, X, Y)
[ "def", "test_cnn_sample_wt_fit", "(", ")", ":", "cnn", "=", "CondensedNearestNeighbour", "(", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "cnn", ".", "sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised when sample is called before fitting .
train
false
54,610
def delete_rax_scaling_group(args): print ("--- Cleaning Autoscale Groups matching '%s'" % args.match_re) for region in pyrax.identity.services.autoscale.regions: asg = pyrax.connect_to_autoscale(region=region) for group in rax_list_iterator(asg): if re.search(args.match_re, group.name): group.manager._delete = _force_delete_rax_scaling_group(group.manager) prompt_and_delete(group, ('Delete matching %s? [y/n]: ' % group), args.assumeyes)
[ "def", "delete_rax_scaling_group", "(", "args", ")", ":", "print", "(", "\"--- Cleaning Autoscale Groups matching '%s'\"", "%", "args", ".", "match_re", ")", "for", "region", "in", "pyrax", ".", "identity", ".", "services", ".", "autoscale", ".", "regions", ":", "asg", "=", "pyrax", ".", "connect_to_autoscale", "(", "region", "=", "region", ")", "for", "group", "in", "rax_list_iterator", "(", "asg", ")", ":", "if", "re", ".", "search", "(", "args", ".", "match_re", ",", "group", ".", "name", ")", ":", "group", ".", "manager", ".", "_delete", "=", "_force_delete_rax_scaling_group", "(", "group", ".", "manager", ")", "prompt_and_delete", "(", "group", ",", "(", "'Delete matching %s? [y/n]: '", "%", "group", ")", ",", "args", ".", "assumeyes", ")" ]
function for deleting autoscale groups .
train
false
54,612
def _qualNameWalker(qualName): (yield (qualName, [])) qualParts = qualName.split('.') for index in range(1, len(qualParts)): (yield ('.'.join(qualParts[:(- index)]), qualParts[(- index):]))
[ "def", "_qualNameWalker", "(", "qualName", ")", ":", "(", "yield", "(", "qualName", ",", "[", "]", ")", ")", "qualParts", "=", "qualName", ".", "split", "(", "'.'", ")", "for", "index", "in", "range", "(", "1", ",", "len", "(", "qualParts", ")", ")", ":", "(", "yield", "(", "'.'", ".", "join", "(", "qualParts", "[", ":", "(", "-", "index", ")", "]", ")", ",", "qualParts", "[", "(", "-", "index", ")", ":", "]", ")", ")" ]
given a python qualified name .
train
false
54,613
@pytest.mark.parametrize('specialchars, count_char', [(' abcde ', ' '), (' aaaaaaaaaa', 'a'), ('\xc4\x81\xc3\xa9\xc4\xa9\xc3\xb8\xc3\xb8\xc3\xb8\xc3\xb8\xc3\xb8\xc3\xb8\xc3\xbc', u'\xf8')]) @pytest.mark.django_db def test_clean_specialchars_unique(specialchars, count_char): form_data = {'code': 'foo', 'fullname': 'Foo', 'checkstyle': 'foo', 'nplurals': '2', 'specialchars': specialchars} form = LanguageForm(form_data) assert form.is_valid() assert (form.cleaned_data['specialchars'].count(count_char) == 1)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'specialchars, count_char'", ",", "[", "(", "' abcde '", ",", "' '", ")", ",", "(", "' aaaaaaaaaa'", ",", "'a'", ")", ",", "(", "'\\xc4\\x81\\xc3\\xa9\\xc4\\xa9\\xc3\\xb8\\xc3\\xb8\\xc3\\xb8\\xc3\\xb8\\xc3\\xb8\\xc3\\xb8\\xc3\\xbc'", ",", "u'\\xf8'", ")", "]", ")", "@", "pytest", ".", "mark", ".", "django_db", "def", "test_clean_specialchars_unique", "(", "specialchars", ",", "count_char", ")", ":", "form_data", "=", "{", "'code'", ":", "'foo'", ",", "'fullname'", ":", "'Foo'", ",", "'checkstyle'", ":", "'foo'", ",", "'nplurals'", ":", "'2'", ",", "'specialchars'", ":", "specialchars", "}", "form", "=", "LanguageForm", "(", "form_data", ")", "assert", "form", ".", "is_valid", "(", ")", "assert", "(", "form", ".", "cleaned_data", "[", "'specialchars'", "]", ".", "count", "(", "count_char", ")", "==", "1", ")" ]
tests special characters are unique .
train
false
54,614
@treeio_login_required @handle_response_format def event_edit(request, event_id, response_format='html'): event = get_object_or_404(Event, pk=event_id) if (not request.user.profile.has_permission(event, mode='w')): return user_denied(request, message="You don't have access to this Event") if request.POST: if ('cancel' not in request.POST): form = EventForm(request.user.profile, None, None, request.POST, instance=event) if form.is_valid(): event = form.save() return HttpResponseRedirect(reverse('events_event_view', args=[event.id])) else: return HttpResponseRedirect(reverse('events')) else: form = EventForm(request.user.profile, instance=event) return render_to_response('events/event_edit', {'event': event, 'form': form}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "event_edit", "(", "request", ",", "event_id", ",", "response_format", "=", "'html'", ")", ":", "event", "=", "get_object_or_404", "(", "Event", ",", "pk", "=", "event_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "event", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Event\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "EventForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "None", ",", "request", ".", "POST", ",", "instance", "=", "event", ")", "if", "form", ".", "is_valid", "(", ")", ":", "event", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'events_event_view'", ",", "args", "=", "[", "event", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'events'", ")", ")", "else", ":", "form", "=", "EventForm", "(", "request", ".", "user", ".", "profile", ",", "instance", "=", "event", ")", "return", "render_to_response", "(", "'events/event_edit'", ",", "{", "'event'", ":", "event", ",", "'form'", ":", "form", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
event edit .
train
false
54,615
def is_media_request(request): parsed_media_url = urlparse(settings.MEDIA_URL) if request.path_info.startswith(parsed_media_url.path): if parsed_media_url.netloc: if (request.get_host() == parsed_media_url.netloc): return True else: return True return False
[ "def", "is_media_request", "(", "request", ")", ":", "parsed_media_url", "=", "urlparse", "(", "settings", ".", "MEDIA_URL", ")", "if", "request", ".", "path_info", ".", "startswith", "(", "parsed_media_url", ".", "path", ")", ":", "if", "parsed_media_url", ".", "netloc", ":", "if", "(", "request", ".", "get_host", "(", ")", "==", "parsed_media_url", ".", "netloc", ")", ":", "return", "True", "else", ":", "return", "True", "return", "False" ]
check if a request is a media request .
train
false
54,616
def test_read_bin_lush_matrix_ubyte_scalar(): path = (example_bin_lush_path + 'ubyte_scalar.lushbin') result = read_bin_lush_matrix(path) assert (str(result.dtype) == 'uint8') assert (len(result.shape) == 3) assert (result.shape[0] == 1) assert (result.shape[1] == 1) assert (result.shape[1] == 1) assert (result[(0, 0)] == 12)
[ "def", "test_read_bin_lush_matrix_ubyte_scalar", "(", ")", ":", "path", "=", "(", "example_bin_lush_path", "+", "'ubyte_scalar.lushbin'", ")", "result", "=", "read_bin_lush_matrix", "(", "path", ")", "assert", "(", "str", "(", "result", ".", "dtype", ")", "==", "'uint8'", ")", "assert", "(", "len", "(", "result", ".", "shape", ")", "==", "3", ")", "assert", "(", "result", ".", "shape", "[", "0", "]", "==", "1", ")", "assert", "(", "result", ".", "shape", "[", "1", "]", "==", "1", ")", "assert", "(", "result", ".", "shape", "[", "1", "]", "==", "1", ")", "assert", "(", "result", "[", "(", "0", ",", "0", ")", "]", "==", "12", ")" ]
read data from a lush file with uint8 data .
train
false
54,617
def finite_diff_kauers(sum): function = sum.function for l in sum.limits: function = function.subs(l[0], (l[(-1)] + 1)) return function
[ "def", "finite_diff_kauers", "(", "sum", ")", ":", "function", "=", "sum", ".", "function", "for", "l", "in", "sum", ".", "limits", ":", "function", "=", "function", ".", "subs", "(", "l", "[", "0", "]", ",", "(", "l", "[", "(", "-", "1", ")", "]", "+", "1", ")", ")", "return", "function" ]
takes as input a sum instance and returns the difference between the sum with the upper index incremented by 1 and the original sum .
train
false
54,618
def no_afni(): if (Info.version() is None): return True return False
[ "def", "no_afni", "(", ")", ":", "if", "(", "Info", ".", "version", "(", ")", "is", "None", ")", ":", "return", "True", "return", "False" ]
checks if afni is available .
train
false
54,620
def seed(seed=None): get_random_state().seed(seed)
[ "def", "seed", "(", "seed", "=", "None", ")", ":", "get_random_state", "(", ")", ".", "seed", "(", "seed", ")" ]
resets the state of the random number generator with a seed .
train
false
54,623
def _check_bem_size(surfs): if (surfs[0]['np'] > 10000): msg = ('The bem surface has %s data points. 5120 (ico grade=4) should be enough.' % surfs[0]['np']) if (len(surfs) == 3): msg += ' Dense 3-layer bems may not save properly.' warn(msg)
[ "def", "_check_bem_size", "(", "surfs", ")", ":", "if", "(", "surfs", "[", "0", "]", "[", "'np'", "]", ">", "10000", ")", ":", "msg", "=", "(", "'The bem surface has %s data points. 5120 (ico grade=4) should be enough.'", "%", "surfs", "[", "0", "]", "[", "'np'", "]", ")", "if", "(", "len", "(", "surfs", ")", "==", "3", ")", ":", "msg", "+=", "' Dense 3-layer bems may not save properly.'", "warn", "(", "msg", ")" ]
helper for checking bem surface sizes .
train
false
54,624
def url_name_for_block(block): return block.location.name
[ "def", "url_name_for_block", "(", "block", ")", ":", "return", "block", ".", "location", ".", "name" ]
given a block .
train
false
54,626
def generate_reset_password_token(user): password_hash = (md5(user.password) if user.password else None) data = [str(user.id), password_hash] return _security.reset_serializer.dumps(data)
[ "def", "generate_reset_password_token", "(", "user", ")", ":", "password_hash", "=", "(", "md5", "(", "user", ".", "password", ")", "if", "user", ".", "password", "else", "None", ")", "data", "=", "[", "str", "(", "user", ".", "id", ")", ",", "password_hash", "]", "return", "_security", ".", "reset_serializer", ".", "dumps", "(", "data", ")" ]
generates a unique reset password token for the specified user .
train
true
54,627
def rrggbb_to_triplet(color): rgbtuple = _namedict.get(color) if (rgbtuple is None): if (color[0] != '#'): raise BadColor(color) red = color[1:3] green = color[3:5] blue = color[5:7] rgbtuple = (int(red, 16), int(green, 16), int(blue, 16)) _namedict[color] = rgbtuple return rgbtuple
[ "def", "rrggbb_to_triplet", "(", "color", ")", ":", "rgbtuple", "=", "_namedict", ".", "get", "(", "color", ")", "if", "(", "rgbtuple", "is", "None", ")", ":", "if", "(", "color", "[", "0", "]", "!=", "'#'", ")", ":", "raise", "BadColor", "(", "color", ")", "red", "=", "color", "[", "1", ":", "3", "]", "green", "=", "color", "[", "3", ":", "5", "]", "blue", "=", "color", "[", "5", ":", "7", "]", "rgbtuple", "=", "(", "int", "(", "red", ",", "16", ")", ",", "int", "(", "green", ",", "16", ")", ",", "int", "(", "blue", ",", "16", ")", ")", "_namedict", "[", "color", "]", "=", "rgbtuple", "return", "rgbtuple" ]
converts a #rrggbb color to the tuple .
train
false
54,628
def assign_role_for_exploration(committer_id, exploration_id, assignee_id, new_role): _assign_role(committer_id, assignee_id, new_role, exploration_id, feconf.ACTIVITY_TYPE_EXPLORATION) if (new_role in [ROLE_OWNER, ROLE_EDITOR]): subscription_services.subscribe_to_exploration(assignee_id, exploration_id)
[ "def", "assign_role_for_exploration", "(", "committer_id", ",", "exploration_id", ",", "assignee_id", ",", "new_role", ")", ":", "_assign_role", "(", "committer_id", ",", "assignee_id", ",", "new_role", ",", "exploration_id", ",", "feconf", ".", "ACTIVITY_TYPE_EXPLORATION", ")", "if", "(", "new_role", "in", "[", "ROLE_OWNER", ",", "ROLE_EDITOR", "]", ")", ":", "subscription_services", ".", "subscribe_to_exploration", "(", "assignee_id", ",", "exploration_id", ")" ]
assign assignee_id to the given role and subscribes the assignee to future exploration updates .
train
false
54,630
def perform_check(prerelease=current_version.is_prerelease): pypi = current_version try: pypi = available_on_pypi(prerelease) except Exception: log.warning('An issue occurred while checking PyPI') best = max(pypi, current_version) where = None command = None cache = cache_file() if cache: os.utime(cache, None) if (best == current_version): log.info(('You have the latest version of Pwntools (%s)' % best)) return command = ['pip', 'install', '-U'] if (best == pypi): where = 'pypi' pypi_package = package_name if best.is_prerelease: pypi_package += ('==%s' % best) command += [pypi_package] command_str = ' '.join(command) log.info((('A newer version of %s is available on %s (%s --> %s).\n' % (package_name, where, current_version, best)) + ('Update with: $ %s' % command_str))) return command
[ "def", "perform_check", "(", "prerelease", "=", "current_version", ".", "is_prerelease", ")", ":", "pypi", "=", "current_version", "try", ":", "pypi", "=", "available_on_pypi", "(", "prerelease", ")", "except", "Exception", ":", "log", ".", "warning", "(", "'An issue occurred while checking PyPI'", ")", "best", "=", "max", "(", "pypi", ",", "current_version", ")", "where", "=", "None", "command", "=", "None", "cache", "=", "cache_file", "(", ")", "if", "cache", ":", "os", ".", "utime", "(", "cache", ",", "None", ")", "if", "(", "best", "==", "current_version", ")", ":", "log", ".", "info", "(", "(", "'You have the latest version of Pwntools (%s)'", "%", "best", ")", ")", "return", "command", "=", "[", "'pip'", ",", "'install'", ",", "'-U'", "]", "if", "(", "best", "==", "pypi", ")", ":", "where", "=", "'pypi'", "pypi_package", "=", "package_name", "if", "best", ".", "is_prerelease", ":", "pypi_package", "+=", "(", "'==%s'", "%", "best", ")", "command", "+=", "[", "pypi_package", "]", "command_str", "=", "' '", ".", "join", "(", "command", ")", "log", ".", "info", "(", "(", "(", "'A newer version of %s is available on %s (%s --> %s).\\n'", "%", "(", "package_name", ",", "where", ",", "current_version", ",", "best", ")", ")", "+", "(", "'Update with: $ %s'", "%", "command_str", ")", ")", ")", "return", "command" ]
perform the update check .
train
false
54,631
def virtual_root(resource, request): try: reg = request.registry except AttributeError: reg = get_current_registry() url_adapter = reg.queryMultiAdapter((resource, request), IResourceURL) if (url_adapter is None): url_adapter = ResourceURL(resource, request) (vpath, rpath) = (url_adapter.virtual_path, url_adapter.physical_path) if ((rpath != vpath) and rpath.endswith(vpath)): vroot_path = rpath[:(- len(vpath))] return find_resource(resource, vroot_path) try: return request.root except AttributeError: return find_root(resource)
[ "def", "virtual_root", "(", "resource", ",", "request", ")", ":", "try", ":", "reg", "=", "request", ".", "registry", "except", "AttributeError", ":", "reg", "=", "get_current_registry", "(", ")", "url_adapter", "=", "reg", ".", "queryMultiAdapter", "(", "(", "resource", ",", "request", ")", ",", "IResourceURL", ")", "if", "(", "url_adapter", "is", "None", ")", ":", "url_adapter", "=", "ResourceURL", "(", "resource", ",", "request", ")", "(", "vpath", ",", "rpath", ")", "=", "(", "url_adapter", ".", "virtual_path", ",", "url_adapter", ".", "physical_path", ")", "if", "(", "(", "rpath", "!=", "vpath", ")", "and", "rpath", ".", "endswith", "(", "vpath", ")", ")", ":", "vroot_path", "=", "rpath", "[", ":", "(", "-", "len", "(", "vpath", ")", ")", "]", "return", "find_resource", "(", "resource", ",", "vroot_path", ")", "try", ":", "return", "request", ".", "root", "except", "AttributeError", ":", "return", "find_root", "(", "resource", ")" ]
provided any :term:resource and a :term:request object .
train
false
54,632
def intTime(scale=1): return int((time.time() * scale))
[ "def", "intTime", "(", "scale", "=", "1", ")", ":", "return", "int", "(", "(", "time", ".", "time", "(", ")", "*", "scale", ")", ")" ]
the time in integer seconds .
train
false
54,633
def _make_compound_key(table, key): if (not isinstance(key, (list, tuple))): key = [key] return [table.columns[name] for name in key]
[ "def", "_make_compound_key", "(", "table", ",", "key", ")", ":", "if", "(", "not", "isinstance", "(", "key", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "key", "=", "[", "key", "]", "return", "[", "table", ".", "columns", "[", "name", "]", "for", "name", "in", "key", "]" ]
returns a list of columns from column_key for table representing potentially a compound key .
train
false
54,634
def DNSServiceResolve(flags=0, interfaceIndex=_NO_DEFAULT, name=_NO_DEFAULT, regtype=_NO_DEFAULT, domain=_NO_DEFAULT, callBack=None): _NO_DEFAULT.check(interfaceIndex) _NO_DEFAULT.check(name) _NO_DEFAULT.check(regtype) _NO_DEFAULT.check(domain) @_DNSServiceResolveReply def _callback(sdRef, flags, interfaceIndex, errorCode, fullname, hosttarget, port, txtLen, txtRecord, context): if (callBack is not None): port = socket.ntohs(port) txtRecord = _length_and_void_p_to_string(txtLen, txtRecord) callBack(sdRef, flags, interfaceIndex, errorCode, fullname.decode(), hosttarget.decode(), port, txtRecord) _global_lock.acquire() try: sdRef = _DNSServiceResolve(flags, interfaceIndex, name, regtype, domain, _callback, None) finally: _global_lock.release() sdRef._add_callback(_callback) return sdRef
[ "def", "DNSServiceResolve", "(", "flags", "=", "0", ",", "interfaceIndex", "=", "_NO_DEFAULT", ",", "name", "=", "_NO_DEFAULT", ",", "regtype", "=", "_NO_DEFAULT", ",", "domain", "=", "_NO_DEFAULT", ",", "callBack", "=", "None", ")", ":", "_NO_DEFAULT", ".", "check", "(", "interfaceIndex", ")", "_NO_DEFAULT", ".", "check", "(", "name", ")", "_NO_DEFAULT", ".", "check", "(", "regtype", ")", "_NO_DEFAULT", ".", "check", "(", "domain", ")", "@", "_DNSServiceResolveReply", "def", "_callback", "(", "sdRef", ",", "flags", ",", "interfaceIndex", ",", "errorCode", ",", "fullname", ",", "hosttarget", ",", "port", ",", "txtLen", ",", "txtRecord", ",", "context", ")", ":", "if", "(", "callBack", "is", "not", "None", ")", ":", "port", "=", "socket", ".", "ntohs", "(", "port", ")", "txtRecord", "=", "_length_and_void_p_to_string", "(", "txtLen", ",", "txtRecord", ")", "callBack", "(", "sdRef", ",", "flags", ",", "interfaceIndex", ",", "errorCode", ",", "fullname", ".", "decode", "(", ")", ",", "hosttarget", ".", "decode", "(", ")", ",", "port", ",", "txtRecord", ")", "_global_lock", ".", "acquire", "(", ")", "try", ":", "sdRef", "=", "_DNSServiceResolve", "(", "flags", ",", "interfaceIndex", ",", "name", ",", "regtype", ",", "domain", ",", "_callback", ",", "None", ")", "finally", ":", "_global_lock", ".", "release", "(", ")", "sdRef", ".", "_add_callback", "(", "_callback", ")", "return", "sdRef" ]
resolve a service name discovered via dnsservicebrowse() to a target host name .
train
false
54,636
def kernel_info(attrs=None, where=None): return _osquery_cmd(table='kernel_info', attrs=attrs, where=where)
[ "def", "kernel_info", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'kernel_info'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return kernel_info information from osquery cli example: .
train
false
54,639
def encode_quopri(msg): orig = msg.get_payload(decode=True) encdata = _qencode(orig) msg.set_payload(encdata) msg['Content-Transfer-Encoding'] = 'quoted-printable'
[ "def", "encode_quopri", "(", "msg", ")", ":", "orig", "=", "msg", ".", "get_payload", "(", "decode", "=", "True", ")", "encdata", "=", "_qencode", "(", "orig", ")", "msg", ".", "set_payload", "(", "encdata", ")", "msg", "[", "'Content-Transfer-Encoding'", "]", "=", "'quoted-printable'" ]
encode the messages payload in quoted-printable .
train
false
54,641
def get_value_from_user(message, default_value='', hidden=False): return _validate_user_input(InputDialog(message, default_value, is_truthy(hidden)))
[ "def", "get_value_from_user", "(", "message", ",", "default_value", "=", "''", ",", "hidden", "=", "False", ")", ":", "return", "_validate_user_input", "(", "InputDialog", "(", "message", ",", "default_value", ",", "is_truthy", "(", "hidden", ")", ")", ")" ]
pauses test execution and asks user to input a value .
train
false
54,642
def is_private_subnet(ip): priv_subnets = [{'subnet': '10.0.0.0', 'mask': '255.0.0.0'}, {'subnet': '172.16.0.0', 'mask': '255.240.0.0'}, {'subnet': '192.168.0.0', 'mask': '255.255.0.0'}] ip = struct.unpack('I', socket.inet_aton(ip))[0] for network in priv_subnets: subnet = struct.unpack('I', socket.inet_aton(network['subnet']))[0] mask = struct.unpack('I', socket.inet_aton(network['mask']))[0] if ((ip & mask) == (subnet & mask)): return True return False
[ "def", "is_private_subnet", "(", "ip", ")", ":", "priv_subnets", "=", "[", "{", "'subnet'", ":", "'10.0.0.0'", ",", "'mask'", ":", "'255.0.0.0'", "}", ",", "{", "'subnet'", ":", "'172.16.0.0'", ",", "'mask'", ":", "'255.240.0.0'", "}", ",", "{", "'subnet'", ":", "'192.168.0.0'", ",", "'mask'", ":", "'255.255.0.0'", "}", "]", "ip", "=", "struct", ".", "unpack", "(", "'I'", ",", "socket", ".", "inet_aton", "(", "ip", ")", ")", "[", "0", "]", "for", "network", "in", "priv_subnets", ":", "subnet", "=", "struct", ".", "unpack", "(", "'I'", ",", "socket", ".", "inet_aton", "(", "network", "[", "'subnet'", "]", ")", ")", "[", "0", "]", "mask", "=", "struct", ".", "unpack", "(", "'I'", ",", "socket", ".", "inet_aton", "(", "network", "[", "'mask'", "]", ")", ")", "[", "0", "]", "if", "(", "(", "ip", "&", "mask", ")", "==", "(", "subnet", "&", "mask", ")", ")", ":", "return", "True", "return", "False" ]
utility function to check if an ip address is inside a private subnet .
train
false
54,643
def emit_certificate_event(event_name, user, course_id, course=None, event_data=None): event_name = '.'.join(['edx', 'certificate', event_name]) if (course is None): course = modulestore().get_course(course_id, depth=0) context = {'org_id': course.org, 'course_id': unicode(course_id)} data = {'user_id': user.id, 'course_id': unicode(course_id), 'certificate_url': get_certificate_url(user.id, course_id)} event_data = (event_data or {}) event_data.update(data) with tracker.get_tracker().context(event_name, context): tracker.emit(event_name, event_data)
[ "def", "emit_certificate_event", "(", "event_name", ",", "user", ",", "course_id", ",", "course", "=", "None", ",", "event_data", "=", "None", ")", ":", "event_name", "=", "'.'", ".", "join", "(", "[", "'edx'", ",", "'certificate'", ",", "event_name", "]", ")", "if", "(", "course", "is", "None", ")", ":", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_id", ",", "depth", "=", "0", ")", "context", "=", "{", "'org_id'", ":", "course", ".", "org", ",", "'course_id'", ":", "unicode", "(", "course_id", ")", "}", "data", "=", "{", "'user_id'", ":", "user", ".", "id", ",", "'course_id'", ":", "unicode", "(", "course_id", ")", ",", "'certificate_url'", ":", "get_certificate_url", "(", "user", ".", "id", ",", "course_id", ")", "}", "event_data", "=", "(", "event_data", "or", "{", "}", ")", "event_data", ".", "update", "(", "data", ")", "with", "tracker", ".", "get_tracker", "(", ")", ".", "context", "(", "event_name", ",", "context", ")", ":", "tracker", ".", "emit", "(", "event_name", ",", "event_data", ")" ]
emits certificate event .
train
false
54,644
def get_nexusport_binding(port_id, vlan_id, switch_ip, instance_id): LOG.debug(_('get_nexusport_binding() called')) session = db.get_session() try: binding = session.query(nexus_models_v2.NexusPortBinding).filter_by(vlan_id=vlan_id).filter_by(switch_ip=switch_ip).filter_by(port_id=port_id).filter_by(instance_id=instance_id).all() return binding except exc.NoResultFound: raise c_exc.NexusPortBindingNotFound(vlan_id=vlan_id)
[ "def", "get_nexusport_binding", "(", "port_id", ",", "vlan_id", ",", "switch_ip", ",", "instance_id", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'get_nexusport_binding() called'", ")", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "binding", "=", "session", ".", "query", "(", "nexus_models_v2", ".", "NexusPortBinding", ")", ".", "filter_by", "(", "vlan_id", "=", "vlan_id", ")", ".", "filter_by", "(", "switch_ip", "=", "switch_ip", ")", ".", "filter_by", "(", "port_id", "=", "port_id", ")", ".", "filter_by", "(", "instance_id", "=", "instance_id", ")", ".", "all", "(", ")", "return", "binding", "except", "exc", ".", "NoResultFound", ":", "raise", "c_exc", ".", "NexusPortBindingNotFound", "(", "vlan_id", "=", "vlan_id", ")" ]
lists a nexusport binding .
train
false
54,645
def filter_user(user_ref): if user_ref: user_ref = user_ref.copy() user_ref.pop('password', None) user_ref.pop('tenants', None) user_ref.pop('groups', None) user_ref.pop('domains', None) try: user_ref['extra'].pop('password', None) user_ref['extra'].pop('tenants', None) except KeyError: pass if ('password_expires_at' not in user_ref): user_ref['password_expires_at'] = None return user_ref
[ "def", "filter_user", "(", "user_ref", ")", ":", "if", "user_ref", ":", "user_ref", "=", "user_ref", ".", "copy", "(", ")", "user_ref", ".", "pop", "(", "'password'", ",", "None", ")", "user_ref", ".", "pop", "(", "'tenants'", ",", "None", ")", "user_ref", ".", "pop", "(", "'groups'", ",", "None", ")", "user_ref", ".", "pop", "(", "'domains'", ",", "None", ")", "try", ":", "user_ref", "[", "'extra'", "]", ".", "pop", "(", "'password'", ",", "None", ")", "user_ref", "[", "'extra'", "]", ".", "pop", "(", "'tenants'", ",", "None", ")", "except", "KeyError", ":", "pass", "if", "(", "'password_expires_at'", "not", "in", "user_ref", ")", ":", "user_ref", "[", "'password_expires_at'", "]", "=", "None", "return", "user_ref" ]
filter out private items in a user dict .
train
false
54,646
def BdbQuit_excepthook(et, ev, tb, excepthook=None): warnings.warn('`BdbQuit_excepthook` is deprecated since version 5.1', DeprecationWarning, stacklevel=2) if (et == bdb.BdbQuit): print 'Exiting Debugger.' elif (excepthook is not None): excepthook(et, ev, tb) else: BdbQuit_excepthook.excepthook_ori(et, ev, tb)
[ "def", "BdbQuit_excepthook", "(", "et", ",", "ev", ",", "tb", ",", "excepthook", "=", "None", ")", ":", "warnings", ".", "warn", "(", "'`BdbQuit_excepthook` is deprecated since version 5.1'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "if", "(", "et", "==", "bdb", ".", "BdbQuit", ")", ":", "print", "'Exiting Debugger.'", "elif", "(", "excepthook", "is", "not", "None", ")", ":", "excepthook", "(", "et", ",", "ev", ",", "tb", ")", "else", ":", "BdbQuit_excepthook", ".", "excepthook_ori", "(", "et", ",", "ev", ",", "tb", ")" ]
exception hook which handles bdbquit exceptions .
train
false
54,647
def social_auth_login_redirect(request): redirect_value = request.REQUEST.get(REDIRECT_FIELD_NAME) if redirect_value: redirect_querystring = ((REDIRECT_FIELD_NAME + '=') + redirect_value) else: redirect_querystring = '' return {'REDIRECT_FIELD_NAME': REDIRECT_FIELD_NAME, 'REDIRECT_FIELD_VALUE': redirect_value, 'redirect_querystring': redirect_querystring}
[ "def", "social_auth_login_redirect", "(", "request", ")", ":", "redirect_value", "=", "request", ".", "REQUEST", ".", "get", "(", "REDIRECT_FIELD_NAME", ")", "if", "redirect_value", ":", "redirect_querystring", "=", "(", "(", "REDIRECT_FIELD_NAME", "+", "'='", ")", "+", "redirect_value", ")", "else", ":", "redirect_querystring", "=", "''", "return", "{", "'REDIRECT_FIELD_NAME'", ":", "REDIRECT_FIELD_NAME", ",", "'REDIRECT_FIELD_VALUE'", ":", "redirect_value", ",", "'redirect_querystring'", ":", "redirect_querystring", "}" ]
load current redirect to context .
train
false
54,648
def get_user_model(): try: model = get_model(AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME) except LookupError: raise ImproperlyConfigured(("AUTH_USER_MODEL refers to model '%s' that has not been installed" % settings.AUTH_USER_MODEL)) core_fields = set([f.name for f in User._meta.fields]) model_fields = set([f.name for f in model._meta.fields]) new_fields = model_fields.difference(core_fields) model._meta.has_additional_fields = (len(new_fields) > 0) model._meta.additional_fields = new_fields return model
[ "def", "get_user_model", "(", ")", ":", "try", ":", "model", "=", "get_model", "(", "AUTH_USER_APP_LABEL", ",", "AUTH_USER_MODEL_NAME", ")", "except", "LookupError", ":", "raise", "ImproperlyConfigured", "(", "(", "\"AUTH_USER_MODEL refers to model '%s' that has not been installed\"", "%", "settings", ".", "AUTH_USER_MODEL", ")", ")", "core_fields", "=", "set", "(", "[", "f", ".", "name", "for", "f", "in", "User", ".", "_meta", ".", "fields", "]", ")", "model_fields", "=", "set", "(", "[", "f", ".", "name", "for", "f", "in", "model", ".", "_meta", ".", "fields", "]", ")", "new_fields", "=", "model_fields", ".", "difference", "(", "core_fields", ")", "model", ".", "_meta", ".", "has_additional_fields", "=", "(", "len", "(", "new_fields", ")", ">", "0", ")", "model", ".", "_meta", ".", "additional_fields", "=", "new_fields", "return", "model" ]
returns the user model that is active in this project .
train
false
54,649
def remove_useless_meta(meta): if meta: for name in USELESS_META: if (name in meta): del meta[name] return meta
[ "def", "remove_useless_meta", "(", "meta", ")", ":", "if", "meta", ":", "for", "name", "in", "USELESS_META", ":", "if", "(", "name", "in", "meta", ")", ":", "del", "meta", "[", "name", "]", "return", "meta" ]
removes useless attributes from the tables meta .
train
false
54,650
def test_scenario_may_own_outlines(): scenario = Scenario.from_string(OUTLINED_SCENARIO) assert_equals(len(scenario.steps), 4) expected_sentences = ['Given I have entered <input_1> into the calculator', 'And I have entered <input_2> into the calculator', 'When I press <button>', 'Then the result should be <output> on the screen'] for (step, expected_sentence) in zip(scenario.steps, expected_sentences): assert_equals(type(step), Step) assert_equals(step.sentence, expected_sentence) assert_equals(scenario.name, 'Add two numbers') assert_equals(scenario.outlines, [{'input_1': '20', 'input_2': '30', 'button': 'add', 'output': '50'}, {'input_1': '2', 'input_2': '5', 'button': 'add', 'output': '7'}, {'input_1': '0', 'input_2': '40', 'button': 'add', 'output': '40'}])
[ "def", "test_scenario_may_own_outlines", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "OUTLINED_SCENARIO", ")", "assert_equals", "(", "len", "(", "scenario", ".", "steps", ")", ",", "4", ")", "expected_sentences", "=", "[", "'Given I have entered <input_1> into the calculator'", ",", "'And I have entered <input_2> into the calculator'", ",", "'When I press <button>'", ",", "'Then the result should be <output> on the screen'", "]", "for", "(", "step", ",", "expected_sentence", ")", "in", "zip", "(", "scenario", ".", "steps", ",", "expected_sentences", ")", ":", "assert_equals", "(", "type", "(", "step", ")", ",", "Step", ")", "assert_equals", "(", "step", ".", "sentence", ",", "expected_sentence", ")", "assert_equals", "(", "scenario", ".", "name", ",", "'Add two numbers'", ")", "assert_equals", "(", "scenario", ".", "outlines", ",", "[", "{", "'input_1'", ":", "'20'", ",", "'input_2'", ":", "'30'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'50'", "}", ",", "{", "'input_1'", ":", "'2'", ",", "'input_2'", ":", "'5'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'7'", "}", ",", "{", "'input_1'", ":", "'0'", ",", "'input_2'", ":", "'40'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'40'", "}", "]", ")" ]
a scenario may own outlines .
train
false
54,651
def xml_decode(string): string = string.replace('&amp;', '&') string = string.replace('&lt;', '<') string = string.replace('&gt;', '>') string = string.replace('&quot;', '"') string = string.replace('/', SLASH) return string
[ "def", "xml_decode", "(", "string", ")", ":", "string", "=", "string", ".", "replace", "(", "'&amp;'", ",", "'&'", ")", "string", "=", "string", ".", "replace", "(", "'&lt;'", ",", "'<'", ")", "string", "=", "string", ".", "replace", "(", "'&gt;'", ",", "'>'", ")", "string", "=", "string", ".", "replace", "(", "'&quot;'", ",", "'\"'", ")", "string", "=", "string", ".", "replace", "(", "'/'", ",", "SLASH", ")", "return", "string" ]
returns the string with special characters decoded .
train
true
54,652
def name_for_collection_relationship(base, local_cls, referred_cls, constraint): return (referred_cls.__name__.lower() + '_collection')
[ "def", "name_for_collection_relationship", "(", "base", ",", "local_cls", ",", "referred_cls", ",", "constraint", ")", ":", "return", "(", "referred_cls", ".", "__name__", ".", "lower", "(", ")", "+", "'_collection'", ")" ]
return the attribute name that should be used to refer from one class to another .
train
false
54,653
def demo_str_rule_format(): postag(ruleformat='str')
[ "def", "demo_str_rule_format", "(", ")", ":", "postag", "(", "ruleformat", "=", "'str'", ")" ]
exemplify repr (see also str and rule .
train
false
54,655
def test_unicode_column(tmpdir): t = Table([np.array([u'a', u'b', u'cd'])]) t.write(str(tmpdir.join('test.fits')), overwrite=True) with fits.open(str(tmpdir.join('test.fits'))) as hdul: assert np.all((hdul[1].data['col0'] == ['a', 'b', 'cd'])) assert (hdul[1].header['TFORM1'] == '2A') t2 = Table([np.array([u'\u2603'])]) with pytest.raises(UnicodeEncodeError): t2.write(str(tmpdir.join('test.fits')), overwrite=True)
[ "def", "test_unicode_column", "(", "tmpdir", ")", ":", "t", "=", "Table", "(", "[", "np", ".", "array", "(", "[", "u'a'", ",", "u'b'", ",", "u'cd'", "]", ")", "]", ")", "t", ".", "write", "(", "str", "(", "tmpdir", ".", "join", "(", "'test.fits'", ")", ")", ",", "overwrite", "=", "True", ")", "with", "fits", ".", "open", "(", "str", "(", "tmpdir", ".", "join", "(", "'test.fits'", ")", ")", ")", "as", "hdul", ":", "assert", "np", ".", "all", "(", "(", "hdul", "[", "1", "]", ".", "data", "[", "'col0'", "]", "==", "[", "'a'", ",", "'b'", ",", "'cd'", "]", ")", ")", "assert", "(", "hdul", "[", "1", "]", ".", "header", "[", "'TFORM1'", "]", "==", "'2A'", ")", "t2", "=", "Table", "(", "[", "np", ".", "array", "(", "[", "u'\\u2603'", "]", ")", "]", ")", "with", "pytest", ".", "raises", "(", "UnicodeEncodeError", ")", ":", "t2", ".", "write", "(", "str", "(", "tmpdir", ".", "join", "(", "'test.fits'", ")", ")", ",", "overwrite", "=", "True", ")" ]
test that a column of unicode strings is still written as one byte-per-character in the fits table .
train
false
54,656
@skip_if_not_win32 @with_environment def test_get_home_dir_1(): unfrozen = path.get_home_dir() sys.frozen = True IPython.__file__ = abspath(join(HOME_TEST_DIR, 'Lib/IPython/__init__.py')) home_dir = path.get_home_dir() nt.assert_equal(home_dir, unfrozen)
[ "@", "skip_if_not_win32", "@", "with_environment", "def", "test_get_home_dir_1", "(", ")", ":", "unfrozen", "=", "path", ".", "get_home_dir", "(", ")", "sys", ".", "frozen", "=", "True", "IPython", ".", "__file__", "=", "abspath", "(", "join", "(", "HOME_TEST_DIR", ",", "'Lib/IPython/__init__.py'", ")", ")", "home_dir", "=", "path", ".", "get_home_dir", "(", ")", "nt", ".", "assert_equal", "(", "home_dir", ",", "unfrozen", ")" ]
testcase for py2exe logic .
train
false
54,657
def pct_to_int(value, num_items, min_value=1): if (isinstance(value, string_types) and value.endswith('%')): value_pct = int(value.replace('%', '')) return (int(((value_pct / 100.0) * num_items)) or min_value) else: return int(value)
[ "def", "pct_to_int", "(", "value", ",", "num_items", ",", "min_value", "=", "1", ")", ":", "if", "(", "isinstance", "(", "value", ",", "string_types", ")", "and", "value", ".", "endswith", "(", "'%'", ")", ")", ":", "value_pct", "=", "int", "(", "value", ".", "replace", "(", "'%'", ",", "''", ")", ")", "return", "(", "int", "(", "(", "(", "value_pct", "/", "100.0", ")", "*", "num_items", ")", ")", "or", "min_value", ")", "else", ":", "return", "int", "(", "value", ")" ]
converts a given value to a percentage if specified as "x%" .
train
false
54,659
def validate_positive_integer_or_none(option, value): if (value is None): return value return validate_positive_integer(option, value)
[ "def", "validate_positive_integer_or_none", "(", "option", ",", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "value", "return", "validate_positive_integer", "(", "option", ",", "value", ")" ]
validate that value is a positive integer or none .
train
false
54,660
def test_example2(): vert_template = Function('\n void main(void)\n {\n gl_Position = $position;\n }\n ') transformScale = Function('\n vec4 transform_scale(vec4 pos)\n {\n pos.xyz *= $scale;\n return pos;\n }\n ') class Transform(object, ): def __init__(self): self.func = Function(transformScale) self.func['scale'] = 'uniform float' def set_scale(self, scale): self.func['scale'].value = scale transforms = [Transform(), Transform(), Transform()] code = Function(vert_template) ob = Variable('attribute vec3 a_position') for trans in transforms: ob = trans.func(ob) code['position'] = ob print code
[ "def", "test_example2", "(", ")", ":", "vert_template", "=", "Function", "(", "'\\n void main(void)\\n {\\n gl_Position = $position;\\n }\\n '", ")", "transformScale", "=", "Function", "(", "'\\n vec4 transform_scale(vec4 pos)\\n {\\n pos.xyz *= $scale;\\n return pos;\\n }\\n '", ")", "class", "Transform", "(", "object", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "func", "=", "Function", "(", "transformScale", ")", "self", ".", "func", "[", "'scale'", "]", "=", "'uniform float'", "def", "set_scale", "(", "self", ",", "scale", ")", ":", "self", ".", "func", "[", "'scale'", "]", ".", "value", "=", "scale", "transforms", "=", "[", "Transform", "(", ")", ",", "Transform", "(", ")", ",", "Transform", "(", ")", "]", "code", "=", "Function", "(", "vert_template", ")", "ob", "=", "Variable", "(", "'attribute vec3 a_position'", ")", "for", "trans", "in", "transforms", ":", "ob", "=", "trans", ".", "func", "(", "ob", ")", "code", "[", "'position'", "]", "=", "ob", "print", "code" ]
demonstrate how a transform would work .
train
false
54,662
def getEvaluatedIntDefault(defaultInt, key, xmlElement=None): evaluatedInt = getEvaluatedInt(key, xmlElement) if (evaluatedInt == None): return defaultInt return evaluatedInt
[ "def", "getEvaluatedIntDefault", "(", "defaultInt", ",", "key", ",", "xmlElement", "=", "None", ")", ":", "evaluatedInt", "=", "getEvaluatedInt", "(", "key", ",", "xmlElement", ")", "if", "(", "evaluatedInt", "==", "None", ")", ":", "return", "defaultInt", "return", "evaluatedInt" ]
get the evaluated value as an int .
train
false
54,663
def _ToBlobstoreError(error): error_map = {blobstore_service_pb.BlobstoreServiceError.INTERNAL_ERROR: InternalError, blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND: BlobNotFoundError, blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE: DataIndexOutOfRangeError, blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE: BlobFetchSizeTooLargeError, blobstore_service_pb.BlobstoreServiceError.PERMISSION_DENIED: PermissionDeniedError} desired_exc = error_map.get(error.application_error) return (desired_exc(error.error_detail) if desired_exc else error)
[ "def", "_ToBlobstoreError", "(", "error", ")", ":", "error_map", "=", "{", "blobstore_service_pb", ".", "BlobstoreServiceError", ".", "INTERNAL_ERROR", ":", "InternalError", ",", "blobstore_service_pb", ".", "BlobstoreServiceError", ".", "BLOB_NOT_FOUND", ":", "BlobNotFoundError", ",", "blobstore_service_pb", ".", "BlobstoreServiceError", ".", "DATA_INDEX_OUT_OF_RANGE", ":", "DataIndexOutOfRangeError", ",", "blobstore_service_pb", ".", "BlobstoreServiceError", ".", "BLOB_FETCH_SIZE_TOO_LARGE", ":", "BlobFetchSizeTooLargeError", ",", "blobstore_service_pb", ".", "BlobstoreServiceError", ".", "PERMISSION_DENIED", ":", "PermissionDeniedError", "}", "desired_exc", "=", "error_map", ".", "get", "(", "error", ".", "application_error", ")", "return", "(", "desired_exc", "(", "error", ".", "error_detail", ")", "if", "desired_exc", "else", "error", ")" ]
translate an application error to a datastore error .
train
false
54,664
def csolve_prime(f, p, e=1): from sympy.polys.domains import ZZ X1 = [i for i in range(p) if (gf_eval(f, i, p, ZZ) == 0)] if (e == 1): return X1 X = [] S = list(zip(X1, ([1] * len(X1)))) while S: (x, s) = S.pop() if (s == e): X.append(x) else: s1 = (s + 1) ps = (p ** s) S.extend([((x + (v * ps)), s1) for v in _raise_mod_power(x, s, p, f)]) return sorted(X)
[ "def", "csolve_prime", "(", "f", ",", "p", ",", "e", "=", "1", ")", ":", "from", "sympy", ".", "polys", ".", "domains", "import", "ZZ", "X1", "=", "[", "i", "for", "i", "in", "range", "(", "p", ")", "if", "(", "gf_eval", "(", "f", ",", "i", ",", "p", ",", "ZZ", ")", "==", "0", ")", "]", "if", "(", "e", "==", "1", ")", ":", "return", "X1", "X", "=", "[", "]", "S", "=", "list", "(", "zip", "(", "X1", ",", "(", "[", "1", "]", "*", "len", "(", "X1", ")", ")", ")", ")", "while", "S", ":", "(", "x", ",", "s", ")", "=", "S", ".", "pop", "(", ")", "if", "(", "s", "==", "e", ")", ":", "X", ".", "append", "(", "x", ")", "else", ":", "s1", "=", "(", "s", "+", "1", ")", "ps", "=", "(", "p", "**", "s", ")", "S", ".", "extend", "(", "[", "(", "(", "x", "+", "(", "v", "*", "ps", ")", ")", ",", "s1", ")", "for", "v", "in", "_raise_mod_power", "(", "x", ",", "s", ",", "p", ",", "f", ")", "]", ")", "return", "sorted", "(", "X", ")" ]
solutions of f(x) congruent 0 mod .
train
false
54,665
def _get_nets(vif, subnet, version, net_num, link_id): if (subnet.get_meta('dhcp_server') is not None): net_info = {'id': ('network%d' % net_num), 'type': ('ipv%d_dhcp' % version), 'link': link_id, 'network_id': vif['network']['id']} return net_info ip = subnet['ips'][0] address = ip['address'] if (version == 4): netmask = model.get_netmask(ip, subnet) elif (version == 6): netmask = str(subnet.as_netaddr().netmask) net_info = {'id': ('network%d' % net_num), 'type': ('ipv%d' % version), 'link': link_id, 'ip_address': address, 'netmask': netmask, 'routes': _get_default_route(version, subnet), 'network_id': vif['network']['id']} for route in subnet['routes']: route_addr = netaddr.IPNetwork(route['cidr']) new_route = {'network': str(route_addr.network), 'netmask': str(route_addr.netmask), 'gateway': route['gateway']['address']} net_info['routes'].append(new_route) return net_info
[ "def", "_get_nets", "(", "vif", ",", "subnet", ",", "version", ",", "net_num", ",", "link_id", ")", ":", "if", "(", "subnet", ".", "get_meta", "(", "'dhcp_server'", ")", "is", "not", "None", ")", ":", "net_info", "=", "{", "'id'", ":", "(", "'network%d'", "%", "net_num", ")", ",", "'type'", ":", "(", "'ipv%d_dhcp'", "%", "version", ")", ",", "'link'", ":", "link_id", ",", "'network_id'", ":", "vif", "[", "'network'", "]", "[", "'id'", "]", "}", "return", "net_info", "ip", "=", "subnet", "[", "'ips'", "]", "[", "0", "]", "address", "=", "ip", "[", "'address'", "]", "if", "(", "version", "==", "4", ")", ":", "netmask", "=", "model", ".", "get_netmask", "(", "ip", ",", "subnet", ")", "elif", "(", "version", "==", "6", ")", ":", "netmask", "=", "str", "(", "subnet", ".", "as_netaddr", "(", ")", ".", "netmask", ")", "net_info", "=", "{", "'id'", ":", "(", "'network%d'", "%", "net_num", ")", ",", "'type'", ":", "(", "'ipv%d'", "%", "version", ")", ",", "'link'", ":", "link_id", ",", "'ip_address'", ":", "address", ",", "'netmask'", ":", "netmask", ",", "'routes'", ":", "_get_default_route", "(", "version", ",", "subnet", ")", ",", "'network_id'", ":", "vif", "[", "'network'", "]", "[", "'id'", "]", "}", "for", "route", "in", "subnet", "[", "'routes'", "]", ":", "route_addr", "=", "netaddr", ".", "IPNetwork", "(", "route", "[", "'cidr'", "]", ")", "new_route", "=", "{", "'network'", ":", "str", "(", "route_addr", ".", "network", ")", ",", "'netmask'", ":", "str", "(", "route_addr", ".", "netmask", ")", ",", "'gateway'", ":", "route", "[", "'gateway'", "]", "[", "'address'", "]", "}", "net_info", "[", "'routes'", "]", ".", "append", "(", "new_route", ")", "return", "net_info" ]
get networks for the given vif and subnet .
train
false
54,667
def logger(_modem, message_, type_): pass
[ "def", "logger", "(", "_modem", ",", "message_", ",", "type_", ")", ":", "pass" ]
supress all output from pyserial and gsmmodem .
train
false
54,668
def read_double(fid): return _unpack_simple(fid, '>f8', np.float64)
[ "def", "read_double", "(", "fid", ")", ":", "return", "_unpack_simple", "(", "fid", ",", "'>f8'", ",", "np", ".", "float64", ")" ]
read 64bit float from bti file .
train
false
54,669
def getToothProfileCylinder(derivation, pitchRadius, teeth): toothProfileHalfCylinder = getToothProfileHalfCylinder(derivation, pitchRadius) toothProfileHalfCylinder = getThicknessMultipliedPath(toothProfileHalfCylinder, derivation.toothThicknessMultiplier) toothProfileHalf = [] innerRadius = (pitchRadius - derivation.dedendum) for point in toothProfileHalfCylinder: if (abs(point) >= innerRadius): toothProfileHalf.append(point) return getToothProfileCylinderByProfile(derivation, pitchRadius, teeth, toothProfileHalf)
[ "def", "getToothProfileCylinder", "(", "derivation", ",", "pitchRadius", ",", "teeth", ")", ":", "toothProfileHalfCylinder", "=", "getToothProfileHalfCylinder", "(", "derivation", ",", "pitchRadius", ")", "toothProfileHalfCylinder", "=", "getThicknessMultipliedPath", "(", "toothProfileHalfCylinder", ",", "derivation", ".", "toothThicknessMultiplier", ")", "toothProfileHalf", "=", "[", "]", "innerRadius", "=", "(", "pitchRadius", "-", "derivation", ".", "dedendum", ")", "for", "point", "in", "toothProfileHalfCylinder", ":", "if", "(", "abs", "(", "point", ")", ">=", "innerRadius", ")", ":", "toothProfileHalf", ".", "append", "(", "point", ")", "return", "getToothProfileCylinderByProfile", "(", "derivation", ",", "pitchRadius", ",", "teeth", ",", "toothProfileHalf", ")" ]
get profile for one tooth of a cylindrical gear .
train
false
54,670
def get_receptive_field(layers, img_size): receptive_field = np.zeros((len(layers), 2)) conv_mode = True first_conv_layer = True expon = np.ones((1, 2)) for (i, layer) in enumerate(layers[1:]): j = (i + 1) if (not conv_mode): receptive_field[j] = img_size continue if is_conv2d(layer): if (not first_conv_layer): last_field = receptive_field[i] new_field = (last_field + (expon * (np.array(layer.filter_size) - 1))) receptive_field[j] = new_field else: receptive_field[j] = layer.filter_size first_conv_layer = False elif is_maxpool2d(layer): receptive_field[j] = receptive_field[i] expon *= np.array(layer.pool_size) else: conv_mode = False receptive_field[j] = img_size receptive_field[0] = img_size return receptive_field
[ "def", "get_receptive_field", "(", "layers", ",", "img_size", ")", ":", "receptive_field", "=", "np", ".", "zeros", "(", "(", "len", "(", "layers", ")", ",", "2", ")", ")", "conv_mode", "=", "True", "first_conv_layer", "=", "True", "expon", "=", "np", ".", "ones", "(", "(", "1", ",", "2", ")", ")", "for", "(", "i", ",", "layer", ")", "in", "enumerate", "(", "layers", "[", "1", ":", "]", ")", ":", "j", "=", "(", "i", "+", "1", ")", "if", "(", "not", "conv_mode", ")", ":", "receptive_field", "[", "j", "]", "=", "img_size", "continue", "if", "is_conv2d", "(", "layer", ")", ":", "if", "(", "not", "first_conv_layer", ")", ":", "last_field", "=", "receptive_field", "[", "i", "]", "new_field", "=", "(", "last_field", "+", "(", "expon", "*", "(", "np", ".", "array", "(", "layer", ".", "filter_size", ")", "-", "1", ")", ")", ")", "receptive_field", "[", "j", "]", "=", "new_field", "else", ":", "receptive_field", "[", "j", "]", "=", "layer", ".", "filter_size", "first_conv_layer", "=", "False", "elif", "is_maxpool2d", "(", "layer", ")", ":", "receptive_field", "[", "j", "]", "=", "receptive_field", "[", "i", "]", "expon", "*=", "np", ".", "array", "(", "layer", ".", "pool_size", ")", "else", ":", "conv_mode", "=", "False", "receptive_field", "[", "j", "]", "=", "img_size", "receptive_field", "[", "0", "]", "=", "img_size", "return", "receptive_field" ]
get the real filter sizes of each layer involved in convoluation .
train
true
54,671
def clear_all_actions(): global _populated _all_actions.clear() _top_level_ids.clear() _populated = False
[ "def", "clear_all_actions", "(", ")", ":", "global", "_populated", "_all_actions", ".", "clear", "(", ")", "_top_level_ids", ".", "clear", "(", ")", "_populated", "=", "False" ]
clear all registered actions .
train
false
54,672
def get_user_unique_id_and_display_name(request, mapped_properties): user = mapped_properties['user'] user_id = user.get('id') user_name = (user.get('name') or request.remote_user) if (not any([user_id, user_name])): msg = _('Could not map user while setting ephemeral user identity. Either mapping rules must specify user id/name or REMOTE_USER environment variable must be set.') raise exception.Unauthorized(msg) elif (not user_name): user['name'] = user_id elif (not user_id): user_id = user_name user['id'] = parse.quote(user_id) return (user['id'], user['name'])
[ "def", "get_user_unique_id_and_display_name", "(", "request", ",", "mapped_properties", ")", ":", "user", "=", "mapped_properties", "[", "'user'", "]", "user_id", "=", "user", ".", "get", "(", "'id'", ")", "user_name", "=", "(", "user", ".", "get", "(", "'name'", ")", "or", "request", ".", "remote_user", ")", "if", "(", "not", "any", "(", "[", "user_id", ",", "user_name", "]", ")", ")", ":", "msg", "=", "_", "(", "'Could not map user while setting ephemeral user identity. Either mapping rules must specify user id/name or REMOTE_USER environment variable must be set.'", ")", "raise", "exception", ".", "Unauthorized", "(", "msg", ")", "elif", "(", "not", "user_name", ")", ":", "user", "[", "'name'", "]", "=", "user_id", "elif", "(", "not", "user_id", ")", ":", "user_id", "=", "user_name", "user", "[", "'id'", "]", "=", "parse", ".", "quote", "(", "user_id", ")", "return", "(", "user", "[", "'id'", "]", ",", "user", "[", "'name'", "]", ")" ]
setup federated username .
train
false
54,673
def swap_inf_nan(val): if isinstance(val, string_types): return val elif isinstance(val, collections.Sequence): return [swap_inf_nan(v) for v in val] elif isinstance(val, collections.Mapping): return dict([(swap_inf_nan(k), swap_inf_nan(v)) for (k, v) in iteritems(val)]) elif isinstance(val, float): if math.isnan(val): return '__NaN__' elif (val == float('inf')): return '__Infinity__' elif (val == float('-inf')): return '__-Infinity__' else: return val else: return val
[ "def", "swap_inf_nan", "(", "val", ")", ":", "if", "isinstance", "(", "val", ",", "string_types", ")", ":", "return", "val", "elif", "isinstance", "(", "val", ",", "collections", ".", "Sequence", ")", ":", "return", "[", "swap_inf_nan", "(", "v", ")", "for", "v", "in", "val", "]", "elif", "isinstance", "(", "val", ",", "collections", ".", "Mapping", ")", ":", "return", "dict", "(", "[", "(", "swap_inf_nan", "(", "k", ")", ",", "swap_inf_nan", "(", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "val", ")", "]", ")", "elif", "isinstance", "(", "val", ",", "float", ")", ":", "if", "math", ".", "isnan", "(", "val", ")", ":", "return", "'__NaN__'", "elif", "(", "val", "==", "float", "(", "'inf'", ")", ")", ":", "return", "'__Infinity__'", "elif", "(", "val", "==", "float", "(", "'-inf'", ")", ")", ":", "return", "'__-Infinity__'", "else", ":", "return", "val", "else", ":", "return", "val" ]
this takes an arbitrary object and preps it for jsonifying safely .
train
false
54,674
def filer_file_from_upload(request, path, upload_data, sha1=None): return _filer_file_from_upload(model=File, request=request, path=path, upload_data=upload_data, sha1=sha1)
[ "def", "filer_file_from_upload", "(", "request", ",", "path", ",", "upload_data", ",", "sha1", "=", "None", ")", ":", "return", "_filer_file_from_upload", "(", "model", "=", "File", ",", "request", "=", "request", ",", "path", "=", "path", ",", "upload_data", "=", "upload_data", ",", "sha1", "=", "sha1", ")" ]
create a filer .
train
false
54,675
def last_updated(document): if (config.LAST_UPDATED in document): return document[config.LAST_UPDATED].replace(tzinfo=None) else: return epoch()
[ "def", "last_updated", "(", "document", ")", ":", "if", "(", "config", ".", "LAST_UPDATED", "in", "document", ")", ":", "return", "document", "[", "config", ".", "LAST_UPDATED", "]", ".", "replace", "(", "tzinfo", "=", "None", ")", "else", ":", "return", "epoch", "(", ")" ]
fixes documents last_updated field value .
train
false