id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
54,814
def config_value(option): return option_list[option]
[ "def", "config_value", "(", "option", ")", ":", "return", "option_list", "[", "option", "]" ]
return the current configuration value for the given option .
train
false
54,817
def parse_argv(tokens, options, options_first=False): parsed = [] while (tokens.current() is not None): if (tokens.current() == '--'): return (parsed + [Argument(None, v) for v in tokens]) elif tokens.current().startswith('--'): parsed += parse_long(tokens, options) elif (tokens.current().startswith('-') and (tokens.current() != '-')): parsed += parse_shorts(tokens, options) elif options_first: return (parsed + [Argument(None, v) for v in tokens]) else: parsed.append(Argument(None, tokens.move())) return parsed
[ "def", "parse_argv", "(", "tokens", ",", "options", ",", "options_first", "=", "False", ")", ":", "parsed", "=", "[", "]", "while", "(", "tokens", ".", "current", "(", ")", "is", "not", "None", ")", ":", "if", "(", "tokens", ".", "current", "(", ")", "==", "'--'", ")", ":", "return", "(", "parsed", "+", "[", "Argument", "(", "None", ",", "v", ")", "for", "v", "in", "tokens", "]", ")", "elif", "tokens", ".", "current", "(", ")", ".", "startswith", "(", "'--'", ")", ":", "parsed", "+=", "parse_long", "(", "tokens", ",", "options", ")", "elif", "(", "tokens", ".", "current", "(", ")", ".", "startswith", "(", "'-'", ")", "and", "(", "tokens", ".", "current", "(", ")", "!=", "'-'", ")", ")", ":", "parsed", "+=", "parse_shorts", "(", "tokens", ",", "options", ")", "elif", "options_first", ":", "return", "(", "parsed", "+", "[", "Argument", "(", "None", ",", "v", ")", "for", "v", "in", "tokens", "]", ")", "else", ":", "parsed", ".", "append", "(", "Argument", "(", "None", ",", "tokens", ".", "move", "(", ")", ")", ")", "return", "parsed" ]
parse command-line argument vector .
train
true
54,818
def _make_namespaced_xattr_key(key, namespace='user'): namespaced_key = '.'.join([namespace, key]) return namespaced_key
[ "def", "_make_namespaced_xattr_key", "(", "key", ",", "namespace", "=", "'user'", ")", ":", "namespaced_key", "=", "'.'", ".", "join", "(", "[", "namespace", ",", "key", "]", ")", "return", "namespaced_key" ]
create a fully-qualified xattr-key by including the intended namespace .
train
false
54,819
def _zpklp2hp(z, p, k, wo=1.0): z = atleast_1d(z) p = atleast_1d(p) wo = float(wo) degree = _relative_degree(z, p) z_hp = (wo / z) p_hp = (wo / p) z_hp = append(z_hp, zeros(degree)) k_hp = (k * real((prod((- z)) / prod((- p))))) return (z_hp, p_hp, k_hp)
[ "def", "_zpklp2hp", "(", "z", ",", "p", ",", "k", ",", "wo", "=", "1.0", ")", ":", "z", "=", "atleast_1d", "(", "z", ")", "p", "=", "atleast_1d", "(", "p", ")", "wo", "=", "float", "(", "wo", ")", "degree", "=", "_relative_degree", "(", "z", ",", "p", ")", "z_hp", "=", "(", "wo", "/", "z", ")", "p_hp", "=", "(", "wo", "/", "p", ")", "z_hp", "=", "append", "(", "z_hp", ",", "zeros", "(", "degree", ")", ")", "k_hp", "=", "(", "k", "*", "real", "(", "(", "prod", "(", "(", "-", "z", ")", ")", "/", "prod", "(", "(", "-", "p", ")", ")", ")", ")", ")", "return", "(", "z_hp", ",", "p_hp", ",", "k_hp", ")" ]
transform a lowpass filter prototype to a highpass filter .
train
false
54,821
def snipmate_files_for(ft): if (ft == 'all'): ft = '_' patterns = [('%s.snippets' % ft), os.path.join(ft, '*.snippets'), os.path.join(ft, '*.snippet'), os.path.join(ft, '*/*.snippet')] ret = set() for rtp in _vim.eval('&runtimepath').split(','): path = os.path.realpath(os.path.expanduser(os.path.join(rtp, 'snippets'))) for pattern in patterns: for fn in glob.glob(os.path.join(path, pattern)): ret.add(fn) return ret
[ "def", "snipmate_files_for", "(", "ft", ")", ":", "if", "(", "ft", "==", "'all'", ")", ":", "ft", "=", "'_'", "patterns", "=", "[", "(", "'%s.snippets'", "%", "ft", ")", ",", "os", ".", "path", ".", "join", "(", "ft", ",", "'*.snippets'", ")", ",", "os", ".", "path", ".", "join", "(", "ft", ",", "'*.snippet'", ")", ",", "os", ".", "path", ".", "join", "(", "ft", ",", "'*/*.snippet'", ")", "]", "ret", "=", "set", "(", ")", "for", "rtp", "in", "_vim", ".", "eval", "(", "'&runtimepath'", ")", ".", "split", "(", "','", ")", ":", "path", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "expanduser", "(", "os", ".", "path", ".", "join", "(", "rtp", ",", "'snippets'", ")", ")", ")", "for", "pattern", "in", "patterns", ":", "for", "fn", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "path", ",", "pattern", ")", ")", ":", "ret", ".", "add", "(", "fn", ")", "return", "ret" ]
returns all snipmate files we need to look at for ft .
train
false
54,822
def dbsize(host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.dbsize()
[ "def", "dbsize", "(", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "dbsize", "(", ")" ]
return the number of keys in the selected database cli example: .
train
true
54,823
def ant(registry, xml_parent, data): ant = XML.SubElement(xml_parent, 'hudson.tasks.Ant') if (type(data) is str): data = {'targets': data} for (setting, value) in sorted(data.items()): if (setting == 'targets'): targets = XML.SubElement(ant, 'targets') targets.text = value if (setting == 'buildfile'): buildfile = XML.SubElement(ant, 'buildFile') buildfile.text = value if (setting == 'properties'): properties = data['properties'] prop_string = '' for (prop, val) in properties.items(): prop_string += ('%s=%s\n' % (prop, val)) prop_element = XML.SubElement(ant, 'properties') prop_element.text = prop_string if (setting == 'java-opts'): javaopts = data['java-opts'] jopt_string = ' '.join(javaopts) jopt_element = XML.SubElement(ant, 'antOpts') jopt_element.text = jopt_string XML.SubElement(ant, 'antName').text = data.get('ant-name', 'default')
[ "def", "ant", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "ant", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.tasks.Ant'", ")", "if", "(", "type", "(", "data", ")", "is", "str", ")", ":", "data", "=", "{", "'targets'", ":", "data", "}", "for", "(", "setting", ",", "value", ")", "in", "sorted", "(", "data", ".", "items", "(", ")", ")", ":", "if", "(", "setting", "==", "'targets'", ")", ":", "targets", "=", "XML", ".", "SubElement", "(", "ant", ",", "'targets'", ")", "targets", ".", "text", "=", "value", "if", "(", "setting", "==", "'buildfile'", ")", ":", "buildfile", "=", "XML", ".", "SubElement", "(", "ant", ",", "'buildFile'", ")", "buildfile", ".", "text", "=", "value", "if", "(", "setting", "==", "'properties'", ")", ":", "properties", "=", "data", "[", "'properties'", "]", "prop_string", "=", "''", "for", "(", "prop", ",", "val", ")", "in", "properties", ".", "items", "(", ")", ":", "prop_string", "+=", "(", "'%s=%s\\n'", "%", "(", "prop", ",", "val", ")", ")", "prop_element", "=", "XML", ".", "SubElement", "(", "ant", ",", "'properties'", ")", "prop_element", ".", "text", "=", "prop_string", "if", "(", "setting", "==", "'java-opts'", ")", ":", "javaopts", "=", "data", "[", "'java-opts'", "]", "jopt_string", "=", "' '", ".", "join", "(", "javaopts", ")", "jopt_element", "=", "XML", ".", "SubElement", "(", "ant", ",", "'antOpts'", ")", "jopt_element", ".", "text", "=", "jopt_string", "XML", ".", "SubElement", "(", "ant", ",", "'antName'", ")", ".", "text", "=", "data", ".", "get", "(", "'ant-name'", ",", "'default'", ")" ]
yaml: ant execute an ant target .
train
false
54,825
def full_restart(name): restart(name)
[ "def", "full_restart", "(", "name", ")", ":", "restart", "(", "name", ")" ]
calls s6 .
train
false
54,826
def regen(it): if isinstance(it, (list, tuple)): return it return _regen(it)
[ "def", "regen", "(", "it", ")", ":", "if", "isinstance", "(", "it", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "it", "return", "_regen", "(", "it", ")" ]
convert iterator to an object that can be consumed multiple times .
train
false
54,827
def is_arity(n, func, sigspec=None): (sigspec, rv) = _check_sigspec(sigspec, func, _sigs._is_arity, n, func) if (sigspec is None): return rv num = num_required_args(func, sigspec) if (num is not None): num = (num == n) if (not num): return False varargs = has_varargs(func, sigspec) if varargs: return False keywords = has_keywords(func, sigspec) if keywords: return False if ((num is None) or (varargs is None) or (keywords is None)): return None return True
[ "def", "is_arity", "(", "n", ",", "func", ",", "sigspec", "=", "None", ")", ":", "(", "sigspec", ",", "rv", ")", "=", "_check_sigspec", "(", "sigspec", ",", "func", ",", "_sigs", ".", "_is_arity", ",", "n", ",", "func", ")", "if", "(", "sigspec", "is", "None", ")", ":", "return", "rv", "num", "=", "num_required_args", "(", "func", ",", "sigspec", ")", "if", "(", "num", "is", "not", "None", ")", ":", "num", "=", "(", "num", "==", "n", ")", "if", "(", "not", "num", ")", ":", "return", "False", "varargs", "=", "has_varargs", "(", "func", ",", "sigspec", ")", "if", "varargs", ":", "return", "False", "keywords", "=", "has_keywords", "(", "func", ",", "sigspec", ")", "if", "keywords", ":", "return", "False", "if", "(", "(", "num", "is", "None", ")", "or", "(", "varargs", "is", "None", ")", "or", "(", "keywords", "is", "None", ")", ")", ":", "return", "None", "return", "True" ]
does a function have only n positional arguments? this function relies on introspection and does not call the function .
train
false
54,828
def add_trailing_slash(path): if ((len(path) > 0) and (path[(-1)] == os.sep)): return path else: return (path + os.sep)
[ "def", "add_trailing_slash", "(", "path", ")", ":", "if", "(", "(", "len", "(", "path", ")", ">", "0", ")", "and", "(", "path", "[", "(", "-", "1", ")", "]", "==", "os", ".", "sep", ")", ")", ":", "return", "path", "else", ":", "return", "(", "path", "+", "os", ".", "sep", ")" ]
if path does not end with / .
train
false
54,829
@api_versions.wraps('2.17') @utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_trigger_crash_dump(cs, args): server = _find_server(cs, args.server) server.trigger_crash_dump()
[ "@", "api_versions", ".", "wraps", "(", "'2.17'", ")", "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_trigger_crash_dump", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "server", ".", "trigger_crash_dump", "(", ")" ]
trigger crash dump in an instance .
train
false
54,830
def gf_eval(f, a, p, K): result = K.zero for c in f: result *= a result += c result %= p return result
[ "def", "gf_eval", "(", "f", ",", "a", ",", "p", ",", "K", ")", ":", "result", "=", "K", ".", "zero", "for", "c", "in", "f", ":", "result", "*=", "a", "result", "+=", "c", "result", "%=", "p", "return", "result" ]
evaluate f(a) in gf(p) using horner scheme .
train
false
54,832
def find_native_user_instrumentation_hook(cls): return getattr(cls, INSTRUMENTATION_MANAGER, None)
[ "def", "find_native_user_instrumentation_hook", "(", "cls", ")", ":", "return", "getattr", "(", "cls", ",", "INSTRUMENTATION_MANAGER", ",", "None", ")" ]
find user-specified instrumentation management for a class .
train
false
54,833
def addFacesByConcaveLoop(faces, indexedLoop): if (len(indexedLoop) < 3): return remainingLoop = indexedLoop[:] while (len(remainingLoop) > 2): remainingLoop = getRemainingLoopAddFace(faces, remainingLoop)
[ "def", "addFacesByConcaveLoop", "(", "faces", ",", "indexedLoop", ")", ":", "if", "(", "len", "(", "indexedLoop", ")", "<", "3", ")", ":", "return", "remainingLoop", "=", "indexedLoop", "[", ":", "]", "while", "(", "len", "(", "remainingLoop", ")", ">", "2", ")", ":", "remainingLoop", "=", "getRemainingLoopAddFace", "(", "faces", ",", "remainingLoop", ")" ]
add faces from a polygon which is concave .
train
false
54,834
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
54,835
@require_GET def aggregated_metrics(request): today = date.today() locales = get_locales_by_visit((today - timedelta(days=30)), today) product = _get_product(request) return render(request, 'dashboards/aggregated_metrics.html', {'locales_json': json.dumps(settings.SUMO_LANGUAGES), 'locales': locales, 'product': product, 'products': Product.objects.filter(visible=True)})
[ "@", "require_GET", "def", "aggregated_metrics", "(", "request", ")", ":", "today", "=", "date", ".", "today", "(", ")", "locales", "=", "get_locales_by_visit", "(", "(", "today", "-", "timedelta", "(", "days", "=", "30", ")", ")", ",", "today", ")", "product", "=", "_get_product", "(", "request", ")", "return", "render", "(", "request", ",", "'dashboards/aggregated_metrics.html'", ",", "{", "'locales_json'", ":", "json", ".", "dumps", "(", "settings", ".", "SUMO_LANGUAGES", ")", ",", "'locales'", ":", "locales", ",", "'product'", ":", "product", ",", "'products'", ":", "Product", ".", "objects", ".", "filter", "(", "visible", "=", "True", ")", "}", ")" ]
the aggregated kb metrics dashboard .
train
false
54,836
def RewriteResponse(response_file, response_rewriters=None, request_headers=None, env_dict=None): if (response_rewriters is None): response_rewriters = CreateResponseRewritersChain() response = AppServerResponse(response_file) for response_rewriter in response_rewriters: if (response_rewriter.func_code.co_argcount == 1): response_rewriter(response) elif (response_rewriter.func_code.co_argcount == 2): response_rewriter(response, request_headers) else: response_rewriter(response, request_headers, env_dict) return response
[ "def", "RewriteResponse", "(", "response_file", ",", "response_rewriters", "=", "None", ",", "request_headers", "=", "None", ",", "env_dict", "=", "None", ")", ":", "if", "(", "response_rewriters", "is", "None", ")", ":", "response_rewriters", "=", "CreateResponseRewritersChain", "(", ")", "response", "=", "AppServerResponse", "(", "response_file", ")", "for", "response_rewriter", "in", "response_rewriters", ":", "if", "(", "response_rewriter", ".", "func_code", ".", "co_argcount", "==", "1", ")", ":", "response_rewriter", "(", "response", ")", "elif", "(", "response_rewriter", ".", "func_code", ".", "co_argcount", "==", "2", ")", ":", "response_rewriter", "(", "response", ",", "request_headers", ")", "else", ":", "response_rewriter", "(", "response", ",", "request_headers", ",", "env_dict", ")", "return", "response" ]
allows final rewrite of dev_appserver response .
train
false
54,837
def cachefile(src, dst, api=None, logger=None): lcache = os.path.join(os.path.dirname(os.path.dirname(dst)), '.link_cache') if (not os.path.isdir(lcache)): os.mkdir(lcache) key = hashfile(src, lcache=lcache, logger=logger) cachefile = os.path.join(lcache, key) if (not os.path.exists(cachefile)): logger.info(('trying to create cache file %s' % cachefile)) copyfile(src, cachefile, api=api, logger=logger) logger.debug(('trying cachelink %s -> %s -> %s' % (src, cachefile, dst))) os.link(cachefile, dst)
[ "def", "cachefile", "(", "src", ",", "dst", ",", "api", "=", "None", ",", "logger", "=", "None", ")", ":", "lcache", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "dst", ")", ")", ",", "'.link_cache'", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "lcache", ")", ")", ":", "os", ".", "mkdir", "(", "lcache", ")", "key", "=", "hashfile", "(", "src", ",", "lcache", "=", "lcache", ",", "logger", "=", "logger", ")", "cachefile", "=", "os", ".", "path", ".", "join", "(", "lcache", ",", "key", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "cachefile", ")", ")", ":", "logger", ".", "info", "(", "(", "'trying to create cache file %s'", "%", "cachefile", ")", ")", "copyfile", "(", "src", ",", "cachefile", ",", "api", "=", "api", ",", "logger", "=", "logger", ")", "logger", ".", "debug", "(", "(", "'trying cachelink %s -> %s -> %s'", "%", "(", "src", ",", "cachefile", ",", "dst", ")", ")", ")", "os", ".", "link", "(", "cachefile", ",", "dst", ")" ]
copy a file into a cache and link it into place .
train
false
54,838
@not_implemented_for('directed') def common_neighbors(G, u, v): if (u not in G): raise nx.NetworkXError('u is not in the graph.') if (v not in G): raise nx.NetworkXError('v is not in the graph.') return (w for w in G[u] if ((w in G[v]) and (w not in (u, v))))
[ "@", "not_implemented_for", "(", "'directed'", ")", "def", "common_neighbors", "(", "G", ",", "u", ",", "v", ")", ":", "if", "(", "u", "not", "in", "G", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'u is not in the graph.'", ")", "if", "(", "v", "not", "in", "G", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'v is not in the graph.'", ")", "return", "(", "w", "for", "w", "in", "G", "[", "u", "]", "if", "(", "(", "w", "in", "G", "[", "v", "]", ")", "and", "(", "w", "not", "in", "(", "u", ",", "v", ")", ")", ")", ")" ]
return the common neighbors of two nodes in a graph .
train
false
54,839
@utils.arg('ip_range', metavar='<range>', help=_('Address range to create.')) @utils.arg('--pool', dest='pool', metavar='<pool>', default=None, help=_('Pool for new Floating IPs.')) @utils.arg('--interface', metavar='<interface>', default=None, help=_('Interface for new Floating IPs.')) @deprecated_network def do_floating_ip_bulk_create(cs, args): cs.floating_ips_bulk.create(args.ip_range, args.pool, args.interface)
[ "@", "utils", ".", "arg", "(", "'ip_range'", ",", "metavar", "=", "'<range>'", ",", "help", "=", "_", "(", "'Address range to create.'", ")", ")", "@", "utils", ".", "arg", "(", "'--pool'", ",", "dest", "=", "'pool'", ",", "metavar", "=", "'<pool>'", ",", "default", "=", "None", ",", "help", "=", "_", "(", "'Pool for new Floating IPs.'", ")", ")", "@", "utils", ".", "arg", "(", "'--interface'", ",", "metavar", "=", "'<interface>'", ",", "default", "=", "None", ",", "help", "=", "_", "(", "'Interface for new Floating IPs.'", ")", ")", "@", "deprecated_network", "def", "do_floating_ip_bulk_create", "(", "cs", ",", "args", ")", ":", "cs", ".", "floating_ips_bulk", ".", "create", "(", "args", ".", "ip_range", ",", "args", ".", "pool", ",", "args", ".", "interface", ")" ]
bulk create floating ips by range .
train
false
54,841
def no_real_gs_credentials(): if (parse_boolean_envvar(os.getenv('WALE_GS_INTEGRATION_TESTS')) is not True): return True if (os.getenv('GOOGLE_APPLICATION_CREDENTIALS') is None): return True return False
[ "def", "no_real_gs_credentials", "(", ")", ":", "if", "(", "parse_boolean_envvar", "(", "os", ".", "getenv", "(", "'WALE_GS_INTEGRATION_TESTS'", ")", ")", "is", "not", "True", ")", ":", "return", "True", "if", "(", "os", ".", "getenv", "(", "'GOOGLE_APPLICATION_CREDENTIALS'", ")", "is", "None", ")", ":", "return", "True", "return", "False" ]
helps skip integration tests without live credentials .
train
false
54,842
def invert_docs_link_map(docs_links): files_to_docs = defaultdict(list) for (doc, files) in docs_links.iteritems(): for file in files: files_to_docs[file].append(doc) files_to_docs[file] = list(set(files_to_docs[file])) return files_to_docs
[ "def", "invert_docs_link_map", "(", "docs_links", ")", ":", "files_to_docs", "=", "defaultdict", "(", "list", ")", "for", "(", "doc", ",", "files", ")", "in", "docs_links", ".", "iteritems", "(", ")", ":", "for", "file", "in", "files", ":", "files_to_docs", "[", "file", "]", ".", "append", "(", "doc", ")", "files_to_docs", "[", "file", "]", "=", "list", "(", "set", "(", "files_to_docs", "[", "file", "]", ")", ")", "return", "files_to_docs" ]
the docs links map is in this format: "doc_path": [ "file_path" .
train
false
54,843
def multi_replace(text, word_dic): rc = re.compile('|'.join(map(re.escape, word_dic))) def translate(match): return word_dic[match.group(0)] return rc.sub(translate, text)
[ "def", "multi_replace", "(", "text", ",", "word_dic", ")", ":", "rc", "=", "re", ".", "compile", "(", "'|'", ".", "join", "(", "map", "(", "re", ".", "escape", ",", "word_dic", ")", ")", ")", "def", "translate", "(", "match", ")", ":", "return", "word_dic", "[", "match", ".", "group", "(", "0", ")", "]", "return", "rc", ".", "sub", "(", "translate", ",", "text", ")" ]
takes a string and replace words that match a key in a dictionary with the associated value .
train
false
54,844
def is_automated(): is_automated = False try: dist_dir_i = sys.argv.index('--dist-dir') except ValueError: dist_dir_i = None if (dist_dir_i is not None): dist_dir = sys.argv[(dist_dir_i + 1)] if ('egg-dist-tmp' in dist_dir): is_automated = True if ((sys.argv in [['-c', 'develop', '--no-deps'], ['--no-deps', '-c', 'develop'], ['-c', 'egg_info']]) or ('pip-egg-info' in sys.argv) or (sys.argv[:3] == ['-c', 'install', '--record']) or (sys.argv[:4] == ['-c', 'install', '--single-version-externally-managed', '--record'])): is_automated = True return is_automated
[ "def", "is_automated", "(", ")", ":", "is_automated", "=", "False", "try", ":", "dist_dir_i", "=", "sys", ".", "argv", ".", "index", "(", "'--dist-dir'", ")", "except", "ValueError", ":", "dist_dir_i", "=", "None", "if", "(", "dist_dir_i", "is", "not", "None", ")", ":", "dist_dir", "=", "sys", ".", "argv", "[", "(", "dist_dir_i", "+", "1", ")", "]", "if", "(", "'egg-dist-tmp'", "in", "dist_dir", ")", ":", "is_automated", "=", "True", "if", "(", "(", "sys", ".", "argv", "in", "[", "[", "'-c'", ",", "'develop'", ",", "'--no-deps'", "]", ",", "[", "'--no-deps'", ",", "'-c'", ",", "'develop'", "]", ",", "[", "'-c'", ",", "'egg_info'", "]", "]", ")", "or", "(", "'pip-egg-info'", "in", "sys", ".", "argv", ")", "or", "(", "sys", ".", "argv", "[", ":", "3", "]", "==", "[", "'-c'", ",", "'install'", ",", "'--record'", "]", ")", "or", "(", "sys", ".", "argv", "[", ":", "4", "]", "==", "[", "'-c'", ",", "'install'", ",", "'--single-version-externally-managed'", ",", "'--record'", "]", ")", ")", ":", "is_automated", "=", "True", "return", "is_automated" ]
check for installation with easy_install or pip .
train
false
54,845
def gf_berlekamp(f, p, K): Q = gf_Qmatrix(f, p, K) V = gf_Qbasis(Q, p, K) for (i, v) in enumerate(V): V[i] = gf_strip(list(reversed(v))) factors = [f] for k in range(1, len(V)): for f in list(factors): s = K.zero while (s < p): g = gf_sub_ground(V[k], s, p, K) h = gf_gcd(f, g, p, K) if ((h != [K.one]) and (h != f)): factors.remove(f) f = gf_quo(f, h, p, K) factors.extend([f, h]) if (len(factors) == len(V)): return _sort_factors(factors, multiple=False) s += K.one return _sort_factors(factors, multiple=False)
[ "def", "gf_berlekamp", "(", "f", ",", "p", ",", "K", ")", ":", "Q", "=", "gf_Qmatrix", "(", "f", ",", "p", ",", "K", ")", "V", "=", "gf_Qbasis", "(", "Q", ",", "p", ",", "K", ")", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "V", ")", ":", "V", "[", "i", "]", "=", "gf_strip", "(", "list", "(", "reversed", "(", "v", ")", ")", ")", "factors", "=", "[", "f", "]", "for", "k", "in", "range", "(", "1", ",", "len", "(", "V", ")", ")", ":", "for", "f", "in", "list", "(", "factors", ")", ":", "s", "=", "K", ".", "zero", "while", "(", "s", "<", "p", ")", ":", "g", "=", "gf_sub_ground", "(", "V", "[", "k", "]", ",", "s", ",", "p", ",", "K", ")", "h", "=", "gf_gcd", "(", "f", ",", "g", ",", "p", ",", "K", ")", "if", "(", "(", "h", "!=", "[", "K", ".", "one", "]", ")", "and", "(", "h", "!=", "f", ")", ")", ":", "factors", ".", "remove", "(", "f", ")", "f", "=", "gf_quo", "(", "f", ",", "h", ",", "p", ",", "K", ")", "factors", ".", "extend", "(", "[", "f", ",", "h", "]", ")", "if", "(", "len", "(", "factors", ")", "==", "len", "(", "V", ")", ")", ":", "return", "_sort_factors", "(", "factors", ",", "multiple", "=", "False", ")", "s", "+=", "K", ".", "one", "return", "_sort_factors", "(", "factors", ",", "multiple", "=", "False", ")" ]
factor a square-free f in gf(p)[x] for small p .
train
false
54,846
def notify_status(doc, method=None): party_type = None for (key, doctypes) in status_depends_on.iteritems(): if (doc.doctype in doctypes): party_type = key break if (not party_type): return name = doc.get(party_type.lower()) if (not name): return party = frappe.get_doc(party_type, name) filters = get_filters_for(doc.doctype) party.flags.ignore_mandatory = True status = None if filters: if evaluate_filters(doc, filters): status = u'Open' if (status == u'Open'): if (party.status != u'Open'): party.status = u'Open' party.save(ignore_permissions=True) elif (party.status == u'Open'): update_status(party) party.update_modified() party.notify_update()
[ "def", "notify_status", "(", "doc", ",", "method", "=", "None", ")", ":", "party_type", "=", "None", "for", "(", "key", ",", "doctypes", ")", "in", "status_depends_on", ".", "iteritems", "(", ")", ":", "if", "(", "doc", ".", "doctype", "in", "doctypes", ")", ":", "party_type", "=", "key", "break", "if", "(", "not", "party_type", ")", ":", "return", "name", "=", "doc", ".", "get", "(", "party_type", ".", "lower", "(", ")", ")", "if", "(", "not", "name", ")", ":", "return", "party", "=", "frappe", ".", "get_doc", "(", "party_type", ",", "name", ")", "filters", "=", "get_filters_for", "(", "doc", ".", "doctype", ")", "party", ".", "flags", ".", "ignore_mandatory", "=", "True", "status", "=", "None", "if", "filters", ":", "if", "evaluate_filters", "(", "doc", ",", "filters", ")", ":", "status", "=", "u'Open'", "if", "(", "status", "==", "u'Open'", ")", ":", "if", "(", "party", ".", "status", "!=", "u'Open'", ")", ":", "party", ".", "status", "=", "u'Open'", "party", ".", "save", "(", "ignore_permissions", "=", "True", ")", "elif", "(", "party", ".", "status", "==", "u'Open'", ")", ":", "update_status", "(", "party", ")", "party", ".", "update_modified", "(", ")", "party", ".", "notify_update", "(", ")" ]
notify status to customer .
train
false
54,849
def get_writer_names(): return set([k for (k, v) in six.iteritems(REPORT_WRITERS_MAP) if v])
[ "def", "get_writer_names", "(", ")", ":", "return", "set", "(", "[", "k", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "REPORT_WRITERS_MAP", ")", "if", "v", "]", ")" ]
get the registered writer names .
train
false
54,850
@not_implemented_for('undirected') def antichains(G): TC = nx.transitive_closure(G) antichains_stacks = [([], list(reversed(list(nx.topological_sort(G)))))] while antichains_stacks: (antichain, stack) = antichains_stacks.pop() (yield antichain) while stack: x = stack.pop() new_antichain = (antichain + [x]) new_stack = [t for t in stack if (not ((t in TC[x]) or (x in TC[t])))] antichains_stacks.append((new_antichain, new_stack))
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "antichains", "(", "G", ")", ":", "TC", "=", "nx", ".", "transitive_closure", "(", "G", ")", "antichains_stacks", "=", "[", "(", "[", "]", ",", "list", "(", "reversed", "(", "list", "(", "nx", ".", "topological_sort", "(", "G", ")", ")", ")", ")", ")", "]", "while", "antichains_stacks", ":", "(", "antichain", ",", "stack", ")", "=", "antichains_stacks", ".", "pop", "(", ")", "(", "yield", "antichain", ")", "while", "stack", ":", "x", "=", "stack", ".", "pop", "(", ")", "new_antichain", "=", "(", "antichain", "+", "[", "x", "]", ")", "new_stack", "=", "[", "t", "for", "t", "in", "stack", "if", "(", "not", "(", "(", "t", "in", "TC", "[", "x", "]", ")", "or", "(", "x", "in", "TC", "[", "t", "]", ")", ")", ")", "]", "antichains_stacks", ".", "append", "(", "(", "new_antichain", ",", "new_stack", ")", ")" ]
generates antichains from a dag .
train
false
54,852
def _blockdevice_id_to_dataset_id(blockdevice_id): return UUID(blockdevice_id[len(_PREFIX):])
[ "def", "_blockdevice_id_to_dataset_id", "(", "blockdevice_id", ")", ":", "return", "UUID", "(", "blockdevice_id", "[", "len", "(", "_PREFIX", ")", ":", "]", ")" ]
computes a dataset_id from a blockdevice_id .
train
false
54,853
def legitimize(text, os=platform.system()): text = text.translate({0: None, ord('/'): '-', ord('|'): '-'}) if (os == 'Windows'): text = text.translate({ord(':'): '-', ord('*'): '-', ord('?'): '-', ord('\\'): '-', ord('"'): "'", ord('+'): '-', ord('<'): '-', ord('>'): '-', ord('['): '(', ord(']'): ')'}) else: if (os == 'Darwin'): text = text.translate({ord(':'): '-'}) if text.startswith('.'): text = text[1:] text = text[:82] return text
[ "def", "legitimize", "(", "text", ",", "os", "=", "platform", ".", "system", "(", ")", ")", ":", "text", "=", "text", ".", "translate", "(", "{", "0", ":", "None", ",", "ord", "(", "'/'", ")", ":", "'-'", ",", "ord", "(", "'|'", ")", ":", "'-'", "}", ")", "if", "(", "os", "==", "'Windows'", ")", ":", "text", "=", "text", ".", "translate", "(", "{", "ord", "(", "':'", ")", ":", "'-'", ",", "ord", "(", "'*'", ")", ":", "'-'", ",", "ord", "(", "'?'", ")", ":", "'-'", ",", "ord", "(", "'\\\\'", ")", ":", "'-'", ",", "ord", "(", "'\"'", ")", ":", "\"'\"", ",", "ord", "(", "'+'", ")", ":", "'-'", ",", "ord", "(", "'<'", ")", ":", "'-'", ",", "ord", "(", "'>'", ")", ":", "'-'", ",", "ord", "(", "'['", ")", ":", "'('", ",", "ord", "(", "']'", ")", ":", "')'", "}", ")", "else", ":", "if", "(", "os", "==", "'Darwin'", ")", ":", "text", "=", "text", ".", "translate", "(", "{", "ord", "(", "':'", ")", ":", "'-'", "}", ")", "if", "text", ".", "startswith", "(", "'.'", ")", ":", "text", "=", "text", "[", "1", ":", "]", "text", "=", "text", "[", ":", "82", "]", "return", "text" ]
converts a string to a valid filename .
train
false
54,854
def UploadSeparatePatches(issue, rpc_server, patchset, data, options): patches = SplitPatch(data) rv = [] for patch in patches: set_status(('uploading patch for ' + patch[0])) if (len(patch[1]) > MAX_UPLOAD_SIZE): print (('Not uploading the patch for ' + patch[0]) + ' because the file is too large.') continue form_fields = [('filename', patch[0])] if (not options.download_base): form_fields.append(('content_upload', '1')) files = [('data', 'data.diff', patch[1])] (ctype, body) = EncodeMultipartFormData(form_fields, files) url = ('/%d/upload_patch/%d' % (int(issue), int(patchset))) print ('Uploading patch for ' + patch[0]) response_body = rpc_server.Send(url, body, content_type=ctype) lines = response_body.splitlines() if ((not lines) or (lines[0] != 'OK')): StatusUpdate((' --> %s' % response_body)) sys.exit(1) rv.append([lines[1], patch[0]]) return rv
[ "def", "UploadSeparatePatches", "(", "issue", ",", "rpc_server", ",", "patchset", ",", "data", ",", "options", ")", ":", "patches", "=", "SplitPatch", "(", "data", ")", "rv", "=", "[", "]", "for", "patch", "in", "patches", ":", "set_status", "(", "(", "'uploading patch for '", "+", "patch", "[", "0", "]", ")", ")", "if", "(", "len", "(", "patch", "[", "1", "]", ")", ">", "MAX_UPLOAD_SIZE", ")", ":", "print", "(", "(", "'Not uploading the patch for '", "+", "patch", "[", "0", "]", ")", "+", "' because the file is too large.'", ")", "continue", "form_fields", "=", "[", "(", "'filename'", ",", "patch", "[", "0", "]", ")", "]", "if", "(", "not", "options", ".", "download_base", ")", ":", "form_fields", ".", "append", "(", "(", "'content_upload'", ",", "'1'", ")", ")", "files", "=", "[", "(", "'data'", ",", "'data.diff'", ",", "patch", "[", "1", "]", ")", "]", "(", "ctype", ",", "body", ")", "=", "EncodeMultipartFormData", "(", "form_fields", ",", "files", ")", "url", "=", "(", "'/%d/upload_patch/%d'", "%", "(", "int", "(", "issue", ")", ",", "int", "(", "patchset", ")", ")", ")", "print", "(", "'Uploading patch for '", "+", "patch", "[", "0", "]", ")", "response_body", "=", "rpc_server", ".", "Send", "(", "url", ",", "body", ",", "content_type", "=", "ctype", ")", "lines", "=", "response_body", ".", "splitlines", "(", ")", "if", "(", "(", "not", "lines", ")", "or", "(", "lines", "[", "0", "]", "!=", "'OK'", ")", ")", ":", "StatusUpdate", "(", "(", "' --> %s'", "%", "response_body", ")", ")", "sys", ".", "exit", "(", "1", ")", "rv", ".", "append", "(", "[", "lines", "[", "1", "]", ",", "patch", "[", "0", "]", "]", ")", "return", "rv" ]
uploads a separate patch for each file in the diff output .
train
false
54,855
@world.absorb def css_html(css_selector, index=0): assert is_css_present(css_selector) return retry_on_exception((lambda : css_find(css_selector)[index].html))
[ "@", "world", ".", "absorb", "def", "css_html", "(", "css_selector", ",", "index", "=", "0", ")", ":", "assert", "is_css_present", "(", "css_selector", ")", "return", "retry_on_exception", "(", "(", "lambda", ":", "css_find", "(", "css_selector", ")", "[", "index", "]", ".", "html", ")", ")" ]
returns the html of a css_selector .
train
false
54,858
def location_to_string(locationID): loc = ['{}-'.format((locationID >> 24))] while (locationID & 15728640): if (len(loc) > 1): loc.append('.') loc.append('{}'.format(((locationID >> 20) & 15))) locationID <<= 4 return ''.join(loc)
[ "def", "location_to_string", "(", "locationID", ")", ":", "loc", "=", "[", "'{}-'", ".", "format", "(", "(", "locationID", ">>", "24", ")", ")", "]", "while", "(", "locationID", "&", "15728640", ")", ":", "if", "(", "len", "(", "loc", ")", ">", "1", ")", ":", "loc", ".", "append", "(", "'.'", ")", "loc", ".", "append", "(", "'{}'", ".", "format", "(", "(", "(", "locationID", ">>", "20", ")", "&", "15", ")", ")", ")", "locationID", "<<=", "4", "return", "''", ".", "join", "(", "loc", ")" ]
helper to calculate port and bus number from locationid .
train
false
54,859
def get_subordinate_users(user, site): from cms.utils.page_permissions import get_change_permissions_id_list try: user_level = get_user_permission_level(user, site) except NoPermissionsException: qs = get_user_model().objects.distinct().filter(((Q(is_staff=True) & Q(pageuser__created_by=user)) & Q(pagepermission__page=None))) qs = qs.exclude(pk=user.pk).exclude(groups__user__pk=user.pk) return qs if (user_level == ROOT_USER_LEVEL): return get_user_model().objects.all() page_id_allow_list = get_change_permissions_id_list(user, site, check_global=False) qs = get_user_model().objects.distinct().filter(((Q(is_staff=True) & (Q(pagepermission__page__id__in=page_id_allow_list) & Q(pagepermission__page__depth__gte=user_level))) | (Q(pageuser__created_by=user) & Q(pagepermission__page=None)))) qs = qs.exclude(pk=user.pk).exclude(groups__user__pk=user.pk) return qs
[ "def", "get_subordinate_users", "(", "user", ",", "site", ")", ":", "from", "cms", ".", "utils", ".", "page_permissions", "import", "get_change_permissions_id_list", "try", ":", "user_level", "=", "get_user_permission_level", "(", "user", ",", "site", ")", "except", "NoPermissionsException", ":", "qs", "=", "get_user_model", "(", ")", ".", "objects", ".", "distinct", "(", ")", ".", "filter", "(", "(", "(", "Q", "(", "is_staff", "=", "True", ")", "&", "Q", "(", "pageuser__created_by", "=", "user", ")", ")", "&", "Q", "(", "pagepermission__page", "=", "None", ")", ")", ")", "qs", "=", "qs", ".", "exclude", "(", "pk", "=", "user", ".", "pk", ")", ".", "exclude", "(", "groups__user__pk", "=", "user", ".", "pk", ")", "return", "qs", "if", "(", "user_level", "==", "ROOT_USER_LEVEL", ")", ":", "return", "get_user_model", "(", ")", ".", "objects", ".", "all", "(", ")", "page_id_allow_list", "=", "get_change_permissions_id_list", "(", "user", ",", "site", ",", "check_global", "=", "False", ")", "qs", "=", "get_user_model", "(", ")", ".", "objects", ".", "distinct", "(", ")", ".", "filter", "(", "(", "(", "Q", "(", "is_staff", "=", "True", ")", "&", "(", "Q", "(", "pagepermission__page__id__in", "=", "page_id_allow_list", ")", "&", "Q", "(", "pagepermission__page__depth__gte", "=", "user_level", ")", ")", ")", "|", "(", "Q", "(", "pageuser__created_by", "=", "user", ")", "&", "Q", "(", "pagepermission__page", "=", "None", ")", ")", ")", ")", "qs", "=", "qs", ".", "exclude", "(", "pk", "=", "user", ".", "pk", ")", ".", "exclude", "(", "groups__user__pk", "=", "user", ".", "pk", ")", "return", "qs" ]
returns users queryset .
train
false
54,860
def get_numpy_status(): numpy_status = {} try: import numpy numpy_version = numpy.__version__ numpy_status['up_to_date'] = (parse_version(numpy_version) >= parse_version(NUMPY_MIN_VERSION)) numpy_status['version'] = numpy_version except ImportError: traceback.print_exc() numpy_status['up_to_date'] = False numpy_status['version'] = '' return numpy_status
[ "def", "get_numpy_status", "(", ")", ":", "numpy_status", "=", "{", "}", "try", ":", "import", "numpy", "numpy_version", "=", "numpy", ".", "__version__", "numpy_status", "[", "'up_to_date'", "]", "=", "(", "parse_version", "(", "numpy_version", ")", ">=", "parse_version", "(", "NUMPY_MIN_VERSION", ")", ")", "numpy_status", "[", "'version'", "]", "=", "numpy_version", "except", "ImportError", ":", "traceback", ".", "print_exc", "(", ")", "numpy_status", "[", "'up_to_date'", "]", "=", "False", "numpy_status", "[", "'version'", "]", "=", "''", "return", "numpy_status" ]
returns a dictionary containing a boolean specifying whether numpy is up-to-date .
train
true
54,861
def triangulate_point(x1, x2, P1, P2): M = zeros((6, 6)) M[:3, :4] = P1 M[3:, :4] = P2 M[:3, 4] = (- x1) M[3:, 5] = (- x2) (U, S, V) = linalg.svd(M) X = V[(-1), :4] return (X / X[3])
[ "def", "triangulate_point", "(", "x1", ",", "x2", ",", "P1", ",", "P2", ")", ":", "M", "=", "zeros", "(", "(", "6", ",", "6", ")", ")", "M", "[", ":", "3", ",", ":", "4", "]", "=", "P1", "M", "[", "3", ":", ",", ":", "4", "]", "=", "P2", "M", "[", ":", "3", ",", "4", "]", "=", "(", "-", "x1", ")", "M", "[", "3", ":", ",", "5", "]", "=", "(", "-", "x2", ")", "(", "U", ",", "S", ",", "V", ")", "=", "linalg", ".", "svd", "(", "M", ")", "X", "=", "V", "[", "(", "-", "1", ")", ",", ":", "4", "]", "return", "(", "X", "/", "X", "[", "3", "]", ")" ]
point pair triangulation from least squares solution .
train
false
54,864
@api_versions.wraps('2.10') @utils.arg('name', metavar='<name>', help=_('Keypair name to delete.')) @utils.arg('--user', metavar='<user-id>', default=None, help=_('ID of key-pair owner (Admin only).')) def do_keypair_delete(cs, args): cs.keypairs.delete(args.name, args.user)
[ "@", "api_versions", ".", "wraps", "(", "'2.10'", ")", "@", "utils", ".", "arg", "(", "'name'", ",", "metavar", "=", "'<name>'", ",", "help", "=", "_", "(", "'Keypair name to delete.'", ")", ")", "@", "utils", ".", "arg", "(", "'--user'", ",", "metavar", "=", "'<user-id>'", ",", "default", "=", "None", ",", "help", "=", "_", "(", "'ID of key-pair owner (Admin only).'", ")", ")", "def", "do_keypair_delete", "(", "cs", ",", "args", ")", ":", "cs", ".", "keypairs", ".", "delete", "(", "args", ".", "name", ",", "args", ".", "user", ")" ]
delete keypair given by its name .
train
false
54,865
def _removeIfPresent(filename): try: os.unlink(filename) except OSError as why: if (why.errno == ENOENT): return 0 else: raise else: return 1
[ "def", "_removeIfPresent", "(", "filename", ")", ":", "try", ":", "os", ".", "unlink", "(", "filename", ")", "except", "OSError", "as", "why", ":", "if", "(", "why", ".", "errno", "==", "ENOENT", ")", ":", "return", "0", "else", ":", "raise", "else", ":", "return", "1" ]
attempt to remove a file .
train
true
54,867
def split_at_whitespace(string): return re.split(__WHITESPACE_SPLIT, string)
[ "def", "split_at_whitespace", "(", "string", ")", ":", "return", "re", ".", "split", "(", "__WHITESPACE_SPLIT", ",", "string", ")" ]
like string .
train
false
54,868
def test_conversion_qtable_table(): qt = QTable(MIXIN_COLS) names = qt.colnames for name in names: qt[name].info.description = name t = Table(qt) for name in names: assert (t[name].info.description == name) if (name == 'quantity'): assert np.all((t['quantity'] == qt['quantity'].value)) assert np.all((t['quantity'].unit is qt['quantity'].unit)) assert isinstance(t['quantity'], t.ColumnClass) else: assert_table_name_col_equal(t, name, qt[name]) qt2 = QTable(qt) for name in names: assert (qt2[name].info.description == name) assert_table_name_col_equal(qt2, name, qt[name])
[ "def", "test_conversion_qtable_table", "(", ")", ":", "qt", "=", "QTable", "(", "MIXIN_COLS", ")", "names", "=", "qt", ".", "colnames", "for", "name", "in", "names", ":", "qt", "[", "name", "]", ".", "info", ".", "description", "=", "name", "t", "=", "Table", "(", "qt", ")", "for", "name", "in", "names", ":", "assert", "(", "t", "[", "name", "]", ".", "info", ".", "description", "==", "name", ")", "if", "(", "name", "==", "'quantity'", ")", ":", "assert", "np", ".", "all", "(", "(", "t", "[", "'quantity'", "]", "==", "qt", "[", "'quantity'", "]", ".", "value", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'quantity'", "]", ".", "unit", "is", "qt", "[", "'quantity'", "]", ".", "unit", ")", ")", "assert", "isinstance", "(", "t", "[", "'quantity'", "]", ",", "t", ".", "ColumnClass", ")", "else", ":", "assert_table_name_col_equal", "(", "t", ",", "name", ",", "qt", "[", "name", "]", ")", "qt2", "=", "QTable", "(", "qt", ")", "for", "name", "in", "names", ":", "assert", "(", "qt2", "[", "name", "]", ".", "info", ".", "description", "==", "name", ")", "assert_table_name_col_equal", "(", "qt2", ",", "name", ",", "qt", "[", "name", "]", ")" ]
test that a table round trips from qtable => table => qtable .
train
false
54,869
@frame_transform_graph.transform(coord.StaticMatrixTransform, coord.Galactic, Sagittarius) def galactic_to_sgr(): return SGR_MATRIX
[ "@", "frame_transform_graph", ".", "transform", "(", "coord", ".", "StaticMatrixTransform", ",", "coord", ".", "Galactic", ",", "Sagittarius", ")", "def", "galactic_to_sgr", "(", ")", ":", "return", "SGR_MATRIX" ]
compute the transformation matrix from galactic spherical to heliocentric sgr coordinates .
train
false
54,870
def _StructPackDecoder(wire_type, format): value_size = struct.calcsize(format) local_unpack = struct.unpack def InnerDecode(buffer, pos): new_pos = (pos + value_size) result = local_unpack(format, buffer[pos:new_pos])[0] return (result, new_pos) return _SimpleDecoder(wire_type, InnerDecode)
[ "def", "_StructPackDecoder", "(", "wire_type", ",", "format", ")", ":", "value_size", "=", "struct", ".", "calcsize", "(", "format", ")", "local_unpack", "=", "struct", ".", "unpack", "def", "InnerDecode", "(", "buffer", ",", "pos", ")", ":", "new_pos", "=", "(", "pos", "+", "value_size", ")", "result", "=", "local_unpack", "(", "format", ",", "buffer", "[", "pos", ":", "new_pos", "]", ")", "[", "0", "]", "return", "(", "result", ",", "new_pos", ")", "return", "_SimpleDecoder", "(", "wire_type", ",", "InnerDecode", ")" ]
return a constructor for a decoder for a fixed-width field .
train
true
54,871
def pagerank(matrix, d_factor=0.85): size = len(matrix) epsilon = 0.0001 matrix = matrix.copy() for i in xrange(0, size): col_sum = matrix[:, i].sum() if col_sum: matrix[:, i] /= col_sum e = (((1.0 - d_factor) / size) * numpy.ones((size, size))) matrix = ((d_factor * matrix) + e) result = (numpy.ones(size) / size) prev = (numpy.ones(size) / size) iteration = 0 while True: result = numpy.dot(matrix, result) result /= result.sum() diff = numpy.abs((result - prev)).sum() print ('Iteration %d, change %f' % (iteration, diff)) if (diff < epsilon): break prev = result iteration += 1 return result
[ "def", "pagerank", "(", "matrix", ",", "d_factor", "=", "0.85", ")", ":", "size", "=", "len", "(", "matrix", ")", "epsilon", "=", "0.0001", "matrix", "=", "matrix", ".", "copy", "(", ")", "for", "i", "in", "xrange", "(", "0", ",", "size", ")", ":", "col_sum", "=", "matrix", "[", ":", ",", "i", "]", ".", "sum", "(", ")", "if", "col_sum", ":", "matrix", "[", ":", ",", "i", "]", "/=", "col_sum", "e", "=", "(", "(", "(", "1.0", "-", "d_factor", ")", "/", "size", ")", "*", "numpy", ".", "ones", "(", "(", "size", ",", "size", ")", ")", ")", "matrix", "=", "(", "(", "d_factor", "*", "matrix", ")", "+", "e", ")", "result", "=", "(", "numpy", ".", "ones", "(", "size", ")", "/", "size", ")", "prev", "=", "(", "numpy", ".", "ones", "(", "size", ")", "/", "size", ")", "iteration", "=", "0", "while", "True", ":", "result", "=", "numpy", ".", "dot", "(", "matrix", ",", "result", ")", "result", "/=", "result", ".", "sum", "(", ")", "diff", "=", "numpy", ".", "abs", "(", "(", "result", "-", "prev", ")", ")", ".", "sum", "(", ")", "print", "(", "'Iteration %d, change %f'", "%", "(", "iteration", ",", "diff", ")", ")", "if", "(", "diff", "<", "epsilon", ")", ":", "break", "prev", "=", "result", "iteration", "+=", "1", "return", "result" ]
calculate the pagerank vector of a given adjacency matrix .
train
false
54,873
def isPointInsideLoop(loop, point): return ((getNumberOfIntersectionsToLeft(loop, point) % 2) == 1)
[ "def", "isPointInsideLoop", "(", "loop", ",", "point", ")", ":", "return", "(", "(", "getNumberOfIntersectionsToLeft", "(", "loop", ",", "point", ")", "%", "2", ")", "==", "1", ")" ]
determine if a point is inside another loop .
train
false
54,874
@frappe.whitelist() def enroll_student(source_name): student = get_mapped_doc(u'Student Applicant', source_name, {u'Student Applicant': {u'doctype': u'Student', u'field_map': {u'name': u'student_applicant'}}}, ignore_permissions=True) student.save() program_enrollment = frappe.new_doc(u'Program Enrollment') program_enrollment.student = student.name program_enrollment.student_name = student.title program_enrollment.program = frappe.db.get_value(u'Student Applicant', source_name, u'program') return program_enrollment
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "enroll_student", "(", "source_name", ")", ":", "student", "=", "get_mapped_doc", "(", "u'Student Applicant'", ",", "source_name", ",", "{", "u'Student Applicant'", ":", "{", "u'doctype'", ":", "u'Student'", ",", "u'field_map'", ":", "{", "u'name'", ":", "u'student_applicant'", "}", "}", "}", ",", "ignore_permissions", "=", "True", ")", "student", ".", "save", "(", ")", "program_enrollment", "=", "frappe", ".", "new_doc", "(", "u'Program Enrollment'", ")", "program_enrollment", ".", "student", "=", "student", ".", "name", "program_enrollment", ".", "student_name", "=", "student", ".", "title", "program_enrollment", ".", "program", "=", "frappe", ".", "db", ".", "get_value", "(", "u'Student Applicant'", ",", "source_name", ",", "u'program'", ")", "return", "program_enrollment" ]
creates a student record and returns a program enrollment .
train
false
54,875
def fill_gaps(*args, **kwargs): generator = fill_gaps_generator(*args, **kwargs) return list(generator)
[ "def", "fill_gaps", "(", "*", "args", ",", "**", "kwargs", ")", ":", "generator", "=", "fill_gaps_generator", "(", "*", "args", ",", "**", "kwargs", ")", "return", "list", "(", "generator", ")" ]
listify the generator returned by fill_gaps_generator for memoize .
train
false
54,876
@verbose def tweets_by_user_demo(user='NLTK_org', count=200): oauth = credsfromfile() client = Query(**oauth) client.register(TweetWriter()) client.user_tweets(user, count)
[ "@", "verbose", "def", "tweets_by_user_demo", "(", "user", "=", "'NLTK_org'", ",", "count", "=", "200", ")", ":", "oauth", "=", "credsfromfile", "(", ")", "client", "=", "Query", "(", "**", "oauth", ")", "client", ".", "register", "(", "TweetWriter", "(", ")", ")", "client", ".", "user_tweets", "(", "user", ",", "count", ")" ]
use the rest api to search for past tweets by a given user .
train
false
54,877
def grad_clip(x, lower_bound, upper_bound): return GradClip(lower_bound, upper_bound)(x)
[ "def", "grad_clip", "(", "x", ",", "lower_bound", ",", "upper_bound", ")", ":", "return", "GradClip", "(", "lower_bound", ",", "upper_bound", ")", "(", "x", ")" ]
this op do a view in the forward .
train
false
54,880
def _do_surface_dots_subset(intrad, rsurf, rmags, rref, refl, lsurf, rlens, this_nn, cosmags, ws, volume, lut, n_fact, ch_type, idx): products = _fast_sphere_dot_r0(intrad, rsurf, rmags, lsurf, rlens, this_nn, cosmags, None, ws, volume, lut, n_fact, ch_type).T if (rref is not None): raise NotImplementedError return products
[ "def", "_do_surface_dots_subset", "(", "intrad", ",", "rsurf", ",", "rmags", ",", "rref", ",", "refl", ",", "lsurf", ",", "rlens", ",", "this_nn", ",", "cosmags", ",", "ws", ",", "volume", ",", "lut", ",", "n_fact", ",", "ch_type", ",", "idx", ")", ":", "products", "=", "_fast_sphere_dot_r0", "(", "intrad", ",", "rsurf", ",", "rmags", ",", "lsurf", ",", "rlens", ",", "this_nn", ",", "cosmags", ",", "None", ",", "ws", ",", "volume", ",", "lut", ",", "n_fact", ",", "ch_type", ")", ".", "T", "if", "(", "rref", "is", "not", "None", ")", ":", "raise", "NotImplementedError", "return", "products" ]
helper for parallelization .
train
false
54,881
def list_job(jid, ext_source=None, display_progress=False): ret = {'jid': jid} mminion = salt.minion.MasterMinion(__opts__) returner = _get_returner((__opts__['ext_job_cache'], ext_source, __opts__['master_job_cache'])) if display_progress: __jid_event__.fire_event({'message': 'Querying returner: {0}'.format(returner)}, 'progress') job = mminion.returners['{0}.get_load'.format(returner)](jid) ret.update(_format_jid_instance(jid, job)) ret['Result'] = mminion.returners['{0}.get_jid'.format(returner)](jid) fstr = '{0}.get_endtime'.format(__opts__['master_job_cache']) if (__opts__.get('job_cache_store_endtime') and (fstr in mminion.returners)): endtime = mminion.returners[fstr](jid) if endtime: ret['EndTime'] = endtime return ret
[ "def", "list_job", "(", "jid", ",", "ext_source", "=", "None", ",", "display_progress", "=", "False", ")", ":", "ret", "=", "{", "'jid'", ":", "jid", "}", "mminion", "=", "salt", ".", "minion", ".", "MasterMinion", "(", "__opts__", ")", "returner", "=", "_get_returner", "(", "(", "__opts__", "[", "'ext_job_cache'", "]", ",", "ext_source", ",", "__opts__", "[", "'master_job_cache'", "]", ")", ")", "if", "display_progress", ":", "__jid_event__", ".", "fire_event", "(", "{", "'message'", ":", "'Querying returner: {0}'", ".", "format", "(", "returner", ")", "}", ",", "'progress'", ")", "job", "=", "mminion", ".", "returners", "[", "'{0}.get_load'", ".", "format", "(", "returner", ")", "]", "(", "jid", ")", "ret", ".", "update", "(", "_format_jid_instance", "(", "jid", ",", "job", ")", ")", "ret", "[", "'Result'", "]", "=", "mminion", ".", "returners", "[", "'{0}.get_jid'", ".", "format", "(", "returner", ")", "]", "(", "jid", ")", "fstr", "=", "'{0}.get_endtime'", ".", "format", "(", "__opts__", "[", "'master_job_cache'", "]", ")", "if", "(", "__opts__", ".", "get", "(", "'job_cache_store_endtime'", ")", "and", "(", "fstr", "in", "mminion", ".", "returners", ")", ")", ":", "endtime", "=", "mminion", ".", "returners", "[", "fstr", "]", "(", "jid", ")", "if", "endtime", ":", "ret", "[", "'EndTime'", "]", "=", "endtime", "return", "ret" ]
list a specific job given by its jid ext_source if provided .
train
true
54,882
def DEFINE_choice(name, default, choices, help): CONFIG.AddOption(type_info.Choice(name=name, default=default, choices=choices, description=help))
[ "def", "DEFINE_choice", "(", "name", ",", "default", ",", "choices", ",", "help", ")", ":", "CONFIG", ".", "AddOption", "(", "type_info", ".", "Choice", "(", "name", "=", "name", ",", "default", "=", "default", ",", "choices", "=", "choices", ",", "description", "=", "help", ")", ")" ]
a helper for defining choice string options .
train
false
54,883
def list_exports(exports='/etc/exports'): ret = {} with salt.utils.fopen(exports, 'r') as efl: for line in efl.read().splitlines(): if (not line): continue if line.startswith('#'): continue comps = line.split() ret[comps[0]] = [] newshares = [] for perm in comps[1:]: if perm.startswith('/'): newshares.append(perm) continue permcomps = perm.split('(') permcomps[1] = permcomps[1].replace(')', '') hosts = permcomps[0].split(',') options = permcomps[1].split(',') ret[comps[0]].append({'hosts': hosts, 'options': options}) for share in newshares: ret[share] = ret[comps[0]] return ret
[ "def", "list_exports", "(", "exports", "=", "'/etc/exports'", ")", ":", "ret", "=", "{", "}", "with", "salt", ".", "utils", ".", "fopen", "(", "exports", ",", "'r'", ")", "as", "efl", ":", "for", "line", "in", "efl", ".", "read", "(", ")", ".", "splitlines", "(", ")", ":", "if", "(", "not", "line", ")", ":", "continue", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "comps", "=", "line", ".", "split", "(", ")", "ret", "[", "comps", "[", "0", "]", "]", "=", "[", "]", "newshares", "=", "[", "]", "for", "perm", "in", "comps", "[", "1", ":", "]", ":", "if", "perm", ".", "startswith", "(", "'/'", ")", ":", "newshares", ".", "append", "(", "perm", ")", "continue", "permcomps", "=", "perm", ".", "split", "(", "'('", ")", "permcomps", "[", "1", "]", "=", "permcomps", "[", "1", "]", ".", "replace", "(", "')'", ",", "''", ")", "hosts", "=", "permcomps", "[", "0", "]", ".", "split", "(", "','", ")", "options", "=", "permcomps", "[", "1", "]", ".", "split", "(", "','", ")", "ret", "[", "comps", "[", "0", "]", "]", ".", "append", "(", "{", "'hosts'", ":", "hosts", ",", "'options'", ":", "options", "}", ")", "for", "share", "in", "newshares", ":", "ret", "[", "share", "]", "=", "ret", "[", "comps", "[", "0", "]", "]", "return", "ret" ]
list configured exports cli example: .
train
false
54,884
@snippet def client_list_subscriptions(client, to_delete): def do_something_with(sub): pass for subscription in client.list_subscriptions(): do_something_with(subscription)
[ "@", "snippet", "def", "client_list_subscriptions", "(", "client", ",", "to_delete", ")", ":", "def", "do_something_with", "(", "sub", ")", ":", "pass", "for", "subscription", "in", "client", ".", "list_subscriptions", "(", ")", ":", "do_something_with", "(", "subscription", ")" ]
list all subscriptions for a project .
train
false
54,886
@csrf_exempt def notify_url_handler(request): logger1.info('>>notify url handler start...') if (request.method == 'POST'): if notify_verify(request.POST): logger1.info('pass verification...') tn = request.POST.get('out_trade_no') logger1.info(('Change the status of bill %s' % tn)) bill = Bill.objects.get(pk=tn) trade_status = request.POST.get('trade_status') logger1.info(('the status of bill %s changed to %s' % (tn, trade_status))) bill.trade_status = trade_status bill.save() trade_no = request.POST.get('trade_no') if (trade_status == 'WAIT_SELLER_SEND_GOODS'): logger1.info('It is WAIT_SELLER_SEND_GOODS, so upgrade bill') upgrade_bill(bill, ((6 * 30) + 7)) url = send_goods_confirm_by_platform(trade_no) logger1.info(('send goods confirmation. %s' % url)) req = urllib.urlopen(url) return HttpResponse('success') else: logger1.info(('##info: Status of %s' % trade_status)) return HttpResponse('success') return HttpResponse('fail')
[ "@", "csrf_exempt", "def", "notify_url_handler", "(", "request", ")", ":", "logger1", ".", "info", "(", "'>>notify url handler start...'", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "if", "notify_verify", "(", "request", ".", "POST", ")", ":", "logger1", ".", "info", "(", "'pass verification...'", ")", "tn", "=", "request", ".", "POST", ".", "get", "(", "'out_trade_no'", ")", "logger1", ".", "info", "(", "(", "'Change the status of bill %s'", "%", "tn", ")", ")", "bill", "=", "Bill", ".", "objects", ".", "get", "(", "pk", "=", "tn", ")", "trade_status", "=", "request", ".", "POST", ".", "get", "(", "'trade_status'", ")", "logger1", ".", "info", "(", "(", "'the status of bill %s changed to %s'", "%", "(", "tn", ",", "trade_status", ")", ")", ")", "bill", ".", "trade_status", "=", "trade_status", "bill", ".", "save", "(", ")", "trade_no", "=", "request", ".", "POST", ".", "get", "(", "'trade_no'", ")", "if", "(", "trade_status", "==", "'WAIT_SELLER_SEND_GOODS'", ")", ":", "logger1", ".", "info", "(", "'It is WAIT_SELLER_SEND_GOODS, so upgrade bill'", ")", "upgrade_bill", "(", "bill", ",", "(", "(", "6", "*", "30", ")", "+", "7", ")", ")", "url", "=", "send_goods_confirm_by_platform", "(", "trade_no", ")", "logger1", ".", "info", "(", "(", "'send goods confirmation. %s'", "%", "url", ")", ")", "req", "=", "urllib", ".", "urlopen", "(", "url", ")", "return", "HttpResponse", "(", "'success'", ")", "else", ":", "logger1", ".", "info", "(", "(", "'##info: Status of %s'", "%", "trade_status", ")", ")", "return", "HttpResponse", "(", "'success'", ")", "return", "HttpResponse", "(", "'fail'", ")" ]
handler for notify_url for asynchronous updating billing information .
train
false
54,887
def validate_bool_maybe_none(b): if isinstance(b, six.string_types): b = b.lower() if ((b is None) or (b == u'none')): return None if (b in (u't', u'y', u'yes', u'on', u'true', u'1', 1, True)): return True elif (b in (u'f', u'n', u'no', u'off', u'false', u'0', 0, False)): return False else: raise ValueError((u'Could not convert "%s" to boolean' % b))
[ "def", "validate_bool_maybe_none", "(", "b", ")", ":", "if", "isinstance", "(", "b", ",", "six", ".", "string_types", ")", ":", "b", "=", "b", ".", "lower", "(", ")", "if", "(", "(", "b", "is", "None", ")", "or", "(", "b", "==", "u'none'", ")", ")", ":", "return", "None", "if", "(", "b", "in", "(", "u't'", ",", "u'y'", ",", "u'yes'", ",", "u'on'", ",", "u'true'", ",", "u'1'", ",", "1", ",", "True", ")", ")", ":", "return", "True", "elif", "(", "b", "in", "(", "u'f'", ",", "u'n'", ",", "u'no'", ",", "u'off'", ",", "u'false'", ",", "u'0'", ",", "0", ",", "False", ")", ")", ":", "return", "False", "else", ":", "raise", "ValueError", "(", "(", "u'Could not convert \"%s\" to boolean'", "%", "b", ")", ")" ]
convert b to a boolean or raise .
train
false
54,889
def has_player(accessing_obj, accessed_obj, *args, **kwargs): return (hasattr(accessing_obj, 'has_player') and accessing_obj.has_player)
[ "def", "has_player", "(", "accessing_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "(", "hasattr", "(", "accessing_obj", ",", "'has_player'", ")", "and", "accessing_obj", ".", "has_player", ")" ]
only returns true if accessing_obj has_player is true .
train
false
54,891
def _mocked_presets(*args, **kwargs): return [MockPreset('1')]
[ "def", "_mocked_presets", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "[", "MockPreset", "(", "'1'", ")", "]" ]
return a list of mocked presets .
train
false
54,893
def pportD5(state): global dataReg if (state == 0): dataReg = (dataReg & (~ 32)) else: dataReg = (dataReg | 32) port.DlPortWritePortUchar(baseAddress, dataReg)
[ "def", "pportD5", "(", "state", ")", ":", "global", "dataReg", "if", "(", "state", "==", "0", ")", ":", "dataReg", "=", "(", "dataReg", "&", "(", "~", "32", ")", ")", "else", ":", "dataReg", "=", "(", "dataReg", "|", "32", ")", "port", ".", "DlPortWritePortUchar", "(", "baseAddress", ",", "dataReg", ")" ]
toggle data register d5 bit .
train
false
54,896
def toggle(device, partition, flag): _validate_device(device) try: int(partition) except Exception: raise CommandExecutionError('Invalid partition number passed to partition.toggle') if (flag not in set(['bios_grub', 'legacy_boot', 'boot', 'lba', 'root', 'swap', 'hidden', 'raid', 'LVM', 'PALO', 'PREP', 'DIAG'])): raise CommandExecutionError('Invalid flag passed to partition.toggle') cmd = 'parted -m -s {0} toggle {1} {2}'.format(device, partition, flag) out = __salt__['cmd.run'](cmd).splitlines() return out
[ "def", "toggle", "(", "device", ",", "partition", ",", "flag", ")", ":", "_validate_device", "(", "device", ")", "try", ":", "int", "(", "partition", ")", "except", "Exception", ":", "raise", "CommandExecutionError", "(", "'Invalid partition number passed to partition.toggle'", ")", "if", "(", "flag", "not", "in", "set", "(", "[", "'bios_grub'", ",", "'legacy_boot'", ",", "'boot'", ",", "'lba'", ",", "'root'", ",", "'swap'", ",", "'hidden'", ",", "'raid'", ",", "'LVM'", ",", "'PALO'", ",", "'PREP'", ",", "'DIAG'", "]", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Invalid flag passed to partition.toggle'", ")", "cmd", "=", "'parted -m -s {0} toggle {1} {2}'", ".", "format", "(", "device", ",", "partition", ",", "flag", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "return", "out" ]
toggle the state of <flag> on <partition> .
train
true
54,897
def test_parser_without_subparser_recieves_root_entry(complete_parser): result = convert(complete_parser) assert ('primary' in result['widgets'])
[ "def", "test_parser_without_subparser_recieves_root_entry", "(", "complete_parser", ")", ":", "result", "=", "convert", "(", "complete_parser", ")", "assert", "(", "'primary'", "in", "result", "[", "'widgets'", "]", ")" ]
non-subparser setups should receive a default root key called primary .
train
false
54,898
def test_table_with_no_newline(): table = BytesIO() with pytest.raises(ascii.InconsistentTableError): ascii.read(table) table = BytesIO() with pytest.raises(ValueError) as err: ascii.read(table, guess=False, fast_reader=False, format='basic') assert ('No header line found' in str(err.value)) table = BytesIO() with pytest.raises(ValueError) as err: ascii.read(table, guess=False, fast_reader=True, format='fast_basic') assert ('Inconsistent data column lengths' in str(err.value)) for kwargs in [dict(), dict(guess=False, fast_reader=False, format='basic'), dict(guess=False, fast_reader=True, format='fast_basic')]: table = BytesIO() table.write('a b') t = ascii.read(table, **kwargs) assert (t.colnames == ['a', 'b']) assert (len(t) == 0)
[ "def", "test_table_with_no_newline", "(", ")", ":", "table", "=", "BytesIO", "(", ")", "with", "pytest", ".", "raises", "(", "ascii", ".", "InconsistentTableError", ")", ":", "ascii", ".", "read", "(", "table", ")", "table", "=", "BytesIO", "(", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "err", ":", "ascii", ".", "read", "(", "table", ",", "guess", "=", "False", ",", "fast_reader", "=", "False", ",", "format", "=", "'basic'", ")", "assert", "(", "'No header line found'", "in", "str", "(", "err", ".", "value", ")", ")", "table", "=", "BytesIO", "(", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "err", ":", "ascii", ".", "read", "(", "table", ",", "guess", "=", "False", ",", "fast_reader", "=", "True", ",", "format", "=", "'fast_basic'", ")", "assert", "(", "'Inconsistent data column lengths'", "in", "str", "(", "err", ".", "value", ")", ")", "for", "kwargs", "in", "[", "dict", "(", ")", ",", "dict", "(", "guess", "=", "False", ",", "fast_reader", "=", "False", ",", "format", "=", "'basic'", ")", ",", "dict", "(", "guess", "=", "False", ",", "fast_reader", "=", "True", ",", "format", "=", "'fast_basic'", ")", "]", ":", "table", "=", "BytesIO", "(", ")", "table", ".", "write", "(", "'a b'", ")", "t", "=", "ascii", ".", "read", "(", "table", ",", "**", "kwargs", ")", "assert", "(", "t", ".", "colnames", "==", "[", "'a'", ",", "'b'", "]", ")", "assert", "(", "len", "(", "t", ")", "==", "0", ")" ]
test that an input file which is completely empty fails in the expected way .
train
false
54,899
def gauge(): def prep(r): if r.interactive: pass elif (r.representation == 'plain'): r.table.image_url.readable = False return True s3.prep = prep def postp(r, output): if r.interactive: pass elif (r.representation == 'plain'): image_url = r.record.image_url if image_url: output['item'].append(IMG(_src=image_url, _width=400, _height=310)) return output s3.postp = postp output = s3_rest_controller() return output
[ "def", "gauge", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "r", ".", "interactive", ":", "pass", "elif", "(", "r", ".", "representation", "==", "'plain'", ")", ":", "r", ".", "table", ".", "image_url", ".", "readable", "=", "False", "return", "True", "s3", ".", "prep", "=", "prep", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "r", ".", "interactive", ":", "pass", "elif", "(", "r", ".", "representation", "==", "'plain'", ")", ":", "image_url", "=", "r", ".", "record", ".", "image_url", "if", "image_url", ":", "output", "[", "'item'", "]", ".", "append", "(", "IMG", "(", "_src", "=", "image_url", ",", "_width", "=", "400", ",", "_height", "=", "310", ")", ")", "return", "output", "s3", ".", "postp", "=", "postp", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
flood gauges .
train
false
54,900
def matchOnlyAtCol(n): def verifyCol(strg, locn, toks): if (col(locn, strg) != n): raise ParseException(strg, locn, ('matched token not at column %d' % n)) return verifyCol
[ "def", "matchOnlyAtCol", "(", "n", ")", ":", "def", "verifyCol", "(", "strg", ",", "locn", ",", "toks", ")", ":", "if", "(", "col", "(", "locn", ",", "strg", ")", "!=", "n", ")", ":", "raise", "ParseException", "(", "strg", ",", "locn", ",", "(", "'matched token not at column %d'", "%", "n", ")", ")", "return", "verifyCol" ]
helper method for defining parse actions that require matching at a specific column in the input text .
train
true
54,901
def show_key(kwargs=None, call=None): if (call != 'function'): log.error('The list_keys function must be called with -f or --function.') return False if (not kwargs): kwargs = {} if ('keyname' not in kwargs): log.error('A keyname is required.') return False (rcode, data) = query(command='my/keys/{0}'.format(kwargs['keyname']), method='GET') return {'keys': {data['name']: data['key']}}
[ "def", "show_key", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "log", ".", "error", "(", "'The list_keys function must be called with -f or --function.'", ")", "return", "False", "if", "(", "not", "kwargs", ")", ":", "kwargs", "=", "{", "}", "if", "(", "'keyname'", "not", "in", "kwargs", ")", ":", "log", ".", "error", "(", "'A keyname is required.'", ")", "return", "False", "(", "rcode", ",", "data", ")", "=", "query", "(", "command", "=", "'my/keys/{0}'", ".", "format", "(", "kwargs", "[", "'keyname'", "]", ")", ",", "method", "=", "'GET'", ")", "return", "{", "'keys'", ":", "{", "data", "[", "'name'", "]", ":", "data", "[", "'key'", "]", "}", "}" ]
list the keys available .
train
true
54,902
def get_name_levels(node): visitor = _NodeNameCollector() ast.walk(node, visitor) return visitor.names
[ "def", "get_name_levels", "(", "node", ")", ":", "visitor", "=", "_NodeNameCollector", "(", ")", "ast", ".", "walk", "(", "node", ",", "visitor", ")", "return", "visitor", ".", "names" ]
return a list of tuples for assigned names the level is none for simple assignments and is a list of numbers for tuple assignments for example in:: a .
train
true
54,903
def condentropy(px, py, pxpy=None, logbase=2): if ((not _isproperdist(px)) or (not _isproperdist(py))): raise ValueError('px or py is not a proper probability distribution') if ((pxpy != None) and (not _isproperdist(pxpy))): raise ValueError('pxpy is not a proper joint distribtion') if (pxpy == None): pxpy = np.outer(py, px) condent = np.sum((pxpy * np.nan_to_num(np.log2((py / pxpy))))) if (logbase == 2): return condent else: return (logbasechange(2, logbase) * condent)
[ "def", "condentropy", "(", "px", ",", "py", ",", "pxpy", "=", "None", ",", "logbase", "=", "2", ")", ":", "if", "(", "(", "not", "_isproperdist", "(", "px", ")", ")", "or", "(", "not", "_isproperdist", "(", "py", ")", ")", ")", ":", "raise", "ValueError", "(", "'px or py is not a proper probability distribution'", ")", "if", "(", "(", "pxpy", "!=", "None", ")", "and", "(", "not", "_isproperdist", "(", "pxpy", ")", ")", ")", ":", "raise", "ValueError", "(", "'pxpy is not a proper joint distribtion'", ")", "if", "(", "pxpy", "==", "None", ")", ":", "pxpy", "=", "np", ".", "outer", "(", "py", ",", "px", ")", "condent", "=", "np", ".", "sum", "(", "(", "pxpy", "*", "np", ".", "nan_to_num", "(", "np", ".", "log2", "(", "(", "py", "/", "pxpy", ")", ")", ")", ")", ")", "if", "(", "logbase", "==", "2", ")", ":", "return", "condent", "else", ":", "return", "(", "logbasechange", "(", "2", ",", "logbase", ")", "*", "condent", ")" ]
return the conditional entropy of x given y .
train
false
54,904
def task(*args, **kwargs): kwargs.setdefault('accept_magic_kwargs', False) return app_or_default().task(*args, **kwargs)
[ "def", "task", "(", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'accept_magic_kwargs'", ",", "False", ")", "return", "app_or_default", "(", ")", ".", "task", "(", "*", "args", ",", "**", "kwargs", ")" ]
decorator to create a task class out of any callable .
train
false
54,905
def _is_image_available(context, image): if (hasattr(context, 'auth_token') and context.auth_token): return True def _is_image_public(image): if hasattr(image, 'visibility'): return (str(image.visibility).lower() == 'public') else: return image.is_public if (context.is_admin or _is_image_public(image)): return True properties = image.properties if (context.project_id and ('owner_id' in properties)): return (str(properties['owner_id']) == str(context.project_id)) if (context.project_id and ('project_id' in properties)): return (str(properties['project_id']) == str(context.project_id)) try: user_id = properties['user_id'] except KeyError: return False return (str(user_id) == str(context.user_id))
[ "def", "_is_image_available", "(", "context", ",", "image", ")", ":", "if", "(", "hasattr", "(", "context", ",", "'auth_token'", ")", "and", "context", ".", "auth_token", ")", ":", "return", "True", "def", "_is_image_public", "(", "image", ")", ":", "if", "hasattr", "(", "image", ",", "'visibility'", ")", ":", "return", "(", "str", "(", "image", ".", "visibility", ")", ".", "lower", "(", ")", "==", "'public'", ")", "else", ":", "return", "image", ".", "is_public", "if", "(", "context", ".", "is_admin", "or", "_is_image_public", "(", "image", ")", ")", ":", "return", "True", "properties", "=", "image", ".", "properties", "if", "(", "context", ".", "project_id", "and", "(", "'owner_id'", "in", "properties", ")", ")", ":", "return", "(", "str", "(", "properties", "[", "'owner_id'", "]", ")", "==", "str", "(", "context", ".", "project_id", ")", ")", "if", "(", "context", ".", "project_id", "and", "(", "'project_id'", "in", "properties", ")", ")", ":", "return", "(", "str", "(", "properties", "[", "'project_id'", "]", ")", "==", "str", "(", "context", ".", "project_id", ")", ")", "try", ":", "user_id", "=", "properties", "[", "'user_id'", "]", "except", "KeyError", ":", "return", "False", "return", "(", "str", "(", "user_id", ")", "==", "str", "(", "context", ".", "user_id", ")", ")" ]
check image availability .
train
false
54,906
def subsample(inputs, factor, scope=None): if (factor == 1): return inputs else: return slim.max_pool2d(inputs, [1, 1], stride=factor, scope=scope)
[ "def", "subsample", "(", "inputs", ",", "factor", ",", "scope", "=", "None", ")", ":", "if", "(", "factor", "==", "1", ")", ":", "return", "inputs", "else", ":", "return", "slim", ".", "max_pool2d", "(", "inputs", ",", "[", "1", ",", "1", "]", ",", "stride", "=", "factor", ",", "scope", "=", "scope", ")" ]
subsamples the input along the spatial dimensions .
train
false
54,907
@membership_required def group_invite(request, slug, template_name='groups/group_invite.html'): group = get_object_or_404(Group, slug=slug, is_active=True) form = GroupInviteForm(initial={'group': group.pk, 'user': request.user.pk}) return render(request, template_name, {'group': group, 'form': form})
[ "@", "membership_required", "def", "group_invite", "(", "request", ",", "slug", ",", "template_name", "=", "'groups/group_invite.html'", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "slug", "=", "slug", ",", "is_active", "=", "True", ")", "form", "=", "GroupInviteForm", "(", "initial", "=", "{", "'group'", ":", "group", ".", "pk", ",", "'user'", ":", "request", ".", "user", ".", "pk", "}", ")", "return", "render", "(", "request", ",", "template_name", ",", "{", "'group'", ":", "group", ",", "'form'", ":", "form", "}", ")" ]
returns an invite form .
train
false
54,908
def strip_files(files, argv_max=(256 * 1024)): while files: cmd = list(STRIPCMD) pathlen = sum(((len(s) + 1) for s in cmd)) while ((pathlen < argv_max) and files): f = files.pop() cmd.append(f) pathlen += (len(f) + 1) if (len(cmd) > len(STRIPCMD)): all_files = cmd[len(STRIPCMD):] unwritable_files = tuple(filter(None, ((None if os.access(x, os.W_OK) else (x, os.stat(x).st_mode)) for x in all_files))) [os.chmod(x, (stat.S_IWRITE | old_mode)) for (x, old_mode) in unwritable_files] subprocess.check_call(cmd) [os.chmod(x, old_mode) for (x, old_mode) in unwritable_files]
[ "def", "strip_files", "(", "files", ",", "argv_max", "=", "(", "256", "*", "1024", ")", ")", ":", "while", "files", ":", "cmd", "=", "list", "(", "STRIPCMD", ")", "pathlen", "=", "sum", "(", "(", "(", "len", "(", "s", ")", "+", "1", ")", "for", "s", "in", "cmd", ")", ")", "while", "(", "(", "pathlen", "<", "argv_max", ")", "and", "files", ")", ":", "f", "=", "files", ".", "pop", "(", ")", "cmd", ".", "append", "(", "f", ")", "pathlen", "+=", "(", "len", "(", "f", ")", "+", "1", ")", "if", "(", "len", "(", "cmd", ")", ">", "len", "(", "STRIPCMD", ")", ")", ":", "all_files", "=", "cmd", "[", "len", "(", "STRIPCMD", ")", ":", "]", "unwritable_files", "=", "tuple", "(", "filter", "(", "None", ",", "(", "(", "None", "if", "os", ".", "access", "(", "x", ",", "os", ".", "W_OK", ")", "else", "(", "x", ",", "os", ".", "stat", "(", "x", ")", ".", "st_mode", ")", ")", "for", "x", "in", "all_files", ")", ")", ")", "[", "os", ".", "chmod", "(", "x", ",", "(", "stat", ".", "S_IWRITE", "|", "old_mode", ")", ")", "for", "(", "x", ",", "old_mode", ")", "in", "unwritable_files", "]", "subprocess", ".", "check_call", "(", "cmd", ")", "[", "os", ".", "chmod", "(", "x", ",", "old_mode", ")", "for", "(", "x", ",", "old_mode", ")", "in", "unwritable_files", "]" ]
strip a list of files .
train
false
54,910
def read_local(tex_root, name): cache_path = _local_cache_path(tex_root) _validate_life_span(cache_path) return _read(cache_path, name)
[ "def", "read_local", "(", "tex_root", ",", "name", ")", ":", "cache_path", "=", "_local_cache_path", "(", "tex_root", ")", "_validate_life_span", "(", "cache_path", ")", "return", "_read", "(", "cache_path", ",", "name", ")" ]
reads the object from the local cache using pickle .
train
false
54,911
@login_required @require_POST def add_leader(request, group_slug): prof = get_object_or_404(GroupProfile, slug=group_slug) if (not _user_can_manage_leaders(request.user, prof)): raise PermissionDenied form = AddUserForm(request.POST) if form.is_valid(): for user in form.cleaned_data['users']: if (prof.group not in user.groups.all()): user.groups.add(prof.group) prof.leaders.add(user) msg = _('{users} added to the group leaders successfully!').format(users=request.POST.get('users')) messages.add_message(request, messages.SUCCESS, msg) return HttpResponseRedirect(prof.get_absolute_url()) msg = _('There were errors adding leaders to the group, see below.') messages.add_message(request, messages.ERROR, msg) return profile(request, group_slug, leader_form=form)
[ "@", "login_required", "@", "require_POST", "def", "add_leader", "(", "request", ",", "group_slug", ")", ":", "prof", "=", "get_object_or_404", "(", "GroupProfile", ",", "slug", "=", "group_slug", ")", "if", "(", "not", "_user_can_manage_leaders", "(", "request", ".", "user", ",", "prof", ")", ")", ":", "raise", "PermissionDenied", "form", "=", "AddUserForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "for", "user", "in", "form", ".", "cleaned_data", "[", "'users'", "]", ":", "if", "(", "prof", ".", "group", "not", "in", "user", ".", "groups", ".", "all", "(", ")", ")", ":", "user", ".", "groups", ".", "add", "(", "prof", ".", "group", ")", "prof", ".", "leaders", ".", "add", "(", "user", ")", "msg", "=", "_", "(", "'{users} added to the group leaders successfully!'", ")", ".", "format", "(", "users", "=", "request", ".", "POST", ".", "get", "(", "'users'", ")", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "SUCCESS", ",", "msg", ")", "return", "HttpResponseRedirect", "(", "prof", ".", "get_absolute_url", "(", ")", ")", "msg", "=", "_", "(", "'There were errors adding leaders to the group, see below.'", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "ERROR", ",", "msg", ")", "return", "profile", "(", "request", ",", "group_slug", ",", "leader_form", "=", "form", ")" ]
add a leader to the group .
train
false
54,912
@functools.lru_cache() def get_languages(): return OrderedDict(settings.LANGUAGES)
[ "@", "functools", ".", "lru_cache", "(", ")", "def", "get_languages", "(", ")", ":", "return", "OrderedDict", "(", "settings", ".", "LANGUAGES", ")" ]
cache of settings .
train
false
54,913
def get_tenancy(vm_): return config.get_cloud_config_value('tenancy', vm_, __opts__, search_global=False)
[ "def", "get_tenancy", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'tenancy'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")" ]
returns the tenancy to use .
train
false
54,914
def true_dot(x, y, grad_preserves_dense=True): if hasattr(x, 'getnnz'): x = as_sparse_variable(x) assert (x.format in ['csr', 'csc']) if hasattr(y, 'getnnz'): y = as_sparse_variable(y) assert (y.format in ['csr', 'csc']) x_is_sparse_variable = _is_sparse_variable(x) y_is_sparse_variable = _is_sparse_variable(y) if ((not x_is_sparse_variable) and (not y_is_sparse_variable)): raise TypeError() if x_is_sparse_variable: return TrueDot(grad_preserves_dense)(x, y) else: assert y_is_sparse_variable return transpose(TrueDot(grad_preserves_dense)(y.T, x.T))
[ "def", "true_dot", "(", "x", ",", "y", ",", "grad_preserves_dense", "=", "True", ")", ":", "if", "hasattr", "(", "x", ",", "'getnnz'", ")", ":", "x", "=", "as_sparse_variable", "(", "x", ")", "assert", "(", "x", ".", "format", "in", "[", "'csr'", ",", "'csc'", "]", ")", "if", "hasattr", "(", "y", ",", "'getnnz'", ")", ":", "y", "=", "as_sparse_variable", "(", "y", ")", "assert", "(", "y", ".", "format", "in", "[", "'csr'", ",", "'csc'", "]", ")", "x_is_sparse_variable", "=", "_is_sparse_variable", "(", "x", ")", "y_is_sparse_variable", "=", "_is_sparse_variable", "(", "y", ")", "if", "(", "(", "not", "x_is_sparse_variable", ")", "and", "(", "not", "y_is_sparse_variable", ")", ")", ":", "raise", "TypeError", "(", ")", "if", "x_is_sparse_variable", ":", "return", "TrueDot", "(", "grad_preserves_dense", ")", "(", "x", ",", "y", ")", "else", ":", "assert", "y_is_sparse_variable", "return", "transpose", "(", "TrueDot", "(", "grad_preserves_dense", ")", "(", "y", ".", "T", ",", "x", ".", "T", ")", ")" ]
operation for efficiently calculating the dot product when one or all operands are sparse .
train
false
54,915
def parse_xmlrpc(xml_string): handler = XmlRpcReadHandler() xml.sax.parseString(xml_string, handler) return handler
[ "def", "parse_xmlrpc", "(", "xml_string", ")", ":", "handler", "=", "XmlRpcReadHandler", "(", ")", "xml", ".", "sax", ".", "parseString", "(", "xml_string", ",", "handler", ")", "return", "handler" ]
the user should call these functions: parse_xmlrpc and build_xmlrpc .
train
false
54,916
def distrib_release(): with settings(hide('running', 'stdout')): kernel = run('uname -s') if (kernel == 'Linux'): return run('lsb_release -r --short') elif (kernel == 'SunOS'): return run('uname -v')
[ "def", "distrib_release", "(", ")", ":", "with", "settings", "(", "hide", "(", "'running'", ",", "'stdout'", ")", ")", ":", "kernel", "=", "run", "(", "'uname -s'", ")", "if", "(", "kernel", "==", "'Linux'", ")", ":", "return", "run", "(", "'lsb_release -r --short'", ")", "elif", "(", "kernel", "==", "'SunOS'", ")", ":", "return", "run", "(", "'uname -v'", ")" ]
get the release number of the distribution .
train
false
54,917
def fit(function, x, y): p0 = [guess_plateau(x, y), 4.0, guess_lag(x, y), 0.1, min(y)] (params, pcov) = curve_fit(function, x, y, p0=p0) return (params, pcov)
[ "def", "fit", "(", "function", ",", "x", ",", "y", ")", ":", "p0", "=", "[", "guess_plateau", "(", "x", ",", "y", ")", ",", "4.0", ",", "guess_lag", "(", "x", ",", "y", ")", ",", "0.1", ",", "min", "(", "y", ")", "]", "(", "params", ",", "pcov", ")", "=", "curve_fit", "(", "function", ",", "x", ",", "y", ",", "p0", "=", "p0", ")", "return", "(", "params", ",", "pcov", ")" ]
fit the provided functrion to the x and y values .
train
false
54,918
def _tmp_name(input): if (input is not None): input = os.path.dirname(input) (f, fn) = tempfile.mkstemp(dir=input) os.close(f) return fn
[ "def", "_tmp_name", "(", "input", ")", ":", "if", "(", "input", "is", "not", "None", ")", ":", "input", "=", "os", ".", "path", ".", "dirname", "(", "input", ")", "(", "f", ",", "fn", ")", "=", "tempfile", ".", "mkstemp", "(", "dir", "=", "input", ")", "os", ".", "close", "(", "f", ")", "return", "fn" ]
create a temporary file name which should not already exist .
train
false
54,922
def getFabmetheusUtilitiesPath(subName=''): return getJoinedPath(getFabmetheusPath('fabmetheus_utilities'), subName)
[ "def", "getFabmetheusUtilitiesPath", "(", "subName", "=", "''", ")", ":", "return", "getJoinedPath", "(", "getFabmetheusPath", "(", "'fabmetheus_utilities'", ")", ",", "subName", ")" ]
get the fabmetheus utilities directory path .
train
false
54,924
@jit(nopython=True, cache=True) def get_mixed_actions(tableaux, bases): nums_actions = (tableaux[1].shape[0], tableaux[0].shape[0]) num = (nums_actions[0] + nums_actions[1]) out = np.zeros(num) for (pl, (start, stop)) in enumerate(zip((0, nums_actions[0]), (nums_actions[0], num))): sum_ = 0.0 for i in range(nums_actions[(1 - pl)]): k = bases[pl][i] if (start <= k < stop): out[k] = tableaux[pl][(i, (-1))] sum_ += tableaux[pl][(i, (-1))] if (sum_ != 0): out[start:stop] /= sum_ return (out[:nums_actions[0]], out[nums_actions[0]:])
[ "@", "jit", "(", "nopython", "=", "True", ",", "cache", "=", "True", ")", "def", "get_mixed_actions", "(", "tableaux", ",", "bases", ")", ":", "nums_actions", "=", "(", "tableaux", "[", "1", "]", ".", "shape", "[", "0", "]", ",", "tableaux", "[", "0", "]", ".", "shape", "[", "0", "]", ")", "num", "=", "(", "nums_actions", "[", "0", "]", "+", "nums_actions", "[", "1", "]", ")", "out", "=", "np", ".", "zeros", "(", "num", ")", "for", "(", "pl", ",", "(", "start", ",", "stop", ")", ")", "in", "enumerate", "(", "zip", "(", "(", "0", ",", "nums_actions", "[", "0", "]", ")", ",", "(", "nums_actions", "[", "0", "]", ",", "num", ")", ")", ")", ":", "sum_", "=", "0.0", "for", "i", "in", "range", "(", "nums_actions", "[", "(", "1", "-", "pl", ")", "]", ")", ":", "k", "=", "bases", "[", "pl", "]", "[", "i", "]", "if", "(", "start", "<=", "k", "<", "stop", ")", ":", "out", "[", "k", "]", "=", "tableaux", "[", "pl", "]", "[", "(", "i", ",", "(", "-", "1", ")", ")", "]", "sum_", "+=", "tableaux", "[", "pl", "]", "[", "(", "i", ",", "(", "-", "1", ")", ")", "]", "if", "(", "sum_", "!=", "0", ")", ":", "out", "[", "start", ":", "stop", "]", "/=", "sum_", "return", "(", "out", "[", ":", "nums_actions", "[", "0", "]", "]", ",", "out", "[", "nums_actions", "[", "0", "]", ":", "]", ")" ]
from tableaux and bases .
train
false
54,925
def make_pkgng_aware(jname): ret = {'changes': {}} cdir = _config_dir() if (not os.path.isdir(cdir)): os.makedirs(cdir) if os.path.isdir(cdir): ret['changes'] = 'Created poudriere make file dir {0}'.format(cdir) else: return 'Could not create or find required directory {0}'.format(cdir) __salt__['file.write']('{0}-make.conf'.format(os.path.join(cdir, jname)), 'WITH_PKGNG=yes') if os.path.isfile((os.path.join(cdir, jname) + '-make.conf')): ret['changes'] = 'Created {0}'.format(os.path.join(cdir, '{0}-make.conf'.format(jname))) return ret else: return 'Looks like file {0} could not be created'.format(os.path.join(cdir, (jname + '-make.conf')))
[ "def", "make_pkgng_aware", "(", "jname", ")", ":", "ret", "=", "{", "'changes'", ":", "{", "}", "}", "cdir", "=", "_config_dir", "(", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "cdir", ")", ")", ":", "os", ".", "makedirs", "(", "cdir", ")", "if", "os", ".", "path", ".", "isdir", "(", "cdir", ")", ":", "ret", "[", "'changes'", "]", "=", "'Created poudriere make file dir {0}'", ".", "format", "(", "cdir", ")", "else", ":", "return", "'Could not create or find required directory {0}'", ".", "format", "(", "cdir", ")", "__salt__", "[", "'file.write'", "]", "(", "'{0}-make.conf'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "jname", ")", ")", ",", "'WITH_PKGNG=yes'", ")", "if", "os", ".", "path", ".", "isfile", "(", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "jname", ")", "+", "'-make.conf'", ")", ")", ":", "ret", "[", "'changes'", "]", "=", "'Created {0}'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "'{0}-make.conf'", ".", "format", "(", "jname", ")", ")", ")", "return", "ret", "else", ":", "return", "'Looks like file {0} could not be created'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "(", "jname", "+", "'-make.conf'", ")", ")", ")" ]
make jail jname pkgng aware cli example: .
train
true
54,926
def modify_monitor(hostname, username, password, monitor_type, name, **kwargs): ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if __opts__['test']: params = {'hostname': hostname, 'username': username, 'password': password, 'monitor_type': monitor_type, 'name': name} for (key, value) in six.iteritems(kwargs): params[key] = value return _test_output(ret, 'modify', params) existing = __salt__['bigip.list_monitor'](hostname, username, password, monitor_type, name) if (existing['code'] == 200): modified = __salt__['bigip.modify_monitor'](hostname, username, password, monitor_type, name, **kwargs) if (modified['code'] == 200): del existing['content']['selfLink'] del modified['content']['selfLink'] ret = _check_for_changes('Monitor', ret, existing, modified) else: ret = _load_result(modified, ret) elif (existing['code'] == 404): ret['comment'] = 'A Monitor with this name was not found.' else: ret = _load_result(existing, ret) return ret
[ "def", "modify_monitor", "(", "hostname", ",", "username", ",", "password", ",", "monitor_type", ",", "name", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "if", "__opts__", "[", "'test'", "]", ":", "params", "=", "{", "'hostname'", ":", "hostname", ",", "'username'", ":", "username", ",", "'password'", ":", "password", ",", "'monitor_type'", ":", "monitor_type", ",", "'name'", ":", "name", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "kwargs", ")", ":", "params", "[", "key", "]", "=", "value", "return", "_test_output", "(", "ret", ",", "'modify'", ",", "params", ")", "existing", "=", "__salt__", "[", "'bigip.list_monitor'", "]", "(", "hostname", ",", "username", ",", "password", ",", "monitor_type", ",", "name", ")", "if", "(", "existing", "[", "'code'", "]", "==", "200", ")", ":", "modified", "=", "__salt__", "[", "'bigip.modify_monitor'", "]", "(", "hostname", ",", "username", ",", "password", ",", "monitor_type", ",", "name", ",", "**", "kwargs", ")", "if", "(", "modified", "[", "'code'", "]", "==", "200", ")", ":", "del", "existing", "[", "'content'", "]", "[", "'selfLink'", "]", "del", "modified", "[", "'content'", "]", "[", "'selfLink'", "]", "ret", "=", "_check_for_changes", "(", "'Monitor'", ",", "ret", ",", "existing", ",", "modified", ")", "else", ":", "ret", "=", "_load_result", "(", "modified", ",", "ret", ")", "elif", "(", "existing", "[", "'code'", "]", "==", "404", ")", ":", "ret", "[", "'comment'", "]", "=", "'A Monitor with this name was not found.'", "else", ":", "ret", "=", "_load_result", "(", "existing", ",", "ret", ")", "return", "ret" ]
modify an existing monitor .
train
false
54,927
@pytest.mark.parametrize('fast_reader', [True, False, 'force']) def test_read_rdb_wrong_type(fast_reader): table = 'col1 DCTB col2\nN DCTB N\n1 DCTB Hello' with pytest.raises(ValueError): ascii.read(table, Reader=ascii.Rdb, fast_reader=fast_reader)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'fast_reader'", ",", "[", "True", ",", "False", ",", "'force'", "]", ")", "def", "test_read_rdb_wrong_type", "(", "fast_reader", ")", ":", "table", "=", "'col1 DCTB col2\\nN DCTB N\\n1 DCTB Hello'", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "ascii", ".", "read", "(", "table", ",", "Reader", "=", "ascii", ".", "Rdb", ",", "fast_reader", "=", "fast_reader", ")" ]
read rdb data with inconstent data type .
train
false
54,930
def testInterface(algo): emptyalgo = algo() try: emptyalgo.learn(0) return 'Failed to throw missing evaluator error?' except AssertionError: pass emptyalgo.setEvaluator(sf, xa1) emptyalgo.learn(0) algo(sf, xa1) algo(sf, xa100) algo(sf, numParameters=2) try: algo(sf) return 'Failed to throw unknown dimension error' except ValueError: pass algo(ife1) algo(ife2, pc2) return True
[ "def", "testInterface", "(", "algo", ")", ":", "emptyalgo", "=", "algo", "(", ")", "try", ":", "emptyalgo", ".", "learn", "(", "0", ")", "return", "'Failed to throw missing evaluator error?'", "except", "AssertionError", ":", "pass", "emptyalgo", ".", "setEvaluator", "(", "sf", ",", "xa1", ")", "emptyalgo", ".", "learn", "(", "0", ")", "algo", "(", "sf", ",", "xa1", ")", "algo", "(", "sf", ",", "xa100", ")", "algo", "(", "sf", ",", "numParameters", "=", "2", ")", "try", ":", "algo", "(", "sf", ")", "return", "'Failed to throw unknown dimension error'", "except", "ValueError", ":", "pass", "algo", "(", "ife1", ")", "algo", "(", "ife2", ",", "pc2", ")", "return", "True" ]
tests whether the algorithm is properly implementing the correct blackbox-optimization interface .
train
false
54,931
def build_api_error(message, **kwargs): return {'developer_message': message.format(**kwargs), 'user_message': _(message).format(**kwargs)}
[ "def", "build_api_error", "(", "message", ",", "**", "kwargs", ")", ":", "return", "{", "'developer_message'", ":", "message", ".", "format", "(", "**", "kwargs", ")", ",", "'user_message'", ":", "_", "(", "message", ")", ".", "format", "(", "**", "kwargs", ")", "}" ]
build an error dict corresponding to edx api conventions .
train
false
54,932
def p_definition_token(p): for i in p[3]: if (i[0] not in '\'"'): tokenlist.append(i) if (p[1] == '%left'): preclist.append((('left',) + tuple(p[3]))) elif (p[1] == '%right'): preclist.append((('right',) + tuple(p[3]))) elif (p[1] == '%nonassoc'): preclist.append((('nonassoc',) + tuple(p[3])))
[ "def", "p_definition_token", "(", "p", ")", ":", "for", "i", "in", "p", "[", "3", "]", ":", "if", "(", "i", "[", "0", "]", "not", "in", "'\\'\"'", ")", ":", "tokenlist", ".", "append", "(", "i", ")", "if", "(", "p", "[", "1", "]", "==", "'%left'", ")", ":", "preclist", ".", "append", "(", "(", "(", "'left'", ",", ")", "+", "tuple", "(", "p", "[", "3", "]", ")", ")", ")", "elif", "(", "p", "[", "1", "]", "==", "'%right'", ")", ":", "preclist", ".", "append", "(", "(", "(", "'right'", ",", ")", "+", "tuple", "(", "p", "[", "3", "]", ")", ")", ")", "elif", "(", "p", "[", "1", "]", "==", "'%nonassoc'", ")", ":", "preclist", ".", "append", "(", "(", "(", "'nonassoc'", ",", ")", "+", "tuple", "(", "p", "[", "3", "]", ")", ")", ")" ]
definition : toktype opttype idlist optsemi .
train
false
54,933
def win32_clipboard_get(): try: import win32clipboard except ImportError: raise TryNext('Getting text from the clipboard requires the pywin32 extensions: http://sourceforge.net/projects/pywin32/') win32clipboard.OpenClipboard() try: text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) except (TypeError, win32clipboard.error): try: text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT) text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) except (TypeError, win32clipboard.error): raise ClipboardEmpty finally: win32clipboard.CloseClipboard() return text
[ "def", "win32_clipboard_get", "(", ")", ":", "try", ":", "import", "win32clipboard", "except", "ImportError", ":", "raise", "TryNext", "(", "'Getting text from the clipboard requires the pywin32 extensions: http://sourceforge.net/projects/pywin32/'", ")", "win32clipboard", ".", "OpenClipboard", "(", ")", "try", ":", "text", "=", "win32clipboard", ".", "GetClipboardData", "(", "win32clipboard", ".", "CF_UNICODETEXT", ")", "except", "(", "TypeError", ",", "win32clipboard", ".", "error", ")", ":", "try", ":", "text", "=", "win32clipboard", ".", "GetClipboardData", "(", "win32clipboard", ".", "CF_TEXT", ")", "text", "=", "py3compat", ".", "cast_unicode", "(", "text", ",", "py3compat", ".", "DEFAULT_ENCODING", ")", "except", "(", "TypeError", ",", "win32clipboard", ".", "error", ")", ":", "raise", "ClipboardEmpty", "finally", ":", "win32clipboard", ".", "CloseClipboard", "(", ")", "return", "text" ]
get the current clipboards text on windows .
train
false
54,934
def _setup_fixtures(doctest_item): def func(): pass doctest_item.funcargs = {} fm = doctest_item.session._fixturemanager doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func, cls=None, funcargs=False) fixture_request = FixtureRequest(doctest_item) fixture_request._fillfixtures() return fixture_request
[ "def", "_setup_fixtures", "(", "doctest_item", ")", ":", "def", "func", "(", ")", ":", "pass", "doctest_item", ".", "funcargs", "=", "{", "}", "fm", "=", "doctest_item", ".", "session", ".", "_fixturemanager", "doctest_item", ".", "_fixtureinfo", "=", "fm", ".", "getfixtureinfo", "(", "node", "=", "doctest_item", ",", "func", "=", "func", ",", "cls", "=", "None", ",", "funcargs", "=", "False", ")", "fixture_request", "=", "FixtureRequest", "(", "doctest_item", ")", "fixture_request", ".", "_fillfixtures", "(", ")", "return", "fixture_request" ]
used by doctesttextfile and doctestitem to setup fixture information .
train
false
54,935
def uniq_stable(elems): seen = set() return [x for x in elems if ((x not in seen) and (not seen.add(x)))]
[ "def", "uniq_stable", "(", "elems", ")", ":", "seen", "=", "set", "(", ")", "return", "[", "x", "for", "x", "in", "elems", "if", "(", "(", "x", "not", "in", "seen", ")", "and", "(", "not", "seen", ".", "add", "(", "x", ")", ")", ")", "]" ]
uniq_stable -> list return from an iterable .
train
false
54,936
@register.filter def break_long_headers(header): if ((len(header) > 160) and (u',' in header)): header = mark_safe((u'<br> ' + u', <br>'.join(header.split(u',')))) return header
[ "@", "register", ".", "filter", "def", "break_long_headers", "(", "header", ")", ":", "if", "(", "(", "len", "(", "header", ")", ">", "160", ")", "and", "(", "u','", "in", "header", ")", ")", ":", "header", "=", "mark_safe", "(", "(", "u'<br> '", "+", "u', <br>'", ".", "join", "(", "header", ".", "split", "(", "u','", ")", ")", ")", ")", "return", "header" ]
breaks headers longer than 160 characters when possible .
train
false
54,938
def _convert_java_pattern_to_python(pattern): s = list(pattern) i = 0 while (i < (len(s) - 1)): c = s[i] if ((c == '$') and (s[(i + 1)] in '0123456789')): s[i] = '\\' elif ((c == '\\') and (s[(i + 1)] == '$')): s[i] = '' i += 1 i += 1 return pattern[:0].join(s)
[ "def", "_convert_java_pattern_to_python", "(", "pattern", ")", ":", "s", "=", "list", "(", "pattern", ")", "i", "=", "0", "while", "(", "i", "<", "(", "len", "(", "s", ")", "-", "1", ")", ")", ":", "c", "=", "s", "[", "i", "]", "if", "(", "(", "c", "==", "'$'", ")", "and", "(", "s", "[", "(", "i", "+", "1", ")", "]", "in", "'0123456789'", ")", ")", ":", "s", "[", "i", "]", "=", "'\\\\'", "elif", "(", "(", "c", "==", "'\\\\'", ")", "and", "(", "s", "[", "(", "i", "+", "1", ")", "]", "==", "'$'", ")", ")", ":", "s", "[", "i", "]", "=", "''", "i", "+=", "1", "i", "+=", "1", "return", "pattern", "[", ":", "0", "]", ".", "join", "(", "s", ")" ]
convert a replacement pattern from the java-style $5 to the python-style 5 .
train
false
54,939
def format_correlation_info(corr_coeff, param_p_val, nonparam_p_val, conf_interval, num_permutations, header=''): result = '' if (header != ''): result += (header + '\n') result += ('Correlation coefficient DCTB Parametric p-value DCTB ' + 'Nonparametric p-value DCTB CI (lower) DCTB CI (upper)\n') if (num_permutations > 0): nonparam_p_val_str = format_p_value_for_num_iters(nonparam_p_val, num_permutations) else: nonparam_p_val_str = 'N/A' if (conf_interval == (None, None)): conf_interval_str = 'N/A DCTB N/A' else: conf_interval_str = ('%.4f DCTB %.4f' % conf_interval) result += ('%.4f DCTB %.4f DCTB %s DCTB %s\n' % (corr_coeff, param_p_val, nonparam_p_val_str, conf_interval_str)) return result
[ "def", "format_correlation_info", "(", "corr_coeff", ",", "param_p_val", ",", "nonparam_p_val", ",", "conf_interval", ",", "num_permutations", ",", "header", "=", "''", ")", ":", "result", "=", "''", "if", "(", "header", "!=", "''", ")", ":", "result", "+=", "(", "header", "+", "'\\n'", ")", "result", "+=", "(", "'Correlation coefficient DCTB Parametric p-value DCTB '", "+", "'Nonparametric p-value DCTB CI (lower) DCTB CI (upper)\\n'", ")", "if", "(", "num_permutations", ">", "0", ")", ":", "nonparam_p_val_str", "=", "format_p_value_for_num_iters", "(", "nonparam_p_val", ",", "num_permutations", ")", "else", ":", "nonparam_p_val_str", "=", "'N/A'", "if", "(", "conf_interval", "==", "(", "None", ",", "None", ")", ")", ":", "conf_interval_str", "=", "'N/A DCTB N/A'", "else", ":", "conf_interval_str", "=", "(", "'%.4f DCTB %.4f'", "%", "conf_interval", ")", "result", "+=", "(", "'%.4f DCTB %.4f DCTB %s DCTB %s\\n'", "%", "(", "corr_coeff", ",", "param_p_val", ",", "nonparam_p_val_str", ",", "conf_interval_str", ")", ")", "return", "result" ]
formats correlation information to be suitable for writing to a file .
train
false
54,940
def test_mark_done(pg_xlog): seg = make_segment(1, explicit=False) pg_xlog.touch(seg.name, '.ready') seg.mark_done()
[ "def", "test_mark_done", "(", "pg_xlog", ")", ":", "seg", "=", "make_segment", "(", "1", ",", "explicit", "=", "False", ")", "pg_xlog", ".", "touch", "(", "seg", ".", "name", ",", "'.ready'", ")", "seg", ".", "mark_done", "(", ")" ]
check non-explicit segments can be .
train
false
54,941
def dict_union(*dicts, **kwargs): dicts = list(dicts) if (dicts and isinstance(dicts[0], OrderedDict)): result = OrderedDict() else: result = {} for d in (list(dicts) + [kwargs]): duplicate_keys = (set(result.keys()) & set(d.keys())) if duplicate_keys: raise ValueError('The following keys have duplicate entries: {}'.format(', '.join((str(key) for key in duplicate_keys)))) result.update(d) return result
[ "def", "dict_union", "(", "*", "dicts", ",", "**", "kwargs", ")", ":", "dicts", "=", "list", "(", "dicts", ")", "if", "(", "dicts", "and", "isinstance", "(", "dicts", "[", "0", "]", ",", "OrderedDict", ")", ")", ":", "result", "=", "OrderedDict", "(", ")", "else", ":", "result", "=", "{", "}", "for", "d", "in", "(", "list", "(", "dicts", ")", "+", "[", "kwargs", "]", ")", ":", "duplicate_keys", "=", "(", "set", "(", "result", ".", "keys", "(", ")", ")", "&", "set", "(", "d", ".", "keys", "(", ")", ")", ")", "if", "duplicate_keys", ":", "raise", "ValueError", "(", "'The following keys have duplicate entries: {}'", ".", "format", "(", "', '", ".", "join", "(", "(", "str", "(", "key", ")", "for", "key", "in", "duplicate_keys", ")", ")", ")", ")", "result", ".", "update", "(", "d", ")", "return", "result" ]
return union of a sequence of disjoint dictionaries .
train
false
54,942
def make_path_searcher(path_generator, target_predicate, target_normalizer, extra_paths, **kwargs): def path_searcher(target, extra_dirs=extra_paths): matches = itertools.ifilter(target_predicate, path_generator(target, extra_dirs, **kwargs)) paths = itertools.imap(target_normalizer, matches) return next(paths, '') return path_searcher
[ "def", "make_path_searcher", "(", "path_generator", ",", "target_predicate", ",", "target_normalizer", ",", "extra_paths", ",", "**", "kwargs", ")", ":", "def", "path_searcher", "(", "target", ",", "extra_dirs", "=", "extra_paths", ")", ":", "matches", "=", "itertools", ".", "ifilter", "(", "target_predicate", ",", "path_generator", "(", "target", ",", "extra_dirs", ",", "**", "kwargs", ")", ")", "paths", "=", "itertools", ".", "imap", "(", "target_normalizer", ",", "matches", ")", "return", "next", "(", "paths", ",", "''", ")", "return", "path_searcher" ]
universal search function generator using lazy evaluation .
train
false
54,945
@conf.commands.register def tshark(*args, **kargs): sniff(prn=(lambda x: x.display()), *args, **kargs)
[ "@", "conf", ".", "commands", ".", "register", "def", "tshark", "(", "*", "args", ",", "**", "kargs", ")", ":", "sniff", "(", "prn", "=", "(", "lambda", "x", ":", "x", ".", "display", "(", ")", ")", ",", "*", "args", ",", "**", "kargs", ")" ]
sniff packets and print them calling pkt .
train
false
54,947
def _defaultFetcher(url): try: r = urlfetch.fetch(url, method=urlfetch.GET) except urlfetch.Error as e: log.warn((u'Error opening url=%r: %s' % (url, e)), error=IOError) else: if (r.status_code == 200): mimetype = 'application/octet-stream' try: (mimetype, params) = cgi.parse_header(r.headers['content-type']) encoding = params['charset'] except KeyError: encoding = None if (mimetype != u'text/css'): log.error((u'Expected "text/css" mime type for url %r but found: %r' % (url, mimetype)), error=ValueError) return (encoding, r.content) else: log.warn((u'Error opening url=%r: HTTP status %s' % (url, r.status_code)), error=IOError)
[ "def", "_defaultFetcher", "(", "url", ")", ":", "try", ":", "r", "=", "urlfetch", ".", "fetch", "(", "url", ",", "method", "=", "urlfetch", ".", "GET", ")", "except", "urlfetch", ".", "Error", "as", "e", ":", "log", ".", "warn", "(", "(", "u'Error opening url=%r: %s'", "%", "(", "url", ",", "e", ")", ")", ",", "error", "=", "IOError", ")", "else", ":", "if", "(", "r", ".", "status_code", "==", "200", ")", ":", "mimetype", "=", "'application/octet-stream'", "try", ":", "(", "mimetype", ",", "params", ")", "=", "cgi", ".", "parse_header", "(", "r", ".", "headers", "[", "'content-type'", "]", ")", "encoding", "=", "params", "[", "'charset'", "]", "except", "KeyError", ":", "encoding", "=", "None", "if", "(", "mimetype", "!=", "u'text/css'", ")", ":", "log", ".", "error", "(", "(", "u'Expected \"text/css\" mime type for url %r but found: %r'", "%", "(", "url", ",", "mimetype", ")", ")", ",", "error", "=", "ValueError", ")", "return", "(", "encoding", ",", "r", ".", "content", ")", "else", ":", "log", ".", "warn", "(", "(", "u'Error opening url=%r: HTTP status %s'", "%", "(", "url", ",", "r", ".", "status_code", ")", ")", ",", "error", "=", "IOError", ")" ]
uses googleappengine fetch response content the body content of the response .
train
false