id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
12,324
def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): if (auxiliary is None): H = build_auxiliary_node_connectivity(G) else: H = auxiliary mapping = H.graph.get('mapping', None) if (mapping is None): raise nx.NetworkXError('Invalid auxiliary digraph.') if (G.has_edge(s, t) or G.has_edge(t, s)): return [] kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H) edge_cut = minimum_st_edge_cut(H, ('%sB' % mapping[s]), ('%sA' % mapping[t]), **kwargs) node_cut = set((H.node[node]['id'] for edge in edge_cut for node in edge)) return (node_cut - set([s, t]))
[ "def", "minimum_st_node_cut", "(", "G", ",", "s", ",", "t", ",", "flow_func", "=", "None", ",", "auxiliary", "=", "None", ",", "residual", "=", "None", ")", ":", "if", "(", "auxiliary", "is", "None", ")", ":", "H", "=", "build_auxiliary_node_connectivity", "(", "G", ")", "else", ":", "H", "=", "auxiliary", "mapping", "=", "H", ".", "graph", ".", "get", "(", "'mapping'", ",", "None", ")", "if", "(", "mapping", "is", "None", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'Invalid auxiliary digraph.'", ")", "if", "(", "G", ".", "has_edge", "(", "s", ",", "t", ")", "or", "G", ".", "has_edge", "(", "t", ",", "s", ")", ")", ":", "return", "[", "]", "kwargs", "=", "dict", "(", "flow_func", "=", "flow_func", ",", "residual", "=", "residual", ",", "auxiliary", "=", "H", ")", "edge_cut", "=", "minimum_st_edge_cut", "(", "H", ",", "(", "'%sB'", "%", "mapping", "[", "s", "]", ")", ",", "(", "'%sA'", "%", "mapping", "[", "t", "]", ")", ",", "**", "kwargs", ")", "node_cut", "=", "set", "(", "(", "H", ".", "node", "[", "node", "]", "[", "'id'", "]", "for", "edge", "in", "edge_cut", "for", "node", "in", "edge", ")", ")", "return", "(", "node_cut", "-", "set", "(", "[", "s", ",", "t", "]", ")", ")" ]
returns a set of nodes of minimum cardinality that disconnect source from target in g .
train
false
12,325
def useradd(pwfile, user, password, opts='', runas=None): if (not os.path.exists(pwfile)): opts += 'c' cmd = ['htpasswd', '-b{0}'.format(opts), pwfile, user, password] return __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
[ "def", "useradd", "(", "pwfile", ",", "user", ",", "password", ",", "opts", "=", "''", ",", "runas", "=", "None", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "pwfile", ")", ")", ":", "opts", "+=", "'c'", "cmd", "=", "[", "'htpasswd'", ",", "'-b{0}'", ".", "format", "(", "opts", ")", ",", "pwfile", ",", "user", ",", "password", "]", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "runas", "=", "runas", ",", "python_shell", "=", "False", ")" ]
add http user using the htpasswd command .
train
true
12,326
@task def email_render_document_progress(percent_complete, total): subject = ('The command `render_document` is %s%% complete' % percent_complete) message = ('The command `render_document` is %s%% complete out of a total of %s documents to render.' % (percent_complete, total)) mail_admins(subject=subject, message=message)
[ "@", "task", "def", "email_render_document_progress", "(", "percent_complete", ",", "total", ")", ":", "subject", "=", "(", "'The command `render_document` is %s%% complete'", "%", "percent_complete", ")", "message", "=", "(", "'The command `render_document` is %s%% complete out of a total of %s documents to render.'", "%", "(", "percent_complete", ",", "total", ")", ")", "mail_admins", "(", "subject", "=", "subject", ",", "message", "=", "message", ")" ]
task to send email for render_document progress notification .
train
false
12,328
def get_identifiers(source_code): tokens = set(re.split('[^0-9a-zA-Z_.]', source_code)) valid = re.compile('[a-zA-Z_]') return [token for token in tokens if re.match(valid, token)]
[ "def", "get_identifiers", "(", "source_code", ")", ":", "tokens", "=", "set", "(", "re", ".", "split", "(", "'[^0-9a-zA-Z_.]'", ",", "source_code", ")", ")", "valid", "=", "re", ".", "compile", "(", "'[a-zA-Z_]'", ")", "return", "[", "token", "for", "token", "in", "tokens", "if", "re", ".", "match", "(", "valid", ",", "token", ")", "]" ]
split source code into python identifier-like tokens .
train
true
12,329
def viridis(): rc(u'image', cmap=u'viridis') im = gci() if (im is not None): im.set_cmap(cm.viridis)
[ "def", "viridis", "(", ")", ":", "rc", "(", "u'image'", ",", "cmap", "=", "u'viridis'", ")", "im", "=", "gci", "(", ")", "if", "(", "im", "is", "not", "None", ")", ":", "im", ".", "set_cmap", "(", "cm", ".", "viridis", ")" ]
set the default colormap to viridis and apply to current image if any .
train
false
12,332
def is_bracket_region(obj): return isinstance(obj, BracketRegion)
[ "def", "is_bracket_region", "(", "obj", ")", ":", "return", "isinstance", "(", "obj", ",", "BracketRegion", ")" ]
check if object is a bracketregion .
train
false
12,333
def test_documentation_carry_over(): authentication = hug.authentication.basic(hug.authentication.verify('User1', 'mypassword')) assert (authentication.__doc__ == 'Basic HTTP Authentication')
[ "def", "test_documentation_carry_over", "(", ")", ":", "authentication", "=", "hug", ".", "authentication", ".", "basic", "(", "hug", ".", "authentication", ".", "verify", "(", "'User1'", ",", "'mypassword'", ")", ")", "assert", "(", "authentication", ".", "__doc__", "==", "'Basic HTTP Authentication'", ")" ]
test to ensure documentation correctly carries over - to address issue #252 .
train
false
12,334
@cache(request.env.path_info, time_expire=5, cache_model=cache.ram) def cache_controller_and_view(): t = time.ctime() d = dict(time=t, link=A('click to reload', _href=URL(r=request))) return response.render(d)
[ "@", "cache", "(", "request", ".", "env", ".", "path_info", ",", "time_expire", "=", "5", ",", "cache_model", "=", "cache", ".", "ram", ")", "def", "cache_controller_and_view", "(", ")", ":", "t", "=", "time", ".", "ctime", "(", ")", "d", "=", "dict", "(", "time", "=", "t", ",", "link", "=", "A", "(", "'click to reload'", ",", "_href", "=", "URL", "(", "r", "=", "request", ")", ")", ")", "return", "response", ".", "render", "(", "d", ")" ]
cache the output of the controller rendered by the view in ram .
train
false
12,335
def is_ip_addr(ip): try: socket.inet_aton(ip) return ip except: raise ArgumentTypeError(('%r is not an IP address!' % ip))
[ "def", "is_ip_addr", "(", "ip", ")", ":", "try", ":", "socket", ".", "inet_aton", "(", "ip", ")", "return", "ip", "except", ":", "raise", "ArgumentTypeError", "(", "(", "'%r is not an IP address!'", "%", "ip", ")", ")" ]
check that the supplied value is an internet protocol address .
train
false
12,336
def add_interface_router(router, subnet, profile=None): conn = _auth(profile) return conn.add_interface_router(router, subnet)
[ "def", "add_interface_router", "(", "router", ",", "subnet", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "add_interface_router", "(", "router", ",", "subnet", ")" ]
adds an internal network interface to the specified router cli example: .
train
true
12,337
def CDLHANGINGMAN(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLHANGINGMAN)
[ "def", "CDLHANGINGMAN", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLHANGINGMAN", ")" ]
hanging man .
train
false
12,338
def mutInsert(individual, pset): index = random.randrange(len(individual)) node = individual[index] slice_ = individual.searchSubtree(index) choice = random.choice primitives = [p for p in pset.primitives[node.ret] if (node.ret in p.args)] if (len(primitives) == 0): return (individual,) new_node = choice(primitives) new_subtree = ([None] * len(new_node.args)) position = choice([i for (i, a) in enumerate(new_node.args) if (a == node.ret)]) for (i, arg_type) in enumerate(new_node.args): if (i != position): term = choice(pset.terminals[arg_type]) if isclass(term): term = term() new_subtree[i] = term new_subtree[position:(position + 1)] = individual[slice_] new_subtree.insert(0, new_node) individual[slice_] = new_subtree return (individual,)
[ "def", "mutInsert", "(", "individual", ",", "pset", ")", ":", "index", "=", "random", ".", "randrange", "(", "len", "(", "individual", ")", ")", "node", "=", "individual", "[", "index", "]", "slice_", "=", "individual", ".", "searchSubtree", "(", "index", ")", "choice", "=", "random", ".", "choice", "primitives", "=", "[", "p", "for", "p", "in", "pset", ".", "primitives", "[", "node", ".", "ret", "]", "if", "(", "node", ".", "ret", "in", "p", ".", "args", ")", "]", "if", "(", "len", "(", "primitives", ")", "==", "0", ")", ":", "return", "(", "individual", ",", ")", "new_node", "=", "choice", "(", "primitives", ")", "new_subtree", "=", "(", "[", "None", "]", "*", "len", "(", "new_node", ".", "args", ")", ")", "position", "=", "choice", "(", "[", "i", "for", "(", "i", ",", "a", ")", "in", "enumerate", "(", "new_node", ".", "args", ")", "if", "(", "a", "==", "node", ".", "ret", ")", "]", ")", "for", "(", "i", ",", "arg_type", ")", "in", "enumerate", "(", "new_node", ".", "args", ")", ":", "if", "(", "i", "!=", "position", ")", ":", "term", "=", "choice", "(", "pset", ".", "terminals", "[", "arg_type", "]", ")", "if", "isclass", "(", "term", ")", ":", "term", "=", "term", "(", ")", "new_subtree", "[", "i", "]", "=", "term", "new_subtree", "[", "position", ":", "(", "position", "+", "1", ")", "]", "=", "individual", "[", "slice_", "]", "new_subtree", ".", "insert", "(", "0", ",", "new_node", ")", "individual", "[", "slice_", "]", "=", "new_subtree", "return", "(", "individual", ",", ")" ]
inserts a new branch at a random position in *individual* .
train
false
12,339
def TR111(rv): def f(rv): if (not (isinstance(rv, Pow) and (rv.base.is_positive or (rv.exp.is_integer and rv.exp.is_negative)))): return rv if (rv.base.func is tan): return (cot(rv.base.args[0]) ** (- rv.exp)) elif (rv.base.func is sin): return (csc(rv.base.args[0]) ** (- rv.exp)) elif (rv.base.func is cos): return (sec(rv.base.args[0]) ** (- rv.exp)) return rv return bottom_up(rv, f)
[ "def", "TR111", "(", "rv", ")", ":", "def", "f", "(", "rv", ")", ":", "if", "(", "not", "(", "isinstance", "(", "rv", ",", "Pow", ")", "and", "(", "rv", ".", "base", ".", "is_positive", "or", "(", "rv", ".", "exp", ".", "is_integer", "and", "rv", ".", "exp", ".", "is_negative", ")", ")", ")", ")", ":", "return", "rv", "if", "(", "rv", ".", "base", ".", "func", "is", "tan", ")", ":", "return", "(", "cot", "(", "rv", ".", "base", ".", "args", "[", "0", "]", ")", "**", "(", "-", "rv", ".", "exp", ")", ")", "elif", "(", "rv", ".", "base", ".", "func", "is", "sin", ")", ":", "return", "(", "csc", "(", "rv", ".", "base", ".", "args", "[", "0", "]", ")", "**", "(", "-", "rv", ".", "exp", ")", ")", "elif", "(", "rv", ".", "base", ".", "func", "is", "cos", ")", ":", "return", "(", "sec", "(", "rv", ".", "base", ".", "args", "[", "0", "]", ")", "**", "(", "-", "rv", ".", "exp", ")", ")", "return", "rv", "return", "bottom_up", "(", "rv", ",", "f", ")" ]
convert f(x)**-i to g(x)**i where either i is an integer or the base is positive and f .
train
false
12,340
def _eval_str_tuple(value): if (not (value.startswith('(') and value.endswith(')'))): raise ValueError(value) orig_value = value value = value[1:(-1)] result = [] while value: m = _strs.match(value) if (m is None): raise ValueError(orig_value) result.append(m.group(1)) value = value[len(m.group(0)):] return tuple(result)
[ "def", "_eval_str_tuple", "(", "value", ")", ":", "if", "(", "not", "(", "value", ".", "startswith", "(", "'('", ")", "and", "value", ".", "endswith", "(", "')'", ")", ")", ")", ":", "raise", "ValueError", "(", "value", ")", "orig_value", "=", "value", "value", "=", "value", "[", "1", ":", "(", "-", "1", ")", "]", "result", "=", "[", "]", "while", "value", ":", "m", "=", "_strs", ".", "match", "(", "value", ")", "if", "(", "m", "is", "None", ")", ":", "raise", "ValueError", "(", "orig_value", ")", "result", ".", "append", "(", "m", ".", "group", "(", "1", ")", ")", "value", "=", "value", "[", "len", "(", "m", ".", "group", "(", "0", ")", ")", ":", "]", "return", "tuple", "(", "result", ")" ]
input is the repr of a tuple of strings .
train
false
12,341
def int128_to_b36(int128): if (not int128): return None assert (len(int128) == 16), 'should be 16 bytes (128 bits)' (a, b) = struct.unpack('>QQ', int128) pub_id = ((a << 64) | b) return base36encode(pub_id).lower()
[ "def", "int128_to_b36", "(", "int128", ")", ":", "if", "(", "not", "int128", ")", ":", "return", "None", "assert", "(", "len", "(", "int128", ")", "==", "16", ")", ",", "'should be 16 bytes (128 bits)'", "(", "a", ",", "b", ")", "=", "struct", ".", "unpack", "(", "'>QQ'", ",", "int128", ")", "pub_id", "=", "(", "(", "a", "<<", "64", ")", "|", "b", ")", "return", "base36encode", "(", "pub_id", ")", ".", "lower", "(", ")" ]
int128: a 128 bit unsigned integer returns a base-36 string representation .
train
false
12,342
def load_paste_app(app_name=None): if (app_name is None): app_name = cfg.CONF.prog app_name += _get_deployment_flavor() conf_file = _get_deployment_config_file() if (conf_file is None): raise RuntimeError((_('Unable to locate config file [%s]') % cfg.CONF.paste_deploy['api_paste_config'])) try: app = wsgi.paste_deploy_app(conf_file, app_name, cfg.CONF) if cfg.CONF.debug: cfg.CONF.log_opt_values(logging.getLogger(app_name), logging.DEBUG) return app except (LookupError, ImportError) as e: raise RuntimeError((_('Unable to load %(app_name)s from configuration file %(conf_file)s.\nGot: %(e)r') % {'app_name': app_name, 'conf_file': conf_file, 'e': e}))
[ "def", "load_paste_app", "(", "app_name", "=", "None", ")", ":", "if", "(", "app_name", "is", "None", ")", ":", "app_name", "=", "cfg", ".", "CONF", ".", "prog", "app_name", "+=", "_get_deployment_flavor", "(", ")", "conf_file", "=", "_get_deployment_config_file", "(", ")", "if", "(", "conf_file", "is", "None", ")", ":", "raise", "RuntimeError", "(", "(", "_", "(", "'Unable to locate config file [%s]'", ")", "%", "cfg", ".", "CONF", ".", "paste_deploy", "[", "'api_paste_config'", "]", ")", ")", "try", ":", "app", "=", "wsgi", ".", "paste_deploy_app", "(", "conf_file", ",", "app_name", ",", "cfg", ".", "CONF", ")", "if", "cfg", ".", "CONF", ".", "debug", ":", "cfg", ".", "CONF", ".", "log_opt_values", "(", "logging", ".", "getLogger", "(", "app_name", ")", ",", "logging", ".", "DEBUG", ")", "return", "app", "except", "(", "LookupError", ",", "ImportError", ")", "as", "e", ":", "raise", "RuntimeError", "(", "(", "_", "(", "'Unable to load %(app_name)s from configuration file %(conf_file)s.\\nGot: %(e)r'", ")", "%", "{", "'app_name'", ":", "app_name", ",", "'conf_file'", ":", "conf_file", ",", "'e'", ":", "e", "}", ")", ")" ]
builds and returns a wsgi app from a paste config file .
train
false
12,343
def build_policy(region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if hasattr(conn, 'build_policy'): policy = json.loads(conn.build_policy()) elif hasattr(conn, '_build_policy'): policy = json.loads(conn._build_policy()) else: return {} for (key, policy_val) in policy.items(): for statement in policy_val: if (isinstance(statement['Action'], list) and (len(statement['Action']) == 1)): statement['Action'] = statement['Action'][0] if (isinstance(statement['Principal']['Service'], list) and (len(statement['Principal']['Service']) == 1)): statement['Principal']['Service'] = statement['Principal']['Service'][0] policy['Version'] = '2008-10-17' return policy
[ "def", "build_policy", "(", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "hasattr", "(", "conn", ",", "'build_policy'", ")", ":", "policy", "=", "json", ".", "loads", "(", "conn", ".", "build_policy", "(", ")", ")", "elif", "hasattr", "(", "conn", ",", "'_build_policy'", ")", ":", "policy", "=", "json", ".", "loads", "(", "conn", ".", "_build_policy", "(", ")", ")", "else", ":", "return", "{", "}", "for", "(", "key", ",", "policy_val", ")", "in", "policy", ".", "items", "(", ")", ":", "for", "statement", "in", "policy_val", ":", "if", "(", "isinstance", "(", "statement", "[", "'Action'", "]", ",", "list", ")", "and", "(", "len", "(", "statement", "[", "'Action'", "]", ")", "==", "1", ")", ")", ":", "statement", "[", "'Action'", "]", "=", "statement", "[", "'Action'", "]", "[", "0", "]", "if", "(", "isinstance", "(", "statement", "[", "'Principal'", "]", "[", "'Service'", "]", ",", "list", ")", "and", "(", "len", "(", "statement", "[", "'Principal'", "]", "[", "'Service'", "]", ")", "==", "1", ")", ")", ":", "statement", "[", "'Principal'", "]", "[", "'Service'", "]", "=", "statement", "[", "'Principal'", "]", "[", "'Service'", "]", "[", "0", "]", "policy", "[", "'Version'", "]", "=", "'2008-10-17'", "return", "policy" ]
build a default assume role policy .
train
true
12,344
def salt_information(): (yield ('Salt', __version__))
[ "def", "salt_information", "(", ")", ":", "(", "yield", "(", "'Salt'", ",", "__version__", ")", ")" ]
report version of salt .
train
false
12,345
@app.route('/notes', methods=['GET']) def list_notes(): id_token = request.headers['Authorization'].split(' ').pop() claims = google.oauth2.id_token.verify_firebase_token(id_token, HTTP_REQUEST) if (not claims): return ('Unauthorized', 401) notes = query_database(claims['sub']) return jsonify(notes)
[ "@", "app", ".", "route", "(", "'/notes'", ",", "methods", "=", "[", "'GET'", "]", ")", "def", "list_notes", "(", ")", ":", "id_token", "=", "request", ".", "headers", "[", "'Authorization'", "]", ".", "split", "(", "' '", ")", ".", "pop", "(", ")", "claims", "=", "google", ".", "oauth2", ".", "id_token", ".", "verify_firebase_token", "(", "id_token", ",", "HTTP_REQUEST", ")", "if", "(", "not", "claims", ")", ":", "return", "(", "'Unauthorized'", ",", "401", ")", "notes", "=", "query_database", "(", "claims", "[", "'sub'", "]", ")", "return", "jsonify", "(", "notes", ")" ]
returns a list of notes added by the current firebase user .
train
false
12,346
def if_delegate_has_method(delegate): if isinstance(delegate, list): delegate = tuple(delegate) if (not isinstance(delegate, tuple)): delegate = (delegate,) return (lambda fn: _IffHasAttrDescriptor(fn, delegate, attribute_name=fn.__name__))
[ "def", "if_delegate_has_method", "(", "delegate", ")", ":", "if", "isinstance", "(", "delegate", ",", "list", ")", ":", "delegate", "=", "tuple", "(", "delegate", ")", "if", "(", "not", "isinstance", "(", "delegate", ",", "tuple", ")", ")", ":", "delegate", "=", "(", "delegate", ",", ")", "return", "(", "lambda", "fn", ":", "_IffHasAttrDescriptor", "(", "fn", ",", "delegate", ",", "attribute_name", "=", "fn", ".", "__name__", ")", ")" ]
create a decorator for methods that are delegated to a sub-estimator this enables ducktyping by hasattr returning true according to the sub-estimator .
train
false
12,347
def start_scanning(config, add_entities, client): import pyflic scan_wizard = pyflic.ScanWizard() def scan_completed_callback(scan_wizard, result, address, name): 'Restart scan wizard to constantly check for new buttons.' if (result == pyflic.ScanWizardResult.WizardSuccess): _LOGGER.info('Found new button (%s)', address) elif (result != pyflic.ScanWizardResult.WizardFailedTimeout): _LOGGER.warning('Failed to connect to button (%s). Reason: %s', address, result) start_scanning(config, add_entities, client) scan_wizard.on_completed = scan_completed_callback client.add_scan_wizard(scan_wizard)
[ "def", "start_scanning", "(", "config", ",", "add_entities", ",", "client", ")", ":", "import", "pyflic", "scan_wizard", "=", "pyflic", ".", "ScanWizard", "(", ")", "def", "scan_completed_callback", "(", "scan_wizard", ",", "result", ",", "address", ",", "name", ")", ":", "if", "(", "result", "==", "pyflic", ".", "ScanWizardResult", ".", "WizardSuccess", ")", ":", "_LOGGER", ".", "info", "(", "'Found new button (%s)'", ",", "address", ")", "elif", "(", "result", "!=", "pyflic", ".", "ScanWizardResult", ".", "WizardFailedTimeout", ")", ":", "_LOGGER", ".", "warning", "(", "'Failed to connect to button (%s). Reason: %s'", ",", "address", ",", "result", ")", "start_scanning", "(", "config", ",", "add_entities", ",", "client", ")", "scan_wizard", ".", "on_completed", "=", "scan_completed_callback", "client", ".", "add_scan_wizard", "(", "scan_wizard", ")" ]
start a new flic client for scanning & connceting to new buttons .
train
false
12,348
def get_cookie_opener(gs_username, gs_token, gs_toolname=None): cj = http_cookiejar.CookieJar() for (cookie_name, cookie_value) in [('gs-token', gs_token), ('gs-username', gs_username)]: cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(cookie) cookie_opener = build_opener(HTTPCookieProcessor(cj)) cookie_opener.addheaders.append(('gs-toolname', (gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME))) return cookie_opener
[ "def", "get_cookie_opener", "(", "gs_username", ",", "gs_token", ",", "gs_toolname", "=", "None", ")", ":", "cj", "=", "http_cookiejar", ".", "CookieJar", "(", ")", "for", "(", "cookie_name", ",", "cookie_value", ")", "in", "[", "(", "'gs-token'", ",", "gs_token", ")", ",", "(", "'gs-username'", ",", "gs_username", ")", "]", ":", "cookie", "=", "http_cookiejar", ".", "Cookie", "(", "version", "=", "0", ",", "name", "=", "cookie_name", ",", "value", "=", "cookie_value", ",", "port", "=", "None", ",", "port_specified", "=", "False", ",", "domain", "=", "''", ",", "domain_specified", "=", "False", ",", "domain_initial_dot", "=", "False", ",", "path", "=", "'/'", ",", "path_specified", "=", "True", ",", "secure", "=", "False", ",", "expires", "=", "None", ",", "discard", "=", "True", ",", "comment", "=", "None", ",", "comment_url", "=", "None", ",", "rest", "=", "{", "'HttpOnly'", ":", "None", "}", ",", "rfc2109", "=", "False", ")", "cj", ".", "set_cookie", "(", "cookie", ")", "cookie_opener", "=", "build_opener", "(", "HTTPCookieProcessor", "(", "cj", ")", ")", "cookie_opener", ".", "addheaders", ".", "append", "(", "(", "'gs-toolname'", ",", "(", "gs_toolname", "or", "DEFAULT_GENOMESPACE_TOOLNAME", ")", ")", ")", "return", "cookie_opener" ]
create a genomespace cookie opener .
train
false
12,349
def test_softplus(): def softplus(x): return np.log((np.ones_like(x) + np.exp(x))) x = K.placeholder(ndim=2) f = K.function([x], [activations.softplus(x)]) test_values = get_standard_values() result = f([test_values])[0] expected = softplus(test_values) assert_allclose(result, expected, rtol=1e-05)
[ "def", "test_softplus", "(", ")", ":", "def", "softplus", "(", "x", ")", ":", "return", "np", ".", "log", "(", "(", "np", ".", "ones_like", "(", "x", ")", "+", "np", ".", "exp", "(", "x", ")", ")", ")", "x", "=", "K", ".", "placeholder", "(", "ndim", "=", "2", ")", "f", "=", "K", ".", "function", "(", "[", "x", "]", ",", "[", "activations", ".", "softplus", "(", "x", ")", "]", ")", "test_values", "=", "get_standard_values", "(", ")", "result", "=", "f", "(", "[", "test_values", "]", ")", "[", "0", "]", "expected", "=", "softplus", "(", "test_values", ")", "assert_allclose", "(", "result", ",", "expected", ",", "rtol", "=", "1e-05", ")" ]
test using a reference softplus implementation .
train
false
12,350
def _formatter(nodetext, optionstext, caller=None): return nodetext
[ "def", "_formatter", "(", "nodetext", ",", "optionstext", ",", "caller", "=", "None", ")", ":", "return", "nodetext" ]
do not display the options .
train
false
12,351
def null(): return ValueEvent(None)
[ "def", "null", "(", ")", ":", "return", "ValueEvent", "(", "None", ")" ]
null -> str encode the shellcode raw_bytes such that it does not contain any null bytes .
train
false
12,352
def get_key_policy(key_id, policy_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = {} try: key_policy = conn.get_key_policy(key_id, policy_name) r['key_policy'] = json.deserialize(key_policy['Policy'], object_pairs_hook=odict.OrderedDict) except boto.exception.BotoServerError as e: r['error'] = __utils__['boto.get_error'](e) return r
[ "def", "get_key_policy", "(", "key_id", ",", "policy_name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "r", "=", "{", "}", "try", ":", "key_policy", "=", "conn", ".", "get_key_policy", "(", "key_id", ",", "policy_name", ")", "r", "[", "'key_policy'", "]", "=", "json", ".", "deserialize", "(", "key_policy", "[", "'Policy'", "]", ",", "object_pairs_hook", "=", "odict", ".", "OrderedDict", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "r", "[", "'error'", "]", "=", "__utils__", "[", "'boto.get_error'", "]", "(", "e", ")", "return", "r" ]
get the policy for the specified key .
train
true
12,353
def ensure_s3_bucket(name, region=''): bucket = ('s3://' + name) list_result = run_quick(('aws s3 ls ' + bucket), echo=False) if (not list_result.returncode): sys.stderr.write('WARNING: "{bucket}" already exists. Overwriting.\n'.format(bucket=bucket)) else: print 'Creating S3 bucket "{bucket}"'.format(bucket=bucket) command = ('aws s3 mb ' + bucket) if region: command += (' --region ' + region) check_run_quick(command, echo=False)
[ "def", "ensure_s3_bucket", "(", "name", ",", "region", "=", "''", ")", ":", "bucket", "=", "(", "'s3://'", "+", "name", ")", "list_result", "=", "run_quick", "(", "(", "'aws s3 ls '", "+", "bucket", ")", ",", "echo", "=", "False", ")", "if", "(", "not", "list_result", ".", "returncode", ")", ":", "sys", ".", "stderr", ".", "write", "(", "'WARNING: \"{bucket}\" already exists. Overwriting.\\n'", ".", "format", "(", "bucket", "=", "bucket", ")", ")", "else", ":", "print", "'Creating S3 bucket \"{bucket}\"'", ".", "format", "(", "bucket", "=", "bucket", ")", "command", "=", "(", "'aws s3 mb '", "+", "bucket", ")", "if", "region", ":", "command", "+=", "(", "' --region '", "+", "region", ")", "check_run_quick", "(", "command", ",", "echo", "=", "False", ")" ]
ensure that the desired s3 bucket exists .
train
false
12,354
def load_libc_function(func_name, log_error=True, fail_if_missing=False, errcheck=False): try: libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True) func = getattr(libc, func_name) except AttributeError: if fail_if_missing: raise if log_error: logging.warning(_('Unable to locate %s in libc. Leaving as a no-op.'), func_name) return noop_libc_function if errcheck: def _errcheck(result, f, args): if (result == (-1)): errcode = ctypes.get_errno() raise OSError(errcode, os.strerror(errcode)) return result func.errcheck = _errcheck return func
[ "def", "load_libc_function", "(", "func_name", ",", "log_error", "=", "True", ",", "fail_if_missing", "=", "False", ",", "errcheck", "=", "False", ")", ":", "try", ":", "libc", "=", "ctypes", ".", "CDLL", "(", "ctypes", ".", "util", ".", "find_library", "(", "'c'", ")", ",", "use_errno", "=", "True", ")", "func", "=", "getattr", "(", "libc", ",", "func_name", ")", "except", "AttributeError", ":", "if", "fail_if_missing", ":", "raise", "if", "log_error", ":", "logging", ".", "warning", "(", "_", "(", "'Unable to locate %s in libc. Leaving as a no-op.'", ")", ",", "func_name", ")", "return", "noop_libc_function", "if", "errcheck", ":", "def", "_errcheck", "(", "result", ",", "f", ",", "args", ")", ":", "if", "(", "result", "==", "(", "-", "1", ")", ")", ":", "errcode", "=", "ctypes", ".", "get_errno", "(", ")", "raise", "OSError", "(", "errcode", ",", "os", ".", "strerror", "(", "errcode", ")", ")", "return", "result", "func", ".", "errcheck", "=", "_errcheck", "return", "func" ]
attempt to find the function in libc .
train
false
12,355
def openpty(): try: return os.openpty() except (AttributeError, OSError): pass (master_fd, slave_name) = _open_terminal() slave_fd = slave_open(slave_name) return (master_fd, slave_fd)
[ "def", "openpty", "(", ")", ":", "try", ":", "return", "os", ".", "openpty", "(", ")", "except", "(", "AttributeError", ",", "OSError", ")", ":", "pass", "(", "master_fd", ",", "slave_name", ")", "=", "_open_terminal", "(", ")", "slave_fd", "=", "slave_open", "(", "slave_name", ")", "return", "(", "master_fd", ",", "slave_fd", ")" ]
openpty() -> open a pty master/slave pair .
train
true
12,357
def _butter(order, band, rate=44100): global _butter_cache _h = hash((order, band, rate)) if (not (_h in _butter_cache)): (low, high) = band nyqfreq = (float(rate) / 2) lowf = (low / nyqfreq) highf = (high / nyqfreq) _butter_cache[_h] = butter(order, (lowf, highf), btype='band') return _butter_cache[_h]
[ "def", "_butter", "(", "order", ",", "band", ",", "rate", "=", "44100", ")", ":", "global", "_butter_cache", "_h", "=", "hash", "(", "(", "order", ",", "band", ",", "rate", ")", ")", "if", "(", "not", "(", "_h", "in", "_butter_cache", ")", ")", ":", "(", "low", ",", "high", ")", "=", "band", "nyqfreq", "=", "(", "float", "(", "rate", ")", "/", "2", ")", "lowf", "=", "(", "low", "/", "nyqfreq", ")", "highf", "=", "(", "high", "/", "nyqfreq", ")", "_butter_cache", "[", "_h", "]", "=", "butter", "(", "order", ",", "(", "lowf", ",", "highf", ")", ",", "btype", "=", "'band'", ")", "return", "_butter_cache", "[", "_h", "]" ]
cache-ing version of scipy .
train
false
12,359
def make_declare(loop_orders, dtypes, sub): decl = '' for (i, (loop_order, dtype)) in enumerate(zip(loop_orders, dtypes)): var = sub[('lv%i' % i)] decl += ('\n %(dtype)s* %(var)s_iter;\n ' % locals()) for (j, value) in enumerate(loop_order): if (value != 'x'): decl += ('\n npy_intp %(var)s_n%(value)i;\n ssize_t %(var)s_stride%(value)i;\n int %(var)s_jump%(value)i_%(j)i;\n ' % locals()) else: decl += ('\n int %(var)s_jump%(value)s_%(j)i;\n ' % locals()) return decl
[ "def", "make_declare", "(", "loop_orders", ",", "dtypes", ",", "sub", ")", ":", "decl", "=", "''", "for", "(", "i", ",", "(", "loop_order", ",", "dtype", ")", ")", "in", "enumerate", "(", "zip", "(", "loop_orders", ",", "dtypes", ")", ")", ":", "var", "=", "sub", "[", "(", "'lv%i'", "%", "i", ")", "]", "decl", "+=", "(", "'\\n %(dtype)s* %(var)s_iter;\\n '", "%", "locals", "(", ")", ")", "for", "(", "j", ",", "value", ")", "in", "enumerate", "(", "loop_order", ")", ":", "if", "(", "value", "!=", "'x'", ")", ":", "decl", "+=", "(", "'\\n npy_intp %(var)s_n%(value)i;\\n ssize_t %(var)s_stride%(value)i;\\n int %(var)s_jump%(value)i_%(j)i;\\n '", "%", "locals", "(", ")", ")", "else", ":", "decl", "+=", "(", "'\\n int %(var)s_jump%(value)s_%(j)i;\\n '", "%", "locals", "(", ")", ")", "return", "decl" ]
produce code to declare all necessary variables .
train
false
12,360
def with_retry(method, should_retry=None, steps=None, sleep=None): if (should_retry is None): should_retry = retry_always if (steps is None): steps = get_default_retry_steps() def method_with_retry(*a, **kw): name = _callable_repr(method) action_type = _TRY_UNTIL_SUCCESS with start_action(action_type=action_type, function=name): return _poll_until_success_returning_result(should_retry, steps, sleep, method, a, kw) return method_with_retry
[ "def", "with_retry", "(", "method", ",", "should_retry", "=", "None", ",", "steps", "=", "None", ",", "sleep", "=", "None", ")", ":", "if", "(", "should_retry", "is", "None", ")", ":", "should_retry", "=", "retry_always", "if", "(", "steps", "is", "None", ")", ":", "steps", "=", "get_default_retry_steps", "(", ")", "def", "method_with_retry", "(", "*", "a", ",", "**", "kw", ")", ":", "name", "=", "_callable_repr", "(", "method", ")", "action_type", "=", "_TRY_UNTIL_SUCCESS", "with", "start_action", "(", "action_type", "=", "action_type", ",", "function", "=", "name", ")", ":", "return", "_poll_until_success_returning_result", "(", "should_retry", ",", "steps", ",", "sleep", ",", "method", ",", "a", ",", "kw", ")", "return", "method_with_retry" ]
return a new version of method that retries .
train
false
12,361
def track_for_mbid(recording_id): try: return mb.track_for_id(recording_id) except mb.MusicBrainzAPIError as exc: exc.log(log)
[ "def", "track_for_mbid", "(", "recording_id", ")", ":", "try", ":", "return", "mb", ".", "track_for_id", "(", "recording_id", ")", "except", "mb", ".", "MusicBrainzAPIError", "as", "exc", ":", "exc", ".", "log", "(", "log", ")" ]
get a trackinfo object for a musicbrainz recording id .
train
false
12,362
def zeroDepthSearch(expression, value): retVal = [] depth = 0 for index in xrange(len(expression)): if (expression[index] == '('): depth += 1 elif (expression[index] == ')'): depth -= 1 elif ((depth == 0) and (expression[index:(index + len(value))] == value)): retVal.append(index) return retVal
[ "def", "zeroDepthSearch", "(", "expression", ",", "value", ")", ":", "retVal", "=", "[", "]", "depth", "=", "0", "for", "index", "in", "xrange", "(", "len", "(", "expression", ")", ")", ":", "if", "(", "expression", "[", "index", "]", "==", "'('", ")", ":", "depth", "+=", "1", "elif", "(", "expression", "[", "index", "]", "==", "')'", ")", ":", "depth", "-=", "1", "elif", "(", "(", "depth", "==", "0", ")", "and", "(", "expression", "[", "index", ":", "(", "index", "+", "len", "(", "value", ")", ")", "]", "==", "value", ")", ")", ":", "retVal", ".", "append", "(", "index", ")", "return", "retVal" ]
searches occurrences of value inside expression at 0-depth level regarding the parentheses .
train
false
12,363
def get_pythonpath(): scrapy_path = import_module('scrapy').__path__[0] return ((os.path.dirname(scrapy_path) + os.pathsep) + os.environ.get('PYTHONPATH', ''))
[ "def", "get_pythonpath", "(", ")", ":", "scrapy_path", "=", "import_module", "(", "'scrapy'", ")", ".", "__path__", "[", "0", "]", "return", "(", "(", "os", ".", "path", ".", "dirname", "(", "scrapy_path", ")", "+", "os", ".", "pathsep", ")", "+", "os", ".", "environ", ".", "get", "(", "'PYTHONPATH'", ",", "''", ")", ")" ]
return a pythonpath suitable to use in processes so that they find this installation of scrapy .
train
false
12,364
def rowswap(matlist, index1, index2, K): (matlist[index1], matlist[index2]) = (matlist[index2], matlist[index1]) return matlist
[ "def", "rowswap", "(", "matlist", ",", "index1", ",", "index2", ",", "K", ")", ":", "(", "matlist", "[", "index1", "]", ",", "matlist", "[", "index2", "]", ")", "=", "(", "matlist", "[", "index2", "]", ",", "matlist", "[", "index1", "]", ")", "return", "matlist" ]
returns the matrix with index1 row and index2 row swapped .
train
false
12,365
def ask_timezone(question, default, tzurl): lower_tz = [tz.lower() for tz in pytz.all_timezones] while True: r = ask(question, str_compat, default) r = r.strip().replace(u' ', u'_').lower() if (r in lower_tz): r = pytz.all_timezones[lower_tz.index(r)] break else: print(u'Please enter a valid time zone:\n (check [{0}])'.format(tzurl)) return r
[ "def", "ask_timezone", "(", "question", ",", "default", ",", "tzurl", ")", ":", "lower_tz", "=", "[", "tz", ".", "lower", "(", ")", "for", "tz", "in", "pytz", ".", "all_timezones", "]", "while", "True", ":", "r", "=", "ask", "(", "question", ",", "str_compat", ",", "default", ")", "r", "=", "r", ".", "strip", "(", ")", ".", "replace", "(", "u' '", ",", "u'_'", ")", ".", "lower", "(", ")", "if", "(", "r", "in", "lower_tz", ")", ":", "r", "=", "pytz", ".", "all_timezones", "[", "lower_tz", ".", "index", "(", "r", ")", "]", "break", "else", ":", "print", "(", "u'Please enter a valid time zone:\\n (check [{0}])'", ".", "format", "(", "tzurl", ")", ")", "return", "r" ]
prompt for time zone and validate input .
train
false
12,367
def post2data(func): def wrapper(self, request): request.DATA = request.POST if ('$$originalJSON' in request.POST): request.DATA = json.loads(request.POST['$$originalJSON']) return func(self, request) return wrapper
[ "def", "post2data", "(", "func", ")", ":", "def", "wrapper", "(", "self", ",", "request", ")", ":", "request", ".", "DATA", "=", "request", ".", "POST", "if", "(", "'$$originalJSON'", "in", "request", ".", "POST", ")", ":", "request", ".", "DATA", "=", "json", ".", "loads", "(", "request", ".", "POST", "[", "'$$originalJSON'", "]", ")", "return", "func", "(", "self", ",", "request", ")", "return", "wrapper" ]
the sole purpose of this decorator is to restore original form values along with their types stored on client-side under key $$originaljson .
train
true
12,368
@pytest.fixture def project_dir_resources0(project0, subdir0): from pootle_app.models import Directory from pootle_project.models import ProjectResource resources = Directory.objects.live().filter(name=subdir0.name, parent__translationproject__project=project0) return ProjectResource(resources, ('/projects/%s/%s' % (project0.code, subdir0.name)))
[ "@", "pytest", ".", "fixture", "def", "project_dir_resources0", "(", "project0", ",", "subdir0", ")", ":", "from", "pootle_app", ".", "models", "import", "Directory", "from", "pootle_project", ".", "models", "import", "ProjectResource", "resources", "=", "Directory", ".", "objects", ".", "live", "(", ")", ".", "filter", "(", "name", "=", "subdir0", ".", "name", ",", "parent__translationproject__project", "=", "project0", ")", "return", "ProjectResource", "(", "resources", ",", "(", "'/projects/%s/%s'", "%", "(", "project0", ".", "code", ",", "subdir0", ".", "name", ")", ")", ")" ]
returns a projectresource object for a directory .
train
false
12,369
def test_HTTPMove(): assert_raises(AssertionError, HTTPFound) assert_raises(AssertionError, HTTPTemporaryRedirect, headers=[('l0cation', '/bing')]) assert isinstance(HTTPMovedPermanently('This is a message', headers=[('Location', '/bing')]), HTTPRedirection) assert isinstance(HTTPUseProxy(headers=[('LOCATION', '/bing')]), HTTPRedirection) assert isinstance(HTTPFound('/foobar'), HTTPRedirection)
[ "def", "test_HTTPMove", "(", ")", ":", "assert_raises", "(", "AssertionError", ",", "HTTPFound", ")", "assert_raises", "(", "AssertionError", ",", "HTTPTemporaryRedirect", ",", "headers", "=", "[", "(", "'l0cation'", ",", "'/bing'", ")", "]", ")", "assert", "isinstance", "(", "HTTPMovedPermanently", "(", "'This is a message'", ",", "headers", "=", "[", "(", "'Location'", ",", "'/bing'", ")", "]", ")", ",", "HTTPRedirection", ")", "assert", "isinstance", "(", "HTTPUseProxy", "(", "headers", "=", "[", "(", "'LOCATION'", ",", "'/bing'", ")", "]", ")", ",", "HTTPRedirection", ")", "assert", "isinstance", "(", "HTTPFound", "(", "'/foobar'", ")", ",", "HTTPRedirection", ")" ]
make sure that location is a mandatory attribute of redirects .
train
false
12,371
def compress_weights(W, l): (U, s, V) = np.linalg.svd(W, full_matrices=False) Ul = U[:, :l] sl = s[:l] Vl = V[:l, :] L = np.dot(np.diag(sl), Vl) return (Ul, L)
[ "def", "compress_weights", "(", "W", ",", "l", ")", ":", "(", "U", ",", "s", ",", "V", ")", "=", "np", ".", "linalg", ".", "svd", "(", "W", ",", "full_matrices", "=", "False", ")", "Ul", "=", "U", "[", ":", ",", ":", "l", "]", "sl", "=", "s", "[", ":", "l", "]", "Vl", "=", "V", "[", ":", "l", ",", ":", "]", "L", "=", "np", ".", "dot", "(", "np", ".", "diag", "(", "sl", ")", ",", "Vl", ")", "return", "(", "Ul", ",", "L", ")" ]
compress the weight matrix w of an inner product layer using truncated svd .
train
false
12,372
def parse_auth(rule): parser = argparse.ArgumentParser() rules = shlex.split(rule) rules.pop(0) noargs = ('back', 'test', 'nostart', 'kickstart', 'probe', 'enablecache', 'disablecache', 'disablenis', 'enableshadow', 'disableshadow', 'enablemd5', 'disablemd5', 'enableldap', 'enableldapauth', 'enableldaptls', 'disableldap', 'disableldapauth', 'enablekrb5kdcdns', 'disablekrb5kdcdns', 'enablekrb5realmdns', 'disablekrb5realmdns', 'disablekrb5', 'disablehe-siod', 'enablesmbauth', 'disablesmbauth', 'enablewinbind', 'enablewinbindauth', 'disablewinbind', 'disablewinbindauth', 'enablewinbindusedefaultdomain', 'disablewinbindusedefaultdomain', 'enablewins', 'disablewins') for arg in noargs: parser.add_argument('--{0}'.format(arg), dest=arg, action='store_true') parser.add_argument('--enablenis', dest='enablenis', action='store') parser.add_argument('--hesiodrhs', dest='hesiodrhs', action='store') parser.add_argument('--krb5adminserver', dest='krb5adminserver', action='append') parser.add_argument('--krb5kdc', dest='krb5kdc', action='append') parser.add_argument('--ldapbasedn', dest='ldapbasedn', action='store') parser.add_argument('--ldapserver', dest='ldapserver', action='append') parser.add_argument('--nisserver', dest='nisserver', action='append') parser.add_argument('--passalgo', dest='passalgo', action='store') parser.add_argument('--smbidmapgid', dest='smbidmapgid', action='store') parser.add_argument('--smbidmapuid', dest='smbidmapuid', action='store') parser.add_argument('--smbrealm', dest='smbrealm', action='store') parser.add_argument('--smbsecurity', dest='smbsecurity', action='store', choices=['user', 'server', 'domain', 'dns']) parser.add_argument('--smbservers', dest='smbservers', action='store') parser.add_argument('--smbworkgroup', dest='smbworkgroup', action='store') parser.add_argument('--winbindjoin', dest='winbindjoin', action='store') parser.add_argument('--winbindseparator', dest='winbindseparator', action='store') parser.add_argument('--winbindtemplatehomedir', dest='winbindtemplatehomedir', action='store') parser.add_argument('--winbindtemplateprimarygroup', dest='winbindtemplateprimarygroup', action='store') parser.add_argument('--winbindtemplateshell', dest='winbindtemplateshell', action='store') parser.add_argument('--enablekrb5', dest='enablekrb5', action='store_true') if ('--enablekrb5' in rules): parser.add_argument('--krb5realm', dest='krb5realm', action='store', required=True) parser.add_argument('--enablehesiod', dest='enablehesiod', action='store_true') if ('--enablehesiod' in rules): parser.add_argument('--hesiodlhs', dest='hesiodlhs', action='store', required=True) args = clean_args(vars(parser.parse_args(rules))) parser = None return args
[ "def", "parse_auth", "(", "rule", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "rules", "=", "shlex", ".", "split", "(", "rule", ")", "rules", ".", "pop", "(", "0", ")", "noargs", "=", "(", "'back'", ",", "'test'", ",", "'nostart'", ",", "'kickstart'", ",", "'probe'", ",", "'enablecache'", ",", "'disablecache'", ",", "'disablenis'", ",", "'enableshadow'", ",", "'disableshadow'", ",", "'enablemd5'", ",", "'disablemd5'", ",", "'enableldap'", ",", "'enableldapauth'", ",", "'enableldaptls'", ",", "'disableldap'", ",", "'disableldapauth'", ",", "'enablekrb5kdcdns'", ",", "'disablekrb5kdcdns'", ",", "'enablekrb5realmdns'", ",", "'disablekrb5realmdns'", ",", "'disablekrb5'", ",", "'disablehe-siod'", ",", "'enablesmbauth'", ",", "'disablesmbauth'", ",", "'enablewinbind'", ",", "'enablewinbindauth'", ",", "'disablewinbind'", ",", "'disablewinbindauth'", ",", "'enablewinbindusedefaultdomain'", ",", "'disablewinbindusedefaultdomain'", ",", "'enablewins'", ",", "'disablewins'", ")", "for", "arg", "in", "noargs", ":", "parser", ".", "add_argument", "(", "'--{0}'", ".", "format", "(", "arg", ")", ",", "dest", "=", "arg", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--enablenis'", ",", "dest", "=", "'enablenis'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--hesiodrhs'", ",", "dest", "=", "'hesiodrhs'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--krb5adminserver'", ",", "dest", "=", "'krb5adminserver'", ",", "action", "=", "'append'", ")", "parser", ".", "add_argument", "(", "'--krb5kdc'", ",", "dest", "=", "'krb5kdc'", ",", "action", "=", "'append'", ")", "parser", ".", "add_argument", "(", "'--ldapbasedn'", ",", "dest", "=", "'ldapbasedn'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--ldapserver'", ",", "dest", "=", "'ldapserver'", ",", "action", "=", "'append'", ")", "parser", ".", "add_argument", "(", "'--nisserver'", ",", "dest", "=", "'nisserver'", ",", "action", "=", "'append'", ")", "parser", ".", "add_argument", "(", "'--passalgo'", ",", "dest", "=", "'passalgo'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--smbidmapgid'", ",", "dest", "=", "'smbidmapgid'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--smbidmapuid'", ",", "dest", "=", "'smbidmapuid'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--smbrealm'", ",", "dest", "=", "'smbrealm'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--smbsecurity'", ",", "dest", "=", "'smbsecurity'", ",", "action", "=", "'store'", ",", "choices", "=", "[", "'user'", ",", "'server'", ",", "'domain'", ",", "'dns'", "]", ")", "parser", ".", "add_argument", "(", "'--smbservers'", ",", "dest", "=", "'smbservers'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--smbworkgroup'", ",", "dest", "=", "'smbworkgroup'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--winbindjoin'", ",", "dest", "=", "'winbindjoin'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--winbindseparator'", ",", "dest", "=", "'winbindseparator'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--winbindtemplatehomedir'", ",", "dest", "=", "'winbindtemplatehomedir'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--winbindtemplateprimarygroup'", ",", "dest", "=", "'winbindtemplateprimarygroup'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--winbindtemplateshell'", ",", "dest", "=", "'winbindtemplateshell'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--enablekrb5'", ",", "dest", "=", "'enablekrb5'", ",", "action", "=", "'store_true'", ")", "if", "(", "'--enablekrb5'", "in", "rules", ")", ":", "parser", ".", "add_argument", "(", "'--krb5realm'", ",", "dest", "=", "'krb5realm'", ",", "action", "=", "'store'", ",", "required", "=", "True", ")", "parser", ".", "add_argument", "(", "'--enablehesiod'", ",", "dest", "=", "'enablehesiod'", ",", "action", "=", "'store_true'", ")", "if", "(", "'--enablehesiod'", "in", "rules", ")", ":", "parser", ".", "add_argument", "(", "'--hesiodlhs'", ",", "dest", "=", "'hesiodlhs'", ",", "action", "=", "'store'", ",", "required", "=", "True", ")", "args", "=", "clean_args", "(", "vars", "(", "parser", ".", "parse_args", "(", "rules", ")", ")", ")", "parser", "=", "None", "return", "args" ]
parse rfc2617 http authentication header string and return tuple or none .
train
true
12,373
@pytest.fixture def merge_log_err(hass): with mock.patch('homeassistant.config._LOGGER.error') as logerr: (yield logerr)
[ "@", "pytest", ".", "fixture", "def", "merge_log_err", "(", "hass", ")", ":", "with", "mock", ".", "patch", "(", "'homeassistant.config._LOGGER.error'", ")", "as", "logerr", ":", "(", "yield", "logerr", ")" ]
patch _merge_log_error from packages .
train
false
12,374
def make_cascading_boolean_array(shape, first_value=True): if (len(shape) != 2): raise ValueError('Shape must be 2-dimensional. Given shape was {}'.format(shape)) cascading = np.full(shape, (not first_value), dtype=np.bool) ending_col = (shape[1] - 1) for row in cascading: if (ending_col > 0): row[:ending_col] = first_value ending_col -= 1 else: break return cascading
[ "def", "make_cascading_boolean_array", "(", "shape", ",", "first_value", "=", "True", ")", ":", "if", "(", "len", "(", "shape", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'Shape must be 2-dimensional. Given shape was {}'", ".", "format", "(", "shape", ")", ")", "cascading", "=", "np", ".", "full", "(", "shape", ",", "(", "not", "first_value", ")", ",", "dtype", "=", "np", ".", "bool", ")", "ending_col", "=", "(", "shape", "[", "1", "]", "-", "1", ")", "for", "row", "in", "cascading", ":", "if", "(", "ending_col", ">", "0", ")", ":", "row", "[", ":", "ending_col", "]", "=", "first_value", "ending_col", "-=", "1", "else", ":", "break", "return", "cascading" ]
create a numpy array with the given shape containing cascading boolean values .
train
false
12,375
def convert_and_import_workflow(has_workflow, **kwds): galaxy_interface = kwds.get('galaxy_interface', None) if (galaxy_interface is None): galaxy_interface = BioBlendImporterGalaxyInterface(**kwds) source_type = kwds.get('source_type', None) workflow_directory = kwds.get('workflow_directory', None) if (source_type == 'path'): workflow_path = has_workflow if (workflow_directory is None): workflow_directory = os.path.dirname(has_workflow) with open(workflow_path, 'r') as f: has_workflow = yaml.load(f) if (workflow_directory is not None): workflow_directory = os.path.abspath(workflow_directory) if isinstance(has_workflow, dict): workflow = python_to_workflow(has_workflow, galaxy_interface, workflow_directory) else: workflow = yaml_to_workflow(has_workflow, galaxy_interface, workflow_directory) publish = kwds.get('publish', False) exact_tools = kwds.get('exact_tools', False) import_kwds = {} if publish: import_kwds['publish'] = True if exact_tools: import_kwds['exact_tools'] = True return galaxy_interface.import_workflow(workflow, **import_kwds)
[ "def", "convert_and_import_workflow", "(", "has_workflow", ",", "**", "kwds", ")", ":", "galaxy_interface", "=", "kwds", ".", "get", "(", "'galaxy_interface'", ",", "None", ")", "if", "(", "galaxy_interface", "is", "None", ")", ":", "galaxy_interface", "=", "BioBlendImporterGalaxyInterface", "(", "**", "kwds", ")", "source_type", "=", "kwds", ".", "get", "(", "'source_type'", ",", "None", ")", "workflow_directory", "=", "kwds", ".", "get", "(", "'workflow_directory'", ",", "None", ")", "if", "(", "source_type", "==", "'path'", ")", ":", "workflow_path", "=", "has_workflow", "if", "(", "workflow_directory", "is", "None", ")", ":", "workflow_directory", "=", "os", ".", "path", ".", "dirname", "(", "has_workflow", ")", "with", "open", "(", "workflow_path", ",", "'r'", ")", "as", "f", ":", "has_workflow", "=", "yaml", ".", "load", "(", "f", ")", "if", "(", "workflow_directory", "is", "not", "None", ")", ":", "workflow_directory", "=", "os", ".", "path", ".", "abspath", "(", "workflow_directory", ")", "if", "isinstance", "(", "has_workflow", ",", "dict", ")", ":", "workflow", "=", "python_to_workflow", "(", "has_workflow", ",", "galaxy_interface", ",", "workflow_directory", ")", "else", ":", "workflow", "=", "yaml_to_workflow", "(", "has_workflow", ",", "galaxy_interface", ",", "workflow_directory", ")", "publish", "=", "kwds", ".", "get", "(", "'publish'", ",", "False", ")", "exact_tools", "=", "kwds", ".", "get", "(", "'exact_tools'", ",", "False", ")", "import_kwds", "=", "{", "}", "if", "publish", ":", "import_kwds", "[", "'publish'", "]", "=", "True", "if", "exact_tools", ":", "import_kwds", "[", "'exact_tools'", "]", "=", "True", "return", "galaxy_interface", ".", "import_workflow", "(", "workflow", ",", "**", "import_kwds", ")" ]
function is main entry for conversion and import of format 2 workflows .
train
false
12,377
def _value_check(condition, message): if (condition == False): raise ValueError(message)
[ "def", "_value_check", "(", "condition", ",", "message", ")", ":", "if", "(", "condition", "==", "False", ")", ":", "raise", "ValueError", "(", "message", ")" ]
check a condition on input value .
train
false
12,379
def column_families(keyspace=None): sys = _sys_mgr() ksps = sys.list_keyspaces() if keyspace: if (keyspace in ksps): return list(sys.get_keyspace_column_families(keyspace).keys()) else: return None else: ret = {} for kspace in ksps: ret[kspace] = list(sys.get_keyspace_column_families(kspace).keys()) return ret
[ "def", "column_families", "(", "keyspace", "=", "None", ")", ":", "sys", "=", "_sys_mgr", "(", ")", "ksps", "=", "sys", ".", "list_keyspaces", "(", ")", "if", "keyspace", ":", "if", "(", "keyspace", "in", "ksps", ")", ":", "return", "list", "(", "sys", ".", "get_keyspace_column_families", "(", "keyspace", ")", ".", "keys", "(", ")", ")", "else", ":", "return", "None", "else", ":", "ret", "=", "{", "}", "for", "kspace", "in", "ksps", ":", "ret", "[", "kspace", "]", "=", "list", "(", "sys", ".", "get_keyspace_column_families", "(", "kspace", ")", ".", "keys", "(", ")", ")", "return", "ret" ]
return existing column families for all keyspaces or just the provided one .
train
true
12,380
def load_augmentation_class(): try: class_name = desktop.conf.AUTH.USER_AUGMENTOR.get() i = class_name.rfind('.') (module, attr) = (class_name[:i], class_name[(i + 1):]) mod = import_module(module) klass = getattr(mod, attr) LOG.info(('Augmenting users with class: %s' % (klass,))) return klass except: LOG.exception('failed to augment class') raise ImproperlyConfigured(('Could not find user_augmentation_class: %s' % (class_name,)))
[ "def", "load_augmentation_class", "(", ")", ":", "try", ":", "class_name", "=", "desktop", ".", "conf", ".", "AUTH", ".", "USER_AUGMENTOR", ".", "get", "(", ")", "i", "=", "class_name", ".", "rfind", "(", "'.'", ")", "(", "module", ",", "attr", ")", "=", "(", "class_name", "[", ":", "i", "]", ",", "class_name", "[", "(", "i", "+", "1", ")", ":", "]", ")", "mod", "=", "import_module", "(", "module", ")", "klass", "=", "getattr", "(", "mod", ",", "attr", ")", "LOG", ".", "info", "(", "(", "'Augmenting users with class: %s'", "%", "(", "klass", ",", ")", ")", ")", "return", "klass", "except", ":", "LOG", ".", "exception", "(", "'failed to augment class'", ")", "raise", "ImproperlyConfigured", "(", "(", "'Could not find user_augmentation_class: %s'", "%", "(", "class_name", ",", ")", ")", ")" ]
loads the user augmentation class .
train
false
12,381
def set_ssh_port(port=22): _current = global_settings() if (_current['Global Settings']['SSH_PORT']['VALUE'] == port): return True _xml = '<RIBCL VERSION="2.0">\n <LOGIN USER_LOGIN="adminname" PASSWORD="password">\n <RIB_INFO MODE="write">\n <MOD_GLOBAL_SETTINGS>\n <SSH_PORT value="{0}"/>\n </MOD_GLOBAL_SETTINGS>\n </RIB_INFO>\n </LOGIN>\n </RIBCL>'.format(port) return __execute_cmd('Configure_SSH_Port', _xml)
[ "def", "set_ssh_port", "(", "port", "=", "22", ")", ":", "_current", "=", "global_settings", "(", ")", "if", "(", "_current", "[", "'Global Settings'", "]", "[", "'SSH_PORT'", "]", "[", "'VALUE'", "]", "==", "port", ")", ":", "return", "True", "_xml", "=", "'<RIBCL VERSION=\"2.0\">\\n <LOGIN USER_LOGIN=\"adminname\" PASSWORD=\"password\">\\n <RIB_INFO MODE=\"write\">\\n <MOD_GLOBAL_SETTINGS>\\n <SSH_PORT value=\"{0}\"/>\\n </MOD_GLOBAL_SETTINGS>\\n </RIB_INFO>\\n </LOGIN>\\n </RIBCL>'", ".", "format", "(", "port", ")", "return", "__execute_cmd", "(", "'Configure_SSH_Port'", ",", "_xml", ")" ]
set ssh port .
train
true
12,382
def describe_pipelines(pipeline_ids, region=None, key=None, keyid=None, profile=None): client = _get_client(region, key, keyid, profile) r = {} try: r['result'] = client.describe_pipelines(pipelineIds=pipeline_ids) except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: r['error'] = str(e) return r
[ "def", "describe_pipelines", "(", "pipeline_ids", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "client", "=", "_get_client", "(", "region", ",", "key", ",", "keyid", ",", "profile", ")", "r", "=", "{", "}", "try", ":", "r", "[", "'result'", "]", "=", "client", ".", "describe_pipelines", "(", "pipelineIds", "=", "pipeline_ids", ")", "except", "(", "botocore", ".", "exceptions", ".", "BotoCoreError", ",", "botocore", ".", "exceptions", ".", "ClientError", ")", "as", "e", ":", "r", "[", "'error'", "]", "=", "str", "(", "e", ")", "return", "r" ]
retrieve metadata about one or more pipelines .
train
true
12,383
def create_server(config=None, **_options): if config.has_option('server', 'modules'): modules = shlex.split(config.get('server', 'modules')) for module in modules: e = __import__(module) app = Flask(__name__.rsplit('.', 1)[0]) app.register_blueprint(slicer, config=config, **_options) return app
[ "def", "create_server", "(", "config", "=", "None", ",", "**", "_options", ")", ":", "if", "config", ".", "has_option", "(", "'server'", ",", "'modules'", ")", ":", "modules", "=", "shlex", ".", "split", "(", "config", ".", "get", "(", "'server'", ",", "'modules'", ")", ")", "for", "module", "in", "modules", ":", "e", "=", "__import__", "(", "module", ")", "app", "=", "Flask", "(", "__name__", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", ")", "app", ".", "register_blueprint", "(", "slicer", ",", "config", "=", "config", ",", "**", "_options", ")", "return", "app" ]
returns a flask server application .
train
false
12,385
def _time_prefix(fit_time): return (' t=%0.3f:' % fit_time).ljust(17)
[ "def", "_time_prefix", "(", "fit_time", ")", ":", "return", "(", "' t=%0.3f:'", "%", "fit_time", ")", ".", "ljust", "(", "17", ")" ]
helper to format log messages .
train
false
12,386
@importorskip('PIL') @importorskip(modname_tkinter) @skip(reason='"excludedimports" support is non-deterministically broken.') def test_pil_no_tkinter(pyi_builder): pyi_builder.test_source('\n import PIL.Image\n\n # Dynamically importing the Tkinter package should fail with an\n # "ImportError", implying "PIL" package hooks successfully excluded\n # Tkinter. To prevent PyInstaller from parsing this import and thus\n # freezing this extension with this test, this import is dynamic.\n try:\n __import__(\'{modname_tkinter}\')\n raise SystemExit(\'ERROR: Module {modname_tkinter} is bundled.\')\n except ImportError:\n pass\n\n # Dynamically importing the "_tkinter" shared library should also fail.\n try:\n __import__(\'_tkinter\')\n raise SystemExit(\'ERROR: Module _tkinter is bundled.\')\n except ImportError:\n pass\n '.format(modname_tkinter=modname_tkinter))
[ "@", "importorskip", "(", "'PIL'", ")", "@", "importorskip", "(", "modname_tkinter", ")", "@", "skip", "(", "reason", "=", "'\"excludedimports\" support is non-deterministically broken.'", ")", "def", "test_pil_no_tkinter", "(", "pyi_builder", ")", ":", "pyi_builder", ".", "test_source", "(", "'\\n import PIL.Image\\n\\n # Dynamically importing the Tkinter package should fail with an\\n # \"ImportError\", implying \"PIL\" package hooks successfully excluded\\n # Tkinter. To prevent PyInstaller from parsing this import and thus\\n # freezing this extension with this test, this import is dynamic.\\n try:\\n __import__(\\'{modname_tkinter}\\')\\n raise SystemExit(\\'ERROR: Module {modname_tkinter} is bundled.\\')\\n except ImportError:\\n pass\\n\\n # Dynamically importing the \"_tkinter\" shared library should also fail.\\n try:\\n __import__(\\'_tkinter\\')\\n raise SystemExit(\\'ERROR: Module _tkinter is bundled.\\')\\n except ImportError:\\n pass\\n '", ".", "format", "(", "modname_tkinter", "=", "modname_tkinter", ")", ")" ]
ensure that the tkinter package excluded by pil package hooks is unimportable by frozen applications explicitly importing only the latter .
train
false
12,387
def _check_alignment(num_words, num_mots, alignment): assert (type(alignment) is Alignment) if (not all(((0 <= pair[0] < num_words) for pair in alignment))): raise IndexError(u'Alignment is outside boundary of words') if (not all((((pair[1] is None) or (0 <= pair[1] < num_mots)) for pair in alignment))): raise IndexError(u'Alignment is outside boundary of mots')
[ "def", "_check_alignment", "(", "num_words", ",", "num_mots", ",", "alignment", ")", ":", "assert", "(", "type", "(", "alignment", ")", "is", "Alignment", ")", "if", "(", "not", "all", "(", "(", "(", "0", "<=", "pair", "[", "0", "]", "<", "num_words", ")", "for", "pair", "in", "alignment", ")", ")", ")", ":", "raise", "IndexError", "(", "u'Alignment is outside boundary of words'", ")", "if", "(", "not", "all", "(", "(", "(", "(", "pair", "[", "1", "]", "is", "None", ")", "or", "(", "0", "<=", "pair", "[", "1", "]", "<", "num_mots", ")", ")", "for", "pair", "in", "alignment", ")", ")", ")", ":", "raise", "IndexError", "(", "u'Alignment is outside boundary of mots'", ")" ]
check whether the alignments are legal .
train
false
12,388
def getPlaneDot(vec3First, vec3Second): return ((vec3First.x * vec3Second.x) + (vec3First.y * vec3Second.y))
[ "def", "getPlaneDot", "(", "vec3First", ",", "vec3Second", ")", ":", "return", "(", "(", "vec3First", ".", "x", "*", "vec3Second", ".", "x", ")", "+", "(", "vec3First", ".", "y", "*", "vec3Second", ".", "y", ")", ")" ]
get the dot product of the x and y components of a pair of vector3s .
train
false
12,389
def _tenths_version(full_version): match = _tenths_version_pattern.match(full_version) if match: return match.group(1) return ''
[ "def", "_tenths_version", "(", "full_version", ")", ":", "match", "=", "_tenths_version_pattern", ".", "match", "(", "full_version", ")", "if", "match", ":", "return", "match", ".", "group", "(", "1", ")", "return", "''" ]
return the major and minor version numbers from a full version string .
train
false
12,390
def _get_extra_data_path(home_dir=None): global _temp_home_dir if (home_dir is None): home_dir = os.environ.get('_MNE_FAKE_HOME_DIR') if (home_dir is None): if ('nt' == os.name.lower()): home_dir = os.getenv('APPDATA') elif (os.getenv('MNE_DONTWRITE_HOME', '') == 'true'): if (_temp_home_dir is None): _temp_home_dir = tempfile.mkdtemp() atexit.register(partial(shutil.rmtree, _temp_home_dir, ignore_errors=True)) home_dir = _temp_home_dir else: home_dir = os.path.expanduser('~') if (home_dir is None): raise ValueError('mne-python config file path could not be determined, please report this error to mne-python developers') return op.join(home_dir, '.mne')
[ "def", "_get_extra_data_path", "(", "home_dir", "=", "None", ")", ":", "global", "_temp_home_dir", "if", "(", "home_dir", "is", "None", ")", ":", "home_dir", "=", "os", ".", "environ", ".", "get", "(", "'_MNE_FAKE_HOME_DIR'", ")", "if", "(", "home_dir", "is", "None", ")", ":", "if", "(", "'nt'", "==", "os", ".", "name", ".", "lower", "(", ")", ")", ":", "home_dir", "=", "os", ".", "getenv", "(", "'APPDATA'", ")", "elif", "(", "os", ".", "getenv", "(", "'MNE_DONTWRITE_HOME'", ",", "''", ")", "==", "'true'", ")", ":", "if", "(", "_temp_home_dir", "is", "None", ")", ":", "_temp_home_dir", "=", "tempfile", ".", "mkdtemp", "(", ")", "atexit", ".", "register", "(", "partial", "(", "shutil", ".", "rmtree", ",", "_temp_home_dir", ",", "ignore_errors", "=", "True", ")", ")", "home_dir", "=", "_temp_home_dir", "else", ":", "home_dir", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "if", "(", "home_dir", "is", "None", ")", ":", "raise", "ValueError", "(", "'mne-python config file path could not be determined, please report this error to mne-python developers'", ")", "return", "op", ".", "join", "(", "home_dir", ",", "'.mne'", ")" ]
get path to extra data .
train
false
12,391
def cgsnapshot_creating_from_src(): return IMPL.cgsnapshot_creating_from_src()
[ "def", "cgsnapshot_creating_from_src", "(", ")", ":", "return", "IMPL", ".", "cgsnapshot_creating_from_src", "(", ")" ]
get a filter that checks if a cgsnapshot is being created from a cg .
train
false
12,392
@flaskbb.group() def translations(): pass
[ "@", "flaskbb", ".", "group", "(", ")", "def", "translations", "(", ")", ":", "pass" ]
translations command sub group .
train
false
12,393
def safe_xml_str(s, use_encoding='utf-8'): illegal_xml = re.compile(u'[\x00-\x08\x0b\x0c\x0e-\x1f\ud800-\udfff\ufffe\uffff]') if (not isinstance(s, unicode)): if isinstance(s, str): s = unicode(s, use_encoding, errors='replace') else: s = unicode(s) s = illegal_xml.sub(u'\ufffd', s) return s
[ "def", "safe_xml_str", "(", "s", ",", "use_encoding", "=", "'utf-8'", ")", ":", "illegal_xml", "=", "re", ".", "compile", "(", "u'[\\x00-\\x08\\x0b\\x0c\\x0e-\\x1f\\ud800-\\udfff\\ufffe\\uffff]'", ")", "if", "(", "not", "isinstance", "(", "s", ",", "unicode", ")", ")", ":", "if", "isinstance", "(", "s", ",", "str", ")", ":", "s", "=", "unicode", "(", "s", ",", "use_encoding", ",", "errors", "=", "'replace'", ")", "else", ":", "s", "=", "unicode", "(", "s", ")", "s", "=", "illegal_xml", ".", "sub", "(", "u'\\ufffd'", ",", "s", ")", "return", "s" ]
replace invalid-in-xml unicode control characters with ufffd .
train
false
12,397
def save_virtual_workbook(workbook): writer = ExcelWriter(workbook) temp_buffer = StringIO() try: archive = ZipFile(temp_buffer, 'w', ZIP_DEFLATED) writer.write_data(archive) finally: archive.close() virtual_workbook = temp_buffer.getvalue() temp_buffer.close() return virtual_workbook
[ "def", "save_virtual_workbook", "(", "workbook", ")", ":", "writer", "=", "ExcelWriter", "(", "workbook", ")", "temp_buffer", "=", "StringIO", "(", ")", "try", ":", "archive", "=", "ZipFile", "(", "temp_buffer", ",", "'w'", ",", "ZIP_DEFLATED", ")", "writer", ".", "write_data", "(", "archive", ")", "finally", ":", "archive", ".", "close", "(", ")", "virtual_workbook", "=", "temp_buffer", ".", "getvalue", "(", ")", "temp_buffer", ".", "close", "(", ")", "return", "virtual_workbook" ]
return an in-memory workbook .
train
false
12,398
def is_status_update_request(request_data): return any((('status' in update) for update in request_data))
[ "def", "is_status_update_request", "(", "request_data", ")", ":", "return", "any", "(", "(", "(", "'status'", "in", "update", ")", "for", "update", "in", "request_data", ")", ")" ]
returns true if request_data contains status update else false .
train
false
12,400
def estimate(path): if (not os.path.exists(path)): raise CommandExecutionError('Path "{0}" was not found.'.format(path)) out = __salt__['cmd.run_all']('xfs_estimate -v {0}'.format(path)) _verify_run(out) return _xfs_estimate_output(out['stdout'])
[ "def", "estimate", "(", "path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Path \"{0}\" was not found.'", ".", "format", "(", "path", ")", ")", "out", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'xfs_estimate -v {0}'", ".", "format", "(", "path", ")", ")", "_verify_run", "(", "out", ")", "return", "_xfs_estimate_output", "(", "out", "[", "'stdout'", "]", ")" ]
estimate the space that an xfs filesystem will take .
train
true
12,401
@frappe.whitelist() def make(doctype=None, name=None, content=None, subject=None, sent_or_received=u'Sent', sender=None, sender_full_name=None, recipients=None, communication_medium=u'Email', send_email=False, print_html=None, print_format=None, attachments=u'[]', send_me_a_copy=False, cc=None, flags=None, read_receipt=None): is_error_report = ((doctype == u'User') and (name == frappe.session.user) and (subject == u'Error Report')) send_me_a_copy = cint(send_me_a_copy) if (doctype and name and (not is_error_report) and (not frappe.has_permission(doctype, u'email', name)) and (not (flags or {}).get(u'ignore_doctype_permissions'))): raise frappe.PermissionError(u'You are not allowed to send emails related to: {doctype} {name}'.format(doctype=doctype, name=name)) if (not sender): sender = get_formatted_email(frappe.session.user) comm = frappe.get_doc({u'doctype': u'Communication', u'subject': subject, u'content': content, u'sender': sender, u'sender_full_name': sender_full_name, u'recipients': recipients, u'cc': (cc or None), u'communication_medium': communication_medium, u'sent_or_received': sent_or_received, u'reference_doctype': doctype, u'reference_name': name, u'message_id': get_message_id().strip(u' <>'), u'read_receipt': read_receipt}) comm.insert(ignore_permissions=True) if (not doctype): comm.db_set(dict(reference_doctype=u'Communication', reference_name=comm.name)) if isinstance(attachments, basestring): attachments = json.loads(attachments) if attachments: add_attachments(comm.name, attachments) frappe.db.commit() if cint(send_email): comm.send(print_html, print_format, attachments, send_me_a_copy=send_me_a_copy) return {u'name': comm.name, u'emails_not_sent_to': (u', '.join(comm.emails_not_sent_to) if hasattr(comm, u'emails_not_sent_to') else None)}
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "make", "(", "doctype", "=", "None", ",", "name", "=", "None", ",", "content", "=", "None", ",", "subject", "=", "None", ",", "sent_or_received", "=", "u'Sent'", ",", "sender", "=", "None", ",", "sender_full_name", "=", "None", ",", "recipients", "=", "None", ",", "communication_medium", "=", "u'Email'", ",", "send_email", "=", "False", ",", "print_html", "=", "None", ",", "print_format", "=", "None", ",", "attachments", "=", "u'[]'", ",", "send_me_a_copy", "=", "False", ",", "cc", "=", "None", ",", "flags", "=", "None", ",", "read_receipt", "=", "None", ")", ":", "is_error_report", "=", "(", "(", "doctype", "==", "u'User'", ")", "and", "(", "name", "==", "frappe", ".", "session", ".", "user", ")", "and", "(", "subject", "==", "u'Error Report'", ")", ")", "send_me_a_copy", "=", "cint", "(", "send_me_a_copy", ")", "if", "(", "doctype", "and", "name", "and", "(", "not", "is_error_report", ")", "and", "(", "not", "frappe", ".", "has_permission", "(", "doctype", ",", "u'email'", ",", "name", ")", ")", "and", "(", "not", "(", "flags", "or", "{", "}", ")", ".", "get", "(", "u'ignore_doctype_permissions'", ")", ")", ")", ":", "raise", "frappe", ".", "PermissionError", "(", "u'You are not allowed to send emails related to: {doctype} {name}'", ".", "format", "(", "doctype", "=", "doctype", ",", "name", "=", "name", ")", ")", "if", "(", "not", "sender", ")", ":", "sender", "=", "get_formatted_email", "(", "frappe", ".", "session", ".", "user", ")", "comm", "=", "frappe", ".", "get_doc", "(", "{", "u'doctype'", ":", "u'Communication'", ",", "u'subject'", ":", "subject", ",", "u'content'", ":", "content", ",", "u'sender'", ":", "sender", ",", "u'sender_full_name'", ":", "sender_full_name", ",", "u'recipients'", ":", "recipients", ",", "u'cc'", ":", "(", "cc", "or", "None", ")", ",", "u'communication_medium'", ":", "communication_medium", ",", "u'sent_or_received'", ":", "sent_or_received", ",", "u'reference_doctype'", ":", "doctype", ",", "u'reference_name'", ":", "name", ",", "u'message_id'", ":", "get_message_id", "(", ")", ".", "strip", "(", "u' <>'", ")", ",", "u'read_receipt'", ":", "read_receipt", "}", ")", "comm", ".", "insert", "(", "ignore_permissions", "=", "True", ")", "if", "(", "not", "doctype", ")", ":", "comm", ".", "db_set", "(", "dict", "(", "reference_doctype", "=", "u'Communication'", ",", "reference_name", "=", "comm", ".", "name", ")", ")", "if", "isinstance", "(", "attachments", ",", "basestring", ")", ":", "attachments", "=", "json", ".", "loads", "(", "attachments", ")", "if", "attachments", ":", "add_attachments", "(", "comm", ".", "name", ",", "attachments", ")", "frappe", ".", "db", ".", "commit", "(", ")", "if", "cint", "(", "send_email", ")", ":", "comm", ".", "send", "(", "print_html", ",", "print_format", ",", "attachments", ",", "send_me_a_copy", "=", "send_me_a_copy", ")", "return", "{", "u'name'", ":", "comm", ".", "name", ",", "u'emails_not_sent_to'", ":", "(", "u', '", ".", "join", "(", "comm", ".", "emails_not_sent_to", ")", "if", "hasattr", "(", "comm", ",", "u'emails_not_sent_to'", ")", "else", "None", ")", "}" ]
run make .
train
false
12,402
def safe_no_dnn_workmem_bwd(workmem): if workmem: raise RuntimeError('The option `dnn.conv.workmem_bwd` has been removed and should not be used anymore. Please use the options `dnn.conv.algo_bwd_filter` and `dnn.conv.algo_bwd_data` instead.') return True
[ "def", "safe_no_dnn_workmem_bwd", "(", "workmem", ")", ":", "if", "workmem", ":", "raise", "RuntimeError", "(", "'The option `dnn.conv.workmem_bwd` has been removed and should not be used anymore. Please use the options `dnn.conv.algo_bwd_filter` and `dnn.conv.algo_bwd_data` instead.'", ")", "return", "True" ]
make sure the user is not attempting to use dnn .
train
false
12,403
def feature_sign_search(dictionary, signals, sparsity, max_iter=1000, solution=None): dictionary = np.asarray(dictionary) _feature_sign_checkargs(dictionary, signals, sparsity, max_iter, solution) signals_ndim = signals.ndim signals = np.atleast_2d(signals) if (solution is None): solution = np.zeros((signals.shape[0], dictionary.shape[1]), dtype=signals.dtype) orig_sol = None else: orig_sol = solution solution = np.atleast_2d(solution) for (row, (signal, sol)) in enumerate(izip(signals, solution)): (_, iters) = _feature_sign_search_single(dictionary, signal, sparsity, max_iter, sol) if (iters >= max_iter): log.warning(('maximum number of iterations reached when optimizing code for training case %d; solution may not be optimal' % iters)) if ((orig_sol is not None) and (orig_sol.ndim == 1)): solution = orig_sol elif ((orig_sol is None) and (signals_ndim == 1)): solution = solution.squeeze() return solution
[ "def", "feature_sign_search", "(", "dictionary", ",", "signals", ",", "sparsity", ",", "max_iter", "=", "1000", ",", "solution", "=", "None", ")", ":", "dictionary", "=", "np", ".", "asarray", "(", "dictionary", ")", "_feature_sign_checkargs", "(", "dictionary", ",", "signals", ",", "sparsity", ",", "max_iter", ",", "solution", ")", "signals_ndim", "=", "signals", ".", "ndim", "signals", "=", "np", ".", "atleast_2d", "(", "signals", ")", "if", "(", "solution", "is", "None", ")", ":", "solution", "=", "np", ".", "zeros", "(", "(", "signals", ".", "shape", "[", "0", "]", ",", "dictionary", ".", "shape", "[", "1", "]", ")", ",", "dtype", "=", "signals", ".", "dtype", ")", "orig_sol", "=", "None", "else", ":", "orig_sol", "=", "solution", "solution", "=", "np", ".", "atleast_2d", "(", "solution", ")", "for", "(", "row", ",", "(", "signal", ",", "sol", ")", ")", "in", "enumerate", "(", "izip", "(", "signals", ",", "solution", ")", ")", ":", "(", "_", ",", "iters", ")", "=", "_feature_sign_search_single", "(", "dictionary", ",", "signal", ",", "sparsity", ",", "max_iter", ",", "sol", ")", "if", "(", "iters", ">=", "max_iter", ")", ":", "log", ".", "warning", "(", "(", "'maximum number of iterations reached when optimizing code for training case %d; solution may not be optimal'", "%", "iters", ")", ")", "if", "(", "(", "orig_sol", "is", "not", "None", ")", "and", "(", "orig_sol", ".", "ndim", "==", "1", ")", ")", ":", "solution", "=", "orig_sol", "elif", "(", "(", "orig_sol", "is", "None", ")", "and", "(", "signals_ndim", "==", "1", ")", ")", ":", "solution", "=", "solution", ".", "squeeze", "(", ")", "return", "solution" ]
solve l1-penalized quadratic minimization problems with feature-sign search .
train
false
12,404
def _get_importer(path_name): cache = sys.path_importer_cache try: importer = cache[path_name] except KeyError: cache[path_name] = None for hook in sys.path_hooks: try: importer = hook(path_name) break except ImportError: pass else: try: importer = imp.NullImporter(path_name) except ImportError: return None cache[path_name] = importer return importer
[ "def", "_get_importer", "(", "path_name", ")", ":", "cache", "=", "sys", ".", "path_importer_cache", "try", ":", "importer", "=", "cache", "[", "path_name", "]", "except", "KeyError", ":", "cache", "[", "path_name", "]", "=", "None", "for", "hook", "in", "sys", ".", "path_hooks", ":", "try", ":", "importer", "=", "hook", "(", "path_name", ")", "break", "except", "ImportError", ":", "pass", "else", ":", "try", ":", "importer", "=", "imp", ".", "NullImporter", "(", "path_name", ")", "except", "ImportError", ":", "return", "None", "cache", "[", "path_name", "]", "=", "importer", "return", "importer" ]
python version of pyimport_getimporter c api function .
train
true
12,405
def safe_file_dump(filename, payload): with safe_open(filename, u'wb') as f: f.write(payload)
[ "def", "safe_file_dump", "(", "filename", ",", "payload", ")", ":", "with", "safe_open", "(", "filename", ",", "u'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "payload", ")" ]
write a string to a file .
train
false
12,406
def smuggle_url(url, data): sdata = compat_urllib_parse.urlencode({u'__youtubedl_smuggle': json.dumps(data)}) return ((url + u'#') + sdata)
[ "def", "smuggle_url", "(", "url", ",", "data", ")", ":", "sdata", "=", "compat_urllib_parse", ".", "urlencode", "(", "{", "u'__youtubedl_smuggle'", ":", "json", ".", "dumps", "(", "data", ")", "}", ")", "return", "(", "(", "url", "+", "u'#'", ")", "+", "sdata", ")" ]
pass additional data in a url for internal use .
train
false
12,408
def writedocs(dir, pkgpath='', done=None): if (done is None): done = {} for (importer, modname, ispkg) in pkgutil.walk_packages([dir], pkgpath): writedoc(modname) return
[ "def", "writedocs", "(", "dir", ",", "pkgpath", "=", "''", ",", "done", "=", "None", ")", ":", "if", "(", "done", "is", "None", ")", ":", "done", "=", "{", "}", "for", "(", "importer", ",", "modname", ",", "ispkg", ")", "in", "pkgutil", ".", "walk_packages", "(", "[", "dir", "]", ",", "pkgpath", ")", ":", "writedoc", "(", "modname", ")", "return" ]
write out html documentation for all modules in a directory tree .
train
false
12,409
def translate(s, a, b=None, c=None): from sympy.core.compatibility import maketrans try: ''.translate(None, '') py3 = False except TypeError: py3 = True mr = {} if (a is None): assert (c is None) if (not b): return s c = b a = b = '' elif (type(a) is dict): short = {} for k in list(a.keys()): if ((len(k) == 1) and (len(a[k]) == 1)): short[k] = a.pop(k) mr = a c = b if short: (a, b) = [''.join(i) for i in list(zip(*short.items()))] else: a = b = '' else: assert (len(a) == len(b)) if py3: if c: s = s.translate(maketrans('', '', c)) s = replace(s, mr) return s.translate(maketrans(a, b)) else: if c: c = list(c) rem = {} for i in range((-1), ((-1) - len(c)), (-1)): if (ord(c[i]) > 255): rem[c[i]] = '' c.pop(i) s = s.translate(None, ''.join(c)) s = replace(s, rem) if a: a = list(a) b = list(b) for i in range((-1), ((-1) - len(a)), (-1)): if ((ord(a[i]) > 255) or (ord(b[i]) > 255)): mr[a.pop(i)] = b.pop(i) a = ''.join(a) b = ''.join(b) s = replace(s, mr) table = maketrans(a, b) if ((type(table) is str) and (type(s) is str)): s = s.translate(table) else: s = s.translate(dict([(i, ord(c)) for (i, c) in enumerate(table)])) return s
[ "def", "translate", "(", "s", ",", "a", ",", "b", "=", "None", ",", "c", "=", "None", ")", ":", "from", "sympy", ".", "core", ".", "compatibility", "import", "maketrans", "try", ":", ".", "translate", "(", "None", ",", "''", ")", "py3", "=", "False", "except", "TypeError", ":", "py3", "=", "True", "mr", "=", "{", "}", "if", "(", "a", "is", "None", ")", ":", "assert", "(", "c", "is", "None", ")", "if", "(", "not", "b", ")", ":", "return", "s", "c", "=", "b", "a", "=", "b", "=", "''", "elif", "(", "type", "(", "a", ")", "is", "dict", ")", ":", "short", "=", "{", "}", "for", "k", "in", "list", "(", "a", ".", "keys", "(", ")", ")", ":", "if", "(", "(", "len", "(", "k", ")", "==", "1", ")", "and", "(", "len", "(", "a", "[", "k", "]", ")", "==", "1", ")", ")", ":", "short", "[", "k", "]", "=", "a", ".", "pop", "(", "k", ")", "mr", "=", "a", "c", "=", "b", "if", "short", ":", "(", "a", ",", "b", ")", "=", "[", "''", ".", "join", "(", "i", ")", "for", "i", "in", "list", "(", "zip", "(", "*", "short", ".", "items", "(", ")", ")", ")", "]", "else", ":", "a", "=", "b", "=", "''", "else", ":", "assert", "(", "len", "(", "a", ")", "==", "len", "(", "b", ")", ")", "if", "py3", ":", "if", "c", ":", "s", "=", "s", ".", "translate", "(", "maketrans", "(", "''", ",", "''", ",", "c", ")", ")", "s", "=", "replace", "(", "s", ",", "mr", ")", "return", "s", ".", "translate", "(", "maketrans", "(", "a", ",", "b", ")", ")", "else", ":", "if", "c", ":", "c", "=", "list", "(", "c", ")", "rem", "=", "{", "}", "for", "i", "in", "range", "(", "(", "-", "1", ")", ",", "(", "(", "-", "1", ")", "-", "len", "(", "c", ")", ")", ",", "(", "-", "1", ")", ")", ":", "if", "(", "ord", "(", "c", "[", "i", "]", ")", ">", "255", ")", ":", "rem", "[", "c", "[", "i", "]", "]", "=", "''", "c", ".", "pop", "(", "i", ")", "s", "=", "s", ".", "translate", "(", "None", ",", "''", ".", "join", "(", "c", ")", ")", "s", "=", "replace", "(", "s", ",", "rem", ")", "if", "a", ":", "a", "=", "list", "(", "a", ")", "b", "=", "list", "(", "b", ")", "for", "i", "in", "range", "(", "(", "-", "1", ")", ",", "(", "(", "-", "1", ")", "-", "len", "(", "a", ")", ")", ",", "(", "-", "1", ")", ")", ":", "if", "(", "(", "ord", "(", "a", "[", "i", "]", ")", ">", "255", ")", "or", "(", "ord", "(", "b", "[", "i", "]", ")", ">", "255", ")", ")", ":", "mr", "[", "a", ".", "pop", "(", "i", ")", "]", "=", "b", ".", "pop", "(", "i", ")", "a", "=", "''", ".", "join", "(", "a", ")", "b", "=", "''", ".", "join", "(", "b", ")", "s", "=", "replace", "(", "s", ",", "mr", ")", "table", "=", "maketrans", "(", "a", ",", "b", ")", "if", "(", "(", "type", "(", "table", ")", "is", "str", ")", "and", "(", "type", "(", "s", ")", "is", "str", ")", ")", ":", "s", "=", "s", ".", "translate", "(", "table", ")", "else", ":", "s", "=", "s", ".", "translate", "(", "dict", "(", "[", "(", "i", ",", "ord", "(", "c", ")", ")", "for", "(", "i", ",", "c", ")", "in", "enumerate", "(", "table", ")", "]", ")", ")", "return", "s" ]
ajax handler for translating .
train
false
12,410
def writeable_directory(arg): arg = directory(arg) if (not os.access(arg, os.W_OK)): raise argparse.ArgumentTypeError('{0} exists but is not writeable with its current permissions'.format(arg)) return arg
[ "def", "writeable_directory", "(", "arg", ")", ":", "arg", "=", "directory", "(", "arg", ")", "if", "(", "not", "os", ".", "access", "(", "arg", ",", "os", ".", "W_OK", ")", ")", ":", "raise", "argparse", ".", "ArgumentTypeError", "(", "'{0} exists but is not writeable with its current permissions'", ".", "format", "(", "arg", ")", ")", "return", "arg" ]
an argument type (for use with the type= argument to argparse .
train
false
12,411
def addPrefixes(path, dirprefix): for folder in dirprefix: if (not folder): continue if (not path): break basepath = os.path.basename(os.path.abspath(path)) if (folder != basepath.lower()): path = os.path.join(path, folder) return path
[ "def", "addPrefixes", "(", "path", ",", "dirprefix", ")", ":", "for", "folder", "in", "dirprefix", ":", "if", "(", "not", "folder", ")", ":", "continue", "if", "(", "not", "path", ")", ":", "break", "basepath", "=", "os", ".", "path", ".", "basename", "(", "os", ".", "path", ".", "abspath", "(", "path", ")", ")", "if", "(", "folder", "!=", "basepath", ".", "lower", "(", ")", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "folder", ")", "return", "path" ]
add list of prefixes as sub folders to path /my/path and [a .
train
false
12,412
def _convert_datetime_str(response): if response: return dict([((k, '{0}'.format(v)) if isinstance(v, datetime.date) else (k, v)) for (k, v) in six.iteritems(response)]) return None
[ "def", "_convert_datetime_str", "(", "response", ")", ":", "if", "response", ":", "return", "dict", "(", "[", "(", "(", "k", ",", "'{0}'", ".", "format", "(", "v", ")", ")", "if", "isinstance", "(", "v", ",", "datetime", ".", "date", ")", "else", "(", "k", ",", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "response", ")", "]", ")", "return", "None" ]
modify any key-value pair where value is a datetime object to a string .
train
true
12,414
def _increment_sync_num(*args, **kwargs): KEY = datetime.date.today() if (cache.get(KEY) is not None): cache.incr(KEY)
[ "def", "_increment_sync_num", "(", "*", "args", ",", "**", "kwargs", ")", ":", "KEY", "=", "datetime", ".", "date", ".", "today", "(", ")", "if", "(", "cache", ".", "get", "(", "KEY", ")", "is", "not", "None", ")", ":", "cache", ".", "incr", "(", "KEY", ")" ]
increment the sync_num .
train
false
12,415
def runu(command, timeout=30, withexitstatus=False, events=None, extra_args=None, logfile=None, cwd=None, env=None, **kwargs): kwargs.setdefault('encoding', 'utf-8') return run(command, timeout=timeout, withexitstatus=withexitstatus, events=events, extra_args=extra_args, logfile=logfile, cwd=cwd, env=env, **kwargs)
[ "def", "runu", "(", "command", ",", "timeout", "=", "30", ",", "withexitstatus", "=", "False", ",", "events", "=", "None", ",", "extra_args", "=", "None", ",", "logfile", "=", "None", ",", "cwd", "=", "None", ",", "env", "=", "None", ",", "**", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'encoding'", ",", "'utf-8'", ")", "return", "run", "(", "command", ",", "timeout", "=", "timeout", ",", "withexitstatus", "=", "withexitstatus", ",", "events", "=", "events", ",", "extra_args", "=", "extra_args", ",", "logfile", "=", "logfile", ",", "cwd", "=", "cwd", ",", "env", "=", "env", ",", "**", "kwargs", ")" ]
deprecated: pass encoding to run() instead .
train
true
12,416
@register.simple_tag def whiteboard_messages(project=None, subproject=None, language=None): ret = [] whiteboards = WhiteboardMessage.objects.context_filter(project, subproject, language) for whiteboard in whiteboards: ret.append(render_to_string(u'message.html', {u'tags': u' '.join((whiteboard.category, u'whiteboard')), u'message': mark_safe(urlize(whiteboard.message, autoescape=True))})) return mark_safe(u'\n'.join(ret))
[ "@", "register", ".", "simple_tag", "def", "whiteboard_messages", "(", "project", "=", "None", ",", "subproject", "=", "None", ",", "language", "=", "None", ")", ":", "ret", "=", "[", "]", "whiteboards", "=", "WhiteboardMessage", ".", "objects", ".", "context_filter", "(", "project", ",", "subproject", ",", "language", ")", "for", "whiteboard", "in", "whiteboards", ":", "ret", ".", "append", "(", "render_to_string", "(", "u'message.html'", ",", "{", "u'tags'", ":", "u' '", ".", "join", "(", "(", "whiteboard", ".", "category", ",", "u'whiteboard'", ")", ")", ",", "u'message'", ":", "mark_safe", "(", "urlize", "(", "whiteboard", ".", "message", ",", "autoescape", "=", "True", ")", ")", "}", ")", ")", "return", "mark_safe", "(", "u'\\n'", ".", "join", "(", "ret", ")", ")" ]
displays whiteboard messages for given context .
train
false
12,417
def involvedpairs(pairs): for pair in pairs: (yield pair) left = (pair[0], (pair[1] - 1)) if ((left[1] >= 0) and (left not in pairs)): (yield left) top = ((pair[0] - 1), pair[1]) topright = ((pair[0] - 1), (pair[1] + 1)) if ((pair[0] > 0) and (top not in pairs) and (topright not in pairs)): (yield top)
[ "def", "involvedpairs", "(", "pairs", ")", ":", "for", "pair", "in", "pairs", ":", "(", "yield", "pair", ")", "left", "=", "(", "pair", "[", "0", "]", ",", "(", "pair", "[", "1", "]", "-", "1", ")", ")", "if", "(", "(", "left", "[", "1", "]", ">=", "0", ")", "and", "(", "left", "not", "in", "pairs", ")", ")", ":", "(", "yield", "left", ")", "top", "=", "(", "(", "pair", "[", "0", "]", "-", "1", ")", ",", "pair", "[", "1", "]", ")", "topright", "=", "(", "(", "pair", "[", "0", "]", "-", "1", ")", ",", "(", "pair", "[", "1", "]", "+", "1", ")", ")", "if", "(", "(", "pair", "[", "0", "]", ">", "0", ")", "and", "(", "top", "not", "in", "pairs", ")", "and", "(", "topright", "not", "in", "pairs", ")", ")", ":", "(", "yield", "top", ")" ]
get all the pixel pairs whose gradient involves an unknown pixel .
train
false
12,419
def RegisterCLSID(clsid, pythonClass): mapCLSIDToClass[str(clsid)] = pythonClass
[ "def", "RegisterCLSID", "(", "clsid", ",", "pythonClass", ")", ":", "mapCLSIDToClass", "[", "str", "(", "clsid", ")", "]", "=", "pythonClass" ]
register a class that wraps a clsid this function allows a clsid to be globally associated with a class .
train
false
12,420
def p_assignment_expression_1(t): pass
[ "def", "p_assignment_expression_1", "(", "t", ")", ":", "pass" ]
assignment_expression : conditional_expression .
train
false
12,423
def proxy_url(url): match = app.url_map.bind('').match(url, method=request.method) response = app.view_functions[match[0]](**match[1]) return make_response(response)
[ "def", "proxy_url", "(", "url", ")", ":", "match", "=", "app", ".", "url_map", ".", "bind", "(", "''", ")", ".", "match", "(", "url", ",", "method", "=", "request", ".", "method", ")", "response", "=", "app", ".", "view_functions", "[", "match", "[", "0", "]", "]", "(", "**", "match", "[", "1", "]", ")", "return", "make_response", "(", "response", ")" ]
call flask view function for a given url .
train
false
12,424
def probe(*devices): for device in devices: _validate_device(device) cmd = 'partprobe -- {0}'.format(' '.join(devices)) out = __salt__['cmd.run'](cmd).splitlines() return out
[ "def", "probe", "(", "*", "devices", ")", ":", "for", "device", "in", "devices", ":", "_validate_device", "(", "device", ")", "cmd", "=", "'partprobe -- {0}'", ".", "format", "(", "' '", ".", "join", "(", "devices", ")", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "return", "out" ]
ask the kernel to update its local partition data .
train
true
12,426
def corner_fast(image, n=12, threshold=0.15): image = _prepare_grayscale_input_2D(image) image = np.ascontiguousarray(image) response = _corner_fast(image, n, threshold) return response
[ "def", "corner_fast", "(", "image", ",", "n", "=", "12", ",", "threshold", "=", "0.15", ")", ":", "image", "=", "_prepare_grayscale_input_2D", "(", "image", ")", "image", "=", "np", ".", "ascontiguousarray", "(", "image", ")", "response", "=", "_corner_fast", "(", "image", ",", "n", ",", "threshold", ")", "return", "response" ]
extract fast corners for a given image .
train
false
12,427
def double_line_checker(full_load, count_str): num = full_load.lower().count(count_str) if (num > 1): lines = full_load.count('\r\n') if (lines > 1): full_load = full_load.split('\r\n')[(-2)] return full_load
[ "def", "double_line_checker", "(", "full_load", ",", "count_str", ")", ":", "num", "=", "full_load", ".", "lower", "(", ")", ".", "count", "(", "count_str", ")", "if", "(", "num", ">", "1", ")", ":", "lines", "=", "full_load", ".", "count", "(", "'\\r\\n'", ")", "if", "(", "lines", ">", "1", ")", ":", "full_load", "=", "full_load", ".", "split", "(", "'\\r\\n'", ")", "[", "(", "-", "2", ")", "]", "return", "full_load" ]
check if count_str shows up twice .
train
false
12,428
@loader_option() def load_only(loadopt, *attrs): cloned = loadopt.set_column_strategy(attrs, {'deferred': False, 'instrument': True}) cloned.set_column_strategy('*', {'deferred': True, 'instrument': True}, {'undefer_pks': True}) return cloned
[ "@", "loader_option", "(", ")", "def", "load_only", "(", "loadopt", ",", "*", "attrs", ")", ":", "cloned", "=", "loadopt", ".", "set_column_strategy", "(", "attrs", ",", "{", "'deferred'", ":", "False", ",", "'instrument'", ":", "True", "}", ")", "cloned", ".", "set_column_strategy", "(", "'*'", ",", "{", "'deferred'", ":", "True", ",", "'instrument'", ":", "True", "}", ",", "{", "'undefer_pks'", ":", "True", "}", ")", "return", "cloned" ]
indicate that for a particular entity .
train
false
12,429
@pytest.mark.parametrize('fast_reader', [True, False, 'force']) def test_read_with_names_arg(fast_reader): with pytest.raises(ValueError): dat = ascii.read(['c d', 'e f'], names=('a',), guess=False, fast_reader=fast_reader)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'fast_reader'", ",", "[", "True", ",", "False", ",", "'force'", "]", ")", "def", "test_read_with_names_arg", "(", "fast_reader", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "dat", "=", "ascii", ".", "read", "(", "[", "'c d'", ",", "'e f'", "]", ",", "names", "=", "(", "'a'", ",", ")", ",", "guess", "=", "False", ",", "fast_reader", "=", "fast_reader", ")" ]
test that a bad value of names raises an exception .
train
false
12,430
def isNegative(phrase): return bool(re.search("\\b(no(t)?|don\\'t|stop|end)\\b", phrase, re.IGNORECASE))
[ "def", "isNegative", "(", "phrase", ")", ":", "return", "bool", "(", "re", ".", "search", "(", "\"\\\\b(no(t)?|don\\\\'t|stop|end)\\\\b\"", ",", "phrase", ",", "re", ".", "IGNORECASE", ")", ")" ]
returns true if the input phrase has a negative sentiment .
train
false
12,431
def IPAddress(address, version=None): if version: if (version == 4): return IPv4Address(address) elif (version == 6): return IPv6Address(address) try: return IPv4Address(address) except (AddressValueError, NetmaskValueError): pass try: return IPv6Address(address) except (AddressValueError, NetmaskValueError): pass raise ValueError(('%r does not appear to be an IPv4 or IPv6 address' % address))
[ "def", "IPAddress", "(", "address", ",", "version", "=", "None", ")", ":", "if", "version", ":", "if", "(", "version", "==", "4", ")", ":", "return", "IPv4Address", "(", "address", ")", "elif", "(", "version", "==", "6", ")", ":", "return", "IPv6Address", "(", "address", ")", "try", ":", "return", "IPv4Address", "(", "address", ")", "except", "(", "AddressValueError", ",", "NetmaskValueError", ")", ":", "pass", "try", ":", "return", "IPv6Address", "(", "address", ")", "except", "(", "AddressValueError", ",", "NetmaskValueError", ")", ":", "pass", "raise", "ValueError", "(", "(", "'%r does not appear to be an IPv4 or IPv6 address'", "%", "address", ")", ")" ]
take an ip string/int and return an object of the correct type .
train
true
12,434
def _GetModule(fname): mod_name = ('win32com.gen_py.%s' % fname) mod = __import__(mod_name) return sys.modules[mod_name]
[ "def", "_GetModule", "(", "fname", ")", ":", "mod_name", "=", "(", "'win32com.gen_py.%s'", "%", "fname", ")", "mod", "=", "__import__", "(", "mod_name", ")", "return", "sys", ".", "modules", "[", "mod_name", "]" ]
given the name of a module in the gen_py directory .
train
false
12,435
def ae(actual, predicted): return np.abs((np.array(actual) - np.array(predicted)))
[ "def", "ae", "(", "actual", ",", "predicted", ")", ":", "return", "np", ".", "abs", "(", "(", "np", ".", "array", "(", "actual", ")", "-", "np", ".", "array", "(", "predicted", ")", ")", ")" ]
computes the absolute error .
train
true
12,436
def retry_503(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except HttpError as e: log.error('HTTP Error calling Google Analytics: %s', e) if (e.resp.status == 503): return f(*args, **kwargs) return wrapper
[ "def", "retry_503", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "except", "HttpError", "as", "e", ":", "log", ".", "error", "(", "'HTTP Error calling Google Analytics: %s'", ",", "e", ")", "if", "(", "e", ".", "resp", ".", "status", "==", "503", ")", ":", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
call f .
train
false
12,437
def start_map(name, handler_spec, reader_spec, mapper_parameters, shard_count=_DEFAULT_SHARD_COUNT, output_writer_spec=None, mapreduce_parameters=None, base_path=None, queue_name=None, eta=None, countdown=None, hooks_class_name=None, _app=None, transactional=False, transactional_parent=None): if (not shard_count): shard_count = _DEFAULT_SHARD_COUNT if (base_path is None): base_path = base_handler._DEFAULT_BASE_PATH if mapper_parameters: mapper_parameters = dict(mapper_parameters) if mapreduce_parameters: mapreduce_parameters = dict(mapreduce_parameters) mapper_spec = model.MapperSpec(handler_spec, reader_spec, mapper_parameters, shard_count, output_writer_spec=output_writer_spec) if (transactional and (not transactional_parent)): logging.error('transactional_parent should be specified for transactional starts.Your job will fail to start if mapreduce specification is too big.') return handlers.StartJobHandler._start_map(name, mapper_spec, (mapreduce_parameters or {}), base_path=base_path, queue_name=queue_name, eta=eta, countdown=countdown, hooks_class_name=hooks_class_name, _app=_app, transactional=transactional, parent_entity=transactional_parent)
[ "def", "start_map", "(", "name", ",", "handler_spec", ",", "reader_spec", ",", "mapper_parameters", ",", "shard_count", "=", "_DEFAULT_SHARD_COUNT", ",", "output_writer_spec", "=", "None", ",", "mapreduce_parameters", "=", "None", ",", "base_path", "=", "None", ",", "queue_name", "=", "None", ",", "eta", "=", "None", ",", "countdown", "=", "None", ",", "hooks_class_name", "=", "None", ",", "_app", "=", "None", ",", "transactional", "=", "False", ",", "transactional_parent", "=", "None", ")", ":", "if", "(", "not", "shard_count", ")", ":", "shard_count", "=", "_DEFAULT_SHARD_COUNT", "if", "(", "base_path", "is", "None", ")", ":", "base_path", "=", "base_handler", ".", "_DEFAULT_BASE_PATH", "if", "mapper_parameters", ":", "mapper_parameters", "=", "dict", "(", "mapper_parameters", ")", "if", "mapreduce_parameters", ":", "mapreduce_parameters", "=", "dict", "(", "mapreduce_parameters", ")", "mapper_spec", "=", "model", ".", "MapperSpec", "(", "handler_spec", ",", "reader_spec", ",", "mapper_parameters", ",", "shard_count", ",", "output_writer_spec", "=", "output_writer_spec", ")", "if", "(", "transactional", "and", "(", "not", "transactional_parent", ")", ")", ":", "logging", ".", "error", "(", "'transactional_parent should be specified for transactional starts.Your job will fail to start if mapreduce specification is too big.'", ")", "return", "handlers", ".", "StartJobHandler", ".", "_start_map", "(", "name", ",", "mapper_spec", ",", "(", "mapreduce_parameters", "or", "{", "}", ")", ",", "base_path", "=", "base_path", ",", "queue_name", "=", "queue_name", ",", "eta", "=", "eta", ",", "countdown", "=", "countdown", ",", "hooks_class_name", "=", "hooks_class_name", ",", "_app", "=", "_app", ",", "transactional", "=", "transactional", ",", "parent_entity", "=", "transactional_parent", ")" ]
start a new .
train
false
12,438
def check_variable_type(allowed_type_funcs): def enumerated_type_check(var_name, val): for func in allowed_type_funcs: if (not func(var_name, val)): return None return (_('%s is not an allowed_type') % (var_name,)) return enumerated_type_check
[ "def", "check_variable_type", "(", "allowed_type_funcs", ")", ":", "def", "enumerated_type_check", "(", "var_name", ",", "val", ")", ":", "for", "func", "in", "allowed_type_funcs", ":", "if", "(", "not", "func", "(", "var_name", ",", "val", ")", ")", ":", "return", "None", "return", "(", "_", "(", "'%s is not an allowed_type'", ")", "%", "(", "var_name", ",", ")", ")", "return", "enumerated_type_check" ]
use this validator if an argument is of a variable type .
train
false
12,439
def get_python_exec(ver): try: return PYEXECS[ver] except KeyError: raise ValueError(('Version %s not supported/recognized' % ver))
[ "def", "get_python_exec", "(", "ver", ")", ":", "try", ":", "return", "PYEXECS", "[", "ver", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "(", "'Version %s not supported/recognized'", "%", "ver", ")", ")" ]
return the executable of python for the given version .
train
false
12,441
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
return a fresh instance of the hash object .
train
false
12,442
def compile_command(source, filename='<input>', symbol='single'): return _maybe_compile(_compile, source, filename, symbol)
[ "def", "compile_command", "(", "source", ",", "filename", "=", "'<input>'", ",", "symbol", "=", "'single'", ")", ":", "return", "_maybe_compile", "(", "_compile", ",", "source", ",", "filename", ",", "symbol", ")" ]
compile a command and determine whether it is incomplete .
train
false
12,444
def _get_servernames(names): whitespace_re = re.compile('\\s+') names = re.sub(whitespace_re, ' ', names) return names.split(' ')
[ "def", "_get_servernames", "(", "names", ")", ":", "whitespace_re", "=", "re", ".", "compile", "(", "'\\\\s+'", ")", "names", "=", "re", ".", "sub", "(", "whitespace_re", ",", "' '", ",", "names", ")", "return", "names", ".", "split", "(", "' '", ")" ]
turns a server_name string into a list of server names .
train
false
12,445
def count_comments_handler(sender, **kwargs): comment = kwargs['comment'] if comment.is_public: entry = comment.content_object if isinstance(entry, Entry): entry.comment_count = (F('comment_count') + 1) entry.save(update_fields=['comment_count'])
[ "def", "count_comments_handler", "(", "sender", ",", "**", "kwargs", ")", ":", "comment", "=", "kwargs", "[", "'comment'", "]", "if", "comment", ".", "is_public", ":", "entry", "=", "comment", ".", "content_object", "if", "isinstance", "(", "entry", ",", "Entry", ")", ":", "entry", ".", "comment_count", "=", "(", "F", "(", "'comment_count'", ")", "+", "1", ")", "entry", ".", "save", "(", "update_fields", "=", "[", "'comment_count'", "]", ")" ]
update entry .
train
true
12,446
def to_dict_of_lists(G, nodelist=None): if (nodelist is None): nodelist = G d = {} for n in nodelist: d[n] = [nbr for nbr in G.neighbors(n) if (nbr in nodelist)] return d
[ "def", "to_dict_of_lists", "(", "G", ",", "nodelist", "=", "None", ")", ":", "if", "(", "nodelist", "is", "None", ")", ":", "nodelist", "=", "G", "d", "=", "{", "}", "for", "n", "in", "nodelist", ":", "d", "[", "n", "]", "=", "[", "nbr", "for", "nbr", "in", "G", ".", "neighbors", "(", "n", ")", "if", "(", "nbr", "in", "nodelist", ")", "]", "return", "d" ]
return adjacency representation of graph as a dictionary of lists .
train
false
12,448
def test_rgb(h, f): if (h[:2] == '\x01\xda'): return 'rgb'
[ "def", "test_rgb", "(", "h", ",", "f", ")", ":", "if", "(", "h", "[", ":", "2", "]", "==", "'\\x01\\xda'", ")", ":", "return", "'rgb'" ]
sgi image library .
train
false