id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
38,885
def ExportStateName(state): return {STATE_READ: 'READ', STATE_GETTING: 'GETTING', STATE_GOT: 'GOT', STATE_ERROR: 'NOT_GOT'}[state]
[ "def", "ExportStateName", "(", "state", ")", ":", "return", "{", "STATE_READ", ":", "'READ'", ",", "STATE_GETTING", ":", "'GETTING'", ",", "STATE_GOT", ":", "'GOT'", ",", "STATE_ERROR", ":", "'NOT_GOT'", "}", "[", "state", "]" ]
converts a numeric state identifier to a string .
train
false
38,886
def user_or_ip(key_prefix): def _user_or_ip(request): if (hasattr(request, 'user') and request.user.is_authenticated()): key = str(request.user.pk) else: key = request.META.get('HTTP_X_CLUSTER_CLIENT_IP', request.META['REMOTE_ADDR']) return ('uip:%s:%s' % (key_prefix, key)) return _user_or_ip
[ "def", "user_or_ip", "(", "key_prefix", ")", ":", "def", "_user_or_ip", "(", "request", ")", ":", "if", "(", "hasattr", "(", "request", ",", "'user'", ")", "and", "request", ".", "user", ".", "is_authenticated", "(", ")", ")", ":", "key", "=", "str", "(", "request", ".", "user", ".", "pk", ")", "else", ":", "key", "=", "request", ".", "META", ".", "get", "(", "'HTTP_X_CLUSTER_CLIENT_IP'", ",", "request", ".", "META", "[", "'REMOTE_ADDR'", "]", ")", "return", "(", "'uip:%s:%s'", "%", "(", "key_prefix", ",", "key", ")", ")", "return", "_user_or_ip" ]
used for generating rate limiting keys .
train
false
38,887
def setAllArgs(obj, argdict): xmlstore = isinstance(obj, XMLBuildable) for n in list(argdict.keys()): if hasattr(obj, n): setattr(obj, n, argdict[n]) if xmlstore: obj.argdict[n] = argdict[n] else: print(('Warning: parameter name', n, 'not found!')) if xmlstore: if (not hasattr(obj, '_unknown_argdict')): obj._unknown_argdict = {} obj._unknown_argdict[n] = argdict[n]
[ "def", "setAllArgs", "(", "obj", ",", "argdict", ")", ":", "xmlstore", "=", "isinstance", "(", "obj", ",", "XMLBuildable", ")", "for", "n", "in", "list", "(", "argdict", ".", "keys", "(", ")", ")", ":", "if", "hasattr", "(", "obj", ",", "n", ")", ":", "setattr", "(", "obj", ",", "n", ",", "argdict", "[", "n", "]", ")", "if", "xmlstore", ":", "obj", ".", "argdict", "[", "n", "]", "=", "argdict", "[", "n", "]", "else", ":", "print", "(", "(", "'Warning: parameter name'", ",", "n", ",", "'not found!'", ")", ")", "if", "xmlstore", ":", "if", "(", "not", "hasattr", "(", "obj", ",", "'_unknown_argdict'", ")", ")", ":", "obj", ".", "_unknown_argdict", "=", "{", "}", "obj", ".", "_unknown_argdict", "[", "n", "]", "=", "argdict", "[", "n", "]" ]
set all those internal variables which have the same name than an entry in the given objects dictionary .
train
false
38,888
def volume_code(volume): ind = np.searchsorted([100000.0, 1000000.0, 5000000.0, 10000000.0, 10000000.0], volume) return ind
[ "def", "volume_code", "(", "volume", ")", ":", "ind", "=", "np", ".", "searchsorted", "(", "[", "100000.0", ",", "1000000.0", ",", "5000000.0", ",", "10000000.0", ",", "10000000.0", "]", ",", "volume", ")", "return", "ind" ]
code the continuous volume data categorically .
train
false
38,889
def _areAllSDRsUnique(sdrDict): for (k1, v1) in sdrDict.iteritems(): for (k2, v2) in sdrDict.iteritems(): if ((k2 != k1) and ((v1 == v2).sum() == v1.size)): return False return True
[ "def", "_areAllSDRsUnique", "(", "sdrDict", ")", ":", "for", "(", "k1", ",", "v1", ")", "in", "sdrDict", ".", "iteritems", "(", ")", ":", "for", "(", "k2", ",", "v2", ")", "in", "sdrDict", ".", "iteritems", "(", ")", ":", "if", "(", "(", "k2", "!=", "k1", ")", "and", "(", "(", "v1", "==", "v2", ")", ".", "sum", "(", ")", "==", "v1", ".", "size", ")", ")", ":", "return", "False", "return", "True" ]
return true iff all the sdrs in the dict are unique .
train
false
38,892
def delete_api(name, description=None, region=None, key=None, keyid=None, profile=None): try: conn_params = dict(region=region, key=key, keyid=keyid, profile=profile) r = _find_apis_by_name(name, description=description, **conn_params) apis = r.get('restapi') if apis: conn = _get_conn(**conn_params) for api in apis: conn.delete_rest_api(restApiId=api['id']) return {'deleted': True, 'count': len(apis)} else: return {'deleted': False} except ClientError as e: return {'deleted': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "delete_api", "(", "name", ",", "description", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn_params", "=", "dict", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "r", "=", "_find_apis_by_name", "(", "name", ",", "description", "=", "description", ",", "**", "conn_params", ")", "apis", "=", "r", ".", "get", "(", "'restapi'", ")", "if", "apis", ":", "conn", "=", "_get_conn", "(", "**", "conn_params", ")", "for", "api", "in", "apis", ":", "conn", ".", "delete_rest_api", "(", "restApiId", "=", "api", "[", "'id'", "]", ")", "return", "{", "'deleted'", ":", "True", ",", "'count'", ":", "len", "(", "apis", ")", "}", "else", ":", "return", "{", "'deleted'", ":", "False", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
delete all rest api service with the given name and an optional api description returns {deleted: true .
train
true
38,893
def Proxy(f): def Wrapped(self, *args): return getattr(self, f)(*args) return Wrapped
[ "def", "Proxy", "(", "f", ")", ":", "def", "Wrapped", "(", "self", ",", "*", "args", ")", ":", "return", "getattr", "(", "self", ",", "f", ")", "(", "*", "args", ")", "return", "Wrapped" ]
a helper to create a proxy method in a class .
train
true
38,894
def requireModule(name, default=None): try: return namedModule(name) except ImportError: return default
[ "def", "requireModule", "(", "name", ",", "default", "=", "None", ")", ":", "try", ":", "return", "namedModule", "(", "name", ")", "except", "ImportError", ":", "return", "default" ]
try to import a module given its name .
train
false
38,895
def _get_hemi(s): if (s['type'] != 'surf'): raise RuntimeError('Only surface source spaces supported') if (s['id'] == FIFF.FIFFV_MNE_SURF_LEFT_HEMI): return ('lh', 0, s['id']) elif (s['id'] == FIFF.FIFFV_MNE_SURF_RIGHT_HEMI): return ('rh', 1, s['id']) else: raise ValueError(('unknown surface ID %s' % s['id']))
[ "def", "_get_hemi", "(", "s", ")", ":", "if", "(", "s", "[", "'type'", "]", "!=", "'surf'", ")", ":", "raise", "RuntimeError", "(", "'Only surface source spaces supported'", ")", "if", "(", "s", "[", "'id'", "]", "==", "FIFF", ".", "FIFFV_MNE_SURF_LEFT_HEMI", ")", ":", "return", "(", "'lh'", ",", "0", ",", "s", "[", "'id'", "]", ")", "elif", "(", "s", "[", "'id'", "]", "==", "FIFF", ".", "FIFFV_MNE_SURF_RIGHT_HEMI", ")", ":", "return", "(", "'rh'", ",", "1", ",", "s", "[", "'id'", "]", ")", "else", ":", "raise", "ValueError", "(", "(", "'unknown surface ID %s'", "%", "s", "[", "'id'", "]", ")", ")" ]
get a hemisphere from a given source space .
train
false
38,897
def getReplaceableExportGcode(nameOfReplaceFile, replaceableExportGcode): replaceLines = settings.getAlterationLines(nameOfReplaceFile) if (len(replaceLines) < 1): return replaceableExportGcode for replaceLine in replaceLines: splitLine = replaceLine.replace('\\n', ' DCTB ').split(' DCTB ') if (len(splitLine) > 0): replaceableExportGcode = replaceableExportGcode.replace(splitLine[0], '\n'.join(splitLine[1:])) output = cStringIO.StringIO() gcodec.addLinesToCString(output, archive.getTextLines(replaceableExportGcode)) return output.getvalue()
[ "def", "getReplaceableExportGcode", "(", "nameOfReplaceFile", ",", "replaceableExportGcode", ")", ":", "replaceLines", "=", "settings", ".", "getAlterationLines", "(", "nameOfReplaceFile", ")", "if", "(", "len", "(", "replaceLines", ")", "<", "1", ")", ":", "return", "replaceableExportGcode", "for", "replaceLine", "in", "replaceLines", ":", "splitLine", "=", "replaceLine", ".", "replace", "(", "'\\\\n'", ",", "' DCTB '", ")", ".", "split", "(", "' DCTB '", ")", "if", "(", "len", "(", "splitLine", ")", ">", "0", ")", ":", "replaceableExportGcode", "=", "replaceableExportGcode", ".", "replace", "(", "splitLine", "[", "0", "]", ",", "'\\n'", ".", "join", "(", "splitLine", "[", "1", ":", "]", ")", ")", "output", "=", "cStringIO", ".", "StringIO", "(", ")", "gcodec", ".", "addLinesToCString", "(", "output", ",", "archive", ".", "getTextLines", "(", "replaceableExportGcode", ")", ")", "return", "output", ".", "getvalue", "(", ")" ]
get text with strings replaced according to replace .
train
false
38,900
def _emit_post_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update): def update_stmt(): clause = sql.and_() for col in mapper._pks_by_table[table]: clause.clauses.append((col == sql.bindparam(col._label, type_=col.type))) return table.update(clause) statement = base_mapper._memo(('post_update', table), update_stmt) for (key, grouper) in groupby(update, (lambda rec: (rec[4], list(rec[2].keys())))): connection = key[0] multiparams = [params for (state, state_dict, params, mapper, conn) in grouper] cached_connections[connection].execute(statement, multiparams)
[ "def", "_emit_post_update_statements", "(", "base_mapper", ",", "uowtransaction", ",", "cached_connections", ",", "mapper", ",", "table", ",", "update", ")", ":", "def", "update_stmt", "(", ")", ":", "clause", "=", "sql", ".", "and_", "(", ")", "for", "col", "in", "mapper", ".", "_pks_by_table", "[", "table", "]", ":", "clause", ".", "clauses", ".", "append", "(", "(", "col", "==", "sql", ".", "bindparam", "(", "col", ".", "_label", ",", "type_", "=", "col", ".", "type", ")", ")", ")", "return", "table", ".", "update", "(", "clause", ")", "statement", "=", "base_mapper", ".", "_memo", "(", "(", "'post_update'", ",", "table", ")", ",", "update_stmt", ")", "for", "(", "key", ",", "grouper", ")", "in", "groupby", "(", "update", ",", "(", "lambda", "rec", ":", "(", "rec", "[", "4", "]", ",", "list", "(", "rec", "[", "2", "]", ".", "keys", "(", ")", ")", ")", ")", ")", ":", "connection", "=", "key", "[", "0", "]", "multiparams", "=", "[", "params", "for", "(", "state", ",", "state_dict", ",", "params", ",", "mapper", ",", "conn", ")", "in", "grouper", "]", "cached_connections", "[", "connection", "]", ".", "execute", "(", "statement", ",", "multiparams", ")" ]
emit update statements corresponding to value lists collected by _collect_post_update_commands() .
train
false
38,901
def gevent_wait_callback(conn, timeout=None): while 1: state = conn.poll() if (state == extensions.POLL_OK): break elif (state == extensions.POLL_READ): wait_read(conn.fileno(), timeout=timeout) elif (state == extensions.POLL_WRITE): wait_write(conn.fileno(), timeout=timeout) else: raise psycopg2.OperationalError(('Bad result from poll: %r' % state))
[ "def", "gevent_wait_callback", "(", "conn", ",", "timeout", "=", "None", ")", ":", "while", "1", ":", "state", "=", "conn", ".", "poll", "(", ")", "if", "(", "state", "==", "extensions", ".", "POLL_OK", ")", ":", "break", "elif", "(", "state", "==", "extensions", ".", "POLL_READ", ")", ":", "wait_read", "(", "conn", ".", "fileno", "(", ")", ",", "timeout", "=", "timeout", ")", "elif", "(", "state", "==", "extensions", ".", "POLL_WRITE", ")", ":", "wait_write", "(", "conn", ".", "fileno", "(", ")", ",", "timeout", "=", "timeout", ")", "else", ":", "raise", "psycopg2", ".", "OperationalError", "(", "(", "'Bad result from poll: %r'", "%", "state", ")", ")" ]
a wait callback useful to allow gevent to work with psycopg .
train
true
38,902
def cache_dir(path, saltenv='base', include_empty=False, include_pat=None, exclude_pat=None): return _client().cache_dir(path, saltenv, include_empty, include_pat, exclude_pat)
[ "def", "cache_dir", "(", "path", ",", "saltenv", "=", "'base'", ",", "include_empty", "=", "False", ",", "include_pat", "=", "None", ",", "exclude_pat", "=", "None", ")", ":", "return", "_client", "(", ")", ".", "cache_dir", "(", "path", ",", "saltenv", ",", "include_empty", ",", "include_pat", ",", "exclude_pat", ")" ]
download and cache everything under a directory from the master include_pat : none glob or regex to narrow down the files cached from the given path .
train
true
38,903
def must_be_logged_in(func): @functools.wraps(func) def wrapped(*args, **kwargs): kwargs['auth'] = Auth.from_kwargs(request.args.to_dict(), kwargs) if kwargs['auth'].logged_in: return func(*args, **kwargs) else: return redirect(cas.get_login_url(request.url)) return wrapped
[ "def", "must_be_logged_in", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'auth'", "]", "=", "Auth", ".", "from_kwargs", "(", "request", ".", "args", ".", "to_dict", "(", ")", ",", "kwargs", ")", "if", "kwargs", "[", "'auth'", "]", ".", "logged_in", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "redirect", "(", "cas", ".", "get_login_url", "(", "request", ".", "url", ")", ")", "return", "wrapped" ]
require that user be logged in .
train
false
38,904
def valid_string(val): for char in val: try: char = ord(char) except TypeError: raise NotValid('string') if ((char == 9) or (char == 10) or (char == 13)): continue elif (32 <= char <= 55295): continue elif (57344 <= char <= 65533): continue elif (65536 <= char <= 1114111): continue else: raise NotValid('string') return True
[ "def", "valid_string", "(", "val", ")", ":", "for", "char", "in", "val", ":", "try", ":", "char", "=", "ord", "(", "char", ")", "except", "TypeError", ":", "raise", "NotValid", "(", "'string'", ")", "if", "(", "(", "char", "==", "9", ")", "or", "(", "char", "==", "10", ")", "or", "(", "char", "==", "13", ")", ")", ":", "continue", "elif", "(", "32", "<=", "char", "<=", "55295", ")", ":", "continue", "elif", "(", "57344", "<=", "char", "<=", "65533", ")", ":", "continue", "elif", "(", "65536", "<=", "char", "<=", "1114111", ")", ":", "continue", "else", ":", "raise", "NotValid", "(", "'string'", ")", "return", "True" ]
expects unicode char ::= #x9 | #xa | #xd | [#x20-#xd7ff] | [#xe000-#xfffd] | [#x10000-#x10ffff] .
train
true
38,905
def copy_doc(source): def wrapper(func): if ((source.__doc__ is None) or (len(source.__doc__) == 0)): raise ValueError('Cannot copy docstring: docstring was empty.') doc = source.__doc__ if (func.__doc__ is not None): doc += func.__doc__ func.__doc__ = doc return func return wrapper
[ "def", "copy_doc", "(", "source", ")", ":", "def", "wrapper", "(", "func", ")", ":", "if", "(", "(", "source", ".", "__doc__", "is", "None", ")", "or", "(", "len", "(", "source", ".", "__doc__", ")", "==", "0", ")", ")", ":", "raise", "ValueError", "(", "'Cannot copy docstring: docstring was empty.'", ")", "doc", "=", "source", ".", "__doc__", "if", "(", "func", ".", "__doc__", "is", "not", "None", ")", ":", "doc", "+=", "func", ".", "__doc__", "func", ".", "__doc__", "=", "doc", "return", "func", "return", "wrapper" ]
decorator to copy the docstring from another function .
train
false
38,907
def p_postfix_expression_6(t): pass
[ "def", "p_postfix_expression_6", "(", "t", ")", ":", "pass" ]
postfix_expression : postfix_expression arrow id .
train
false
38,908
def get_function_signature(func): arg_count = func.func_code.co_argcount arg_names = func.func_code.co_varnames[:arg_count] arg_defaults = {} func_defaults = () if (func.func_defaults is not None): func_defaults = func.func_defaults for (name, value) in zip(arg_names[::(-1)], func_defaults[::(-1)]): arg_defaults[name] = value signature = {'name': func.__name__, 'arg_count': arg_count, 'arg_names': arg_names, 'arg_defaults': arg_defaults} non_pos_names = list(func.func_code.co_varnames[arg_count:]) COLLECTS_ARBITRARY_POSITIONAL_ARGS = 4 if (func.func_code.co_flags & COLLECTS_ARBITRARY_POSITIONAL_ARGS): signature['var_args'] = non_pos_names.pop(0) COLLECTS_ARBITRARY_KEYWORD_ARGS = 8 if (func.func_code.co_flags & COLLECTS_ARBITRARY_KEYWORD_ARGS): signature['var_kw_args'] = non_pos_names.pop(0) return signature
[ "def", "get_function_signature", "(", "func", ")", ":", "arg_count", "=", "func", ".", "func_code", ".", "co_argcount", "arg_names", "=", "func", ".", "func_code", ".", "co_varnames", "[", ":", "arg_count", "]", "arg_defaults", "=", "{", "}", "func_defaults", "=", "(", ")", "if", "(", "func", ".", "func_defaults", "is", "not", "None", ")", ":", "func_defaults", "=", "func", ".", "func_defaults", "for", "(", "name", ",", "value", ")", "in", "zip", "(", "arg_names", "[", ":", ":", "(", "-", "1", ")", "]", ",", "func_defaults", "[", ":", ":", "(", "-", "1", ")", "]", ")", ":", "arg_defaults", "[", "name", "]", "=", "value", "signature", "=", "{", "'name'", ":", "func", ".", "__name__", ",", "'arg_count'", ":", "arg_count", ",", "'arg_names'", ":", "arg_names", ",", "'arg_defaults'", ":", "arg_defaults", "}", "non_pos_names", "=", "list", "(", "func", ".", "func_code", ".", "co_varnames", "[", "arg_count", ":", "]", ")", "COLLECTS_ARBITRARY_POSITIONAL_ARGS", "=", "4", "if", "(", "func", ".", "func_code", ".", "co_flags", "&", "COLLECTS_ARBITRARY_POSITIONAL_ARGS", ")", ":", "signature", "[", "'var_args'", "]", "=", "non_pos_names", ".", "pop", "(", "0", ")", "COLLECTS_ARBITRARY_KEYWORD_ARGS", "=", "8", "if", "(", "func", ".", "func_code", ".", "co_flags", "&", "COLLECTS_ARBITRARY_KEYWORD_ARGS", ")", ":", "signature", "[", "'var_kw_args'", "]", "=", "non_pos_names", ".", "pop", "(", "0", ")", "return", "signature" ]
get the function signature as a mapping of attributes .
train
false
38,909
def run_steps(container, steps, out=sys.stdout): container.start() try: for commands in steps: status = container.execute(commands, out) if (status != 0): return status finally: container.stop() return 0
[ "def", "run_steps", "(", "container", ",", "steps", ",", "out", "=", "sys", ".", "stdout", ")", ":", "container", ".", "start", "(", ")", "try", ":", "for", "commands", "in", "steps", ":", "status", "=", "container", ".", "execute", "(", "commands", ",", "out", ")", "if", "(", "status", "!=", "0", ")", ":", "return", "status", "finally", ":", "container", ".", "stop", "(", ")", "return", "0" ]
run a sequence of commands in a container .
train
false
38,910
def generate_info_refs(repo): refs = repo.get_refs() return write_info_refs(refs, repo.object_store)
[ "def", "generate_info_refs", "(", "repo", ")", ":", "refs", "=", "repo", ".", "get_refs", "(", ")", "return", "write_info_refs", "(", "refs", ",", "repo", ".", "object_store", ")" ]
generate an info refs file .
train
false
38,911
def _process_log_to_dict(process_log): if ((not process_log) or (not process_log.total_stages)): return {} else: return {'process_id': process_log.id, 'process_name': process_log.process_name, 'process_percent': process_log.process_percent, 'stage_name': process_log.stage_name, 'stage_percent': process_log.stage_percent, 'stage_status': process_log.stage_status, 'cur_stage_num': (1 + int(math.floor((process_log.total_stages * process_log.process_percent)))), 'total_stages': process_log.total_stages, 'notes': process_log.notes, 'completed': (process_log.completed or (process_log.end_time is not None))}
[ "def", "_process_log_to_dict", "(", "process_log", ")", ":", "if", "(", "(", "not", "process_log", ")", "or", "(", "not", "process_log", ".", "total_stages", ")", ")", ":", "return", "{", "}", "else", ":", "return", "{", "'process_id'", ":", "process_log", ".", "id", ",", "'process_name'", ":", "process_log", ".", "process_name", ",", "'process_percent'", ":", "process_log", ".", "process_percent", ",", "'stage_name'", ":", "process_log", ".", "stage_name", ",", "'stage_percent'", ":", "process_log", ".", "stage_percent", ",", "'stage_status'", ":", "process_log", ".", "stage_status", ",", "'cur_stage_num'", ":", "(", "1", "+", "int", "(", "math", ".", "floor", "(", "(", "process_log", ".", "total_stages", "*", "process_log", ".", "process_percent", ")", ")", ")", ")", ",", "'total_stages'", ":", "process_log", ".", "total_stages", ",", "'notes'", ":", "process_log", ".", "notes", ",", "'completed'", ":", "(", "process_log", ".", "completed", "or", "(", "process_log", ".", "end_time", "is", "not", "None", ")", ")", "}" ]
utility function to convert a process log to a dict .
train
false
38,913
def is_prereg_admin_not_project_admin(request, draft): user = request.user is_project_admin = draft.branched_from.has_permission(user, osf_permissions.ADMIN) return (is_prereg_admin(user) and (not is_project_admin))
[ "def", "is_prereg_admin_not_project_admin", "(", "request", ",", "draft", ")", ":", "user", "=", "request", ".", "user", "is_project_admin", "=", "draft", ".", "branched_from", ".", "has_permission", "(", "user", ",", "osf_permissions", ".", "ADMIN", ")", "return", "(", "is_prereg_admin", "(", "user", ")", "and", "(", "not", "is_project_admin", ")", ")" ]
returns true if user is prereg admin .
train
false
38,917
def delete_backup_info(backup_info, delete_files=True): if backup_info.blob_files: delete_backup_files(backup_info.filesystem, backup_info.blob_files) backup_info.delete(force_writes=True) else: kinds_backup_files = tuple(backup_info.get_kind_backup_files()) if delete_files: delete_backup_files(backup_info.filesystem, itertools.chain(*(kind_backup_files.files for kind_backup_files in kinds_backup_files))) db.delete((kinds_backup_files + (backup_info,)), force_writes=True)
[ "def", "delete_backup_info", "(", "backup_info", ",", "delete_files", "=", "True", ")", ":", "if", "backup_info", ".", "blob_files", ":", "delete_backup_files", "(", "backup_info", ".", "filesystem", ",", "backup_info", ".", "blob_files", ")", "backup_info", ".", "delete", "(", "force_writes", "=", "True", ")", "else", ":", "kinds_backup_files", "=", "tuple", "(", "backup_info", ".", "get_kind_backup_files", "(", ")", ")", "if", "delete_files", ":", "delete_backup_files", "(", "backup_info", ".", "filesystem", ",", "itertools", ".", "chain", "(", "*", "(", "kind_backup_files", ".", "files", "for", "kind_backup_files", "in", "kinds_backup_files", ")", ")", ")", "db", ".", "delete", "(", "(", "kinds_backup_files", "+", "(", "backup_info", ",", ")", ")", ",", "force_writes", "=", "True", ")" ]
deletes a backup including its associated files and other metadata .
train
false
38,918
def parse_requirements(strs): lines = iter(yield_lines(strs)) def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): items = [] while (not TERMINATOR(line, p)): if CONTINUE(line, p): try: line = next(lines) p = 0 except StopIteration: raise ValueError('\\ must not appear on the last nonblank line') match = ITEM(line, p) if (not match): msg = (('Expected ' + item_name) + ' in') raise ValueError(msg, line, 'at', line[p:]) items.append(match.group(*groups)) p = match.end() match = COMMA(line, p) if match: p = match.end() elif (not TERMINATOR(line, p)): msg = "Expected ',' or end-of-list in" raise ValueError(msg, line, 'at', line[p:]) match = TERMINATOR(line, p) if match: p = match.end() return (line, p, items) for line in lines: match = DISTRO(line) if (not match): raise ValueError('Missing distribution spec', line) project_name = match.group(1) p = match.end() extras = [] match = OBRACKET(line, p) if match: p = match.end() (line, p, extras) = scan_list(DISTRO, CBRACKET, line, p, (1,), "'extra' name") (line, p, specs) = scan_list(VERSION, LINE_END, line, p, (1, 2), 'version spec') specs = [(op, safe_version(val)) for (op, val) in specs] (yield Requirement(project_name, specs, extras))
[ "def", "parse_requirements", "(", "strs", ")", ":", "lines", "=", "iter", "(", "yield_lines", "(", "strs", ")", ")", "def", "scan_list", "(", "ITEM", ",", "TERMINATOR", ",", "line", ",", "p", ",", "groups", ",", "item_name", ")", ":", "items", "=", "[", "]", "while", "(", "not", "TERMINATOR", "(", "line", ",", "p", ")", ")", ":", "if", "CONTINUE", "(", "line", ",", "p", ")", ":", "try", ":", "line", "=", "next", "(", "lines", ")", "p", "=", "0", "except", "StopIteration", ":", "raise", "ValueError", "(", "'\\\\ must not appear on the last nonblank line'", ")", "match", "=", "ITEM", "(", "line", ",", "p", ")", "if", "(", "not", "match", ")", ":", "msg", "=", "(", "(", "'Expected '", "+", "item_name", ")", "+", "' in'", ")", "raise", "ValueError", "(", "msg", ",", "line", ",", "'at'", ",", "line", "[", "p", ":", "]", ")", "items", ".", "append", "(", "match", ".", "group", "(", "*", "groups", ")", ")", "p", "=", "match", ".", "end", "(", ")", "match", "=", "COMMA", "(", "line", ",", "p", ")", "if", "match", ":", "p", "=", "match", ".", "end", "(", ")", "elif", "(", "not", "TERMINATOR", "(", "line", ",", "p", ")", ")", ":", "msg", "=", "\"Expected ',' or end-of-list in\"", "raise", "ValueError", "(", "msg", ",", "line", ",", "'at'", ",", "line", "[", "p", ":", "]", ")", "match", "=", "TERMINATOR", "(", "line", ",", "p", ")", "if", "match", ":", "p", "=", "match", ".", "end", "(", ")", "return", "(", "line", ",", "p", ",", "items", ")", "for", "line", "in", "lines", ":", "match", "=", "DISTRO", "(", "line", ")", "if", "(", "not", "match", ")", ":", "raise", "ValueError", "(", "'Missing distribution spec'", ",", "line", ")", "project_name", "=", "match", ".", "group", "(", "1", ")", "p", "=", "match", ".", "end", "(", ")", "extras", "=", "[", "]", "match", "=", "OBRACKET", "(", "line", ",", "p", ")", "if", "match", ":", "p", "=", "match", ".", "end", "(", ")", "(", "line", ",", "p", ",", "extras", ")", "=", "scan_list", "(", "DISTRO", ",", "CBRACKET", ",", "line", ",", "p", ",", "(", "1", ",", ")", ",", "\"'extra' name\"", ")", "(", "line", ",", "p", ",", "specs", ")", "=", "scan_list", "(", "VERSION", ",", "LINE_END", ",", "line", ",", "p", ",", "(", "1", ",", "2", ")", ",", "'version spec'", ")", "specs", "=", "[", "(", "op", ",", "safe_version", "(", "val", ")", ")", "for", "(", "op", ",", "val", ")", "in", "specs", "]", "(", "yield", "Requirement", "(", "project_name", ",", "specs", ",", "extras", ")", ")" ]
parse a requirements file and yield installrequirement instances .
train
true
38,919
def bone(): rc('image', cmap='bone') im = gci() if (im is not None): im.set_cmap(cm.bone) draw_if_interactive()
[ "def", "bone", "(", ")", ":", "rc", "(", "'image'", ",", "cmap", "=", "'bone'", ")", "im", "=", "gci", "(", ")", "if", "(", "im", "is", "not", "None", ")", ":", "im", ".", "set_cmap", "(", "cm", ".", "bone", ")", "draw_if_interactive", "(", ")" ]
set the default colormap to bone and apply to current image if any .
train
false
38,920
def rmconfig(name): portpath = _check_portname(name) return __salt__['cmd.run'](['make', 'rmconfig'], cwd=portpath, python_shell=False)
[ "def", "rmconfig", "(", "name", ")", ":", "portpath", "=", "_check_portname", "(", "name", ")", "return", "__salt__", "[", "'cmd.run'", "]", "(", "[", "'make'", ",", "'rmconfig'", "]", ",", "cwd", "=", "portpath", ",", "python_shell", "=", "False", ")" ]
clear the cached options for the specified port; run a make rmconfig name the name of the port to clear cli example: .
train
true
38,921
def app_pack_compiled(app, request, raise_ex=False): try: filename = apath(('../deposit/%s.w2p' % app), request) w2p_pack(filename, apath(app, request), compiled=True) return filename except Exception as e: if raise_ex: raise return None
[ "def", "app_pack_compiled", "(", "app", ",", "request", ",", "raise_ex", "=", "False", ")", ":", "try", ":", "filename", "=", "apath", "(", "(", "'../deposit/%s.w2p'", "%", "app", ")", ",", "request", ")", "w2p_pack", "(", "filename", ",", "apath", "(", "app", ",", "request", ")", ",", "compiled", "=", "True", ")", "return", "filename", "except", "Exception", "as", "e", ":", "if", "raise_ex", ":", "raise", "return", "None" ]
builds a w2p bytecode-compiled package for the application args: app: application name request: the global request object returns: filename of the w2p file or none on error .
train
false
38,924
def get_undo_commands(dire): with open(os.path.join(dire, 'COMMANDS')) as csvfile: return list(csv.reader(csvfile))
[ "def", "get_undo_commands", "(", "dire", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "dire", ",", "'COMMANDS'", ")", ")", "as", "csvfile", ":", "return", "list", "(", "csv", ".", "reader", "(", "csvfile", ")", ")" ]
get new files .
train
false
38,925
def find_unused_port(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.bind(('127.0.0.1', 0)) sock.listen(socket.SOMAXCONN) (_, port) = sock.getsockname() finally: sock.close() return port
[ "def", "find_unused_port", "(", ")", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "try", ":", "sock", ".", "bind", "(", "(", "'127.0.0.1'", ",", "0", ")", ")", "sock", ".", "listen", "(", "socket", ".", "SOMAXCONN", ")", "(", "_", ",", "port", ")", "=", "sock", ".", "getsockname", "(", ")", "finally", ":", "sock", ".", "close", "(", ")", "return", "port" ]
returns an unused port that should be suitable for binding .
train
false
38,926
def check_for_folder(path): for (_root, dirs, _files) in os.walk(path): if dirs: return True return False
[ "def", "check_for_folder", "(", "path", ")", ":", "for", "(", "_root", ",", "dirs", ",", "_files", ")", "in", "os", ".", "walk", "(", "path", ")", ":", "if", "dirs", ":", "return", "True", "return", "False" ]
return true if any folder is found in the tree at path .
train
false
38,927
def make_cache_queue(): cache = frappe.cache() emails = frappe.db.sql(u"select\n DCTB DCTB DCTB name\n DCTB DCTB from\n DCTB DCTB DCTB `tabEmail Queue`\n DCTB DCTB where\n DCTB DCTB DCTB (status='Not Sent' or status='Partially Sent') and\n DCTB DCTB DCTB (send_after is null or send_after < %(now)s)\n DCTB DCTB order\n DCTB DCTB DCTB by priority desc, creation asc\n DCTB DCTB limit 500", {u'now': now_datetime()}) cache.delete_value(u'cache_email_queue') for e in emails: cache.rpush(u'cache_email_queue', e[0])
[ "def", "make_cache_queue", "(", ")", ":", "cache", "=", "frappe", ".", "cache", "(", ")", "emails", "=", "frappe", ".", "db", ".", "sql", "(", "u\"select\\n DCTB DCTB DCTB name\\n DCTB DCTB from\\n DCTB DCTB DCTB `tabEmail Queue`\\n DCTB DCTB where\\n DCTB DCTB DCTB (status='Not Sent' or status='Partially Sent') and\\n DCTB DCTB DCTB (send_after is null or send_after < %(now)s)\\n DCTB DCTB order\\n DCTB DCTB DCTB by priority desc, creation asc\\n DCTB DCTB limit 500\"", ",", "{", "u'now'", ":", "now_datetime", "(", ")", "}", ")", "cache", ".", "delete_value", "(", "u'cache_email_queue'", ")", "for", "e", "in", "emails", ":", "cache", ".", "rpush", "(", "u'cache_email_queue'", ",", "e", "[", "0", "]", ")" ]
cache values in queue before sendign .
train
false
38,928
def is_user_registered(user_id): if (user_id is None): return False user_settings = user_models.UserSettingsModel.get(user_id, strict=False) return bool(user_settings)
[ "def", "is_user_registered", "(", "user_id", ")", ":", "if", "(", "user_id", "is", "None", ")", ":", "return", "False", "user_settings", "=", "user_models", ".", "UserSettingsModel", ".", "get", "(", "user_id", ",", "strict", "=", "False", ")", "return", "bool", "(", "user_settings", ")" ]
checks if a user is registered with given user_id .
train
false
38,931
def cosine_distance(u, v): return (1 - (numpy.dot(u, v) / (sqrt(numpy.dot(u, u)) * sqrt(numpy.dot(v, v)))))
[ "def", "cosine_distance", "(", "u", ",", "v", ")", ":", "return", "(", "1", "-", "(", "numpy", ".", "dot", "(", "u", ",", "v", ")", "/", "(", "sqrt", "(", "numpy", ".", "dot", "(", "u", ",", "u", ")", ")", "*", "sqrt", "(", "numpy", ".", "dot", "(", "v", ",", "v", ")", ")", ")", ")", ")" ]
returns 1 minus the cosine of the angle between vectors v and u .
train
false
38,934
@membership_required def topic_edit(request, slug, topic_id, template_name='groups/topics/topic_form.html'): group = get_object_or_404(Group, slug=slug) topic = get_object_or_404(GroupTopic, pk=topic_id, group=group, user=request.user) if (request.method == 'POST'): form = GroupTopicForm(request.POST, instance=topic) if form.is_valid(): form.save() return redirect(request, topic) else: form = GroupTopicForm(instance=topic) return render(request, template_name, {'form': form, 'group': group, 'topic': topic})
[ "@", "membership_required", "def", "topic_edit", "(", "request", ",", "slug", ",", "topic_id", ",", "template_name", "=", "'groups/topics/topic_form.html'", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "slug", "=", "slug", ")", "topic", "=", "get_object_or_404", "(", "GroupTopic", ",", "pk", "=", "topic_id", ",", "group", "=", "group", ",", "user", "=", "request", ".", "user", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "form", "=", "GroupTopicForm", "(", "request", ".", "POST", ",", "instance", "=", "topic", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", ".", "save", "(", ")", "return", "redirect", "(", "request", ",", "topic", ")", "else", ":", "form", "=", "GroupTopicForm", "(", "instance", "=", "topic", ")", "return", "render", "(", "request", ",", "template_name", ",", "{", "'form'", ":", "form", ",", "'group'", ":", "group", ",", "'topic'", ":", "topic", "}", ")" ]
returns a group topic form page .
train
false
38,937
def gf_strip(f): if ((not f) or f[0]): return f k = 0 for coeff in f: if coeff: break else: k += 1 return f[k:]
[ "def", "gf_strip", "(", "f", ")", ":", "if", "(", "(", "not", "f", ")", "or", "f", "[", "0", "]", ")", ":", "return", "f", "k", "=", "0", "for", "coeff", "in", "f", ":", "if", "coeff", ":", "break", "else", ":", "k", "+=", "1", "return", "f", "[", "k", ":", "]" ]
remove leading zeros from f .
train
false
38,939
@_noconds_(True) def _sine_cosine_transform(f, x, k, a, b, K, name, simplify=True): F = integrate(((a * f) * K(((b * x) * k))), (x, 0, oo)) if (not F.has(Integral)): return (_simplify(F, simplify), True) if (not F.is_Piecewise): raise IntegralTransformError(name, f, 'could not compute integral') (F, cond) = F.args[0] if F.has(Integral): raise IntegralTransformError(name, f, 'integral in unexpected form') return (_simplify(F, simplify), cond)
[ "@", "_noconds_", "(", "True", ")", "def", "_sine_cosine_transform", "(", "f", ",", "x", ",", "k", ",", "a", ",", "b", ",", "K", ",", "name", ",", "simplify", "=", "True", ")", ":", "F", "=", "integrate", "(", "(", "(", "a", "*", "f", ")", "*", "K", "(", "(", "(", "b", "*", "x", ")", "*", "k", ")", ")", ")", ",", "(", "x", ",", "0", ",", "oo", ")", ")", "if", "(", "not", "F", ".", "has", "(", "Integral", ")", ")", ":", "return", "(", "_simplify", "(", "F", ",", "simplify", ")", ",", "True", ")", "if", "(", "not", "F", ".", "is_Piecewise", ")", ":", "raise", "IntegralTransformError", "(", "name", ",", "f", ",", "'could not compute integral'", ")", "(", "F", ",", "cond", ")", "=", "F", ".", "args", "[", "0", "]", "if", "F", ".", "has", "(", "Integral", ")", ":", "raise", "IntegralTransformError", "(", "name", ",", "f", ",", "'integral in unexpected form'", ")", "return", "(", "_simplify", "(", "F", ",", "simplify", ")", ",", "cond", ")" ]
compute a general sine or cosine-type transform f(k) = a int_0^oo b*sin f(x) dx .
train
false
38,940
def run_async(to_execute, *args, **kwargs): callback = kwargs.pop('at_return', None) errback = kwargs.pop('at_err', None) callback_kwargs = kwargs.pop('at_return_kwargs', {}) errback_kwargs = kwargs.pop('at_err_kwargs', {}) if callable(to_execute): deferred = threads.deferToThread(to_execute, *args, **kwargs) else: raise RuntimeError(("'%s' could not be handled by run_async" % to_execute)) if callback: deferred.addCallback(callback, **callback_kwargs) deferred.addErrback(errback, **errback_kwargs)
[ "def", "run_async", "(", "to_execute", ",", "*", "args", ",", "**", "kwargs", ")", ":", "callback", "=", "kwargs", ".", "pop", "(", "'at_return'", ",", "None", ")", "errback", "=", "kwargs", ".", "pop", "(", "'at_err'", ",", "None", ")", "callback_kwargs", "=", "kwargs", ".", "pop", "(", "'at_return_kwargs'", ",", "{", "}", ")", "errback_kwargs", "=", "kwargs", ".", "pop", "(", "'at_err_kwargs'", ",", "{", "}", ")", "if", "callable", "(", "to_execute", ")", ":", "deferred", "=", "threads", ".", "deferToThread", "(", "to_execute", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "raise", "RuntimeError", "(", "(", "\"'%s' could not be handled by run_async\"", "%", "to_execute", ")", ")", "if", "callback", ":", "deferred", ".", "addCallback", "(", "callback", ",", "**", "callback_kwargs", ")", "deferred", ".", "addErrback", "(", "errback", ",", "**", "errback_kwargs", ")" ]
runs a function or executes a code snippet asynchronously .
train
false
38,941
def test_basic_compound_inverse(): t = (((Shift(2) & Shift(3)) | (Scale(2) & Scale(3))) | Rotation2D(90)) assert_allclose(t.inverse(*t(0, 1)), (0, 1))
[ "def", "test_basic_compound_inverse", "(", ")", ":", "t", "=", "(", "(", "(", "Shift", "(", "2", ")", "&", "Shift", "(", "3", ")", ")", "|", "(", "Scale", "(", "2", ")", "&", "Scale", "(", "3", ")", ")", ")", "|", "Rotation2D", "(", "90", ")", ")", "assert_allclose", "(", "t", ".", "inverse", "(", "*", "t", "(", "0", ",", "1", ")", ")", ",", "(", "0", ",", "1", ")", ")" ]
test basic inversion of compound models in the limited sense supported for models made from compositions and joins only .
train
false
38,942
@nottest def ultra_slow_test(f): f.ultra_slow_test = True f.slow_test = True return f
[ "@", "nottest", "def", "ultra_slow_test", "(", "f", ")", ":", "f", ".", "ultra_slow_test", "=", "True", "f", ".", "slow_test", "=", "True", "return", "f" ]
decorator for ultra slow tests .
train
false
38,943
def make_list_box(parent, width=0, height=0, hbar=0, vbar=1, fill=BOTH, expand=1, pack=1, class_=None, name=None, takefocus=None): (hbar, vbar, frame) = make_scrollbars(parent, hbar, vbar, pack, class_=class_, name=name, takefocus=takefocus) widget = Listbox(frame, name='listbox') if width: widget.config(width=width) if height: widget.config(height=height) widget.pack(expand=expand, fill=fill, side=LEFT) set_scroll_commands(widget, hbar, vbar) return (widget, frame)
[ "def", "make_list_box", "(", "parent", ",", "width", "=", "0", ",", "height", "=", "0", ",", "hbar", "=", "0", ",", "vbar", "=", "1", ",", "fill", "=", "BOTH", ",", "expand", "=", "1", ",", "pack", "=", "1", ",", "class_", "=", "None", ",", "name", "=", "None", ",", "takefocus", "=", "None", ")", ":", "(", "hbar", ",", "vbar", ",", "frame", ")", "=", "make_scrollbars", "(", "parent", ",", "hbar", ",", "vbar", ",", "pack", ",", "class_", "=", "class_", ",", "name", "=", "name", ",", "takefocus", "=", "takefocus", ")", "widget", "=", "Listbox", "(", "frame", ",", "name", "=", "'listbox'", ")", "if", "width", ":", "widget", ".", "config", "(", "width", "=", "width", ")", "if", "height", ":", "widget", ".", "config", "(", "height", "=", "height", ")", "widget", ".", "pack", "(", "expand", "=", "expand", ",", "fill", "=", "fill", ",", "side", "=", "LEFT", ")", "set_scroll_commands", "(", "widget", ",", "hbar", ",", "vbar", ")", "return", "(", "widget", ",", "frame", ")" ]
subroutine to create a list box .
train
false
38,945
def ioff(): matplotlib.interactive(False) uninstall_repl_displayhook()
[ "def", "ioff", "(", ")", ":", "matplotlib", ".", "interactive", "(", "False", ")", "uninstall_repl_displayhook", "(", ")" ]
turn interactive mode off .
train
false
38,946
def check_accept(*types): def decorator(f): @functools.wraps(f) def decorated_function(req): if req.accept: best_match = req.accept.best_match(types) if (not best_match): type_string = ', '.join(types) raise webob.exc.HTTPNotAcceptable((_('Only %(type)s is provided') % {'type': type_string}), json_formatter=json_error_formatter) return f(req) return decorated_function return decorator
[ "def", "check_accept", "(", "*", "types", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "decorated_function", "(", "req", ")", ":", "if", "req", ".", "accept", ":", "best_match", "=", "req", ".", "accept", ".", "best_match", "(", "types", ")", "if", "(", "not", "best_match", ")", ":", "type_string", "=", "', '", ".", "join", "(", "types", ")", "raise", "webob", ".", "exc", ".", "HTTPNotAcceptable", "(", "(", "_", "(", "'Only %(type)s is provided'", ")", "%", "{", "'type'", ":", "type_string", "}", ")", ",", "json_formatter", "=", "json_error_formatter", ")", "return", "f", "(", "req", ")", "return", "decorated_function", "return", "decorator" ]
if accept is set explicitly .
train
false
38,949
def lrange(key, start, stop, host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.lrange(key, start, stop)
[ "def", "lrange", "(", "key", ",", "start", ",", "stop", ",", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "lrange", "(", "key", ",", "start", ",", "stop", ")" ]
get a range of values from a list in redis cli example: .
train
true
38,950
def active_env(): return get_link_suffix('env')
[ "def", "active_env", "(", ")", ":", "return", "get_link_suffix", "(", "'env'", ")" ]
return the revision id of the current environment .
train
false
38,951
@register.inclusion_tag('horizon/_subnav_list.html', takes_context=True) def horizon_dashboard_nav(context): if ('request' not in context): return {} dashboard = context['request'].horizon['dashboard'] panel_groups = dashboard.get_panel_groups() non_empty_groups = [] for group in panel_groups.values(): allowed_panels = [] for panel in group: if (callable(panel.nav) and panel.nav(context)): allowed_panels.append(panel) elif ((not callable(panel.nav)) and panel.nav): allowed_panels.append(panel) if allowed_panels: non_empty_groups.append((group.name, allowed_panels)) return {'components': SortedDict(non_empty_groups), 'user': context['request'].user, 'current': context['request'].horizon['panel'].slug, 'request': context['request']}
[ "@", "register", ".", "inclusion_tag", "(", "'horizon/_subnav_list.html'", ",", "takes_context", "=", "True", ")", "def", "horizon_dashboard_nav", "(", "context", ")", ":", "if", "(", "'request'", "not", "in", "context", ")", ":", "return", "{", "}", "dashboard", "=", "context", "[", "'request'", "]", ".", "horizon", "[", "'dashboard'", "]", "panel_groups", "=", "dashboard", ".", "get_panel_groups", "(", ")", "non_empty_groups", "=", "[", "]", "for", "group", "in", "panel_groups", ".", "values", "(", ")", ":", "allowed_panels", "=", "[", "]", "for", "panel", "in", "group", ":", "if", "(", "callable", "(", "panel", ".", "nav", ")", "and", "panel", ".", "nav", "(", "context", ")", ")", ":", "allowed_panels", ".", "append", "(", "panel", ")", "elif", "(", "(", "not", "callable", "(", "panel", ".", "nav", ")", ")", "and", "panel", ".", "nav", ")", ":", "allowed_panels", ".", "append", "(", "panel", ")", "if", "allowed_panels", ":", "non_empty_groups", ".", "append", "(", "(", "group", ".", "name", ",", "allowed_panels", ")", ")", "return", "{", "'components'", ":", "SortedDict", "(", "non_empty_groups", ")", ",", "'user'", ":", "context", "[", "'request'", "]", ".", "user", ",", "'current'", ":", "context", "[", "'request'", "]", ".", "horizon", "[", "'panel'", "]", ".", "slug", ",", "'request'", ":", "context", "[", "'request'", "]", "}" ]
generates sub-navigation entries for the current dashboard .
train
false
38,952
def get_device_name_for_instance(instance, bdms, device): mappings = block_device.instance_block_mapping(instance, bdms) return get_next_device_name(instance, mappings.values(), mappings['root'], device)
[ "def", "get_device_name_for_instance", "(", "instance", ",", "bdms", ",", "device", ")", ":", "mappings", "=", "block_device", ".", "instance_block_mapping", "(", "instance", ",", "bdms", ")", "return", "get_next_device_name", "(", "instance", ",", "mappings", ".", "values", "(", ")", ",", "mappings", "[", "'root'", "]", ",", "device", ")" ]
validates a device name for instance .
train
false
38,953
def opendocx(file): mydoc = zipfile.ZipFile(file) xmlcontent = mydoc.read('word/document.xml') document = etree.fromstring(xmlcontent) return document
[ "def", "opendocx", "(", "file", ")", ":", "mydoc", "=", "zipfile", ".", "ZipFile", "(", "file", ")", "xmlcontent", "=", "mydoc", ".", "read", "(", "'word/document.xml'", ")", "document", "=", "etree", ".", "fromstring", "(", "xmlcontent", ")", "return", "document" ]
open a docx file .
train
true
38,954
def _utf8(s): if (not isinstance(s, bytes)): s = s.encode('utf8') return s
[ "def", "_utf8", "(", "s", ")", ":", "if", "(", "not", "isinstance", "(", "s", ",", "bytes", ")", ")", ":", "s", "=", "s", ".", "encode", "(", "'utf8'", ")", "return", "s" ]
ensure utf8 bytes .
train
false
38,957
def ssh_config(host_string=None): from fabric.state import env dummy = {} if (not env.use_ssh_config): return dummy if ('_ssh_config' not in env): try: conf = ssh.SSHConfig() path = os.path.expanduser(env.ssh_config_path) with open(path) as fd: conf.parse(fd) env._ssh_config = conf except IOError: warn(("Unable to load SSH config file '%s'" % path)) return dummy host = parse_host_string((host_string or env.host_string))['host'] return env._ssh_config.lookup(host)
[ "def", "ssh_config", "(", "host_string", "=", "None", ")", ":", "from", "fabric", ".", "state", "import", "env", "dummy", "=", "{", "}", "if", "(", "not", "env", ".", "use_ssh_config", ")", ":", "return", "dummy", "if", "(", "'_ssh_config'", "not", "in", "env", ")", ":", "try", ":", "conf", "=", "ssh", ".", "SSHConfig", "(", ")", "path", "=", "os", ".", "path", ".", "expanduser", "(", "env", ".", "ssh_config_path", ")", "with", "open", "(", "path", ")", "as", "fd", ":", "conf", ".", "parse", "(", "fd", ")", "env", ".", "_ssh_config", "=", "conf", "except", "IOError", ":", "warn", "(", "(", "\"Unable to load SSH config file '%s'\"", "%", "path", ")", ")", "return", "dummy", "host", "=", "parse_host_string", "(", "(", "host_string", "or", "env", ".", "host_string", ")", ")", "[", "'host'", "]", "return", "env", ".", "_ssh_config", ".", "lookup", "(", "host", ")" ]
return ssh configuration dict for current env .
train
false
38,958
def py_xor_simple(str1, str2): warnings.warn('deprecated', DeprecationWarning) precondition((len(str1) == len(str2)), 'str1 and str2 are required to be of the same length.', str1=str1, str2=str2) return ''.join(map(chr, map(operator.__xor__, map(ord, str1), map(ord, str2))))
[ "def", "py_xor_simple", "(", "str1", ",", "str2", ")", ":", "warnings", ".", "warn", "(", "'deprecated'", ",", "DeprecationWarning", ")", "precondition", "(", "(", "len", "(", "str1", ")", "==", "len", "(", "str2", ")", ")", ",", "'str1 and str2 are required to be of the same length.'", ",", "str1", "=", "str1", ",", "str2", "=", "str2", ")", "return", "''", ".", "join", "(", "map", "(", "chr", ",", "map", "(", "operator", ".", "__xor__", ",", "map", "(", "ord", ",", "str1", ")", ",", "map", "(", "ord", ",", "str2", ")", ")", ")", ")" ]
benchmarks show that this is the same speed as py_xor() for small strings and much slower for large strings .
train
false
38,959
def parameterized_config(template): FERNET_KEY = generate_fernet_key() all_vars = {k: v for d in [globals(), locals()] for (k, v) in d.items()} return template.format(**all_vars)
[ "def", "parameterized_config", "(", "template", ")", ":", "FERNET_KEY", "=", "generate_fernet_key", "(", ")", "all_vars", "=", "{", "k", ":", "v", "for", "d", "in", "[", "globals", "(", ")", ",", "locals", "(", ")", "]", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", "}", "return", "template", ".", "format", "(", "**", "all_vars", ")" ]
generates a configuration from the provided template + variables defined in current scope .
train
true
38,960
def apply_operation(): codon_table = codon_list.get(codon_list.curselection()) print('Code: {}'.format(codon_table)) seq = ''.join(input_text.get(1.0, tk.END).split()) print('Input sequence: {}'.format(seq)) operation = transform_var.get() print('Operation: {}'.format(operation)) if (operation == 'transcribe'): result = transcribe(seq) elif (operation == 'translate'): result = translate(seq, table=codon_table, to_stop=True) elif (operation == 'back transcribe'): result = back_transcribe(seq) else: result = '' output_text.delete(1.0, tk.END) output_text.insert(tk.END, result) print('Result: {}'.format(result)) return
[ "def", "apply_operation", "(", ")", ":", "codon_table", "=", "codon_list", ".", "get", "(", "codon_list", ".", "curselection", "(", ")", ")", "print", "(", "'Code: {}'", ".", "format", "(", "codon_table", ")", ")", "seq", "=", "''", ".", "join", "(", "input_text", ".", "get", "(", "1.0", ",", "tk", ".", "END", ")", ".", "split", "(", ")", ")", "print", "(", "'Input sequence: {}'", ".", "format", "(", "seq", ")", ")", "operation", "=", "transform_var", ".", "get", "(", ")", "print", "(", "'Operation: {}'", ".", "format", "(", "operation", ")", ")", "if", "(", "operation", "==", "'transcribe'", ")", ":", "result", "=", "transcribe", "(", "seq", ")", "elif", "(", "operation", "==", "'translate'", ")", ":", "result", "=", "translate", "(", "seq", ",", "table", "=", "codon_table", ",", "to_stop", "=", "True", ")", "elif", "(", "operation", "==", "'back transcribe'", ")", ":", "result", "=", "back_transcribe", "(", "seq", ")", "else", ":", "result", "=", "''", "output_text", ".", "delete", "(", "1.0", ",", "tk", ".", "END", ")", "output_text", ".", "insert", "(", "tk", ".", "END", ",", "result", ")", "print", "(", "'Result: {}'", ".", "format", "(", "result", ")", ")", "return" ]
do the selected operation .
train
false
38,961
def add_entry(): print 'Enter your entry. Press ctrl+d when finished.' data = sys.stdin.read().strip() if (data and (raw_input('Save entry? [Yn] ') != 'n')): Entry.create(content=data) print 'Saved successfully.'
[ "def", "add_entry", "(", ")", ":", "print", "'Enter your entry. Press ctrl+d when finished.'", "data", "=", "sys", ".", "stdin", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "(", "data", "and", "(", "raw_input", "(", "'Save entry? [Yn] '", ")", "!=", "'n'", ")", ")", ":", "Entry", ".", "create", "(", "content", "=", "data", ")", "print", "'Saved successfully.'" ]
add entry .
train
true
38,962
def relfreq(a, numbins=10, defaultreallimits=None, weights=None): a = np.asanyarray(a) (h, l, b, e) = _histogram(a, numbins, defaultreallimits, weights=weights) h = (h / float(a.shape[0])) return RelfreqResult(h, l, b, e)
[ "def", "relfreq", "(", "a", ",", "numbins", "=", "10", ",", "defaultreallimits", "=", "None", ",", "weights", "=", "None", ")", ":", "a", "=", "np", ".", "asanyarray", "(", "a", ")", "(", "h", ",", "l", ",", "b", ",", "e", ")", "=", "_histogram", "(", "a", ",", "numbins", ",", "defaultreallimits", ",", "weights", "=", "weights", ")", "h", "=", "(", "h", "/", "float", "(", "a", ".", "shape", "[", "0", "]", ")", ")", "return", "RelfreqResult", "(", "h", ",", "l", ",", "b", ",", "e", ")" ]
returns a relative frequency histogram .
train
false
38,963
def _vindex_slice(block, points): points = [(p if isinstance(p, slice) else list(p)) for p in points] return block[tuple(points)]
[ "def", "_vindex_slice", "(", "block", ",", "points", ")", ":", "points", "=", "[", "(", "p", "if", "isinstance", "(", "p", ",", "slice", ")", "else", "list", "(", "p", ")", ")", "for", "p", "in", "points", "]", "return", "block", "[", "tuple", "(", "points", ")", "]" ]
pull out point-wise slices from block .
train
false
38,964
def get_relay_ip_list(server=_DEFAULT_SERVER): ret = list() setting = 'RelayIpList' lines = _get_wmi_setting('IIsSmtpServerSetting', setting, server) if (not lines): _LOG.debug('%s is empty: %s', setting, lines) if (lines is None): lines = [None] return list(lines) i = 0 while (i < len(lines)): octets = [str(x) for x in lines[i:(i + 4)]] address = '.'.join(octets) ret.append(address) i += 4 return ret
[ "def", "get_relay_ip_list", "(", "server", "=", "_DEFAULT_SERVER", ")", ":", "ret", "=", "list", "(", ")", "setting", "=", "'RelayIpList'", "lines", "=", "_get_wmi_setting", "(", "'IIsSmtpServerSetting'", ",", "setting", ",", "server", ")", "if", "(", "not", "lines", ")", ":", "_LOG", ".", "debug", "(", "'%s is empty: %s'", ",", "setting", ",", "lines", ")", "if", "(", "lines", "is", "None", ")", ":", "lines", "=", "[", "None", "]", "return", "list", "(", "lines", ")", "i", "=", "0", "while", "(", "i", "<", "len", "(", "lines", ")", ")", ":", "octets", "=", "[", "str", "(", "x", ")", "for", "x", "in", "lines", "[", "i", ":", "(", "i", "+", "4", ")", "]", "]", "address", "=", "'.'", ".", "join", "(", "octets", ")", "ret", ".", "append", "(", "address", ")", "i", "+=", "4", "return", "ret" ]
get the relayiplist list for the smtp virtual server .
train
true
38,965
def sort_tables(tables, skip_fn=None, extra_dependencies=None): tables = list(tables) tuples = [] if (extra_dependencies is not None): tuples.extend(extra_dependencies) def visit_foreign_key(fkey): if fkey.use_alter: return elif (skip_fn and skip_fn(fkey)): return parent_table = fkey.column.table if (parent_table in tables): child_table = fkey.parent.table if (parent_table is not child_table): tuples.append((parent_table, child_table)) for table in tables: traverse(table, {'schema_visitor': True}, {'foreign_key': visit_foreign_key}) tuples.extend(([parent, table] for parent in table._extra_dependencies)) return list(topological.sort(tuples, tables))
[ "def", "sort_tables", "(", "tables", ",", "skip_fn", "=", "None", ",", "extra_dependencies", "=", "None", ")", ":", "tables", "=", "list", "(", "tables", ")", "tuples", "=", "[", "]", "if", "(", "extra_dependencies", "is", "not", "None", ")", ":", "tuples", ".", "extend", "(", "extra_dependencies", ")", "def", "visit_foreign_key", "(", "fkey", ")", ":", "if", "fkey", ".", "use_alter", ":", "return", "elif", "(", "skip_fn", "and", "skip_fn", "(", "fkey", ")", ")", ":", "return", "parent_table", "=", "fkey", ".", "column", ".", "table", "if", "(", "parent_table", "in", "tables", ")", ":", "child_table", "=", "fkey", ".", "parent", ".", "table", "if", "(", "parent_table", "is", "not", "child_table", ")", ":", "tuples", ".", "append", "(", "(", "parent_table", ",", "child_table", ")", ")", "for", "table", "in", "tables", ":", "traverse", "(", "table", ",", "{", "'schema_visitor'", ":", "True", "}", ",", "{", "'foreign_key'", ":", "visit_foreign_key", "}", ")", "tuples", ".", "extend", "(", "(", "[", "parent", ",", "table", "]", "for", "parent", "in", "table", ".", "_extra_dependencies", ")", ")", "return", "list", "(", "topological", ".", "sort", "(", "tuples", ",", "tables", ")", ")" ]
sort a collection of table objects in order of their foreign-key dependency .
train
false
38,966
def cache_files(paths, saltenv='base'): return _client().cache_files(paths, saltenv)
[ "def", "cache_files", "(", "paths", ",", "saltenv", "=", "'base'", ")", ":", "return", "_client", "(", ")", ".", "cache_files", "(", "paths", ",", "saltenv", ")" ]
used to gather many files from the master .
train
false
38,967
def _ell(A, m): if ((len(A.shape) != 2) or (A.shape[0] != A.shape[1])): raise ValueError('expected A to be like a square matrix') p = ((2 * m) + 1) choose_2p_p = scipy.special.comb((2 * p), p, exact=True) abs_c_recip = float((choose_2p_p * math.factorial(((2 * p) + 1)))) u = (2 ** (-53)) A_abs_onenorm = _onenorm_matrix_power_nnm(abs(A), p) if (not A_abs_onenorm): return 0 alpha = (A_abs_onenorm / (_onenorm(A) * abs_c_recip)) log2_alpha_div_u = np.log2((alpha / u)) value = int(np.ceil((log2_alpha_div_u / (2 * m)))) return max(value, 0)
[ "def", "_ell", "(", "A", ",", "m", ")", ":", "if", "(", "(", "len", "(", "A", ".", "shape", ")", "!=", "2", ")", "or", "(", "A", ".", "shape", "[", "0", "]", "!=", "A", ".", "shape", "[", "1", "]", ")", ")", ":", "raise", "ValueError", "(", "'expected A to be like a square matrix'", ")", "p", "=", "(", "(", "2", "*", "m", ")", "+", "1", ")", "choose_2p_p", "=", "scipy", ".", "special", ".", "comb", "(", "(", "2", "*", "p", ")", ",", "p", ",", "exact", "=", "True", ")", "abs_c_recip", "=", "float", "(", "(", "choose_2p_p", "*", "math", ".", "factorial", "(", "(", "(", "2", "*", "p", ")", "+", "1", ")", ")", ")", ")", "u", "=", "(", "2", "**", "(", "-", "53", ")", ")", "A_abs_onenorm", "=", "_onenorm_matrix_power_nnm", "(", "abs", "(", "A", ")", ",", "p", ")", "if", "(", "not", "A_abs_onenorm", ")", ":", "return", "0", "alpha", "=", "(", "A_abs_onenorm", "/", "(", "_onenorm", "(", "A", ")", "*", "abs_c_recip", ")", ")", "log2_alpha_div_u", "=", "np", ".", "log2", "(", "(", "alpha", "/", "u", ")", ")", "value", "=", "int", "(", "np", ".", "ceil", "(", "(", "log2_alpha_div_u", "/", "(", "2", "*", "m", ")", ")", ")", ")", "return", "max", "(", "value", ",", "0", ")" ]
a helper function for expm_2009 .
train
false
38,968
def nzseel1(): tablename = ('%s_%s' % (module, resourcename)) table = db[tablename] table.person_id.default = s3_logged_in_person() s3db.configure(tablename, deletable=False, create_next=URL(module, resourcename, args='[id]'), subheadings={'.': 'name', ('%s / %s' % (T('Overall Hazards'), T('Damage'))): 'collapse', '.': 'posting', ('%s:' % T('Further Action Recommended')): 'barricades', '.': 'estimated_damage'}) rheader = nzseel1_rheader output = s3_rest_controller(rheader=rheader) return output
[ "def", "nzseel1", "(", ")", ":", "tablename", "=", "(", "'%s_%s'", "%", "(", "module", ",", "resourcename", ")", ")", "table", "=", "db", "[", "tablename", "]", "table", ".", "person_id", ".", "default", "=", "s3_logged_in_person", "(", ")", "s3db", ".", "configure", "(", "tablename", ",", "deletable", "=", "False", ",", "create_next", "=", "URL", "(", "module", ",", "resourcename", ",", "args", "=", "'[id]'", ")", ",", "subheadings", "=", "{", "'.'", ":", "'name'", ",", "(", "'%s / %s'", "%", "(", "T", "(", "'Overall Hazards'", ")", ",", "T", "(", "'Damage'", ")", ")", ")", ":", "'collapse'", ",", "'.'", ":", "'posting'", ",", "(", "'%s:'", "%", "T", "(", "'Further Action Recommended'", ")", ")", ":", "'barricades'", ",", "'.'", ":", "'estimated_damage'", "}", ")", "rheader", "=", "nzseel1_rheader", "output", "=", "s3_rest_controller", "(", "rheader", "=", "rheader", ")", "return", "output" ]
restful crud controller @todo: action button to create a new l2 assessment from an l1 .
train
false
38,969
def newDerBitString(binstring): if isinstance(binstring, DerObject): der = DerBitString(binstring.encode()) else: der = DerBitString(binstring) return der
[ "def", "newDerBitString", "(", "binstring", ")", ":", "if", "isinstance", "(", "binstring", ",", "DerObject", ")", ":", "der", "=", "DerBitString", "(", "binstring", ".", "encode", "(", ")", ")", "else", ":", "der", "=", "DerBitString", "(", "binstring", ")", "return", "der" ]
create a derstringstring object .
train
false
38,970
def diop_ternary_quadratic(eq): (var, coeff, diop_type) = classify_diop(eq, _dict=False) if (diop_type in ('homogeneous_ternary_quadratic', 'homogeneous_ternary_quadratic_normal')): return _diop_ternary_quadratic(var, coeff)
[ "def", "diop_ternary_quadratic", "(", "eq", ")", ":", "(", "var", ",", "coeff", ",", "diop_type", ")", "=", "classify_diop", "(", "eq", ",", "_dict", "=", "False", ")", "if", "(", "diop_type", "in", "(", "'homogeneous_ternary_quadratic'", ",", "'homogeneous_ternary_quadratic_normal'", ")", ")", ":", "return", "_diop_ternary_quadratic", "(", "var", ",", "coeff", ")" ]
solves the general quadratic ternary form .
train
false
38,972
def make_ssl_context(key_file=None, cert_file=None, pem_file=None, ca_dir=None, verify_peer=False, url=None, method=SSL.TLSv1_METHOD, key_file_passphrase=None): ssl_context = SSL.Context(method) if cert_file: ssl_context.use_certificate_file(cert_file) if key_file_passphrase: passwd_cb = (lambda max_passphrase_len, set_prompt, userdata: key_file_passphrase) ssl_context.set_passwd_cb(passwd_cb) if key_file: ssl_context.use_privatekey_file(key_file) elif cert_file: ssl_context.use_privatekey_file(cert_file) if (pem_file or ca_dir): ssl_context.load_verify_locations(pem_file, ca_dir) def _callback(conn, x509, errnum, errdepth, preverify_ok): 'Default certification verification callback.\n Performs no checks and returns the status passed in.\n ' return preverify_ok verify_callback = _callback if verify_peer: ssl_context.set_verify_depth(9) if url: set_peer_verification_for_url_hostname(ssl_context, url) else: ssl_context.set_verify(SSL.VERIFY_PEER, verify_callback) else: ssl_context.set_verify(SSL.VERIFY_NONE, verify_callback) return ssl_context
[ "def", "make_ssl_context", "(", "key_file", "=", "None", ",", "cert_file", "=", "None", ",", "pem_file", "=", "None", ",", "ca_dir", "=", "None", ",", "verify_peer", "=", "False", ",", "url", "=", "None", ",", "method", "=", "SSL", ".", "TLSv1_METHOD", ",", "key_file_passphrase", "=", "None", ")", ":", "ssl_context", "=", "SSL", ".", "Context", "(", "method", ")", "if", "cert_file", ":", "ssl_context", ".", "use_certificate_file", "(", "cert_file", ")", "if", "key_file_passphrase", ":", "passwd_cb", "=", "(", "lambda", "max_passphrase_len", ",", "set_prompt", ",", "userdata", ":", "key_file_passphrase", ")", "ssl_context", ".", "set_passwd_cb", "(", "passwd_cb", ")", "if", "key_file", ":", "ssl_context", ".", "use_privatekey_file", "(", "key_file", ")", "elif", "cert_file", ":", "ssl_context", ".", "use_privatekey_file", "(", "cert_file", ")", "if", "(", "pem_file", "or", "ca_dir", ")", ":", "ssl_context", ".", "load_verify_locations", "(", "pem_file", ",", "ca_dir", ")", "def", "_callback", "(", "conn", ",", "x509", ",", "errnum", ",", "errdepth", ",", "preverify_ok", ")", ":", "return", "preverify_ok", "verify_callback", "=", "_callback", "if", "verify_peer", ":", "ssl_context", ".", "set_verify_depth", "(", "9", ")", "if", "url", ":", "set_peer_verification_for_url_hostname", "(", "ssl_context", ",", "url", ")", "else", ":", "ssl_context", ".", "set_verify", "(", "SSL", ".", "VERIFY_PEER", ",", "verify_callback", ")", "else", ":", "ssl_context", ".", "set_verify", "(", "SSL", ".", "VERIFY_NONE", ",", "verify_callback", ")", "return", "ssl_context" ]
creates ssl context containing certificate and key file locations .
train
false
38,973
def _is_url(url): try: return (parse_url(url).scheme in _VALID_URLS) except: return False
[ "def", "_is_url", "(", "url", ")", ":", "try", ":", "return", "(", "parse_url", "(", "url", ")", ".", "scheme", "in", "_VALID_URLS", ")", "except", ":", "return", "False" ]
check to see if a url has a valid protocol .
train
false
38,974
def _get_search_url(txt): log.url.debug('Finding search engine for {!r}'.format(txt)) (engine, term) = _parse_search_term(txt) assert term if (engine is None): template = config.get('searchengines', 'DEFAULT') else: template = config.get('searchengines', engine) url = qurl_from_user_input(template.format(urllib.parse.quote(term))) qtutils.ensure_valid(url) return url
[ "def", "_get_search_url", "(", "txt", ")", ":", "log", ".", "url", ".", "debug", "(", "'Finding search engine for {!r}'", ".", "format", "(", "txt", ")", ")", "(", "engine", ",", "term", ")", "=", "_parse_search_term", "(", "txt", ")", "assert", "term", "if", "(", "engine", "is", "None", ")", ":", "template", "=", "config", ".", "get", "(", "'searchengines'", ",", "'DEFAULT'", ")", "else", ":", "template", "=", "config", ".", "get", "(", "'searchengines'", ",", "engine", ")", "url", "=", "qurl_from_user_input", "(", "template", ".", "format", "(", "urllib", ".", "parse", ".", "quote", "(", "term", ")", ")", ")", "qtutils", ".", "ensure_valid", "(", "url", ")", "return", "url" ]
get a search engine url for a text .
train
false
38,975
def is_naive(value): return (value.utcoffset() is None)
[ "def", "is_naive", "(", "value", ")", ":", "return", "(", "value", ".", "utcoffset", "(", ")", "is", "None", ")" ]
return :const:true if :class:~datetime .
train
false
38,976
def hexToString(hexString): string = '' if ((len(hexString) % 2) != 0): hexString = ('0' + hexString) try: for i in range(0, len(hexString), 2): string += chr(int((hexString[i] + hexString[(i + 1)]), 16)) except: return ((-1), 'Error in hexadecimal conversion') return (0, string)
[ "def", "hexToString", "(", "hexString", ")", ":", "string", "=", "''", "if", "(", "(", "len", "(", "hexString", ")", "%", "2", ")", "!=", "0", ")", ":", "hexString", "=", "(", "'0'", "+", "hexString", ")", "try", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "hexString", ")", ",", "2", ")", ":", "string", "+=", "chr", "(", "int", "(", "(", "hexString", "[", "i", "]", "+", "hexString", "[", "(", "i", "+", "1", ")", "]", ")", ",", "16", ")", ")", "except", ":", "return", "(", "(", "-", "1", ")", ",", "'Error in hexadecimal conversion'", ")", "return", "(", "0", ",", "string", ")" ]
simple method to convert an hexadecimal string to ascii string .
train
false
38,978
def zipline_path(paths, environ=None): return join(zipline_root(environ=environ), *paths)
[ "def", "zipline_path", "(", "paths", ",", "environ", "=", "None", ")", ":", "return", "join", "(", "zipline_root", "(", "environ", "=", "environ", ")", ",", "*", "paths", ")" ]
get a path relative to the zipline root .
train
false
38,980
def parse_bool(text): if (text.lower() in ('true', '1')): return True elif (text.lower() in ('false', '0')): return False raise Exception(('Invalid boolean: %s' % text))
[ "def", "parse_bool", "(", "text", ")", ":", "if", "(", "text", ".", "lower", "(", ")", "in", "(", "'true'", ",", "'1'", ")", ")", ":", "return", "True", "elif", "(", "text", ".", "lower", "(", ")", "in", "(", "'false'", ",", "'0'", ")", ")", ":", "return", "False", "raise", "Exception", "(", "(", "'Invalid boolean: %s'", "%", "text", ")", ")" ]
parses a boolean text and converts it into boolean value .
train
false
38,981
def decode_message(message_type, encoded_message): if (not encoded_message.strip()): return message_type() dictionary = json.loads(encoded_message) def find_variant(value): "Find the messages.Variant type that describes this value.\n\n Args:\n value: The value whose variant type is being determined.\n\n Returns:\n The messages.Variant value that best describes value's type, or None if\n it's a type we don't know how to handle.\n " if isinstance(value, (int, long)): return messages.Variant.INT64 elif isinstance(value, float): return messages.Variant.DOUBLE elif isinstance(value, basestring): return messages.Variant.STRING elif isinstance(value, (list, tuple)): variant_priority = [None, messages.Variant.INT64, messages.Variant.DOUBLE, messages.Variant.STRING] chosen_priority = 0 for v in value: variant = find_variant(v) try: priority = variant_priority.index(variant) except IndexError: priority = (-1) if (priority > chosen_priority): chosen_priority = priority return variant_priority[chosen_priority] return None def decode_dictionary(message_type, dictionary): 'Merge dictionary in to message.\n\n Args:\n message: Message to merge dictionary in to.\n dictionary: Dictionary to extract information from. Dictionary\n is as parsed from JSON. Nested objects will also be dictionaries.\n ' message = message_type() for (key, value) in dictionary.iteritems(): if (value is None): message.reset(key) continue try: field = message.field_by_name(key) except KeyError: variant = find_variant(value) if variant: message.set_unrecognized_field(key, value, variant) else: logging.warning('No variant found for unrecognized field: %s', key) continue if isinstance(value, list): if (not value): continue else: value = [value] valid_value = [] for item in value: if isinstance(field, messages.EnumField): try: item = field.type(item) except TypeError: raise messages.DecodeError(('Invalid enum value "%s"' % value[0])) elif isinstance(field, messages.BytesField): try: item = base64.b64decode(item) except TypeError as err: raise messages.DecodeError(('Base64 decoding error: %s' % err)) elif isinstance(field, message_types.DateTimeField): try: item = util.decode_datetime(item) except ValueError as err: raise messages.DecodeError(err) elif isinstance(field, messages.MessageField): item = decode_dictionary(field.type, item) elif (isinstance(field, messages.FloatField) and isinstance(item, (int, long, basestring))): try: item = float(item) except: pass elif (isinstance(field, messages.IntegerField) and isinstance(item, basestring)): try: item = int(item) except: pass valid_value.append(item) if field.repeated: existing_value = getattr(message, field.name) setattr(message, field.name, valid_value) else: setattr(message, field.name, valid_value[(-1)]) return message message = decode_dictionary(message_type, dictionary) message.check_initialized() return message
[ "def", "decode_message", "(", "message_type", ",", "encoded_message", ")", ":", "if", "(", "not", "encoded_message", ".", "strip", "(", ")", ")", ":", "return", "message_type", "(", ")", "dictionary", "=", "json", ".", "loads", "(", "encoded_message", ")", "def", "find_variant", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "int", ",", "long", ")", ")", ":", "return", "messages", ".", "Variant", ".", "INT64", "elif", "isinstance", "(", "value", ",", "float", ")", ":", "return", "messages", ".", "Variant", ".", "DOUBLE", "elif", "isinstance", "(", "value", ",", "basestring", ")", ":", "return", "messages", ".", "Variant", ".", "STRING", "elif", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "variant_priority", "=", "[", "None", ",", "messages", ".", "Variant", ".", "INT64", ",", "messages", ".", "Variant", ".", "DOUBLE", ",", "messages", ".", "Variant", ".", "STRING", "]", "chosen_priority", "=", "0", "for", "v", "in", "value", ":", "variant", "=", "find_variant", "(", "v", ")", "try", ":", "priority", "=", "variant_priority", ".", "index", "(", "variant", ")", "except", "IndexError", ":", "priority", "=", "(", "-", "1", ")", "if", "(", "priority", ">", "chosen_priority", ")", ":", "chosen_priority", "=", "priority", "return", "variant_priority", "[", "chosen_priority", "]", "return", "None", "def", "decode_dictionary", "(", "message_type", ",", "dictionary", ")", ":", "message", "=", "message_type", "(", ")", "for", "(", "key", ",", "value", ")", "in", "dictionary", ".", "iteritems", "(", ")", ":", "if", "(", "value", "is", "None", ")", ":", "message", ".", "reset", "(", "key", ")", "continue", "try", ":", "field", "=", "message", ".", "field_by_name", "(", "key", ")", "except", "KeyError", ":", "variant", "=", "find_variant", "(", "value", ")", "if", "variant", ":", "message", ".", "set_unrecognized_field", "(", "key", ",", "value", ",", "variant", ")", "else", ":", "logging", ".", "warning", "(", "'No variant found for unrecognized field: %s'", ",", "key", ")", "continue", "if", "isinstance", "(", "value", ",", "list", ")", ":", "if", "(", "not", "value", ")", ":", "continue", "else", ":", "value", "=", "[", "value", "]", "valid_value", "=", "[", "]", "for", "item", "in", "value", ":", "if", "isinstance", "(", "field", ",", "messages", ".", "EnumField", ")", ":", "try", ":", "item", "=", "field", ".", "type", "(", "item", ")", "except", "TypeError", ":", "raise", "messages", ".", "DecodeError", "(", "(", "'Invalid enum value \"%s\"'", "%", "value", "[", "0", "]", ")", ")", "elif", "isinstance", "(", "field", ",", "messages", ".", "BytesField", ")", ":", "try", ":", "item", "=", "base64", ".", "b64decode", "(", "item", ")", "except", "TypeError", "as", "err", ":", "raise", "messages", ".", "DecodeError", "(", "(", "'Base64 decoding error: %s'", "%", "err", ")", ")", "elif", "isinstance", "(", "field", ",", "message_types", ".", "DateTimeField", ")", ":", "try", ":", "item", "=", "util", ".", "decode_datetime", "(", "item", ")", "except", "ValueError", "as", "err", ":", "raise", "messages", ".", "DecodeError", "(", "err", ")", "elif", "isinstance", "(", "field", ",", "messages", ".", "MessageField", ")", ":", "item", "=", "decode_dictionary", "(", "field", ".", "type", ",", "item", ")", "elif", "(", "isinstance", "(", "field", ",", "messages", ".", "FloatField", ")", "and", "isinstance", "(", "item", ",", "(", "int", ",", "long", ",", "basestring", ")", ")", ")", ":", "try", ":", "item", "=", "float", "(", "item", ")", "except", ":", "pass", "elif", "(", "isinstance", "(", "field", ",", "messages", ".", "IntegerField", ")", "and", "isinstance", "(", "item", ",", "basestring", ")", ")", ":", "try", ":", "item", "=", "int", "(", "item", ")", "except", ":", "pass", "valid_value", ".", "append", "(", "item", ")", "if", "field", ".", "repeated", ":", "existing_value", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", "setattr", "(", "message", ",", "field", ".", "name", ",", "valid_value", ")", "else", ":", "setattr", "(", "message", ",", "field", ".", "name", ",", "valid_value", "[", "(", "-", "1", ")", "]", ")", "return", "message", "message", "=", "decode_dictionary", "(", "message_type", ",", "dictionary", ")", "message", ".", "check_initialized", "(", ")", "return", "message" ]
decode urlencoded content to message .
train
false
38,984
def sniff_date(datestr): dateformats = ('%Y%m%d', '%Y-%m-%d', '%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT', '%Y/%m/%d') for dfmt in dateformats: try: return datetime.datetime.strptime(datestr.strip(), dfmt) except (ValueError, AttributeError): return datetime.datetime.utcnow()
[ "def", "sniff_date", "(", "datestr", ")", ":", "dateformats", "=", "(", "'%Y%m%d'", ",", "'%Y-%m-%d'", ",", "'%Y-%m-%dT%H:%M:%SZ'", ",", "'%Y-%m-%dT'", ",", "'%Y/%m/%d'", ")", "for", "dfmt", "in", "dateformats", ":", "try", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "datestr", ".", "strip", "(", ")", ",", "dfmt", ")", "except", "(", "ValueError", ",", "AttributeError", ")", ":", "return", "datetime", ".", "datetime", ".", "utcnow", "(", ")" ]
attempt to parse date into datetime .
train
false
38,985
@testing.requires_testing_data def test_label_in_src(): src = read_source_spaces(src_fname) label = read_label(v1_label_fname) vert_in_src = np.intersect1d(label.vertices, src[0]['vertno'], True) where = np.in1d(label.vertices, vert_in_src) pos_in_src = label.pos[where] values_in_src = label.values[where] label_src = Label(vert_in_src, pos_in_src, values_in_src, hemi='lh').fill(src) vertices_status = np.in1d(src[0]['nearest'], label.vertices) vertices_in = np.nonzero(vertices_status)[0] vertices_out = np.nonzero(np.logical_not(vertices_status))[0] assert_array_equal(label_src.vertices, vertices_in) assert_array_equal(np.in1d(vertices_out, label_src.vertices), False) value_idx = np.digitize(src[0]['nearest'][vertices_in], vert_in_src, True) assert_array_equal(label_src.values, values_in_src[value_idx]) vertices = np.append([(-1)], vert_in_src) assert_raises(ValueError, Label(vertices, hemi='lh').fill, src)
[ "@", "testing", ".", "requires_testing_data", "def", "test_label_in_src", "(", ")", ":", "src", "=", "read_source_spaces", "(", "src_fname", ")", "label", "=", "read_label", "(", "v1_label_fname", ")", "vert_in_src", "=", "np", ".", "intersect1d", "(", "label", ".", "vertices", ",", "src", "[", "0", "]", "[", "'vertno'", "]", ",", "True", ")", "where", "=", "np", ".", "in1d", "(", "label", ".", "vertices", ",", "vert_in_src", ")", "pos_in_src", "=", "label", ".", "pos", "[", "where", "]", "values_in_src", "=", "label", ".", "values", "[", "where", "]", "label_src", "=", "Label", "(", "vert_in_src", ",", "pos_in_src", ",", "values_in_src", ",", "hemi", "=", "'lh'", ")", ".", "fill", "(", "src", ")", "vertices_status", "=", "np", ".", "in1d", "(", "src", "[", "0", "]", "[", "'nearest'", "]", ",", "label", ".", "vertices", ")", "vertices_in", "=", "np", ".", "nonzero", "(", "vertices_status", ")", "[", "0", "]", "vertices_out", "=", "np", ".", "nonzero", "(", "np", ".", "logical_not", "(", "vertices_status", ")", ")", "[", "0", "]", "assert_array_equal", "(", "label_src", ".", "vertices", ",", "vertices_in", ")", "assert_array_equal", "(", "np", ".", "in1d", "(", "vertices_out", ",", "label_src", ".", "vertices", ")", ",", "False", ")", "value_idx", "=", "np", ".", "digitize", "(", "src", "[", "0", "]", "[", "'nearest'", "]", "[", "vertices_in", "]", ",", "vert_in_src", ",", "True", ")", "assert_array_equal", "(", "label_src", ".", "values", ",", "values_in_src", "[", "value_idx", "]", ")", "vertices", "=", "np", ".", "append", "(", "[", "(", "-", "1", ")", "]", ",", "vert_in_src", ")", "assert_raises", "(", "ValueError", ",", "Label", "(", "vertices", ",", "hemi", "=", "'lh'", ")", ".", "fill", ",", "src", ")" ]
test label in src .
train
false
38,986
def contrastive_loss(y_true, y_pred): margin = 1 return K.mean(((y_true * K.square(y_pred)) + ((1 - y_true) * K.square(K.maximum((margin - y_pred), 0)))))
[ "def", "contrastive_loss", "(", "y_true", ",", "y_pred", ")", ":", "margin", "=", "1", "return", "K", ".", "mean", "(", "(", "(", "y_true", "*", "K", ".", "square", "(", "y_pred", ")", ")", "+", "(", "(", "1", "-", "y_true", ")", "*", "K", ".", "square", "(", "K", ".", "maximum", "(", "(", "margin", "-", "y_pred", ")", ",", "0", ")", ")", ")", ")", ")" ]
contrastive loss from hadsell-et-al .
train
true
38,987
def low_light(low_light=True): _sensehat.low_light = low_light return {'low_light': low_light}
[ "def", "low_light", "(", "low_light", "=", "True", ")", ":", "_sensehat", ".", "low_light", "=", "low_light", "return", "{", "'low_light'", ":", "low_light", "}" ]
sets the led matrix to low light mode .
train
false
38,988
def approxChiFunction(dim): dim = float(dim) return (sqrt(dim) * ((1 - (1 / (4 * dim))) + (1 / (21 * (dim ** 2)))))
[ "def", "approxChiFunction", "(", "dim", ")", ":", "dim", "=", "float", "(", "dim", ")", "return", "(", "sqrt", "(", "dim", ")", "*", "(", "(", "1", "-", "(", "1", "/", "(", "4", "*", "dim", ")", ")", ")", "+", "(", "1", "/", "(", "21", "*", "(", "dim", "**", "2", ")", ")", ")", ")", ")" ]
returns chi approximation according to: ostermeier 1997 .
train
false
38,989
def assert_request_user_is_admin_if_user_query_param_is_provided(request, user): requester_user = get_requester() is_admin = request_user_is_admin(request=request) if ((user != requester_user) and (not is_admin)): msg = '"user" attribute can only be provided by admins' raise AccessDeniedError(message=msg, user_db=requester_user)
[ "def", "assert_request_user_is_admin_if_user_query_param_is_provided", "(", "request", ",", "user", ")", ":", "requester_user", "=", "get_requester", "(", ")", "is_admin", "=", "request_user_is_admin", "(", "request", "=", "request", ")", "if", "(", "(", "user", "!=", "requester_user", ")", "and", "(", "not", "is_admin", ")", ")", ":", "msg", "=", "'\"user\" attribute can only be provided by admins'", "raise", "AccessDeniedError", "(", "message", "=", "msg", ",", "user_db", "=", "requester_user", ")" ]
function which asserts that the request user is administator if "user" query parameter is provided and doesnt match the current user .
train
false
38,990
def CDLGAPSIDESIDEWHITE(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLGAPSIDESIDEWHITE)
[ "def", "CDLGAPSIDESIDEWHITE", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLGAPSIDESIDEWHITE", ")" ]
up/down-gap side-by-side white lines .
train
false
38,991
def getRequestHeader(request, name): retVal = None if (request and name): _ = name.upper() retVal = max([(value if (_ == key.upper()) else None) for (key, value) in request.header_items()]) return retVal
[ "def", "getRequestHeader", "(", "request", ",", "name", ")", ":", "retVal", "=", "None", "if", "(", "request", "and", "name", ")", ":", "_", "=", "name", ".", "upper", "(", ")", "retVal", "=", "max", "(", "[", "(", "value", "if", "(", "_", "==", "key", ".", "upper", "(", ")", ")", "else", "None", ")", "for", "(", "key", ",", "value", ")", "in", "request", ".", "header_items", "(", ")", "]", ")", "return", "retVal" ]
solving an issue with an urllib2 request header case sensitivity reference: URL .
train
false
38,993
def object_to_dict(obj, list_depth=1): d = {} for (k, v) in dict(obj).items(): if hasattr(v, '__keylist__'): d[k] = object_to_dict(v, list_depth=list_depth) elif isinstance(v, list): d[k] = [] used = 0 for item in v: used = (used + 1) if (used > list_depth): break if hasattr(item, '__keylist__'): d[k].append(object_to_dict(item, list_depth=list_depth)) else: d[k].append(item) else: d[k] = v return d
[ "def", "object_to_dict", "(", "obj", ",", "list_depth", "=", "1", ")", ":", "d", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "dict", "(", "obj", ")", ".", "items", "(", ")", ":", "if", "hasattr", "(", "v", ",", "'__keylist__'", ")", ":", "d", "[", "k", "]", "=", "object_to_dict", "(", "v", ",", "list_depth", "=", "list_depth", ")", "elif", "isinstance", "(", "v", ",", "list", ")", ":", "d", "[", "k", "]", "=", "[", "]", "used", "=", "0", "for", "item", "in", "v", ":", "used", "=", "(", "used", "+", "1", ")", "if", "(", "used", ">", "list_depth", ")", ":", "break", "if", "hasattr", "(", "item", ",", "'__keylist__'", ")", ":", "d", "[", "k", "]", ".", "append", "(", "object_to_dict", "(", "item", ",", "list_depth", "=", "list_depth", ")", ")", "else", ":", "d", "[", "k", "]", ".", "append", "(", "item", ")", "else", ":", "d", "[", "k", "]", "=", "v", "return", "d" ]
convert suds object into serializable format .
train
false
38,994
def gumbel(loc=0.0, scale=1.0, size=None, dtype=float): rs = uniform(size=size, dtype=dtype) return cupy.ElementwiseKernel('T x, T loc, T scale', 'T y', 'y = loc - log(-log(1 - x)) * scale', 'gumbel_kernel')(rs, loc, scale, rs) return rs
[ "def", "gumbel", "(", "loc", "=", "0.0", ",", "scale", "=", "1.0", ",", "size", "=", "None", ",", "dtype", "=", "float", ")", ":", "rs", "=", "uniform", "(", "size", "=", "size", ",", "dtype", "=", "dtype", ")", "return", "cupy", ".", "ElementwiseKernel", "(", "'T x, T loc, T scale'", ",", "'T y'", ",", "'y = loc - log(-log(1 - x)) * scale'", ",", "'gumbel_kernel'", ")", "(", "rs", ",", "loc", ",", "scale", ",", "rs", ")", "return", "rs" ]
returns an array of samples drawn from a gumbel distribution .
train
false
38,995
def test_options_from_env_vars(script): script.environ['PIP_NO_INDEX'] = '1' result = script.pip('install', '-vvv', 'INITools', expect_error=True) assert ('Ignoring indexes:' in result.stdout), str(result) assert ('DistributionNotFound: No matching distribution found for INITools' in result.stdout)
[ "def", "test_options_from_env_vars", "(", "script", ")", ":", "script", ".", "environ", "[", "'PIP_NO_INDEX'", "]", "=", "'1'", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-vvv'", ",", "'INITools'", ",", "expect_error", "=", "True", ")", "assert", "(", "'Ignoring indexes:'", "in", "result", ".", "stdout", ")", ",", "str", "(", "result", ")", "assert", "(", "'DistributionNotFound: No matching distribution found for INITools'", "in", "result", ".", "stdout", ")" ]
test if configoptionparser reads env vars .
train
false
38,996
@login_required def map_remove(request, mapid, template='maps/map_remove.html'): map_obj = _resolve_map(request, mapid, 'base.delete_resourcebase', _PERMISSION_MSG_VIEW) if (request.method == 'GET'): return render_to_response(template, RequestContext(request, {'map': map_obj})) elif (request.method == 'POST'): if getattr(settings, 'SLACK_ENABLED', False): slack_message = None try: from geonode.contrib.slack.utils import build_slack_message_map slack_message = build_slack_message_map('map_delete', map_obj) except: print 'Could not build slack message for delete map.' delete_map.delay(object_id=map_obj.id) try: from geonode.contrib.slack.utils import send_slack_messages send_slack_messages(slack_message) except: print 'Could not send slack message for delete map.' else: delete_map.delay(object_id=map_obj.id) return HttpResponseRedirect(reverse('maps_browse'))
[ "@", "login_required", "def", "map_remove", "(", "request", ",", "mapid", ",", "template", "=", "'maps/map_remove.html'", ")", ":", "map_obj", "=", "_resolve_map", "(", "request", ",", "mapid", ",", "'base.delete_resourcebase'", ",", "_PERMISSION_MSG_VIEW", ")", "if", "(", "request", ".", "method", "==", "'GET'", ")", ":", "return", "render_to_response", "(", "template", ",", "RequestContext", "(", "request", ",", "{", "'map'", ":", "map_obj", "}", ")", ")", "elif", "(", "request", ".", "method", "==", "'POST'", ")", ":", "if", "getattr", "(", "settings", ",", "'SLACK_ENABLED'", ",", "False", ")", ":", "slack_message", "=", "None", "try", ":", "from", "geonode", ".", "contrib", ".", "slack", ".", "utils", "import", "build_slack_message_map", "slack_message", "=", "build_slack_message_map", "(", "'map_delete'", ",", "map_obj", ")", "except", ":", "print", "'Could not build slack message for delete map.'", "delete_map", ".", "delay", "(", "object_id", "=", "map_obj", ".", "id", ")", "try", ":", "from", "geonode", ".", "contrib", ".", "slack", ".", "utils", "import", "send_slack_messages", "send_slack_messages", "(", "slack_message", ")", "except", ":", "print", "'Could not send slack message for delete map.'", "else", ":", "delete_map", ".", "delay", "(", "object_id", "=", "map_obj", ".", "id", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'maps_browse'", ")", ")" ]
delete a map .
train
false
38,998
def _rebuild_mod_path(orig_path, package_name, module): sys_path = [_normalize_cached(p) for p in sys.path] def safe_sys_path_index(entry): '\n Workaround for #520 and #513.\n ' try: return sys_path.index(entry) except ValueError: return float('inf') def position_in_sys_path(path): '\n Return the ordinal of the path based on its position in sys.path\n ' path_parts = path.split(os.sep) module_parts = (package_name.count('.') + 1) parts = path_parts[:(- module_parts)] return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) orig_path.sort(key=position_in_sys_path) module.__path__[:] = [_normalize_cached(p) for p in orig_path]
[ "def", "_rebuild_mod_path", "(", "orig_path", ",", "package_name", ",", "module", ")", ":", "sys_path", "=", "[", "_normalize_cached", "(", "p", ")", "for", "p", "in", "sys", ".", "path", "]", "def", "safe_sys_path_index", "(", "entry", ")", ":", "try", ":", "return", "sys_path", ".", "index", "(", "entry", ")", "except", "ValueError", ":", "return", "float", "(", "'inf'", ")", "def", "position_in_sys_path", "(", "path", ")", ":", "path_parts", "=", "path", ".", "split", "(", "os", ".", "sep", ")", "module_parts", "=", "(", "package_name", ".", "count", "(", "'.'", ")", "+", "1", ")", "parts", "=", "path_parts", "[", ":", "(", "-", "module_parts", ")", "]", "return", "safe_sys_path_index", "(", "_normalize_cached", "(", "os", ".", "sep", ".", "join", "(", "parts", ")", ")", ")", "orig_path", ".", "sort", "(", "key", "=", "position_in_sys_path", ")", "module", ".", "__path__", "[", ":", "]", "=", "[", "_normalize_cached", "(", "p", ")", "for", "p", "in", "orig_path", "]" ]
rebuild module .
train
true
38,999
def _expand_expected_codes(codes): retval = set() for code in codes.replace(',', ' ').split(' '): code = code.strip() if (not code): continue elif ('-' in code): (low, hi) = code.split('-')[:2] retval.update((str(i) for i in xrange(int(low), (int(hi) + 1)))) else: retval.add(code) return retval
[ "def", "_expand_expected_codes", "(", "codes", ")", ":", "retval", "=", "set", "(", ")", "for", "code", "in", "codes", ".", "replace", "(", "','", ",", "' '", ")", ".", "split", "(", "' '", ")", ":", "code", "=", "code", ".", "strip", "(", ")", "if", "(", "not", "code", ")", ":", "continue", "elif", "(", "'-'", "in", "code", ")", ":", "(", "low", ",", "hi", ")", "=", "code", ".", "split", "(", "'-'", ")", "[", ":", "2", "]", "retval", ".", "update", "(", "(", "str", "(", "i", ")", "for", "i", "in", "xrange", "(", "int", "(", "low", ")", ",", "(", "int", "(", "hi", ")", "+", "1", ")", ")", ")", ")", "else", ":", "retval", ".", "add", "(", "code", ")", "return", "retval" ]
expand the expected code string in set of codes .
train
false
39,000
@login_required @require_POST def add_to_locale(request, locale_code, role): locale = get_object_or_404(Locale, locale=locale_code) if (not _user_can_edit(request.user, locale)): raise PermissionDenied form = AddUserForm(request.POST) if form.is_valid(): for user in form.cleaned_data['users']: getattr(locale, ROLE_ATTRS[role]).add(user) msg = _('{users} added successfully!').format(users=request.POST.get('users')) messages.add_message(request, messages.SUCCESS, msg) return HttpResponseRedirect(locale.get_absolute_url()) msg = _('There were errors adding users, see below.') messages.add_message(request, messages.ERROR, msg) return locale_details(request, locale_code, **{(role + '_form'): form})
[ "@", "login_required", "@", "require_POST", "def", "add_to_locale", "(", "request", ",", "locale_code", ",", "role", ")", ":", "locale", "=", "get_object_or_404", "(", "Locale", ",", "locale", "=", "locale_code", ")", "if", "(", "not", "_user_can_edit", "(", "request", ".", "user", ",", "locale", ")", ")", ":", "raise", "PermissionDenied", "form", "=", "AddUserForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "for", "user", "in", "form", ".", "cleaned_data", "[", "'users'", "]", ":", "getattr", "(", "locale", ",", "ROLE_ATTRS", "[", "role", "]", ")", ".", "add", "(", "user", ")", "msg", "=", "_", "(", "'{users} added successfully!'", ")", ".", "format", "(", "users", "=", "request", ".", "POST", ".", "get", "(", "'users'", ")", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "SUCCESS", ",", "msg", ")", "return", "HttpResponseRedirect", "(", "locale", ".", "get_absolute_url", "(", ")", ")", "msg", "=", "_", "(", "'There were errors adding users, see below.'", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "ERROR", ",", "msg", ")", "return", "locale_details", "(", "request", ",", "locale_code", ",", "**", "{", "(", "role", "+", "'_form'", ")", ":", "form", "}", ")" ]
add a user to the locale role .
train
false
39,001
def test_url_completion_delete_quickmark(qtmodeltester, config_stub, web_history, quickmarks, bookmarks, qtbot): config_stub.data['completion'] = {'timestamp-format': '%Y-%m-%d', 'web-history-max-items': 2} model = urlmodel.UrlCompletionModel() qtmodeltester.data_display_may_return_none = True qtmodeltester.check(model) view = _mock_view_index(model, 0, 1, qtbot) model.delete_cur_item(view) assert ('aw' in quickmarks.marks) assert ('ddg' not in quickmarks.marks) assert ('wiki' in quickmarks.marks)
[ "def", "test_url_completion_delete_quickmark", "(", "qtmodeltester", ",", "config_stub", ",", "web_history", ",", "quickmarks", ",", "bookmarks", ",", "qtbot", ")", ":", "config_stub", ".", "data", "[", "'completion'", "]", "=", "{", "'timestamp-format'", ":", "'%Y-%m-%d'", ",", "'web-history-max-items'", ":", "2", "}", "model", "=", "urlmodel", ".", "UrlCompletionModel", "(", ")", "qtmodeltester", ".", "data_display_may_return_none", "=", "True", "qtmodeltester", ".", "check", "(", "model", ")", "view", "=", "_mock_view_index", "(", "model", ",", "0", ",", "1", ",", "qtbot", ")", "model", ".", "delete_cur_item", "(", "view", ")", "assert", "(", "'aw'", "in", "quickmarks", ".", "marks", ")", "assert", "(", "'ddg'", "not", "in", "quickmarks", ".", "marks", ")", "assert", "(", "'wiki'", "in", "quickmarks", ".", "marks", ")" ]
test deleting a bookmark from the url completion model .
train
false
39,002
def cross_above(values1, values2, start=(-2), end=None): return _cross_impl(values1, values2, start, end, (lambda x: (x > 0)))
[ "def", "cross_above", "(", "values1", ",", "values2", ",", "start", "=", "(", "-", "2", ")", ",", "end", "=", "None", ")", ":", "return", "_cross_impl", "(", "values1", ",", "values2", ",", "start", ",", "end", ",", "(", "lambda", "x", ":", "(", "x", ">", "0", ")", ")", ")" ]
checks for a cross above conditions over the specified period between two dataseries objects .
train
false
39,004
@task_filter(protocol='total-size', batch=True) def fetch_by_total_size(keyword, tasks): m = re.match('^(\\d+(?:\\.\\d+)?)([GM])?$', keyword, flags=re.I) assert m, keyword (n, u) = m.groups() limit = (float(n) * {None: 1, 'G': (1000 ** 3), 'g': (1000 ** 3), 'M': (1000 ** 2), 'm': (1000 ** 2)}[u]) total = 0 results = [] for t in tasks: total += t['size'] if (total <= limit): results.append(t) else: return results return results
[ "@", "task_filter", "(", "protocol", "=", "'total-size'", ",", "batch", "=", "True", ")", "def", "fetch_by_total_size", "(", "keyword", ",", "tasks", ")", ":", "m", "=", "re", ".", "match", "(", "'^(\\\\d+(?:\\\\.\\\\d+)?)([GM])?$'", ",", "keyword", ",", "flags", "=", "re", ".", "I", ")", "assert", "m", ",", "keyword", "(", "n", ",", "u", ")", "=", "m", ".", "groups", "(", ")", "limit", "=", "(", "float", "(", "n", ")", "*", "{", "None", ":", "1", ",", "'G'", ":", "(", "1000", "**", "3", ")", ",", "'g'", ":", "(", "1000", "**", "3", ")", ",", "'M'", ":", "(", "1000", "**", "2", ")", ",", "'m'", ":", "(", "1000", "**", "2", ")", "}", "[", "u", "]", ")", "total", "=", "0", "results", "=", "[", "]", "for", "t", "in", "tasks", ":", "total", "+=", "t", "[", "'size'", "]", "if", "(", "total", "<=", "limit", ")", ":", "results", ".", "append", "(", "t", ")", "else", ":", "return", "results", "return", "results" ]
example: lx download total_size:1g lx download 0/total_size:1g lx list total_size:1g .
train
false
39,006
def _ws(text, keep_spaces=False): if keep_spaces: return keep_space(_(text)) else: return websafe(_(text))
[ "def", "_ws", "(", "text", ",", "keep_spaces", "=", "False", ")", ":", "if", "keep_spaces", ":", "return", "keep_space", "(", "_", "(", "text", ")", ")", "else", ":", "return", "websafe", "(", "_", "(", "text", ")", ")" ]
helper function to get html escaped output from gettext .
train
false
39,007
def zero_grad(x): return zero_grad_(x)
[ "def", "zero_grad", "(", "x", ")", ":", "return", "zero_grad_", "(", "x", ")" ]
consider an expression constant when computing gradients .
train
false
39,008
def mc2mvsk(args): (mc, mc2, mc3, mc4) = args skew = np.divide(mc3, (mc2 ** 1.5)) kurt = (np.divide(mc4, (mc2 ** 2.0)) - 3.0) return (mc, mc2, skew, kurt)
[ "def", "mc2mvsk", "(", "args", ")", ":", "(", "mc", ",", "mc2", ",", "mc3", ",", "mc4", ")", "=", "args", "skew", "=", "np", ".", "divide", "(", "mc3", ",", "(", "mc2", "**", "1.5", ")", ")", "kurt", "=", "(", "np", ".", "divide", "(", "mc4", ",", "(", "mc2", "**", "2.0", ")", ")", "-", "3.0", ")", "return", "(", "mc", ",", "mc2", ",", "skew", ",", "kurt", ")" ]
convert central moments to mean .
train
false
39,010
def get_reviews_by_repository_id_changeset_revision(app, repository_id, changeset_revision): sa_session = app.model.context.current return sa_session.query(app.model.RepositoryReview).filter(and_((app.model.RepositoryReview.repository_id == app.security.decode_id(repository_id)), (app.model.RepositoryReview.changeset_revision == changeset_revision))).all()
[ "def", "get_reviews_by_repository_id_changeset_revision", "(", "app", ",", "repository_id", ",", "changeset_revision", ")", ":", "sa_session", "=", "app", ".", "model", ".", "context", ".", "current", "return", "sa_session", ".", "query", "(", "app", ".", "model", ".", "RepositoryReview", ")", ".", "filter", "(", "and_", "(", "(", "app", ".", "model", ".", "RepositoryReview", ".", "repository_id", "==", "app", ".", "security", ".", "decode_id", "(", "repository_id", ")", ")", ",", "(", "app", ".", "model", ".", "RepositoryReview", ".", "changeset_revision", "==", "changeset_revision", ")", ")", ")", ".", "all", "(", ")" ]
get all repository_reviews from the database via repository id and changeset_revision .
train
false
39,011
def plot_lbp_model(ax, binary_values): theta = np.deg2rad(45) R = 1 r = 0.15 w = 1.5 gray = '0.5' plot_circle(ax, (0, 0), radius=r, color=gray) for (i, facecolor) in enumerate(binary_values): x = (R * np.cos((i * theta))) y = (R * np.sin((i * theta))) plot_circle(ax, (x, y), radius=r, color=str(facecolor)) for x in np.linspace((- w), w, 4): ax.axvline(x, color=gray) ax.axhline(x, color=gray) ax.axis('image') ax.axis('off') size = (w + 0.2) ax.set_xlim((- size), size) ax.set_ylim((- size), size)
[ "def", "plot_lbp_model", "(", "ax", ",", "binary_values", ")", ":", "theta", "=", "np", ".", "deg2rad", "(", "45", ")", "R", "=", "1", "r", "=", "0.15", "w", "=", "1.5", "gray", "=", "'0.5'", "plot_circle", "(", "ax", ",", "(", "0", ",", "0", ")", ",", "radius", "=", "r", ",", "color", "=", "gray", ")", "for", "(", "i", ",", "facecolor", ")", "in", "enumerate", "(", "binary_values", ")", ":", "x", "=", "(", "R", "*", "np", ".", "cos", "(", "(", "i", "*", "theta", ")", ")", ")", "y", "=", "(", "R", "*", "np", ".", "sin", "(", "(", "i", "*", "theta", ")", ")", ")", "plot_circle", "(", "ax", ",", "(", "x", ",", "y", ")", ",", "radius", "=", "r", ",", "color", "=", "str", "(", "facecolor", ")", ")", "for", "x", "in", "np", ".", "linspace", "(", "(", "-", "w", ")", ",", "w", ",", "4", ")", ":", "ax", ".", "axvline", "(", "x", ",", "color", "=", "gray", ")", "ax", ".", "axhline", "(", "x", ",", "color", "=", "gray", ")", "ax", ".", "axis", "(", "'image'", ")", "ax", ".", "axis", "(", "'off'", ")", "size", "=", "(", "w", "+", "0.2", ")", "ax", ".", "set_xlim", "(", "(", "-", "size", ")", ",", "size", ")", "ax", ".", "set_ylim", "(", "(", "-", "size", ")", ",", "size", ")" ]
draw the schematic for a local binary pattern .
train
false
39,013
def env_to_statement(env): source_file = env.get('file', None) if source_file: return ('. %s' % __escape(source_file, env)) execute = env.get('execute', None) if execute: return execute name = env['name'] value = __escape(env['value'], env) return ('%s=%s; export %s' % (name, value, name))
[ "def", "env_to_statement", "(", "env", ")", ":", "source_file", "=", "env", ".", "get", "(", "'file'", ",", "None", ")", "if", "source_file", ":", "return", "(", "'. %s'", "%", "__escape", "(", "source_file", ",", "env", ")", ")", "execute", "=", "env", ".", "get", "(", "'execute'", ",", "None", ")", "if", "execute", ":", "return", "execute", "name", "=", "env", "[", "'name'", "]", "value", "=", "__escape", "(", "env", "[", "'value'", "]", ",", "env", ")", "return", "(", "'%s=%s; export %s'", "%", "(", "name", ",", "value", ",", "name", ")", ")" ]
return the abstraction description of an environment variable definition into a statement for shell script .
train
true
39,014
def GenXinetd(svc='test', disable='no'): defaults = '\n defaults\n {\n instances = 60\n log_type = SYSLOG authpriv\n log_on_success = HOST PID\n log_on_failure = HOST\n cps = 25 30\n }\n includedir /etc/xinetd.d\n ' tmpl = ('\n service %s\n {\n disable = %s\n }\n ' % (svc, disable)) return {'/etc/xinetd.conf': defaults, ('/etc/xinetd.d/%s' % svc): tmpl}
[ "def", "GenXinetd", "(", "svc", "=", "'test'", ",", "disable", "=", "'no'", ")", ":", "defaults", "=", "'\\n defaults\\n {\\n instances = 60\\n log_type = SYSLOG authpriv\\n log_on_success = HOST PID\\n log_on_failure = HOST\\n cps = 25 30\\n }\\n includedir /etc/xinetd.d\\n '", "tmpl", "=", "(", "'\\n service %s\\n {\\n disable = %s\\n }\\n '", "%", "(", "svc", ",", "disable", ")", ")", "return", "{", "'/etc/xinetd.conf'", ":", "defaults", ",", "(", "'/etc/xinetd.d/%s'", "%", "svc", ")", ":", "tmpl", "}" ]
generate xinetd file .
train
false
39,015
def p_expression_number(p): p[0] = p[1]
[ "def", "p_expression_number", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
expression : number .
train
false
39,018
def goal(): return s3_rest_controller()
[ "def", "goal", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful controller for goals .
train
false
39,019
def gcdns_connect(module, provider=None): if (not HAS_LIBCLOUD_BASE): module.fail_json(msg='libcloud must be installed to use this module') provider = (provider or Provider.GOOGLE) return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
[ "def", "gcdns_connect", "(", "module", ",", "provider", "=", "None", ")", ":", "if", "(", "not", "HAS_LIBCLOUD_BASE", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "'libcloud must be installed to use this module'", ")", "provider", "=", "(", "provider", "or", "Provider", ".", "GOOGLE", ")", "return", "gcp_connect", "(", "module", ",", "provider", ",", "get_driver", ",", "USER_AGENT_PRODUCT", ",", "USER_AGENT_VERSION", ")" ]
return a gcp connection for google cloud dns .
train
false
39,020
@register.inclusion_tag('bootstrap/breadcrumb.html', takes_context=True) def breadcrumb_nav(context): breadcrumb = [] dashboard = context.request.horizon['dashboard'] try: panel_groups = dashboard.get_panel_groups() except KeyError: panel_groups = None panel_group = None panel = context.request.horizon['panel'] if panel_groups: for group in panel_groups.values(): if ((panel.slug in group.panels) and (group.slug != 'default')): panel_group = group break if (panel.get_absolute_url() == context.request.path): panel = None custom_breadcrumb = context.get('custom_breadcrumb') breadcrumb.append((dashboard.name, None)) if panel_group: breadcrumb.append((panel_group.name, None)) if panel: breadcrumb.append((panel.name, panel.get_absolute_url())) if custom_breadcrumb: breadcrumb.extend(custom_breadcrumb) breadcrumb.append((context.get('page_title'), None)) return {'breadcrumb': breadcrumb}
[ "@", "register", ".", "inclusion_tag", "(", "'bootstrap/breadcrumb.html'", ",", "takes_context", "=", "True", ")", "def", "breadcrumb_nav", "(", "context", ")", ":", "breadcrumb", "=", "[", "]", "dashboard", "=", "context", ".", "request", ".", "horizon", "[", "'dashboard'", "]", "try", ":", "panel_groups", "=", "dashboard", ".", "get_panel_groups", "(", ")", "except", "KeyError", ":", "panel_groups", "=", "None", "panel_group", "=", "None", "panel", "=", "context", ".", "request", ".", "horizon", "[", "'panel'", "]", "if", "panel_groups", ":", "for", "group", "in", "panel_groups", ".", "values", "(", ")", ":", "if", "(", "(", "panel", ".", "slug", "in", "group", ".", "panels", ")", "and", "(", "group", ".", "slug", "!=", "'default'", ")", ")", ":", "panel_group", "=", "group", "break", "if", "(", "panel", ".", "get_absolute_url", "(", ")", "==", "context", ".", "request", ".", "path", ")", ":", "panel", "=", "None", "custom_breadcrumb", "=", "context", ".", "get", "(", "'custom_breadcrumb'", ")", "breadcrumb", ".", "append", "(", "(", "dashboard", ".", "name", ",", "None", ")", ")", "if", "panel_group", ":", "breadcrumb", ".", "append", "(", "(", "panel_group", ".", "name", ",", "None", ")", ")", "if", "panel", ":", "breadcrumb", ".", "append", "(", "(", "panel", ".", "name", ",", "panel", ".", "get_absolute_url", "(", ")", ")", ")", "if", "custom_breadcrumb", ":", "breadcrumb", ".", "extend", "(", "custom_breadcrumb", ")", "breadcrumb", ".", "append", "(", "(", "context", ".", "get", "(", "'page_title'", ")", ",", "None", ")", ")", "return", "{", "'breadcrumb'", ":", "breadcrumb", "}" ]
a logic heavy function for automagically creating a breadcrumb .
train
true
39,023
def _register_model_handler(handler_class): _registry.frozen = False _registry.register(handler_class(registry=_registry)) _registry.frozen = True
[ "def", "_register_model_handler", "(", "handler_class", ")", ":", "_registry", ".", "frozen", "=", "False", "_registry", ".", "register", "(", "handler_class", "(", "registry", "=", "_registry", ")", ")", "_registry", ".", "frozen", "=", "True" ]
register a new model handler .
train
false