id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
43,894
@hook.command(permissions=['op_mute', 'op'], autohelp=False) def unmute(text, conn, chan, notice): mode_cmd_no_target('-m', 'unmute', text, chan, conn, notice)
[ "@", "hook", ".", "command", "(", "permissions", "=", "[", "'op_mute'", ",", "'op'", "]", ",", "autohelp", "=", "False", ")", "def", "unmute", "(", "text", ",", "conn", ",", "chan", ",", "notice", ")", ":", "mode_cmd_no_target", "(", "'-m'", ",", "'unmute'", ",", "text", ",", "chan", ",", "conn", ",", "notice", ")" ]
unmute a user .
train
false
43,895
def tsplit(string, delimiters): delimiters = tuple(delimiters) if (len(delimiters) < 1): return [string] final_delimiter = delimiters[0] for i in delimiters[1:]: string = string.replace(i, final_delimiter) return string.split(final_delimiter)
[ "def", "tsplit", "(", "string", ",", "delimiters", ")", ":", "delimiters", "=", "tuple", "(", "delimiters", ")", "if", "(", "len", "(", "delimiters", ")", "<", "1", ")", ":", "return", "[", "string", "]", "final_delimiter", "=", "delimiters", "[", "0", "]", "for", "i", "in", "delimiters", "[", "1", ":", "]", ":", "string", "=", "string", ".", "replace", "(", "i", ",", "final_delimiter", ")", "return", "string", ".", "split", "(", "final_delimiter", ")" ]
behaves str .
train
true
43,896
def bf_search(graph, root=None): seen = {} search = [] if (len(graph.nodes()) < 1): return search if (root is None): root = graph.nodes()[0] seen[root] = 1 search.append(root) current = graph.children(root) while (len(current) > 0): node = current[0] current = current[1:] if (node not in seen): search.append(node) seen[node] = 1 current.extend(graph.children(node)) return search
[ "def", "bf_search", "(", "graph", ",", "root", "=", "None", ")", ":", "seen", "=", "{", "}", "search", "=", "[", "]", "if", "(", "len", "(", "graph", ".", "nodes", "(", ")", ")", "<", "1", ")", ":", "return", "search", "if", "(", "root", "is", "None", ")", ":", "root", "=", "graph", ".", "nodes", "(", ")", "[", "0", "]", "seen", "[", "root", "]", "=", "1", "search", ".", "append", "(", "root", ")", "current", "=", "graph", ".", "children", "(", "root", ")", "while", "(", "len", "(", "current", ")", ">", "0", ")", ":", "node", "=", "current", "[", "0", "]", "current", "=", "current", "[", "1", ":", "]", "if", "(", "node", "not", "in", "seen", ")", ":", "search", ".", "append", "(", "node", ")", "seen", "[", "node", "]", "=", "1", "current", ".", "extend", "(", "graph", ".", "children", "(", "node", ")", ")", "return", "search" ]
breadth first search of g .
train
false
43,897
def parse_grep_line(line): try: (filename, line_number, contents) = line.split(u':', 2) result = (filename, line_number, contents) except ValueError: result = None return result
[ "def", "parse_grep_line", "(", "line", ")", ":", "try", ":", "(", "filename", ",", "line_number", ",", "contents", ")", "=", "line", ".", "split", "(", "u':'", ",", "2", ")", "result", "=", "(", "filename", ",", "line_number", ",", "contents", ")", "except", "ValueError", ":", "result", "=", "None", "return", "result" ]
parse a grep result line into .
train
false
43,898
def _resolve_symlink(path, max_depth=64): if (sys.getwindowsversion().major < 6): raise SaltInvocationError('Symlinks are only supported on Windows Vista or later.') paths_seen = set((path,)) cur_depth = 0 while is_link(path): path = readlink(path) if (path in paths_seen): raise CommandExecutionError('The given path is involved in a symlink loop.') paths_seen.add(path) cur_depth += 1 if (cur_depth > max_depth): raise CommandExecutionError('Too many levels of symbolic links.') return path
[ "def", "_resolve_symlink", "(", "path", ",", "max_depth", "=", "64", ")", ":", "if", "(", "sys", ".", "getwindowsversion", "(", ")", ".", "major", "<", "6", ")", ":", "raise", "SaltInvocationError", "(", "'Symlinks are only supported on Windows Vista or later.'", ")", "paths_seen", "=", "set", "(", "(", "path", ",", ")", ")", "cur_depth", "=", "0", "while", "is_link", "(", "path", ")", ":", "path", "=", "readlink", "(", "path", ")", "if", "(", "path", "in", "paths_seen", ")", ":", "raise", "CommandExecutionError", "(", "'The given path is involved in a symlink loop.'", ")", "paths_seen", ".", "add", "(", "path", ")", "cur_depth", "+=", "1", "if", "(", "cur_depth", ">", "max_depth", ")", ":", "raise", "CommandExecutionError", "(", "'Too many levels of symbolic links.'", ")", "return", "path" ]
resolves the given symlink path to its real path .
train
true
43,899
def _nova_to_osvif_subnet(subnet): dnsaddrs = [ip['address'] for ip in subnet['dns']] obj = objects.subnet.Subnet(dns=dnsaddrs, ips=_nova_to_osvif_ips(subnet['ips']), routes=_nova_to_osvif_routes(subnet['routes'])) if (subnet['cidr'] is not None): obj.cidr = subnet['cidr'] if ((subnet['gateway'] is not None) and (subnet['gateway']['address'] is not None)): obj.gateway = subnet['gateway']['address'] return obj
[ "def", "_nova_to_osvif_subnet", "(", "subnet", ")", ":", "dnsaddrs", "=", "[", "ip", "[", "'address'", "]", "for", "ip", "in", "subnet", "[", "'dns'", "]", "]", "obj", "=", "objects", ".", "subnet", ".", "Subnet", "(", "dns", "=", "dnsaddrs", ",", "ips", "=", "_nova_to_osvif_ips", "(", "subnet", "[", "'ips'", "]", ")", ",", "routes", "=", "_nova_to_osvif_routes", "(", "subnet", "[", "'routes'", "]", ")", ")", "if", "(", "subnet", "[", "'cidr'", "]", "is", "not", "None", ")", ":", "obj", ".", "cidr", "=", "subnet", "[", "'cidr'", "]", "if", "(", "(", "subnet", "[", "'gateway'", "]", "is", "not", "None", ")", "and", "(", "subnet", "[", "'gateway'", "]", "[", "'address'", "]", "is", "not", "None", ")", ")", ":", "obj", ".", "gateway", "=", "subnet", "[", "'gateway'", "]", "[", "'address'", "]", "return", "obj" ]
convert nova subnet object into os_vif object .
train
false
43,900
def tuple_eval(source): node = ast.parse(source, '<source>', mode='eval') if (not isinstance(node.body, ast.Tuple)): raise ValueError(('%r is not a tuple literal' % source)) if (not all(((isinstance(el, (ast.Str, ast.Num)) or (isinstance(el, ast.UnaryOp) and isinstance(el.op, (ast.UAdd, ast.USub)) and isinstance(el.operand, ast.Num))) for el in node.body.elts))): raise ValueError('Can only contain numbers or strings') return literal_eval(source)
[ "def", "tuple_eval", "(", "source", ")", ":", "node", "=", "ast", ".", "parse", "(", "source", ",", "'<source>'", ",", "mode", "=", "'eval'", ")", "if", "(", "not", "isinstance", "(", "node", ".", "body", ",", "ast", ".", "Tuple", ")", ")", ":", "raise", "ValueError", "(", "(", "'%r is not a tuple literal'", "%", "source", ")", ")", "if", "(", "not", "all", "(", "(", "(", "isinstance", "(", "el", ",", "(", "ast", ".", "Str", ",", "ast", ".", "Num", ")", ")", "or", "(", "isinstance", "(", "el", ",", "ast", ".", "UnaryOp", ")", "and", "isinstance", "(", "el", ".", "op", ",", "(", "ast", ".", "UAdd", ",", "ast", ".", "USub", ")", ")", "and", "isinstance", "(", "el", ".", "operand", ",", "ast", ".", "Num", ")", ")", ")", "for", "el", "in", "node", ".", "body", ".", "elts", ")", ")", ")", ":", "raise", "ValueError", "(", "'Can only contain numbers or strings'", ")", "return", "literal_eval", "(", "source", ")" ]
evaluate a python tuple literal source where the elements are constrained to be int .
train
false
43,903
def allocate_pixels_buffer(width, height): return (((_c_uint * width) * height) * 4)()
[ "def", "allocate_pixels_buffer", "(", "width", ",", "height", ")", ":", "return", "(", "(", "(", "_c_uint", "*", "width", ")", "*", "height", ")", "*", "4", ")", "(", ")" ]
helper function to allocate a buffer to contain an image of width * height suitable for osmesamakecurrent .
train
false
43,905
@contextlib.contextmanager def swap_attr(obj, attr, new_val): if hasattr(obj, attr): real_val = getattr(obj, attr) setattr(obj, attr, new_val) try: (yield) finally: setattr(obj, attr, real_val) else: setattr(obj, attr, new_val) try: (yield) finally: delattr(obj, attr)
[ "@", "contextlib", ".", "contextmanager", "def", "swap_attr", "(", "obj", ",", "attr", ",", "new_val", ")", ":", "if", "hasattr", "(", "obj", ",", "attr", ")", ":", "real_val", "=", "getattr", "(", "obj", ",", "attr", ")", "setattr", "(", "obj", ",", "attr", ",", "new_val", ")", "try", ":", "(", "yield", ")", "finally", ":", "setattr", "(", "obj", ",", "attr", ",", "real_val", ")", "else", ":", "setattr", "(", "obj", ",", "attr", ",", "new_val", ")", "try", ":", "(", "yield", ")", "finally", ":", "delattr", "(", "obj", ",", "attr", ")" ]
temporary swap out an attribute with a new object .
train
false
43,906
def get_ha1_dict_plain(user_password_dict): def get_ha1(realm, username): password = user_password_dict.get(username) if password: return md5_hex(('%s:%s:%s' % (username, realm, password))) return None return get_ha1
[ "def", "get_ha1_dict_plain", "(", "user_password_dict", ")", ":", "def", "get_ha1", "(", "realm", ",", "username", ")", ":", "password", "=", "user_password_dict", ".", "get", "(", "username", ")", "if", "password", ":", "return", "md5_hex", "(", "(", "'%s:%s:%s'", "%", "(", "username", ",", "realm", ",", "password", ")", ")", ")", "return", "None", "return", "get_ha1" ]
returns a get_ha1 function which obtains a plaintext password from a dictionary of the form: {username : password} .
train
false
43,910
def animal_pre_save_check(signal, sender, instance, **kwargs): pre_save_checks.append((('Count = %s (%s)' % (instance.count, type(instance.count))), ('Weight = %s (%s)' % (instance.weight, type(instance.weight)))))
[ "def", "animal_pre_save_check", "(", "signal", ",", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "pre_save_checks", ".", "append", "(", "(", "(", "'Count = %s (%s)'", "%", "(", "instance", ".", "count", ",", "type", "(", "instance", ".", "count", ")", ")", ")", ",", "(", "'Weight = %s (%s)'", "%", "(", "instance", ".", "weight", ",", "type", "(", "instance", ".", "weight", ")", ")", ")", ")", ")" ]
a signal that is used to check the type of data loaded from fixtures .
train
false
43,912
def remove_gulp_files(): for filename in ['gulpfile.js']: os.remove(os.path.join(PROJECT_DIRECTORY, filename))
[ "def", "remove_gulp_files", "(", ")", ":", "for", "filename", "in", "[", "'gulpfile.js'", "]", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "PROJECT_DIRECTORY", ",", "filename", ")", ")" ]
removes files needed for grunt if it isnt going to be used .
train
false
43,913
def get_host_ref_for_vm(session, instance): vm_ref = get_vm_ref(session, instance) return session._call_method(vutil, 'get_object_property', vm_ref, 'runtime.host')
[ "def", "get_host_ref_for_vm", "(", "session", ",", "instance", ")", ":", "vm_ref", "=", "get_vm_ref", "(", "session", ",", "instance", ")", "return", "session", ".", "_call_method", "(", "vutil", ",", "'get_object_property'", ",", "vm_ref", ",", "'runtime.host'", ")" ]
get a moref to the esxi host currently running an instance .
train
false
43,914
def balanced_tree(r, h, create_using=None): if (r == 1): n = (h + 1) else: n = ((1 - (r ** (h + 1))) // (1 - r)) return full_rary_tree(r, n, create_using=create_using)
[ "def", "balanced_tree", "(", "r", ",", "h", ",", "create_using", "=", "None", ")", ":", "if", "(", "r", "==", "1", ")", ":", "n", "=", "(", "h", "+", "1", ")", "else", ":", "n", "=", "(", "(", "1", "-", "(", "r", "**", "(", "h", "+", "1", ")", ")", ")", "//", "(", "1", "-", "r", ")", ")", "return", "full_rary_tree", "(", "r", ",", "n", ",", "create_using", "=", "create_using", ")" ]
return the perfectly balanced r-ary tree of height h .
train
false
43,916
def printDecoderBriefs(decoders): print for d in decoders.values(): print 'Module name:', d.name print ('=' * 20) if d.longdescription: print d.longdescription else: print d.description print ('Default filter: %s' % d.filter) return
[ "def", "printDecoderBriefs", "(", "decoders", ")", ":", "print", "for", "d", "in", "decoders", ".", "values", "(", ")", ":", "print", "'Module name:'", ",", "d", ".", "name", "print", "(", "'='", "*", "20", ")", "if", "d", ".", "longdescription", ":", "print", "d", ".", "longdescription", "else", ":", "print", "d", ".", "description", "print", "(", "'Default filter: %s'", "%", "d", ".", "filter", ")", "return" ]
prints a brief overview of a decoder when using --help with a decoder .
train
false
43,917
@db_api.retry_if_session_inactive() def remove_provisioning_component(context, object_id, object_type, entity, standard_attr_id=None): with context.session.begin(subtransactions=True): standard_attr_id = (standard_attr_id or _get_standard_attr_id(context, object_id, object_type)) if (not standard_attr_id): return False record = context.session.query(pb_model.ProvisioningBlock).filter_by(standard_attr_id=standard_attr_id, entity=entity).first() if record: context.session.delete(record) return True return False
[ "@", "db_api", ".", "retry_if_session_inactive", "(", ")", "def", "remove_provisioning_component", "(", "context", ",", "object_id", ",", "object_type", ",", "entity", ",", "standard_attr_id", "=", "None", ")", ":", "with", "context", ".", "session", ".", "begin", "(", "subtransactions", "=", "True", ")", ":", "standard_attr_id", "=", "(", "standard_attr_id", "or", "_get_standard_attr_id", "(", "context", ",", "object_id", ",", "object_type", ")", ")", "if", "(", "not", "standard_attr_id", ")", ":", "return", "False", "record", "=", "context", ".", "session", ".", "query", "(", "pb_model", ".", "ProvisioningBlock", ")", ".", "filter_by", "(", "standard_attr_id", "=", "standard_attr_id", ",", "entity", "=", "entity", ")", ".", "first", "(", ")", "if", "record", ":", "context", ".", "session", ".", "delete", "(", "record", ")", "return", "True", "return", "False" ]
removes a provisioning block for an object with triggering a callback .
train
false
43,919
def expand_sig(sig): if isinstance(sig, tuple): if (len(sig) == 3): (num_pos_only, func, keyword_only) = sig assert isinstance(sig[(-1)], tuple) else: (num_pos_only, func) = sig keyword_only = () sigspec = signature_or_spec(func) else: func = sig sigspec = signature_or_spec(func) num_pos_only = num_pos_args(sigspec) keyword_only = () keyword_exclude = get_exclude_keywords(num_pos_only, sigspec) return (num_pos_only, func, (keyword_only + keyword_exclude), sigspec)
[ "def", "expand_sig", "(", "sig", ")", ":", "if", "isinstance", "(", "sig", ",", "tuple", ")", ":", "if", "(", "len", "(", "sig", ")", "==", "3", ")", ":", "(", "num_pos_only", ",", "func", ",", "keyword_only", ")", "=", "sig", "assert", "isinstance", "(", "sig", "[", "(", "-", "1", ")", "]", ",", "tuple", ")", "else", ":", "(", "num_pos_only", ",", "func", ")", "=", "sig", "keyword_only", "=", "(", ")", "sigspec", "=", "signature_or_spec", "(", "func", ")", "else", ":", "func", "=", "sig", "sigspec", "=", "signature_or_spec", "(", "func", ")", "num_pos_only", "=", "num_pos_args", "(", "sigspec", ")", "keyword_only", "=", "(", ")", "keyword_exclude", "=", "get_exclude_keywords", "(", "num_pos_only", ",", "sigspec", ")", "return", "(", "num_pos_only", ",", "func", ",", "(", "keyword_only", "+", "keyword_exclude", ")", ",", "sigspec", ")" ]
convert the signature spec in module_info to add to signatures the input signature spec is one of: - lambda_func - - the output signature spec is: where keyword_exclude includes keyword only arguments and .
train
false
43,921
def interface_details(attrs=None, where=None): return _osquery_cmd(table='interface_details', attrs=attrs, where=where)
[ "def", "interface_details", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'interface_details'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return interface_details information from osquery cli example: .
train
false
43,922
def getTypeURIs(service_element): return [type_element.text for type_element in service_element.findall(type_tag)]
[ "def", "getTypeURIs", "(", "service_element", ")", ":", "return", "[", "type_element", ".", "text", "for", "type_element", "in", "service_element", ".", "findall", "(", "type_tag", ")", "]" ]
given a service element .
train
false
43,923
def track_voted_event(request, course, obj, vote_value, undo_vote=False): if isinstance(obj, cc.Thread): obj_type = 'thread' else: obj_type = 'response' event_name = _EVENT_NAME_TEMPLATE.format(obj_type=obj_type, action_name='voted') event_data = {'commentable_id': obj.commentable_id, 'target_username': obj.get('username'), 'undo_vote': undo_vote, 'vote_value': vote_value} track_forum_event(request, event_name, course, obj, event_data)
[ "def", "track_voted_event", "(", "request", ",", "course", ",", "obj", ",", "vote_value", ",", "undo_vote", "=", "False", ")", ":", "if", "isinstance", "(", "obj", ",", "cc", ".", "Thread", ")", ":", "obj_type", "=", "'thread'", "else", ":", "obj_type", "=", "'response'", "event_name", "=", "_EVENT_NAME_TEMPLATE", ".", "format", "(", "obj_type", "=", "obj_type", ",", "action_name", "=", "'voted'", ")", "event_data", "=", "{", "'commentable_id'", ":", "obj", ".", "commentable_id", ",", "'target_username'", ":", "obj", ".", "get", "(", "'username'", ")", ",", "'undo_vote'", ":", "undo_vote", ",", "'vote_value'", ":", "vote_value", "}", "track_forum_event", "(", "request", ",", "event_name", ",", "course", ",", "obj", ",", "event_data", ")" ]
send analytics event for a vote on a thread or response .
train
false
43,924
def parse_query_context(context): if (not context): return None pair = context.split(':', 1) if ((len(pair) != 2) or (pair[0] not in ('table', 'design'))): LOG.error(('Invalid query context data: %s' % (context,))) return None if (pair[0] == 'design'): pair[1] = models.SavedQuery.get(int(pair[1])) return pair
[ "def", "parse_query_context", "(", "context", ")", ":", "if", "(", "not", "context", ")", ":", "return", "None", "pair", "=", "context", ".", "split", "(", "':'", ",", "1", ")", "if", "(", "(", "len", "(", "pair", ")", "!=", "2", ")", "or", "(", "pair", "[", "0", "]", "not", "in", "(", "'table'", ",", "'design'", ")", ")", ")", ":", "LOG", ".", "error", "(", "(", "'Invalid query context data: %s'", "%", "(", "context", ",", ")", ")", ")", "return", "None", "if", "(", "pair", "[", "0", "]", "==", "'design'", ")", ":", "pair", "[", "1", "]", "=", "models", ".", "SavedQuery", ".", "get", "(", "int", "(", "pair", "[", "1", "]", ")", ")", "return", "pair" ]
parse_query_context -> -or- .
train
false
43,925
def set_interactive(enabled=True, app=None): if enabled: inputhook_manager.enable_gui('vispy', app) else: inputhook_manager.disable_gui()
[ "def", "set_interactive", "(", "enabled", "=", "True", ",", "app", "=", "None", ")", ":", "if", "enabled", ":", "inputhook_manager", ".", "enable_gui", "(", "'vispy'", ",", "app", ")", "else", ":", "inputhook_manager", ".", "disable_gui", "(", ")" ]
general api for a script specifying that it is being run in an interactive environment .
train
true
43,926
def test_no_upgrade_unless_requested(script): script.pip('install', 'INITools==0.1', expect_error=True) result = script.pip('install', 'INITools', expect_error=True) assert (not result.files_created), 'pip install INITools upgraded when it should not have'
[ "def", "test_no_upgrade_unless_requested", "(", "script", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'INITools==0.1'", ",", "expect_error", "=", "True", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'INITools'", ",", "expect_error", "=", "True", ")", "assert", "(", "not", "result", ".", "files_created", ")", ",", "'pip install INITools upgraded when it should not have'" ]
no upgrade if not specifically requested .
train
false
43,928
def CopyFileInZip(from_zip, from_name, to_zip, to_name=None, signer=None): data = from_zip.read(from_name) if (to_name is None): to_name = from_name if signer: logging.debug('Signing %s', from_name) data = signer.SignBuffer(data) to_zip.writestr(to_name, data)
[ "def", "CopyFileInZip", "(", "from_zip", ",", "from_name", ",", "to_zip", ",", "to_name", "=", "None", ",", "signer", "=", "None", ")", ":", "data", "=", "from_zip", ".", "read", "(", "from_name", ")", "if", "(", "to_name", "is", "None", ")", ":", "to_name", "=", "from_name", "if", "signer", ":", "logging", ".", "debug", "(", "'Signing %s'", ",", "from_name", ")", "data", "=", "signer", ".", "SignBuffer", "(", "data", ")", "to_zip", ".", "writestr", "(", "to_name", ",", "data", ")" ]
read a file from a zipfile and write it to a new zipfile .
train
true
43,929
def _find_boundaries_subpixel(label_img): ndim = label_img.ndim max_label = np.iinfo(label_img.dtype).max label_img_expanded = np.zeros([((2 * s) - 1) for s in label_img.shape], label_img.dtype) pixels = ([slice(None, None, 2)] * ndim) label_img_expanded[pixels] = label_img edges = np.ones(label_img_expanded.shape, dtype=bool) edges[pixels] = False label_img_expanded[edges] = max_label windows = view_as_windows(np.pad(label_img_expanded, 1, mode='constant', constant_values=0), ((3,) * ndim)) boundaries = np.zeros_like(edges) for index in np.ndindex(label_img_expanded.shape): if edges[index]: values = np.unique(windows[index].ravel()) if (len(values) > 2): boundaries[index] = True return boundaries
[ "def", "_find_boundaries_subpixel", "(", "label_img", ")", ":", "ndim", "=", "label_img", ".", "ndim", "max_label", "=", "np", ".", "iinfo", "(", "label_img", ".", "dtype", ")", ".", "max", "label_img_expanded", "=", "np", ".", "zeros", "(", "[", "(", "(", "2", "*", "s", ")", "-", "1", ")", "for", "s", "in", "label_img", ".", "shape", "]", ",", "label_img", ".", "dtype", ")", "pixels", "=", "(", "[", "slice", "(", "None", ",", "None", ",", "2", ")", "]", "*", "ndim", ")", "label_img_expanded", "[", "pixels", "]", "=", "label_img", "edges", "=", "np", ".", "ones", "(", "label_img_expanded", ".", "shape", ",", "dtype", "=", "bool", ")", "edges", "[", "pixels", "]", "=", "False", "label_img_expanded", "[", "edges", "]", "=", "max_label", "windows", "=", "view_as_windows", "(", "np", ".", "pad", "(", "label_img_expanded", ",", "1", ",", "mode", "=", "'constant'", ",", "constant_values", "=", "0", ")", ",", "(", "(", "3", ",", ")", "*", "ndim", ")", ")", "boundaries", "=", "np", ".", "zeros_like", "(", "edges", ")", "for", "index", "in", "np", ".", "ndindex", "(", "label_img_expanded", ".", "shape", ")", ":", "if", "edges", "[", "index", "]", ":", "values", "=", "np", ".", "unique", "(", "windows", "[", "index", "]", ".", "ravel", "(", ")", ")", "if", "(", "len", "(", "values", ")", ">", "2", ")", ":", "boundaries", "[", "index", "]", "=", "True", "return", "boundaries" ]
see find_boundaries .
train
false
43,930
def extrema(input, labels=None, index=None): dims = numpy.array(numpy.asarray(input).shape) dim_prod = numpy.cumprod(([1] + list(dims[:0:(-1)])))[::(-1)] (minimums, min_positions, maximums, max_positions) = _select(input, labels, index, find_min=True, find_max=True, find_min_positions=True, find_max_positions=True) if numpy.isscalar(minimums): return (minimums, maximums, tuple(((min_positions // dim_prod) % dims)), tuple(((max_positions // dim_prod) % dims))) min_positions = [tuple(v) for v in ((min_positions.reshape((-1), 1) // dim_prod) % dims)] max_positions = [tuple(v) for v in ((max_positions.reshape((-1), 1) // dim_prod) % dims)] return (minimums, maximums, min_positions, max_positions)
[ "def", "extrema", "(", "input", ",", "labels", "=", "None", ",", "index", "=", "None", ")", ":", "dims", "=", "numpy", ".", "array", "(", "numpy", ".", "asarray", "(", "input", ")", ".", "shape", ")", "dim_prod", "=", "numpy", ".", "cumprod", "(", "(", "[", "1", "]", "+", "list", "(", "dims", "[", ":", "0", ":", "(", "-", "1", ")", "]", ")", ")", ")", "[", ":", ":", "(", "-", "1", ")", "]", "(", "minimums", ",", "min_positions", ",", "maximums", ",", "max_positions", ")", "=", "_select", "(", "input", ",", "labels", ",", "index", ",", "find_min", "=", "True", ",", "find_max", "=", "True", ",", "find_min_positions", "=", "True", ",", "find_max_positions", "=", "True", ")", "if", "numpy", ".", "isscalar", "(", "minimums", ")", ":", "return", "(", "minimums", ",", "maximums", ",", "tuple", "(", "(", "(", "min_positions", "//", "dim_prod", ")", "%", "dims", ")", ")", ",", "tuple", "(", "(", "(", "max_positions", "//", "dim_prod", ")", "%", "dims", ")", ")", ")", "min_positions", "=", "[", "tuple", "(", "v", ")", "for", "v", "in", "(", "(", "min_positions", ".", "reshape", "(", "(", "-", "1", ")", ",", "1", ")", "//", "dim_prod", ")", "%", "dims", ")", "]", "max_positions", "=", "[", "tuple", "(", "v", ")", "for", "v", "in", "(", "(", "max_positions", ".", "reshape", "(", "(", "-", "1", ")", ",", "1", ")", "//", "dim_prod", ")", "%", "dims", ")", "]", "return", "(", "minimums", ",", "maximums", ",", "min_positions", ",", "max_positions", ")" ]
calculate the minimums and maximums of the values of an array at labels .
train
false
43,931
def activelanepermute_wavewidth(src, laneid, identity, useidentity): raise _stub_error
[ "def", "activelanepermute_wavewidth", "(", "src", ",", "laneid", ",", "identity", ",", "useidentity", ")", ":", "raise", "_stub_error" ]
hsail activelanepermute_wavewidth_* .
train
false
43,932
def _ensure_like_indices(time, panels): n_time = len(time) n_panel = len(panels) u_panels = np.unique(panels) u_time = np.unique(time) if (len(u_time) == n_time): time = np.tile(u_time, len(u_panels)) if (len(u_panels) == n_panel): panels = np.repeat(u_panels, len(u_time)) return (time, panels)
[ "def", "_ensure_like_indices", "(", "time", ",", "panels", ")", ":", "n_time", "=", "len", "(", "time", ")", "n_panel", "=", "len", "(", "panels", ")", "u_panels", "=", "np", ".", "unique", "(", "panels", ")", "u_time", "=", "np", ".", "unique", "(", "time", ")", "if", "(", "len", "(", "u_time", ")", "==", "n_time", ")", ":", "time", "=", "np", ".", "tile", "(", "u_time", ",", "len", "(", "u_panels", ")", ")", "if", "(", "len", "(", "u_panels", ")", "==", "n_panel", ")", ":", "panels", "=", "np", ".", "repeat", "(", "u_panels", ",", "len", "(", "u_time", ")", ")", "return", "(", "time", ",", "panels", ")" ]
makes sure that time and panels are conformable .
train
true
43,933
def V(p): if (p in consonants): return 0 return C_vwl
[ "def", "V", "(", "p", ")", ":", "if", "(", "p", "in", "consonants", ")", ":", "return", "0", "return", "C_vwl" ]
return vowel weight if p is vowel .
train
false
43,934
def parse_rates(lines, parameters): Q_mat_found = False trans_probs_found = False for line in lines: line_floats_res = line_floats_re.findall(line) line_floats = [float(val) for val in line_floats_res] if (('Rate parameters:' in line) and line_floats): parameters['rate parameters'] = line_floats elif (('rate: ' in line) and line_floats): parameters['rates'] = line_floats elif ('matrix Q' in line): parameters['Q matrix'] = {'matrix': []} if line_floats: parameters['Q matrix']['average Ts/Tv'] = line_floats[0] Q_mat_found = True elif (Q_mat_found and line_floats): parameters['Q matrix']['matrix'].append(line_floats) if (len(parameters['Q matrix']['matrix']) == 4): Q_mat_found = False elif (('alpha' in line) and line_floats): parameters['alpha'] = line_floats[0] elif (('rho' in line) and line_floats): parameters['rho'] = line_floats[0] elif ('transition probabilities' in line): parameters['transition probs.'] = [] trans_probs_found = True elif (trans_probs_found and line_floats): parameters['transition probs.'].append(line_floats) if (len(parameters['transition probs.']) == len(parameters['rates'])): trans_probs_found = False return parameters
[ "def", "parse_rates", "(", "lines", ",", "parameters", ")", ":", "Q_mat_found", "=", "False", "trans_probs_found", "=", "False", "for", "line", "in", "lines", ":", "line_floats_res", "=", "line_floats_re", ".", "findall", "(", "line", ")", "line_floats", "=", "[", "float", "(", "val", ")", "for", "val", "in", "line_floats_res", "]", "if", "(", "(", "'Rate parameters:'", "in", "line", ")", "and", "line_floats", ")", ":", "parameters", "[", "'rate parameters'", "]", "=", "line_floats", "elif", "(", "(", "'rate: '", "in", "line", ")", "and", "line_floats", ")", ":", "parameters", "[", "'rates'", "]", "=", "line_floats", "elif", "(", "'matrix Q'", "in", "line", ")", ":", "parameters", "[", "'Q matrix'", "]", "=", "{", "'matrix'", ":", "[", "]", "}", "if", "line_floats", ":", "parameters", "[", "'Q matrix'", "]", "[", "'average Ts/Tv'", "]", "=", "line_floats", "[", "0", "]", "Q_mat_found", "=", "True", "elif", "(", "Q_mat_found", "and", "line_floats", ")", ":", "parameters", "[", "'Q matrix'", "]", "[", "'matrix'", "]", ".", "append", "(", "line_floats", ")", "if", "(", "len", "(", "parameters", "[", "'Q matrix'", "]", "[", "'matrix'", "]", ")", "==", "4", ")", ":", "Q_mat_found", "=", "False", "elif", "(", "(", "'alpha'", "in", "line", ")", "and", "line_floats", ")", ":", "parameters", "[", "'alpha'", "]", "=", "line_floats", "[", "0", "]", "elif", "(", "(", "'rho'", "in", "line", ")", "and", "line_floats", ")", ":", "parameters", "[", "'rho'", "]", "=", "line_floats", "[", "0", "]", "elif", "(", "'transition probabilities'", "in", "line", ")", ":", "parameters", "[", "'transition probs.'", "]", "=", "[", "]", "trans_probs_found", "=", "True", "elif", "(", "trans_probs_found", "and", "line_floats", ")", ":", "parameters", "[", "'transition probs.'", "]", ".", "append", "(", "line_floats", ")", "if", "(", "len", "(", "parameters", "[", "'transition probs.'", "]", ")", "==", "len", "(", "parameters", "[", "'rates'", "]", ")", ")", ":", "trans_probs_found", "=", "False", "return", "parameters" ]
parse the rate parameters .
train
false
43,935
def _get_vm_by_name(name, allDetails=False): vms = get_resources_vms(includeConfig=allDetails) if (name in vms): return vms[name] log.info('VM with name "{0}" could not be found.'.format(name)) return False
[ "def", "_get_vm_by_name", "(", "name", ",", "allDetails", "=", "False", ")", ":", "vms", "=", "get_resources_vms", "(", "includeConfig", "=", "allDetails", ")", "if", "(", "name", "in", "vms", ")", ":", "return", "vms", "[", "name", "]", "log", ".", "info", "(", "'VM with name \"{0}\" could not be found.'", ".", "format", "(", "name", ")", ")", "return", "False" ]
since proxmox works based op ids rather than names as identifiers this requires some filtering to retrieve the required information .
train
true
43,938
def home_directory(name): with settings(hide('running', 'stdout')): return run(('echo ~' + name))
[ "def", "home_directory", "(", "name", ")", ":", "with", "settings", "(", "hide", "(", "'running'", ",", "'stdout'", ")", ")", ":", "return", "run", "(", "(", "'echo ~'", "+", "name", ")", ")" ]
get the absolute path to the users home directory example:: import fabtools home = fabtools .
train
false
43,940
def set_lang(lang, graceful_fail=False, fallback_lang=None, **kwargs): registry = pylons.request.environ['paste.registry'] if (not lang): registry.replace(pylons.translator, NullTranslations()) else: translator = _get_translator(lang, graceful_fail=graceful_fail, **kwargs) (base_lang, is_dialect, dialect) = lang.partition('-') if is_dialect: try: base_translator = _get_translator(base_lang) except LanguageError: pass else: translator.add_fallback(base_translator) if fallback_lang: fallback_translator = _get_translator(fallback_lang, graceful_fail=True) translator.add_fallback(fallback_translator) registry.replace(pylons.translator, translator)
[ "def", "set_lang", "(", "lang", ",", "graceful_fail", "=", "False", ",", "fallback_lang", "=", "None", ",", "**", "kwargs", ")", ":", "registry", "=", "pylons", ".", "request", ".", "environ", "[", "'paste.registry'", "]", "if", "(", "not", "lang", ")", ":", "registry", ".", "replace", "(", "pylons", ".", "translator", ",", "NullTranslations", "(", ")", ")", "else", ":", "translator", "=", "_get_translator", "(", "lang", ",", "graceful_fail", "=", "graceful_fail", ",", "**", "kwargs", ")", "(", "base_lang", ",", "is_dialect", ",", "dialect", ")", "=", "lang", ".", "partition", "(", "'-'", ")", "if", "is_dialect", ":", "try", ":", "base_translator", "=", "_get_translator", "(", "base_lang", ")", "except", "LanguageError", ":", "pass", "else", ":", "translator", ".", "add_fallback", "(", "base_translator", ")", "if", "fallback_lang", ":", "fallback_translator", "=", "_get_translator", "(", "fallback_lang", ",", "graceful_fail", "=", "True", ")", "translator", ".", "add_fallback", "(", "fallback_translator", ")", "registry", ".", "replace", "(", "pylons", ".", "translator", ",", "translator", ")" ]
change the language of the api being requested .
train
false
43,941
def heuristicCheckDbms(injection): retVal = False pushValue(kb.injection) kb.injection = injection for dbms in getPublicTypeMembers(DBMS, True): if (not FROM_DUMMY_TABLE.get(dbms, '')): continue (randStr1, randStr2) = (randomStr(), randomStr()) Backend.forceDbms(dbms) if checkBooleanExpression(("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ''), randStr1))): if (not checkBooleanExpression(("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ''), randStr2)))): retVal = dbms break Backend.flushForcedDbms() kb.injection = popValue() if retVal: infoMsg = 'heuristic (extended) test shows that the back-end DBMS ' infoMsg += ("could be '%s' " % retVal) logger.info(infoMsg) return retVal
[ "def", "heuristicCheckDbms", "(", "injection", ")", ":", "retVal", "=", "False", "pushValue", "(", "kb", ".", "injection", ")", "kb", ".", "injection", "=", "injection", "for", "dbms", "in", "getPublicTypeMembers", "(", "DBMS", ",", "True", ")", ":", "if", "(", "not", "FROM_DUMMY_TABLE", ".", "get", "(", "dbms", ",", "''", ")", ")", ":", "continue", "(", "randStr1", ",", "randStr2", ")", "=", "(", "randomStr", "(", ")", ",", "randomStr", "(", ")", ")", "Backend", ".", "forceDbms", "(", "dbms", ")", "if", "checkBooleanExpression", "(", "(", "\"(SELECT '%s'%s)='%s'\"", "%", "(", "randStr1", ",", "FROM_DUMMY_TABLE", ".", "get", "(", "dbms", ",", "''", ")", ",", "randStr1", ")", ")", ")", ":", "if", "(", "not", "checkBooleanExpression", "(", "(", "\"(SELECT '%s'%s)='%s'\"", "%", "(", "randStr1", ",", "FROM_DUMMY_TABLE", ".", "get", "(", "dbms", ",", "''", ")", ",", "randStr2", ")", ")", ")", ")", ":", "retVal", "=", "dbms", "break", "Backend", ".", "flushForcedDbms", "(", ")", "kb", ".", "injection", "=", "popValue", "(", ")", "if", "retVal", ":", "infoMsg", "=", "'heuristic (extended) test shows that the back-end DBMS '", "infoMsg", "+=", "(", "\"could be '%s' \"", "%", "retVal", ")", "logger", ".", "info", "(", "infoMsg", ")", "return", "retVal" ]
this functions is called when boolean-based blind is identified with a generic payload and the dbms has not yet been fingerprinted to attempt to identify with a simple dbms specific boolean-based test what the dbms may be .
train
false
43,942
def is_removed(exp): return isinstance(exp, Removed)
[ "def", "is_removed", "(", "exp", ")", ":", "return", "isinstance", "(", "exp", ",", "Removed", ")" ]
is exp a removed node .
train
false
43,944
def StringifyJSON(item): if isinstance(item, (tuple, list)): return [StringifyJSON(x) for x in item] elif isinstance(item, dict): result = {} for (k, v) in item.items(): result[k] = StringifyJSON(v) return result elif (type(item) in (int, long, float, bool)): return item elif (item is None): return None else: return utils.SmartUnicode(item)
[ "def", "StringifyJSON", "(", "item", ")", ":", "if", "isinstance", "(", "item", ",", "(", "tuple", ",", "list", ")", ")", ":", "return", "[", "StringifyJSON", "(", "x", ")", "for", "x", "in", "item", "]", "elif", "isinstance", "(", "item", ",", "dict", ")", ":", "result", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "item", ".", "items", "(", ")", ":", "result", "[", "k", "]", "=", "StringifyJSON", "(", "v", ")", "return", "result", "elif", "(", "type", "(", "item", ")", "in", "(", "int", ",", "long", ",", "float", ",", "bool", ")", ")", ":", "return", "item", "elif", "(", "item", "is", "None", ")", ":", "return", "None", "else", ":", "return", "utils", ".", "SmartUnicode", "(", "item", ")" ]
recursively convert item to a string .
train
false
43,945
def _create_rpc(deadline): return apiproxy_stub_map.UserRPC('file', deadline)
[ "def", "_create_rpc", "(", "deadline", ")", ":", "return", "apiproxy_stub_map", ".", "UserRPC", "(", "'file'", ",", "deadline", ")" ]
create rpc object for file service .
train
false
43,947
def gf_pow(f, n, p, K): if (not n): return [K.one] elif (n == 1): return f elif (n == 2): return gf_sqr(f, p, K) h = [K.one] while True: if (n & 1): h = gf_mul(h, f, p, K) n -= 1 n >>= 1 if (not n): break f = gf_sqr(f, p, K) return h
[ "def", "gf_pow", "(", "f", ",", "n", ",", "p", ",", "K", ")", ":", "if", "(", "not", "n", ")", ":", "return", "[", "K", ".", "one", "]", "elif", "(", "n", "==", "1", ")", ":", "return", "f", "elif", "(", "n", "==", "2", ")", ":", "return", "gf_sqr", "(", "f", ",", "p", ",", "K", ")", "h", "=", "[", "K", ".", "one", "]", "while", "True", ":", "if", "(", "n", "&", "1", ")", ":", "h", "=", "gf_mul", "(", "h", ",", "f", ",", "p", ",", "K", ")", "n", "-=", "1", "n", ">>=", "1", "if", "(", "not", "n", ")", ":", "break", "f", "=", "gf_sqr", "(", "f", ",", "p", ",", "K", ")", "return", "h" ]
compute f**n in gf(p)[x] using repeated squaring .
train
false
43,948
def abcd_normalize(A=None, B=None, C=None, D=None): (A, B, C, D) = map(_atleast_2d_or_none, (A, B, C, D)) (MA, NA) = _shape_or_none(A) (MB, NB) = _shape_or_none(B) (MC, NC) = _shape_or_none(C) (MD, ND) = _shape_or_none(D) p = _choice_not_none(MA, MB, NC) q = _choice_not_none(NB, ND) r = _choice_not_none(MC, MD) if ((p is None) or (q is None) or (r is None)): raise ValueError('Not enough information on the system.') (A, B, C, D) = map(_none_to_empty_2d, (A, B, C, D)) A = _restore(A, (p, p)) B = _restore(B, (p, q)) C = _restore(C, (r, p)) D = _restore(D, (r, q)) return (A, B, C, D)
[ "def", "abcd_normalize", "(", "A", "=", "None", ",", "B", "=", "None", ",", "C", "=", "None", ",", "D", "=", "None", ")", ":", "(", "A", ",", "B", ",", "C", ",", "D", ")", "=", "map", "(", "_atleast_2d_or_none", ",", "(", "A", ",", "B", ",", "C", ",", "D", ")", ")", "(", "MA", ",", "NA", ")", "=", "_shape_or_none", "(", "A", ")", "(", "MB", ",", "NB", ")", "=", "_shape_or_none", "(", "B", ")", "(", "MC", ",", "NC", ")", "=", "_shape_or_none", "(", "C", ")", "(", "MD", ",", "ND", ")", "=", "_shape_or_none", "(", "D", ")", "p", "=", "_choice_not_none", "(", "MA", ",", "MB", ",", "NC", ")", "q", "=", "_choice_not_none", "(", "NB", ",", "ND", ")", "r", "=", "_choice_not_none", "(", "MC", ",", "MD", ")", "if", "(", "(", "p", "is", "None", ")", "or", "(", "q", "is", "None", ")", "or", "(", "r", "is", "None", ")", ")", ":", "raise", "ValueError", "(", "'Not enough information on the system.'", ")", "(", "A", ",", "B", ",", "C", ",", "D", ")", "=", "map", "(", "_none_to_empty_2d", ",", "(", "A", ",", "B", ",", "C", ",", "D", ")", ")", "A", "=", "_restore", "(", "A", ",", "(", "p", ",", "p", ")", ")", "B", "=", "_restore", "(", "B", ",", "(", "p", ",", "q", ")", ")", "C", "=", "_restore", "(", "C", ",", "(", "r", ",", "p", ")", ")", "D", "=", "_restore", "(", "D", ",", "(", "r", ",", "q", ")", ")", "return", "(", "A", ",", "B", ",", "C", ",", "D", ")" ]
check state-space matrices and ensure they are two-dimensional .
train
false
43,949
def scourLength(length): length = SVGLength(length) return (scourUnitlessLength(length.value) + Unit.str(length.units))
[ "def", "scourLength", "(", "length", ")", ":", "length", "=", "SVGLength", "(", "length", ")", "return", "(", "scourUnitlessLength", "(", "length", ".", "value", ")", "+", "Unit", ".", "str", "(", "length", ".", "units", ")", ")" ]
scours a length .
train
true
43,950
def _key_for_namespace(namespace, app): if namespace: return db.Key.from_path(metadata.Namespace.KIND_NAME, namespace, _app=app) else: return db.Key.from_path(metadata.Namespace.KIND_NAME, metadata.Namespace.EMPTY_NAMESPACE_ID, _app=app)
[ "def", "_key_for_namespace", "(", "namespace", ",", "app", ")", ":", "if", "namespace", ":", "return", "db", ".", "Key", ".", "from_path", "(", "metadata", ".", "Namespace", ".", "KIND_NAME", ",", "namespace", ",", "_app", "=", "app", ")", "else", ":", "return", "db", ".", "Key", ".", "from_path", "(", "metadata", ".", "Namespace", ".", "KIND_NAME", ",", "metadata", ".", "Namespace", ".", "EMPTY_NAMESPACE_ID", ",", "_app", "=", "app", ")" ]
return the __namespace__ key for a namespace .
train
true
43,951
def get_svn_revision(path=None): rev = None if (path is None): path = django.__path__[0] entries_path = ('%s/.svn/entries' % path) try: entries = open(entries_path, 'r').read() except IOError: pass else: if re.match('(\\d+)', entries): rev_match = re.search('\\d+\\s+dir\\s+(\\d+)', entries) if rev_match: rev = rev_match.groups()[0] else: from xml.dom import minidom dom = minidom.parse(entries_path) rev = dom.getElementsByTagName('entry')[0].getAttribute('revision') if rev: return (u'SVN-%s' % rev) return u'SVN-unknown'
[ "def", "get_svn_revision", "(", "path", "=", "None", ")", ":", "rev", "=", "None", "if", "(", "path", "is", "None", ")", ":", "path", "=", "django", ".", "__path__", "[", "0", "]", "entries_path", "=", "(", "'%s/.svn/entries'", "%", "path", ")", "try", ":", "entries", "=", "open", "(", "entries_path", ",", "'r'", ")", ".", "read", "(", ")", "except", "IOError", ":", "pass", "else", ":", "if", "re", ".", "match", "(", "'(\\\\d+)'", ",", "entries", ")", ":", "rev_match", "=", "re", ".", "search", "(", "'\\\\d+\\\\s+dir\\\\s+(\\\\d+)'", ",", "entries", ")", "if", "rev_match", ":", "rev", "=", "rev_match", ".", "groups", "(", ")", "[", "0", "]", "else", ":", "from", "xml", ".", "dom", "import", "minidom", "dom", "=", "minidom", ".", "parse", "(", "entries_path", ")", "rev", "=", "dom", ".", "getElementsByTagName", "(", "'entry'", ")", "[", "0", "]", ".", "getAttribute", "(", "'revision'", ")", "if", "rev", ":", "return", "(", "u'SVN-%s'", "%", "rev", ")", "return", "u'SVN-unknown'" ]
returns the svn revision in the form svn-xxxx .
train
false
43,952
def set_default_color_cycle(clist): _process_plot_var_args.defaultColors = clist[:] rcParams['lines.color'] = clist[0]
[ "def", "set_default_color_cycle", "(", "clist", ")", ":", "_process_plot_var_args", ".", "defaultColors", "=", "clist", "[", ":", "]", "rcParams", "[", "'lines.color'", "]", "=", "clist", "[", "0", "]" ]
change the default cycle of colors that will be used by the plot command .
train
false
43,953
def get_runnertype_by_name(runnertype_name): try: runnertypes = RunnerType.query(name=runnertype_name) except (ValueError, ValidationError) as e: LOG.error('Database lookup for name="%s" resulted in exception: %s', runnertype_name, e) raise StackStormDBObjectNotFoundError(('Unable to find runnertype with name="%s"' % runnertype_name)) if (not runnertypes): raise StackStormDBObjectNotFoundError(('Unable to find RunnerType with name="%s"' % runnertype_name)) if (len(runnertypes) > 1): LOG.warning('More than one RunnerType returned from DB lookup by name. Result list is: %s', runnertypes) return runnertypes[0]
[ "def", "get_runnertype_by_name", "(", "runnertype_name", ")", ":", "try", ":", "runnertypes", "=", "RunnerType", ".", "query", "(", "name", "=", "runnertype_name", ")", "except", "(", "ValueError", ",", "ValidationError", ")", "as", "e", ":", "LOG", ".", "error", "(", "'Database lookup for name=\"%s\" resulted in exception: %s'", ",", "runnertype_name", ",", "e", ")", "raise", "StackStormDBObjectNotFoundError", "(", "(", "'Unable to find runnertype with name=\"%s\"'", "%", "runnertype_name", ")", ")", "if", "(", "not", "runnertypes", ")", ":", "raise", "StackStormDBObjectNotFoundError", "(", "(", "'Unable to find RunnerType with name=\"%s\"'", "%", "runnertype_name", ")", ")", "if", "(", "len", "(", "runnertypes", ")", ">", "1", ")", ":", "LOG", ".", "warning", "(", "'More than one RunnerType returned from DB lookup by name. Result list is: %s'", ",", "runnertypes", ")", "return", "runnertypes", "[", "0", "]" ]
get an runnertype by name .
train
false
43,955
def _iter_code(code): from array import array from dis import HAVE_ARGUMENT, EXTENDED_ARG bytes = array('b', code.co_code) eof = len(code.co_code) ptr = 0 extended_arg = 0 while (ptr < eof): op = bytes[ptr] if (op >= HAVE_ARGUMENT): arg = ((bytes[(ptr + 1)] + (bytes[(ptr + 2)] * 256)) + extended_arg) ptr += 3 if (op == EXTENDED_ARG): extended_arg = (arg * long_type(65536)) continue else: arg = None ptr += 1 (yield (op, arg))
[ "def", "_iter_code", "(", "code", ")", ":", "from", "array", "import", "array", "from", "dis", "import", "HAVE_ARGUMENT", ",", "EXTENDED_ARG", "bytes", "=", "array", "(", "'b'", ",", "code", ".", "co_code", ")", "eof", "=", "len", "(", "code", ".", "co_code", ")", "ptr", "=", "0", "extended_arg", "=", "0", "while", "(", "ptr", "<", "eof", ")", ":", "op", "=", "bytes", "[", "ptr", "]", "if", "(", "op", ">=", "HAVE_ARGUMENT", ")", ":", "arg", "=", "(", "(", "bytes", "[", "(", "ptr", "+", "1", ")", "]", "+", "(", "bytes", "[", "(", "ptr", "+", "2", ")", "]", "*", "256", ")", ")", "+", "extended_arg", ")", "ptr", "+=", "3", "if", "(", "op", "==", "EXTENDED_ARG", ")", ":", "extended_arg", "=", "(", "arg", "*", "long_type", "(", "65536", ")", ")", "continue", "else", ":", "arg", "=", "None", "ptr", "+=", "1", "(", "yield", "(", "op", ",", "arg", ")", ")" ]
yield pair for each operation in code object code .
train
true
43,956
def get_srid_info(srid, connection): from django.contrib.gis.gdal import SpatialReference global _srid_cache try: SpatialRefSys = connection.ops.spatial_ref_sys() except NotImplementedError: SpatialRefSys = None (alias, get_srs) = ((connection.alias, (lambda srid: SpatialRefSys.objects.using(connection.alias).get(srid=srid).srs)) if SpatialRefSys else (None, SpatialReference)) if (srid not in _srid_cache[alias]): srs = get_srs(srid) (units, units_name) = srs.units sphere_name = srs['spheroid'] spheroid = ('SPHEROID["%s",%s,%s]' % (sphere_name, srs.semi_major, srs.inverse_flattening)) _srid_cache[alias][srid] = (units, units_name, spheroid) return _srid_cache[alias][srid]
[ "def", "get_srid_info", "(", "srid", ",", "connection", ")", ":", "from", "django", ".", "contrib", ".", "gis", ".", "gdal", "import", "SpatialReference", "global", "_srid_cache", "try", ":", "SpatialRefSys", "=", "connection", ".", "ops", ".", "spatial_ref_sys", "(", ")", "except", "NotImplementedError", ":", "SpatialRefSys", "=", "None", "(", "alias", ",", "get_srs", ")", "=", "(", "(", "connection", ".", "alias", ",", "(", "lambda", "srid", ":", "SpatialRefSys", ".", "objects", ".", "using", "(", "connection", ".", "alias", ")", ".", "get", "(", "srid", "=", "srid", ")", ".", "srs", ")", ")", "if", "SpatialRefSys", "else", "(", "None", ",", "SpatialReference", ")", ")", "if", "(", "srid", "not", "in", "_srid_cache", "[", "alias", "]", ")", ":", "srs", "=", "get_srs", "(", "srid", ")", "(", "units", ",", "units_name", ")", "=", "srs", ".", "units", "sphere_name", "=", "srs", "[", "'spheroid'", "]", "spheroid", "=", "(", "'SPHEROID[\"%s\",%s,%s]'", "%", "(", "sphere_name", ",", "srs", ".", "semi_major", ",", "srs", ".", "inverse_flattening", ")", ")", "_srid_cache", "[", "alias", "]", "[", "srid", "]", "=", "(", "units", ",", "units_name", ",", "spheroid", ")", "return", "_srid_cache", "[", "alias", "]", "[", "srid", "]" ]
returns the units .
train
false
43,957
def from_dict_of_lists(d, create_using=None): G = _prep_create_using(create_using) G.add_nodes_from(d) if (G.is_multigraph() and (not G.is_directed())): seen = {} for (node, nbrlist) in d.items(): for nbr in nbrlist: if (nbr not in seen): G.add_edge(node, nbr) seen[node] = 1 else: G.add_edges_from(((node, nbr) for (node, nbrlist) in d.items() for nbr in nbrlist)) return G
[ "def", "from_dict_of_lists", "(", "d", ",", "create_using", "=", "None", ")", ":", "G", "=", "_prep_create_using", "(", "create_using", ")", "G", ".", "add_nodes_from", "(", "d", ")", "if", "(", "G", ".", "is_multigraph", "(", ")", "and", "(", "not", "G", ".", "is_directed", "(", ")", ")", ")", ":", "seen", "=", "{", "}", "for", "(", "node", ",", "nbrlist", ")", "in", "d", ".", "items", "(", ")", ":", "for", "nbr", "in", "nbrlist", ":", "if", "(", "nbr", "not", "in", "seen", ")", ":", "G", ".", "add_edge", "(", "node", ",", "nbr", ")", "seen", "[", "node", "]", "=", "1", "else", ":", "G", ".", "add_edges_from", "(", "(", "(", "node", ",", "nbr", ")", "for", "(", "node", ",", "nbrlist", ")", "in", "d", ".", "items", "(", ")", "for", "nbr", "in", "nbrlist", ")", ")", "return", "G" ]
return a graph from a dictionary of lists .
train
false
43,958
def cloneHost(individual): clone = individual.__class__(individual) clone.fitness.values = individual.fitness.values return clone
[ "def", "cloneHost", "(", "individual", ")", ":", "clone", "=", "individual", ".", "__class__", "(", "individual", ")", "clone", ".", "fitness", ".", "values", "=", "individual", ".", "fitness", ".", "values", "return", "clone" ]
specialized copy function that will work only on a list of tuples with no other member than a fitness .
train
false
43,960
def extractRegexResult(regex, content, flags=0): retVal = None if (regex and content and ('?P<result>' in regex)): match = re.search(regex, content, flags) if match: retVal = match.group('result') return retVal
[ "def", "extractRegexResult", "(", "regex", ",", "content", ",", "flags", "=", "0", ")", ":", "retVal", "=", "None", "if", "(", "regex", "and", "content", "and", "(", "'?P<result>'", "in", "regex", ")", ")", ":", "match", "=", "re", ".", "search", "(", "regex", ",", "content", ",", "flags", ")", "if", "match", ":", "retVal", "=", "match", ".", "group", "(", "'result'", ")", "return", "retVal" ]
returns result group value from a possible match with regex on a given content .
train
false
43,961
def _management_error_handler(http_error): return _general_error_handler(http_error)
[ "def", "_management_error_handler", "(", "http_error", ")", ":", "return", "_general_error_handler", "(", "http_error", ")" ]
simple error handler for management service .
train
false
43,965
def save_file(content, filename): print ('Saving', filename) with codecs.open(filename, 'wb') as storage_file: pickle.dump(content, storage_file, protocol=2)
[ "def", "save_file", "(", "content", ",", "filename", ")", ":", "print", "(", "'Saving'", ",", "filename", ")", "with", "codecs", ".", "open", "(", "filename", ",", "'wb'", ")", "as", "storage_file", ":", "pickle", ".", "dump", "(", "content", ",", "storage_file", ",", "protocol", "=", "2", ")" ]
store content in filename .
train
false
43,967
def fcode(expr, assign_to=None, **settings): return FCodePrinter(settings).doprint(expr, assign_to)
[ "def", "fcode", "(", "expr", ",", "assign_to", "=", "None", ",", "**", "settings", ")", ":", "return", "FCodePrinter", "(", "settings", ")", ".", "doprint", "(", "expr", ",", "assign_to", ")" ]
converts an expr to a string of c code parameters expr : expr a sympy expression to be converted .
train
false
43,968
def get_nat_gateways(client, subnet_id=None, nat_gateway_id=None, states=None, check_mode=False): params = dict() err_msg = '' gateways_retrieved = False existing_gateways = list() if (not states): states = ['available', 'pending'] if nat_gateway_id: params['NatGatewayIds'] = [nat_gateway_id] else: params['Filter'] = [{'Name': 'subnet-id', 'Values': [subnet_id]}, {'Name': 'state', 'Values': states}] try: if (not check_mode): gateways = client.describe_nat_gateways(**params)['NatGateways'] if gateways: for gw in gateways: existing_gateways.append(convert_to_lower(gw)) gateways_retrieved = True else: gateways_retrieved = True if nat_gateway_id: if (DRY_RUN_GATEWAYS[0]['nat_gateway_id'] == nat_gateway_id): existing_gateways = DRY_RUN_GATEWAYS elif subnet_id: if (DRY_RUN_GATEWAYS[0]['subnet_id'] == subnet_id): existing_gateways = DRY_RUN_GATEWAYS err_msg = '{0} Retrieving gateways'.format(DRY_RUN_MSGS) except botocore.exceptions.ClientError as e: err_msg = str(e) return (gateways_retrieved, err_msg, existing_gateways)
[ "def", "get_nat_gateways", "(", "client", ",", "subnet_id", "=", "None", ",", "nat_gateway_id", "=", "None", ",", "states", "=", "None", ",", "check_mode", "=", "False", ")", ":", "params", "=", "dict", "(", ")", "err_msg", "=", "''", "gateways_retrieved", "=", "False", "existing_gateways", "=", "list", "(", ")", "if", "(", "not", "states", ")", ":", "states", "=", "[", "'available'", ",", "'pending'", "]", "if", "nat_gateway_id", ":", "params", "[", "'NatGatewayIds'", "]", "=", "[", "nat_gateway_id", "]", "else", ":", "params", "[", "'Filter'", "]", "=", "[", "{", "'Name'", ":", "'subnet-id'", ",", "'Values'", ":", "[", "subnet_id", "]", "}", ",", "{", "'Name'", ":", "'state'", ",", "'Values'", ":", "states", "}", "]", "try", ":", "if", "(", "not", "check_mode", ")", ":", "gateways", "=", "client", ".", "describe_nat_gateways", "(", "**", "params", ")", "[", "'NatGateways'", "]", "if", "gateways", ":", "for", "gw", "in", "gateways", ":", "existing_gateways", ".", "append", "(", "convert_to_lower", "(", "gw", ")", ")", "gateways_retrieved", "=", "True", "else", ":", "gateways_retrieved", "=", "True", "if", "nat_gateway_id", ":", "if", "(", "DRY_RUN_GATEWAYS", "[", "0", "]", "[", "'nat_gateway_id'", "]", "==", "nat_gateway_id", ")", ":", "existing_gateways", "=", "DRY_RUN_GATEWAYS", "elif", "subnet_id", ":", "if", "(", "DRY_RUN_GATEWAYS", "[", "0", "]", "[", "'subnet_id'", "]", "==", "subnet_id", ")", ":", "existing_gateways", "=", "DRY_RUN_GATEWAYS", "err_msg", "=", "'{0} Retrieving gateways'", ".", "format", "(", "DRY_RUN_MSGS", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "e", ":", "err_msg", "=", "str", "(", "e", ")", "return", "(", "gateways_retrieved", ",", "err_msg", ",", "existing_gateways", ")" ]
retrieve a list of nat gateways args: client : boto3 client kwargs: subnet_id : the subnet_id the nat resides in .
train
false
43,969
def process_id_map(mapping_f, disable_primer_check=False, has_barcodes=True, char_replace='_', variable_len_barcodes=False, added_demultiplex_field=None, strip_quotes=True, suppress_stripping=False): errors = [] warnings = [] (mapping_data, header, comments) = parse_mapping_file(mapping_f, strip_quotes, suppress_stripping) sample_id_ix = 0 desc_ix = (len(header) - 1) bc_ix = 1 linker_primer_ix = 2 (errors, warnings) = check_header(header, errors, warnings, sample_id_ix, desc_ix, bc_ix, linker_primer_ix, added_demultiplex_field) (errors, warnings) = check_data_fields(header, mapping_data, errors, warnings, disable_primer_check, has_barcodes, char_replace, variable_len_barcodes, added_demultiplex_field) return (header, mapping_data, comments, errors, warnings)
[ "def", "process_id_map", "(", "mapping_f", ",", "disable_primer_check", "=", "False", ",", "has_barcodes", "=", "True", ",", "char_replace", "=", "'_'", ",", "variable_len_barcodes", "=", "False", ",", "added_demultiplex_field", "=", "None", ",", "strip_quotes", "=", "True", ",", "suppress_stripping", "=", "False", ")", ":", "errors", "=", "[", "]", "warnings", "=", "[", "]", "(", "mapping_data", ",", "header", ",", "comments", ")", "=", "parse_mapping_file", "(", "mapping_f", ",", "strip_quotes", ",", "suppress_stripping", ")", "sample_id_ix", "=", "0", "desc_ix", "=", "(", "len", "(", "header", ")", "-", "1", ")", "bc_ix", "=", "1", "linker_primer_ix", "=", "2", "(", "errors", ",", "warnings", ")", "=", "check_header", "(", "header", ",", "errors", ",", "warnings", ",", "sample_id_ix", ",", "desc_ix", ",", "bc_ix", ",", "linker_primer_ix", ",", "added_demultiplex_field", ")", "(", "errors", ",", "warnings", ")", "=", "check_data_fields", "(", "header", ",", "mapping_data", ",", "errors", ",", "warnings", ",", "disable_primer_check", ",", "has_barcodes", ",", "char_replace", ",", "variable_len_barcodes", ",", "added_demultiplex_field", ")", "return", "(", "header", ",", "mapping_data", ",", "comments", ",", "errors", ",", "warnings", ")" ]
reads mapping file .
train
false
43,970
def set_restart_freeze(enabled): state = salt.utils.mac_utils.validate_enabled(enabled) cmd = 'systemsetup -setrestartfreeze {0}'.format(state) salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated(state, get_restart_freeze, True)
[ "def", "set_restart_freeze", "(", "enabled", ")", ":", "state", "=", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "enabled", ")", "cmd", "=", "'systemsetup -setrestartfreeze {0}'", ".", "format", "(", "state", ")", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_success", "(", "cmd", ")", "return", "salt", ".", "utils", ".", "mac_utils", ".", "confirm_updated", "(", "state", ",", "get_restart_freeze", ",", "True", ")" ]
specifies whether the server restarts automatically after a system freeze .
train
true
43,972
def retry_failure(reactor, function, expected=None, steps=None): if (steps is None): steps = repeat(0.1) steps = iter(steps) action = LOOP_UNTIL_ACTION(predicate=function) with action.context(): d = DeferredContext(maybeDeferred(function)) def loop(failure): if (expected and (not failure.check(*expected))): return failure try: interval = steps.next() except StopIteration: return failure d = deferLater(reactor, interval, action.run, function) d.addErrback(loop) return d d.addErrback(loop) def got_result(result): action.add_success_fields(result=result) return result d.addCallback(got_result) d.addActionFinish() return d.result
[ "def", "retry_failure", "(", "reactor", ",", "function", ",", "expected", "=", "None", ",", "steps", "=", "None", ")", ":", "if", "(", "steps", "is", "None", ")", ":", "steps", "=", "repeat", "(", "0.1", ")", "steps", "=", "iter", "(", "steps", ")", "action", "=", "LOOP_UNTIL_ACTION", "(", "predicate", "=", "function", ")", "with", "action", ".", "context", "(", ")", ":", "d", "=", "DeferredContext", "(", "maybeDeferred", "(", "function", ")", ")", "def", "loop", "(", "failure", ")", ":", "if", "(", "expected", "and", "(", "not", "failure", ".", "check", "(", "*", "expected", ")", ")", ")", ":", "return", "failure", "try", ":", "interval", "=", "steps", ".", "next", "(", ")", "except", "StopIteration", ":", "return", "failure", "d", "=", "deferLater", "(", "reactor", ",", "interval", ",", "action", ".", "run", ",", "function", ")", "d", ".", "addErrback", "(", "loop", ")", "return", "d", "d", ".", "addErrback", "(", "loop", ")", "def", "got_result", "(", "result", ")", ":", "action", ".", "add_success_fields", "(", "result", "=", "result", ")", "return", "result", "d", ".", "addCallback", "(", "got_result", ")", "d", ".", "addActionFinish", "(", ")", "return", "d", ".", "result" ]
retry function until it returns successfully .
train
false
43,975
def get_multi_async(keys, **ctx_options): return [key.get_async(**ctx_options) for key in keys]
[ "def", "get_multi_async", "(", "keys", ",", "**", "ctx_options", ")", ":", "return", "[", "key", ".", "get_async", "(", "**", "ctx_options", ")", "for", "key", "in", "keys", "]" ]
fetches a sequence of keys .
train
false
43,977
def vgextend(vgname, devices): if ((not vgname) or (not devices)): return 'Error: vgname and device(s) are both required' if isinstance(devices, six.string_types): devices = devices.split(',') cmd = ['vgextend', vgname] for device in devices: cmd.append(device) out = __salt__['cmd.run'](cmd, python_shell=False).splitlines() vgdata = {'Output from vgextend': out[0].strip()} return vgdata
[ "def", "vgextend", "(", "vgname", ",", "devices", ")", ":", "if", "(", "(", "not", "vgname", ")", "or", "(", "not", "devices", ")", ")", ":", "return", "'Error: vgname and device(s) are both required'", "if", "isinstance", "(", "devices", ",", "six", ".", "string_types", ")", ":", "devices", "=", "devices", ".", "split", "(", "','", ")", "cmd", "=", "[", "'vgextend'", ",", "vgname", "]", "for", "device", "in", "devices", ":", "cmd", ".", "append", "(", "device", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", "vgdata", "=", "{", "'Output from vgextend'", ":", "out", "[", "0", "]", ".", "strip", "(", ")", "}", "return", "vgdata" ]
add physical volumes to an lvm volume group cli examples: .
train
true
43,979
def _dec2base(n, base): if ((n < 0) or (base < 2) or (base > 36)): return '' s = '' while True: r = (n % base) s = (digits[r] + s) n = (n // base) if (n == 0): break return s
[ "def", "_dec2base", "(", "n", ",", "base", ")", ":", "if", "(", "(", "n", "<", "0", ")", "or", "(", "base", "<", "2", ")", "or", "(", "base", ">", "36", ")", ")", ":", "return", "''", "s", "=", "''", "while", "True", ":", "r", "=", "(", "n", "%", "base", ")", "s", "=", "(", "digits", "[", "r", "]", "+", "s", ")", "n", "=", "(", "n", "//", "base", ")", "if", "(", "n", "==", "0", ")", ":", "break", "return", "s" ]
convert positive decimal integer n to equivalent in another base URL .
train
false
43,981
def instance_get_active_by_window_joined(context, begin, end=None, project_id=None, host=None, columns_to_join=None, limit=None, marker=None): return IMPL.instance_get_active_by_window_joined(context, begin, end, project_id, host, columns_to_join=columns_to_join, limit=limit, marker=marker)
[ "def", "instance_get_active_by_window_joined", "(", "context", ",", "begin", ",", "end", "=", "None", ",", "project_id", "=", "None", ",", "host", "=", "None", ",", "columns_to_join", "=", "None", ",", "limit", "=", "None", ",", "marker", "=", "None", ")", ":", "return", "IMPL", ".", "instance_get_active_by_window_joined", "(", "context", ",", "begin", ",", "end", ",", "project_id", ",", "host", ",", "columns_to_join", "=", "columns_to_join", ",", "limit", "=", "limit", ",", "marker", "=", "marker", ")" ]
get instances and joins active during a certain time window .
train
false
43,982
def find_visible(node, lines): root = ast_suite_tree(node) return find_visible_for_suite(root, lines)
[ "def", "find_visible", "(", "node", ",", "lines", ")", ":", "root", "=", "ast_suite_tree", "(", "node", ")", "return", "find_visible_for_suite", "(", "root", ",", "lines", ")" ]
return the line which is visible from all lines .
train
false
43,983
def test_powerlaw_stats(): cases = [(1.0, (0.5, (1.0 / 12), 0.0, (-1.2))), (2.0, ((2.0 / 3), (2.0 / 36), (-0.5656854249492473), (-0.6)))] for (a, exact_mvsk) in cases: mvsk = stats.powerlaw.stats(a, moments='mvsk') assert_array_almost_equal(mvsk, exact_mvsk)
[ "def", "test_powerlaw_stats", "(", ")", ":", "cases", "=", "[", "(", "1.0", ",", "(", "0.5", ",", "(", "1.0", "/", "12", ")", ",", "0.0", ",", "(", "-", "1.2", ")", ")", ")", ",", "(", "2.0", ",", "(", "(", "2.0", "/", "3", ")", ",", "(", "2.0", "/", "36", ")", ",", "(", "-", "0.5656854249492473", ")", ",", "(", "-", "0.6", ")", ")", ")", "]", "for", "(", "a", ",", "exact_mvsk", ")", "in", "cases", ":", "mvsk", "=", "stats", ".", "powerlaw", ".", "stats", "(", "a", ",", "moments", "=", "'mvsk'", ")", "assert_array_almost_equal", "(", "mvsk", ",", "exact_mvsk", ")" ]
test the powerlaw stats function .
train
false
43,984
def validates(version): def _validates(cls): validators[version] = cls if (u'id' in cls.META_SCHEMA): meta_schemas[cls.META_SCHEMA[u'id']] = cls return cls return _validates
[ "def", "validates", "(", "version", ")", ":", "def", "_validates", "(", "cls", ")", ":", "validators", "[", "version", "]", "=", "cls", "if", "(", "u'id'", "in", "cls", ".", "META_SCHEMA", ")", ":", "meta_schemas", "[", "cls", ".", "META_SCHEMA", "[", "u'id'", "]", "]", "=", "cls", "return", "cls", "return", "_validates" ]
register the decorated validator for a version of the specification .
train
false
43,985
def test_pickle_unrecognized_unit(): a = u.Unit(u'asdf', parse_strict=u'silent') pickle.loads(pickle.dumps(a))
[ "def", "test_pickle_unrecognized_unit", "(", ")", ":", "a", "=", "u", ".", "Unit", "(", "u'asdf'", ",", "parse_strict", "=", "u'silent'", ")", "pickle", ".", "loads", "(", "pickle", ".", "dumps", "(", "a", ")", ")" ]
issue #2047 .
train
false
43,986
def get_enabled(): return _get_svc_list('YES')
[ "def", "get_enabled", "(", ")", ":", "return", "_get_svc_list", "(", "'YES'", ")" ]
return the enabled services .
train
false
43,988
def format_attrs(attrs): if (not attrs): return u'' entries = [u'='.join((key, value)) for (key, value) in iteritems(attrs)] return ((u'[' + u', '.join(entries)) + u']')
[ "def", "format_attrs", "(", "attrs", ")", ":", "if", "(", "not", "attrs", ")", ":", "return", "u''", "entries", "=", "[", "u'='", ".", "join", "(", "(", "key", ",", "value", ")", ")", "for", "(", "key", ",", "value", ")", "in", "iteritems", "(", "attrs", ")", "]", "return", "(", "(", "u'['", "+", "u', '", ".", "join", "(", "entries", ")", ")", "+", "u']'", ")" ]
format key .
train
false
43,989
def get_networks_for_instance(context, instance): nw_info = compute_utils.get_nw_info_for_instance(instance) return get_networks_for_instance_from_nw_info(nw_info)
[ "def", "get_networks_for_instance", "(", "context", ",", "instance", ")", ":", "nw_info", "=", "compute_utils", ".", "get_nw_info_for_instance", "(", "instance", ")", "return", "get_networks_for_instance_from_nw_info", "(", "nw_info", ")" ]
returns a prepared nw_info list for passing into the view builders we end up with a data structure like:: {public: {ips: [{address: 10 .
train
false
43,990
def jsonize(data): return json.dumps(data, sort_keys=False, indent=4)
[ "def", "jsonize", "(", "data", ")", ":", "return", "json", ".", "dumps", "(", "data", ",", "sort_keys", "=", "False", ",", "indent", "=", "4", ")" ]
returns json serialized data .
train
false
43,991
def get_all_layers(layer, treat_as_input=None): try: queue = deque(layer) except TypeError: queue = deque([layer]) seen = set() done = set() result = [] if (treat_as_input is not None): seen.update(treat_as_input) while queue: layer = queue[0] if (layer is None): queue.popleft() elif (layer not in seen): seen.add(layer) if hasattr(layer, 'input_layers'): queue.extendleft(reversed(layer.input_layers)) elif hasattr(layer, 'input_layer'): queue.appendleft(layer.input_layer) else: queue.popleft() if (layer not in done): result.append(layer) done.add(layer) return result
[ "def", "get_all_layers", "(", "layer", ",", "treat_as_input", "=", "None", ")", ":", "try", ":", "queue", "=", "deque", "(", "layer", ")", "except", "TypeError", ":", "queue", "=", "deque", "(", "[", "layer", "]", ")", "seen", "=", "set", "(", ")", "done", "=", "set", "(", ")", "result", "=", "[", "]", "if", "(", "treat_as_input", "is", "not", "None", ")", ":", "seen", ".", "update", "(", "treat_as_input", ")", "while", "queue", ":", "layer", "=", "queue", "[", "0", "]", "if", "(", "layer", "is", "None", ")", ":", "queue", ".", "popleft", "(", ")", "elif", "(", "layer", "not", "in", "seen", ")", ":", "seen", ".", "add", "(", "layer", ")", "if", "hasattr", "(", "layer", ",", "'input_layers'", ")", ":", "queue", ".", "extendleft", "(", "reversed", "(", "layer", ".", "input_layers", ")", ")", "elif", "hasattr", "(", "layer", ",", "'input_layer'", ")", ":", "queue", ".", "appendleft", "(", "layer", ".", "input_layer", ")", "else", ":", "queue", ".", "popleft", "(", ")", "if", "(", "layer", "not", "in", "done", ")", ":", "result", ".", "append", "(", "layer", ")", "done", ".", "add", "(", "layer", ")", "return", "result" ]
this function gathers all layers below one or more given :class:layer instances .
train
false
43,993
def get_users_email_preferences(user_ids): user_email_preferences_models = user_models.UserEmailPreferencesModel.get_multi(user_ids) result = [] for email_preferences_model in user_email_preferences_models: if (email_preferences_model is None): result.append(user_domain.UserGlobalPrefs.create_default_prefs()) else: result.append(user_domain.UserGlobalPrefs(email_preferences_model.site_updates, email_preferences_model.editor_role_notifications, email_preferences_model.feedback_message_notifications, email_preferences_model.subscription_notifications)) return result
[ "def", "get_users_email_preferences", "(", "user_ids", ")", ":", "user_email_preferences_models", "=", "user_models", ".", "UserEmailPreferencesModel", ".", "get_multi", "(", "user_ids", ")", "result", "=", "[", "]", "for", "email_preferences_model", "in", "user_email_preferences_models", ":", "if", "(", "email_preferences_model", "is", "None", ")", ":", "result", ".", "append", "(", "user_domain", ".", "UserGlobalPrefs", ".", "create_default_prefs", "(", ")", ")", "else", ":", "result", ".", "append", "(", "user_domain", ".", "UserGlobalPrefs", "(", "email_preferences_model", ".", "site_updates", ",", "email_preferences_model", ".", "editor_role_notifications", ",", "email_preferences_model", ".", "feedback_message_notifications", ",", "email_preferences_model", ".", "subscription_notifications", ")", ")", "return", "result" ]
get email preferences for the list of users .
train
false
43,994
def getFloatGivenBinary(byteIndex, stlData): return unpack('f', stlData[byteIndex:(byteIndex + 4)])[0]
[ "def", "getFloatGivenBinary", "(", "byteIndex", ",", "stlData", ")", ":", "return", "unpack", "(", "'f'", ",", "stlData", "[", "byteIndex", ":", "(", "byteIndex", "+", "4", ")", "]", ")", "[", "0", "]" ]
get vertex given stl vertex line .
train
false
43,995
def _make_type(vendor, field): return ((vendor << 7) | field)
[ "def", "_make_type", "(", "vendor", ",", "field", ")", ":", "return", "(", "(", "vendor", "<<", "7", ")", "|", "field", ")" ]
takes an nxm vendor and field and returns the whole type field .
train
false
43,996
@click.command(u'disable-scheduler') @pass_context def disable_scheduler(context): import frappe.utils.scheduler for site in context.sites: try: frappe.init(site=site) frappe.connect() frappe.utils.scheduler.disable_scheduler() frappe.db.commit() print u'Disabled for', site finally: frappe.destroy()
[ "@", "click", ".", "command", "(", "u'disable-scheduler'", ")", "@", "pass_context", "def", "disable_scheduler", "(", "context", ")", ":", "import", "frappe", ".", "utils", ".", "scheduler", "for", "site", "in", "context", ".", "sites", ":", "try", ":", "frappe", ".", "init", "(", "site", "=", "site", ")", "frappe", ".", "connect", "(", ")", "frappe", ".", "utils", ".", "scheduler", ".", "disable_scheduler", "(", ")", "frappe", ".", "db", ".", "commit", "(", ")", "print", "u'Disabled for'", ",", "site", "finally", ":", "frappe", ".", "destroy", "(", ")" ]
disable scheduler .
train
false
43,997
def funshion_download_by_vid(vid, output_dir='.', merge=False, info_only=False): title = funshion_get_title_by_vid(vid) url_list = funshion_vid_to_urls(vid) for url in url_list: (type, ext, size) = url_info(url) print_info(site_info, title, type, size) if (not info_only): download_urls(url_list, title, ext, total_size=None, output_dir=output_dir, merge=merge)
[ "def", "funshion_download_by_vid", "(", "vid", ",", "output_dir", "=", "'.'", ",", "merge", "=", "False", ",", "info_only", "=", "False", ")", ":", "title", "=", "funshion_get_title_by_vid", "(", "vid", ")", "url_list", "=", "funshion_vid_to_urls", "(", "vid", ")", "for", "url", "in", "url_list", ":", "(", "type", ",", "ext", ",", "size", ")", "=", "url_info", "(", "url", ")", "print_info", "(", "site_info", ",", "title", ",", "type", ",", "size", ")", "if", "(", "not", "info_only", ")", ":", "download_urls", "(", "url_list", ",", "title", ",", "ext", ",", "total_size", "=", "None", ",", "output_dir", "=", "output_dir", ",", "merge", "=", "merge", ")" ]
vid->none secondary wrapper for single video download .
train
false
43,998
@depends(_HAS_WIN32TS_DEPENDENCIES) def disconnect_session(session_id): try: win32ts.WTSDisconnectSession(win32ts.WTS_CURRENT_SERVER_HANDLE, session_id, True) except PyWinError as error: _LOG.error('Error calling WTSDisconnectSession: %s', error) return False return True
[ "@", "depends", "(", "_HAS_WIN32TS_DEPENDENCIES", ")", "def", "disconnect_session", "(", "session_id", ")", ":", "try", ":", "win32ts", ".", "WTSDisconnectSession", "(", "win32ts", ".", "WTS_CURRENT_SERVER_HANDLE", ",", "session_id", ",", "True", ")", "except", "PyWinError", "as", "error", ":", "_LOG", ".", "error", "(", "'Error calling WTSDisconnectSession: %s'", ",", "error", ")", "return", "False", "return", "True" ]
disconnect a session .
train
true
43,999
def _walk_subclasses(klass): for sub in klass.__subclasses__(): (yield sub) for subsub in _walk_subclasses(sub): (yield subsub)
[ "def", "_walk_subclasses", "(", "klass", ")", ":", "for", "sub", "in", "klass", ".", "__subclasses__", "(", ")", ":", "(", "yield", "sub", ")", "for", "subsub", "in", "_walk_subclasses", "(", "sub", ")", ":", "(", "yield", "subsub", ")" ]
recursively walk subclass tree .
train
false
44,000
def RaiseHttpError(url, response_info, response_body, extra_msg=''): if (response_body is not None): stream = cStringIO.StringIO() stream.write(response_body) stream.seek(0) else: stream = None if (not extra_msg): msg = response_info.reason else: msg = ((response_info.reason + ' ') + extra_msg) raise urllib2.HTTPError(url, response_info.status, msg, response_info, stream)
[ "def", "RaiseHttpError", "(", "url", ",", "response_info", ",", "response_body", ",", "extra_msg", "=", "''", ")", ":", "if", "(", "response_body", "is", "not", "None", ")", ":", "stream", "=", "cStringIO", ".", "StringIO", "(", ")", "stream", ".", "write", "(", "response_body", ")", "stream", ".", "seek", "(", "0", ")", "else", ":", "stream", "=", "None", "if", "(", "not", "extra_msg", ")", ":", "msg", "=", "response_info", ".", "reason", "else", ":", "msg", "=", "(", "(", "response_info", ".", "reason", "+", "' '", ")", "+", "extra_msg", ")", "raise", "urllib2", ".", "HTTPError", "(", "url", ",", "response_info", ".", "status", ",", "msg", ",", "response_info", ",", "stream", ")" ]
raise a urllib2 .
train
false
44,001
def normgrad(y, x, params): beta = params[:(-1)] sigma2 = (params[(-1)] * np.ones((len(y), 1))) dmudbeta = mean_grad(x, beta) mu = np.dot(x, beta) params2 = np.column_stack((mu, sigma2)) dllsdms = norm_lls_grad(y, params2) grad = np.column_stack(((dllsdms[:, :1] * dmudbeta), dllsdms[:, :1])) return grad
[ "def", "normgrad", "(", "y", ",", "x", ",", "params", ")", ":", "beta", "=", "params", "[", ":", "(", "-", "1", ")", "]", "sigma2", "=", "(", "params", "[", "(", "-", "1", ")", "]", "*", "np", ".", "ones", "(", "(", "len", "(", "y", ")", ",", "1", ")", ")", ")", "dmudbeta", "=", "mean_grad", "(", "x", ",", "beta", ")", "mu", "=", "np", ".", "dot", "(", "x", ",", "beta", ")", "params2", "=", "np", ".", "column_stack", "(", "(", "mu", ",", "sigma2", ")", ")", "dllsdms", "=", "norm_lls_grad", "(", "y", ",", "params2", ")", "grad", "=", "np", ".", "column_stack", "(", "(", "(", "dllsdms", "[", ":", ",", ":", "1", "]", "*", "dmudbeta", ")", ",", "dllsdms", "[", ":", ",", ":", "1", "]", ")", ")", "return", "grad" ]
jacobian of normal loglikelihood wrt mean mu and variance sigma2 parameters y : array .
train
false
44,004
def ClearAllButEncodingsModules(module_dict): for module_name in module_dict.keys(): if ((not IsEncodingsModule(module_name)) and (module_name != 'sys')): del module_dict[module_name]
[ "def", "ClearAllButEncodingsModules", "(", "module_dict", ")", ":", "for", "module_name", "in", "module_dict", ".", "keys", "(", ")", ":", "if", "(", "(", "not", "IsEncodingsModule", "(", "module_name", ")", ")", "and", "(", "module_name", "!=", "'sys'", ")", ")", ":", "del", "module_dict", "[", "module_name", "]" ]
clear all modules in a module dictionary except for those modules that are in any way related to encodings .
train
false
44,007
def convert_StringProperty(model, prop, kwargs): if prop.multiline: kwargs['validators'].append(validators.length(max=500)) return f.TextAreaField(**kwargs) else: return get_TextField(kwargs)
[ "def", "convert_StringProperty", "(", "model", ",", "prop", ",", "kwargs", ")", ":", "if", "prop", ".", "multiline", ":", "kwargs", "[", "'validators'", "]", ".", "append", "(", "validators", ".", "length", "(", "max", "=", "500", ")", ")", "return", "f", ".", "TextAreaField", "(", "**", "kwargs", ")", "else", ":", "return", "get_TextField", "(", "kwargs", ")" ]
returns a form field for a db .
train
false
44,008
def matrix_to_string(matrix, header=None): if (type(header) is list): header = tuple(header) lengths = [] if header: for column in header: lengths.append(len(column)) for row in matrix: for (i, column) in enumerate(row): column = unicode(column).encode('utf-8') cl = len(column) try: ml = lengths[i] if (cl > ml): lengths[i] = cl except IndexError: lengths.append(cl) lengths = tuple(lengths) format_string = '' for length in lengths: format_string += (('%-' + str(length)) + 's ') format_string += '\n' matrix_str = '' if header: matrix_str += (format_string % header) for row in matrix: matrix_str += (format_string % tuple(row)) return matrix_str
[ "def", "matrix_to_string", "(", "matrix", ",", "header", "=", "None", ")", ":", "if", "(", "type", "(", "header", ")", "is", "list", ")", ":", "header", "=", "tuple", "(", "header", ")", "lengths", "=", "[", "]", "if", "header", ":", "for", "column", "in", "header", ":", "lengths", ".", "append", "(", "len", "(", "column", ")", ")", "for", "row", "in", "matrix", ":", "for", "(", "i", ",", "column", ")", "in", "enumerate", "(", "row", ")", ":", "column", "=", "unicode", "(", "column", ")", ".", "encode", "(", "'utf-8'", ")", "cl", "=", "len", "(", "column", ")", "try", ":", "ml", "=", "lengths", "[", "i", "]", "if", "(", "cl", ">", "ml", ")", ":", "lengths", "[", "i", "]", "=", "cl", "except", "IndexError", ":", "lengths", ".", "append", "(", "cl", ")", "lengths", "=", "tuple", "(", "lengths", ")", "format_string", "=", "''", "for", "length", "in", "lengths", ":", "format_string", "+=", "(", "(", "'%-'", "+", "str", "(", "length", ")", ")", "+", "'s '", ")", "format_string", "+=", "'\\n'", "matrix_str", "=", "''", "if", "header", ":", "matrix_str", "+=", "(", "format_string", "%", "header", ")", "for", "row", "in", "matrix", ":", "matrix_str", "+=", "(", "format_string", "%", "tuple", "(", "row", ")", ")", "return", "matrix_str" ]
return a pretty .
train
false
44,009
@access_log_level(logging.WARN) def log_view(request): if (not request.user.is_superuser): return HttpResponse(_('You must be a superuser.')) hostname = socket.gethostname() l = logging.getLogger() for h in l.handlers: if isinstance(h, desktop.log.log_buffer.FixedBufferHandler): return render('logs.mako', request, dict(log=[l for l in h.buf], query=request.GET.get('q', ''), hostname=hostname)) return render('logs.mako', request, dict(log=[_('No logs found!')], query='', hostname=hostname))
[ "@", "access_log_level", "(", "logging", ".", "WARN", ")", "def", "log_view", "(", "request", ")", ":", "if", "(", "not", "request", ".", "user", ".", "is_superuser", ")", ":", "return", "HttpResponse", "(", "_", "(", "'You must be a superuser.'", ")", ")", "hostname", "=", "socket", ".", "gethostname", "(", ")", "l", "=", "logging", ".", "getLogger", "(", ")", "for", "h", "in", "l", ".", "handlers", ":", "if", "isinstance", "(", "h", ",", "desktop", ".", "log", ".", "log_buffer", ".", "FixedBufferHandler", ")", ":", "return", "render", "(", "'logs.mako'", ",", "request", ",", "dict", "(", "log", "=", "[", "l", "for", "l", "in", "h", ".", "buf", "]", ",", "query", "=", "request", ".", "GET", ".", "get", "(", "'q'", ",", "''", ")", ",", "hostname", "=", "hostname", ")", ")", "return", "render", "(", "'logs.mako'", ",", "request", ",", "dict", "(", "log", "=", "[", "_", "(", "'No logs found!'", ")", "]", ",", "query", "=", "''", ",", "hostname", "=", "hostname", ")", ")" ]
we have a log handler that retains the last x characters of log messages .
train
false
44,010
def next(iter): return iter.next()
[ "def", "next", "(", "iter", ")", ":", "return", "iter", ".", "next", "(", ")" ]
equivalent to iter .
train
false
44,011
def connect_port(address='127.0.0.1', port=9051, password=None, chroot_path=None, controller=stem.control.Controller): try: control_port = stem.socket.ControlPort(address, port) except stem.SocketError as exc: print exc return None return _connect_auth(control_port, password, True, chroot_path, controller)
[ "def", "connect_port", "(", "address", "=", "'127.0.0.1'", ",", "port", "=", "9051", ",", "password", "=", "None", ",", "chroot_path", "=", "None", ",", "controller", "=", "stem", ".", "control", ".", "Controller", ")", ":", "try", ":", "control_port", "=", "stem", ".", "socket", ".", "ControlPort", "(", "address", ",", "port", ")", "except", "stem", ".", "SocketError", "as", "exc", ":", "print", "exc", "return", "None", "return", "_connect_auth", "(", "control_port", ",", "password", ",", "True", ",", "chroot_path", ",", "controller", ")" ]
convenience function for quickly getting a control connection .
train
false
44,012
@must_be_logged_in def user_notifications(auth, **kwargs): return {'mailing_lists': dict((auth.user.mailchimp_mailing_lists.items() + auth.user.osf_mailing_lists.items()))}
[ "@", "must_be_logged_in", "def", "user_notifications", "(", "auth", ",", "**", "kwargs", ")", ":", "return", "{", "'mailing_lists'", ":", "dict", "(", "(", "auth", ".", "user", ".", "mailchimp_mailing_lists", ".", "items", "(", ")", "+", "auth", ".", "user", ".", "osf_mailing_lists", ".", "items", "(", ")", ")", ")", "}" ]
get subscribe data from user .
train
false
44,013
def p_number(p): p[0] = eval(p[1])
[ "def", "p_number", "(", "p", ")", ":", "p", "[", "0", "]", "=", "eval", "(", "p", "[", "1", "]", ")" ]
number : integer | float .
train
false
44,015
def is_same_domain(host, pattern): if (not pattern): return False pattern = pattern.lower() return (((pattern[0] == '.') and (host.endswith(pattern) or (host == pattern[1:]))) or (pattern == host))
[ "def", "is_same_domain", "(", "host", ",", "pattern", ")", ":", "if", "(", "not", "pattern", ")", ":", "return", "False", "pattern", "=", "pattern", ".", "lower", "(", ")", "return", "(", "(", "(", "pattern", "[", "0", "]", "==", "'.'", ")", "and", "(", "host", ".", "endswith", "(", "pattern", ")", "or", "(", "host", "==", "pattern", "[", "1", ":", "]", ")", ")", ")", "or", "(", "pattern", "==", "host", ")", ")" ]
return true if the host is either an exact match or a match to the wildcard pattern .
train
false
44,016
def _makeHDB1(NDB): CMD = 0 EMD = 3 HDB1val = ((((CMD & 1) * pow(2, 7)) | ((EMD & 7) * pow(2, 4))) | (15 & NDB)) return HDB1val
[ "def", "_makeHDB1", "(", "NDB", ")", ":", "CMD", "=", "0", "EMD", "=", "3", "HDB1val", "=", "(", "(", "(", "(", "CMD", "&", "1", ")", "*", "pow", "(", "2", ",", "7", ")", ")", "|", "(", "(", "EMD", "&", "7", ")", "*", "pow", "(", "2", ",", "4", ")", ")", ")", "|", "(", "15", "&", "NDB", ")", ")", "return", "HDB1val" ]
encode header byte 1 .
train
false
44,018
@open_file(0, mode='rb') def read_leda(path, encoding='UTF-8'): lines = (line.decode(encoding) for line in path) G = parse_leda(lines) return G
[ "@", "open_file", "(", "0", ",", "mode", "=", "'rb'", ")", "def", "read_leda", "(", "path", ",", "encoding", "=", "'UTF-8'", ")", ":", "lines", "=", "(", "line", ".", "decode", "(", "encoding", ")", "for", "line", "in", "path", ")", "G", "=", "parse_leda", "(", "lines", ")", "return", "G" ]
read graph in leda format from path .
train
false
44,019
@register.inclusion_tag('zinnia/tags/dummy.html') def get_recent_entries(number=5, template='zinnia/tags/entries_recent.html'): return {'template': template, 'entries': Entry.published.all()[:number]}
[ "@", "register", ".", "inclusion_tag", "(", "'zinnia/tags/dummy.html'", ")", "def", "get_recent_entries", "(", "number", "=", "5", ",", "template", "=", "'zinnia/tags/entries_recent.html'", ")", ":", "return", "{", "'template'", ":", "template", ",", "'entries'", ":", "Entry", ".", "published", ".", "all", "(", ")", "[", ":", "number", "]", "}" ]
return the most recent entries .
train
false
44,021
def validate_email(email, check_mx=False, verify=False): try: assert (re.match(VALID_ADDRESS_REGEXP, email) is not None) check_mx |= verify if check_mx: if (not DNS): raise Exception('For check the mx records or check if the email exists you must have installed pyDNS python package') DNS.DiscoverNameServers() hostname = email[(email.find('@') + 1):] mx_hosts = DNS.mxlookup(hostname) for mx in mx_hosts: try: smtp = smtplib.SMTP() smtp.connect(mx[1]) if (not verify): return True (status, _) = smtp.helo() if (status != 250): continue smtp.mail('') (status, _) = smtp.rcpt(email) if (status != 250): return False break except smtplib.SMTPServerDisconnected: break except smtplib.SMTPConnectError: continue except (AssertionError, ServerError): return False return True
[ "def", "validate_email", "(", "email", ",", "check_mx", "=", "False", ",", "verify", "=", "False", ")", ":", "try", ":", "assert", "(", "re", ".", "match", "(", "VALID_ADDRESS_REGEXP", ",", "email", ")", "is", "not", "None", ")", "check_mx", "|=", "verify", "if", "check_mx", ":", "if", "(", "not", "DNS", ")", ":", "raise", "Exception", "(", "'For check the mx records or check if the email exists you must have installed pyDNS python package'", ")", "DNS", ".", "DiscoverNameServers", "(", ")", "hostname", "=", "email", "[", "(", "email", ".", "find", "(", "'@'", ")", "+", "1", ")", ":", "]", "mx_hosts", "=", "DNS", ".", "mxlookup", "(", "hostname", ")", "for", "mx", "in", "mx_hosts", ":", "try", ":", "smtp", "=", "smtplib", ".", "SMTP", "(", ")", "smtp", ".", "connect", "(", "mx", "[", "1", "]", ")", "if", "(", "not", "verify", ")", ":", "return", "True", "(", "status", ",", "_", ")", "=", "smtp", ".", "helo", "(", ")", "if", "(", "status", "!=", "250", ")", ":", "continue", "smtp", ".", "mail", "(", "''", ")", "(", "status", ",", "_", ")", "=", "smtp", ".", "rcpt", "(", "email", ")", "if", "(", "status", "!=", "250", ")", ":", "return", "False", "break", "except", "smtplib", ".", "SMTPServerDisconnected", ":", "break", "except", "smtplib", ".", "SMTPConnectError", ":", "continue", "except", "(", "AssertionError", ",", "ServerError", ")", ":", "return", "False", "return", "True" ]
indicate whether the given string is a valid email address according to the addr-spec portion of rfc 2822 .
train
false
44,023
@handle_response_format @treeio_login_required def group_view(request, group_id, response_format='html'): group = get_object_or_404(Group, pk=group_id) contacts = Object.filter_by_request(request, Contact.objects.filter(related_user=group).order_by('name')) members = User.objects.filter((Q(default_group=group) | Q(other_groups=group))).distinct() subgroups = Group.objects.filter(parent=group) types = Object.filter_by_request(request, ContactType.objects.order_by('name')) return render_to_response('identities/group_view', {'group': group, 'subgroups': subgroups, 'members': members, 'contacts': contacts, 'types': types}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "group_view", "(", "request", ",", "group_id", ",", "response_format", "=", "'html'", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "pk", "=", "group_id", ")", "contacts", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Contact", ".", "objects", ".", "filter", "(", "related_user", "=", "group", ")", ".", "order_by", "(", "'name'", ")", ")", "members", "=", "User", ".", "objects", ".", "filter", "(", "(", "Q", "(", "default_group", "=", "group", ")", "|", "Q", "(", "other_groups", "=", "group", ")", ")", ")", ".", "distinct", "(", ")", "subgroups", "=", "Group", ".", "objects", ".", "filter", "(", "parent", "=", "group", ")", "types", "=", "Object", ".", "filter_by_request", "(", "request", ",", "ContactType", ".", "objects", ".", "order_by", "(", "'name'", ")", ")", "return", "render_to_response", "(", "'identities/group_view'", ",", "{", "'group'", ":", "group", ",", "'subgroups'", ":", "subgroups", ",", "'members'", ":", "members", ",", "'contacts'", ":", "contacts", ",", "'types'", ":", "types", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
group view .
train
false
44,024
def _VersionList(release): return [int(part) for part in str(release).split('.')]
[ "def", "_VersionList", "(", "release", ")", ":", "return", "[", "int", "(", "part", ")", "for", "part", "in", "str", "(", "release", ")", ".", "split", "(", "'.'", ")", "]" ]
parse a version string into a list of ints .
train
false
44,026
def get_annotation(var, cls): for annotation in getattr(var.tag, 'annotations', []): if isinstance(annotation, cls): return annotation
[ "def", "get_annotation", "(", "var", ",", "cls", ")", ":", "for", "annotation", "in", "getattr", "(", "var", ".", "tag", ",", "'annotations'", ",", "[", "]", ")", ":", "if", "isinstance", "(", "annotation", ",", "cls", ")", ":", "return", "annotation" ]
a helper function to retrieve an annotation of a particular type .
train
false
44,031
def demo_update_one(filename): f = get_contents(filename) if (not len(f.strip())): return '' f = remove_doublesharp_trailing_whitespace(f) f = add_shbang_encoding_future(f) f = add_win_close_quit_demo_license(f) f = convert_inline_comments(f) f = replace_xrange(f) f = replace_commas_etc(f) f = replace_PatchStim(f) f = replace_myWin_win(f) f = replace_equals(f) f = uk_to_us_spelling(f) f = split_multiline(f) return f
[ "def", "demo_update_one", "(", "filename", ")", ":", "f", "=", "get_contents", "(", "filename", ")", "if", "(", "not", "len", "(", "f", ".", "strip", "(", ")", ")", ")", ":", "return", "''", "f", "=", "remove_doublesharp_trailing_whitespace", "(", "f", ")", "f", "=", "add_shbang_encoding_future", "(", "f", ")", "f", "=", "add_win_close_quit_demo_license", "(", "f", ")", "f", "=", "convert_inline_comments", "(", "f", ")", "f", "=", "replace_xrange", "(", "f", ")", "f", "=", "replace_commas_etc", "(", "f", ")", "f", "=", "replace_PatchStim", "(", "f", ")", "f", "=", "replace_myWin_win", "(", "f", ")", "f", "=", "replace_equals", "(", "f", ")", "f", "=", "uk_to_us_spelling", "(", "f", ")", "f", "=", "split_multiline", "(", "f", ")", "return", "f" ]
convert file contents to updated style etc .
train
false
44,032
def raise_error(error_code, error_detail=''): raise apiproxy_errors.ApplicationError(error_code, error_detail=error_detail)
[ "def", "raise_error", "(", "error_code", ",", "error_detail", "=", "''", ")", ":", "raise", "apiproxy_errors", ".", "ApplicationError", "(", "error_code", ",", "error_detail", "=", "error_detail", ")" ]
raise application error helper method .
train
false
44,035
def validate_uuid_representation(dummy, value): try: return _UUID_REPRESENTATIONS[value] except KeyError: raise ValueError(('%s is an invalid UUID representation. Must be one of %s' % (value, tuple(_UUID_REPRESENTATIONS))))
[ "def", "validate_uuid_representation", "(", "dummy", ",", "value", ")", ":", "try", ":", "return", "_UUID_REPRESENTATIONS", "[", "value", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "(", "'%s is an invalid UUID representation. Must be one of %s'", "%", "(", "value", ",", "tuple", "(", "_UUID_REPRESENTATIONS", ")", ")", ")", ")" ]
validate the uuid representation option selected in the uri .
train
true
44,036
def getElementNodesByKey(elementNode, key): if (key not in elementNode.attributes): return [] word = str(elementNode.attributes[key]).strip() evaluatedLinkValue = getEvaluatedLinkValue(elementNode, word) if (evaluatedLinkValue.__class__.__name__ == 'ElementNode'): return [evaluatedLinkValue] if (evaluatedLinkValue.__class__ == list): return evaluatedLinkValue print 'Warning, could not get ElementNodes in getElementNodesByKey in evaluate for:' print key print evaluatedLinkValue print elementNode return []
[ "def", "getElementNodesByKey", "(", "elementNode", ",", "key", ")", ":", "if", "(", "key", "not", "in", "elementNode", ".", "attributes", ")", ":", "return", "[", "]", "word", "=", "str", "(", "elementNode", ".", "attributes", "[", "key", "]", ")", ".", "strip", "(", ")", "evaluatedLinkValue", "=", "getEvaluatedLinkValue", "(", "elementNode", ",", "word", ")", "if", "(", "evaluatedLinkValue", ".", "__class__", ".", "__name__", "==", "'ElementNode'", ")", ":", "return", "[", "evaluatedLinkValue", "]", "if", "(", "evaluatedLinkValue", ".", "__class__", "==", "list", ")", ":", "return", "evaluatedLinkValue", "print", "'Warning, could not get ElementNodes in getElementNodesByKey in evaluate for:'", "print", "key", "print", "evaluatedLinkValue", "print", "elementNode", "return", "[", "]" ]
get the xml elements by key .
train
false
44,038
def _swap_keys(obj, key1, key2): (val1, val2) = (None, None) try: val2 = obj.pop(key1) except KeyError: pass try: val1 = obj.pop(key2) except KeyError: pass if (val2 is not None): obj[key2] = val2 if (val1 is not None): obj[key1] = val1
[ "def", "_swap_keys", "(", "obj", ",", "key1", ",", "key2", ")", ":", "(", "val1", ",", "val2", ")", "=", "(", "None", ",", "None", ")", "try", ":", "val2", "=", "obj", ".", "pop", "(", "key1", ")", "except", "KeyError", ":", "pass", "try", ":", "val1", "=", "obj", ".", "pop", "(", "key2", ")", "except", "KeyError", ":", "pass", "if", "(", "val2", "is", "not", "None", ")", ":", "obj", "[", "key2", "]", "=", "val2", "if", "(", "val1", "is", "not", "None", ")", ":", "obj", "[", "key1", "]", "=", "val1" ]
swap obj[key1] with obj[key2] .
train
false
44,039
def run_symilar(): from pylint.checkers.similar import Run Run(sys.argv[1:])
[ "def", "run_symilar", "(", ")", ":", "from", "pylint", ".", "checkers", ".", "similar", "import", "Run", "Run", "(", "sys", ".", "argv", "[", "1", ":", "]", ")" ]
run symilar .
train
false