id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
6,351
def obtain_lock_id(pid=None): id = (-1) id = obtain_lock_id_to_hog() try: if (id >= 0): if (pid is None): pid = os.getpid() _launch_reaper(id, pid) except: free_lock(id) id = (-1) return id
[ "def", "obtain_lock_id", "(", "pid", "=", "None", ")", ":", "id", "=", "(", "-", "1", ")", "id", "=", "obtain_lock_id_to_hog", "(", ")", "try", ":", "if", "(", "id", ">=", "0", ")", ":", "if", "(", "pid", "is", "None", ")", ":", "pid", "=", "os", ".", "getpid", "(", ")", "_launch_reaper", "(", "id", ",", "pid", ")", "except", ":", "free_lock", "(", "id", ")", "id", "=", "(", "-", "1", ")", "return", "id" ]
finds a free id .
train
false
6,352
def _next_device(): return _select_free_device(_find_allocated_devices())
[ "def", "_next_device", "(", ")", ":", "return", "_select_free_device", "(", "_find_allocated_devices", "(", ")", ")" ]
get the next available ebs device name for this ec2 instance .
train
false
6,353
def get_log_formats(config): log_format = (((Literal('log_format') + parameter) + Group(OneOrMore(parameter))) + semicolon) log_format.ignore(pythonStyleComment) for directive in log_format.searchString(config).asList(): name = directive[1] format_string = ''.join(directive[2]) (yield (name, format_string))
[ "def", "get_log_formats", "(", "config", ")", ":", "log_format", "=", "(", "(", "(", "Literal", "(", "'log_format'", ")", "+", "parameter", ")", "+", "Group", "(", "OneOrMore", "(", "parameter", ")", ")", ")", "+", "semicolon", ")", "log_format", ".", "ignore", "(", "pythonStyleComment", ")", "for", "directive", "in", "log_format", ".", "searchString", "(", "config", ")", ".", "asList", "(", ")", ":", "name", "=", "directive", "[", "1", "]", "format_string", "=", "''", ".", "join", "(", "directive", "[", "2", "]", ")", "(", "yield", "(", "name", ",", "format_string", ")", ")" ]
parse config for log_format directives :return: iterator over tuple of found directives .
train
true
6,355
def image_volume_cache_get_by_volume_id(context, volume_id): return IMPL.image_volume_cache_get_by_volume_id(context, volume_id)
[ "def", "image_volume_cache_get_by_volume_id", "(", "context", ",", "volume_id", ")", ":", "return", "IMPL", ".", "image_volume_cache_get_by_volume_id", "(", "context", ",", "volume_id", ")" ]
query to see if a volume id is an image-volume contained in the cache .
train
false
6,356
def set_key(key, value, host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.set(key, value)
[ "def", "set_key", "(", "key", ",", "value", ",", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "set", "(", "key", ",", "value", ")" ]
updates a parsed yaml structure setting a key to a value .
train
true
6,357
def drag(page, source_index, target_index, placeholder_height=0): draggables = page.q(css='.drag-handle') source = draggables[source_index] target = draggables[target_index] action = ActionChains(page.browser) action.click_and_hold(source).move_to_element_with_offset(target, 0, placeholder_height) if (placeholder_height == 0): action.release(target).perform() else: action.release().perform() wait_for_notification(page)
[ "def", "drag", "(", "page", ",", "source_index", ",", "target_index", ",", "placeholder_height", "=", "0", ")", ":", "draggables", "=", "page", ".", "q", "(", "css", "=", "'.drag-handle'", ")", "source", "=", "draggables", "[", "source_index", "]", "target", "=", "draggables", "[", "target_index", "]", "action", "=", "ActionChains", "(", "page", ".", "browser", ")", "action", ".", "click_and_hold", "(", "source", ")", ".", "move_to_element_with_offset", "(", "target", ",", "0", ",", "placeholder_height", ")", "if", "(", "placeholder_height", "==", "0", ")", ":", "action", ".", "release", "(", "target", ")", ".", "perform", "(", ")", "else", ":", "action", ".", "release", "(", ")", ".", "perform", "(", ")", "wait_for_notification", "(", "page", ")" ]
gets the drag handle with index source_index and drags it to the location of the drag handle with target_index .
train
false
6,358
def make_grantip(app, global_conf, clobber_username=False, **kw): from paste.deploy.converters import asbool clobber_username = asbool(clobber_username) ip_map = {} for (key, value) in kw.items(): if (':' in value): (username, role) = value.split(':', 1) else: username = value role = '' if (username == '-'): username = '' if (role == '-'): role = '' ip_map[key] = value return GrantIPMiddleware(app, ip_map, clobber_username)
[ "def", "make_grantip", "(", "app", ",", "global_conf", ",", "clobber_username", "=", "False", ",", "**", "kw", ")", ":", "from", "paste", ".", "deploy", ".", "converters", "import", "asbool", "clobber_username", "=", "asbool", "(", "clobber_username", ")", "ip_map", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "kw", ".", "items", "(", ")", ":", "if", "(", "':'", "in", "value", ")", ":", "(", "username", ",", "role", ")", "=", "value", ".", "split", "(", "':'", ",", "1", ")", "else", ":", "username", "=", "value", "role", "=", "''", "if", "(", "username", "==", "'-'", ")", ":", "username", "=", "''", "if", "(", "role", "==", "'-'", ")", ":", "role", "=", "''", "ip_map", "[", "key", "]", "=", "value", "return", "GrantIPMiddleware", "(", "app", ",", "ip_map", ",", "clobber_username", ")" ]
grant roles or usernames based on ip addresses .
train
false
6,360
def _bistochastic_normalize(X, max_iter=1000, tol=1e-05): X = make_nonnegative(X) X_scaled = X dist = None for _ in range(max_iter): (X_new, _, _) = _scale_normalize(X_scaled) if issparse(X): dist = norm((X_scaled.data - X.data)) else: dist = norm((X_scaled - X_new)) X_scaled = X_new if ((dist is not None) and (dist < tol)): break return X_scaled
[ "def", "_bistochastic_normalize", "(", "X", ",", "max_iter", "=", "1000", ",", "tol", "=", "1e-05", ")", ":", "X", "=", "make_nonnegative", "(", "X", ")", "X_scaled", "=", "X", "dist", "=", "None", "for", "_", "in", "range", "(", "max_iter", ")", ":", "(", "X_new", ",", "_", ",", "_", ")", "=", "_scale_normalize", "(", "X_scaled", ")", "if", "issparse", "(", "X", ")", ":", "dist", "=", "norm", "(", "(", "X_scaled", ".", "data", "-", "X", ".", "data", ")", ")", "else", ":", "dist", "=", "norm", "(", "(", "X_scaled", "-", "X_new", ")", ")", "X_scaled", "=", "X_new", "if", "(", "(", "dist", "is", "not", "None", ")", "and", "(", "dist", "<", "tol", ")", ")", ":", "break", "return", "X_scaled" ]
normalize rows and columns of x simultaneously so that all rows sum to one constant and all columns sum to a different constant .
train
false
6,361
def export(from_dir, to_dir, blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS, verbose=0): try: mkdir(to_dir) except OSError: pass for (directory, dirnames, filenames) in walk(from_dir): for norecurs in blacklist: try: dirnames.remove(norecurs) except ValueError: continue for dirname in dirnames: src = join(directory, dirname) dest = (to_dir + src[len(from_dir):]) if isdir(src): if (not exists(dest)): mkdir(dest) for filename in filenames: if any([filename.endswith(ext) for ext in ignore_ext]): continue src = join(directory, filename) dest = (to_dir + src[len(from_dir):]) if verbose: print(src, '->', dest, file=sys.stderr) if exists(dest): remove(dest) shutil.copy2(src, dest)
[ "def", "export", "(", "from_dir", ",", "to_dir", ",", "blacklist", "=", "BASE_BLACKLIST", ",", "ignore_ext", "=", "IGNORED_EXTENSIONS", ",", "verbose", "=", "0", ")", ":", "try", ":", "mkdir", "(", "to_dir", ")", "except", "OSError", ":", "pass", "for", "(", "directory", ",", "dirnames", ",", "filenames", ")", "in", "walk", "(", "from_dir", ")", ":", "for", "norecurs", "in", "blacklist", ":", "try", ":", "dirnames", ".", "remove", "(", "norecurs", ")", "except", "ValueError", ":", "continue", "for", "dirname", "in", "dirnames", ":", "src", "=", "join", "(", "directory", ",", "dirname", ")", "dest", "=", "(", "to_dir", "+", "src", "[", "len", "(", "from_dir", ")", ":", "]", ")", "if", "isdir", "(", "src", ")", ":", "if", "(", "not", "exists", "(", "dest", ")", ")", ":", "mkdir", "(", "dest", ")", "for", "filename", "in", "filenames", ":", "if", "any", "(", "[", "filename", ".", "endswith", "(", "ext", ")", "for", "ext", "in", "ignore_ext", "]", ")", ":", "continue", "src", "=", "join", "(", "directory", ",", "filename", ")", "dest", "=", "(", "to_dir", "+", "src", "[", "len", "(", "from_dir", ")", ":", "]", ")", "if", "verbose", ":", "print", "(", "src", ",", "'->'", ",", "dest", ",", "file", "=", "sys", ".", "stderr", ")", "if", "exists", "(", "dest", ")", ":", "remove", "(", "dest", ")", "shutil", ".", "copy2", "(", "src", ",", "dest", ")" ]
export an image description for kiwi .
train
false
6,363
def convertElementNodeRenameByPaths(elementNode, geometryOutput): createLinkPath(elementNode) for geometryOutputChild in geometryOutput: pathElement = xml_simple_reader.ElementNode() pathElement.setParentAddToChildNodes(elementNode) convertElementNodeByPath(pathElement, geometryOutputChild)
[ "def", "convertElementNodeRenameByPaths", "(", "elementNode", ",", "geometryOutput", ")", ":", "createLinkPath", "(", "elementNode", ")", "for", "geometryOutputChild", "in", "geometryOutput", ":", "pathElement", "=", "xml_simple_reader", ".", "ElementNode", "(", ")", "pathElement", ".", "setParentAddToChildNodes", "(", "elementNode", ")", "convertElementNodeByPath", "(", "pathElement", ",", "geometryOutputChild", ")" ]
convert the xml element to a path xml element and add paths .
train
false
6,367
def get_pack_file_abs_path(pack_ref, file_path): pack_base_path = get_pack_base_path(pack_name=pack_ref) path_components = [] path_components.append(pack_base_path) normalized_file_path = os.path.normpath(('/' + file_path)).lstrip('/') if (normalized_file_path != file_path): raise ValueError(('Invalid file path: %s' % file_path)) path_components.append(normalized_file_path) result = os.path.join(*path_components) assert (normalized_file_path in result) common_prefix = os.path.commonprefix([pack_base_path, result]) if (common_prefix != pack_base_path): raise ValueError(('Invalid file_path: %s' % file_path)) return result
[ "def", "get_pack_file_abs_path", "(", "pack_ref", ",", "file_path", ")", ":", "pack_base_path", "=", "get_pack_base_path", "(", "pack_name", "=", "pack_ref", ")", "path_components", "=", "[", "]", "path_components", ".", "append", "(", "pack_base_path", ")", "normalized_file_path", "=", "os", ".", "path", ".", "normpath", "(", "(", "'/'", "+", "file_path", ")", ")", ".", "lstrip", "(", "'/'", ")", "if", "(", "normalized_file_path", "!=", "file_path", ")", ":", "raise", "ValueError", "(", "(", "'Invalid file path: %s'", "%", "file_path", ")", ")", "path_components", ".", "append", "(", "normalized_file_path", ")", "result", "=", "os", ".", "path", ".", "join", "(", "*", "path_components", ")", "assert", "(", "normalized_file_path", "in", "result", ")", "common_prefix", "=", "os", ".", "path", ".", "commonprefix", "(", "[", "pack_base_path", ",", "result", "]", ")", "if", "(", "common_prefix", "!=", "pack_base_path", ")", ":", "raise", "ValueError", "(", "(", "'Invalid file_path: %s'", "%", "file_path", ")", ")", "return", "result" ]
retrieve full absolute path to the pack file .
train
false
6,369
def test_timeout(): from time import sleep sleep(2)
[ "def", "test_timeout", "(", ")", ":", "from", "time", "import", "sleep", "sleep", "(", "2", ")" ]
test whether timeout calls the functions .
train
false
6,371
def axapi_call_v3(module, url, method=None, body=None, signature=None): if signature: headers = {'content-type': 'application/json', 'Authorization': ('A10 %s' % signature)} else: headers = {'content-type': 'application/json'} (rsp, info) = fetch_url(module, url, method=method, data=body, headers=headers) if ((not rsp) or (info['status'] >= 400)): module.fail_json(msg=('failed to connect (status code %s), error was %s' % (info['status'], info.get('msg', 'no error given')))) try: raw_data = rsp.read() data = json.loads(raw_data) except ValueError: if ('status="ok"' in raw_data.lower()): data = {'response': {'status': 'OK'}} else: data = {'response': {'status': 'fail', 'err': {'msg': raw_data}}} except: module.fail_json(msg='could not read the result from the host') finally: rsp.close() return data
[ "def", "axapi_call_v3", "(", "module", ",", "url", ",", "method", "=", "None", ",", "body", "=", "None", ",", "signature", "=", "None", ")", ":", "if", "signature", ":", "headers", "=", "{", "'content-type'", ":", "'application/json'", ",", "'Authorization'", ":", "(", "'A10 %s'", "%", "signature", ")", "}", "else", ":", "headers", "=", "{", "'content-type'", ":", "'application/json'", "}", "(", "rsp", ",", "info", ")", "=", "fetch_url", "(", "module", ",", "url", ",", "method", "=", "method", ",", "data", "=", "body", ",", "headers", "=", "headers", ")", "if", "(", "(", "not", "rsp", ")", "or", "(", "info", "[", "'status'", "]", ">=", "400", ")", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "'failed to connect (status code %s), error was %s'", "%", "(", "info", "[", "'status'", "]", ",", "info", ".", "get", "(", "'msg'", ",", "'no error given'", ")", ")", ")", ")", "try", ":", "raw_data", "=", "rsp", ".", "read", "(", ")", "data", "=", "json", ".", "loads", "(", "raw_data", ")", "except", "ValueError", ":", "if", "(", "'status=\"ok\"'", "in", "raw_data", ".", "lower", "(", ")", ")", ":", "data", "=", "{", "'response'", ":", "{", "'status'", ":", "'OK'", "}", "}", "else", ":", "data", "=", "{", "'response'", ":", "{", "'status'", ":", "'fail'", ",", "'err'", ":", "{", "'msg'", ":", "raw_data", "}", "}", "}", "except", ":", "module", ".", "fail_json", "(", "msg", "=", "'could not read the result from the host'", ")", "finally", ":", "rsp", ".", "close", "(", ")", "return", "data" ]
returns a datastructure based on the result of the api call .
train
false
6,372
def list_dvs(service_instance): return list_objects(service_instance, vim.DistributedVirtualSwitch)
[ "def", "list_dvs", "(", "service_instance", ")", ":", "return", "list_objects", "(", "service_instance", ",", "vim", ".", "DistributedVirtualSwitch", ")" ]
returns a list of distributed virtual switches for the the specified host .
train
false
6,373
def load_config_module(name, options, tags): if isinstance(name, str): LOG.info('Loading %s', name) d = {} module = __import__(name[:(-3)], d, d) else: module = reload(name) onload = module.__dict__.get('onload') if callable(onload): try: onload(options, tags) except: LOG.fatal('Exception while loading %s', name) raise return module
[ "def", "load_config_module", "(", "name", ",", "options", ",", "tags", ")", ":", "if", "isinstance", "(", "name", ",", "str", ")", ":", "LOG", ".", "info", "(", "'Loading %s'", ",", "name", ")", "d", "=", "{", "}", "module", "=", "__import__", "(", "name", "[", ":", "(", "-", "3", ")", "]", ",", "d", ",", "d", ")", "else", ":", "module", "=", "reload", "(", "name", ")", "onload", "=", "module", ".", "__dict__", ".", "get", "(", "'onload'", ")", "if", "callable", "(", "onload", ")", ":", "try", ":", "onload", "(", "options", ",", "tags", ")", "except", ":", "LOG", ".", "fatal", "(", "'Exception while loading %s'", ",", "name", ")", "raise", "return", "module" ]
imports the config module of the given name the name argument can be a string .
train
false
6,374
def describe_cluster(module, redshift): identifier = module.params.get('identifier') try: resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0] except boto.exception.JSONResponseError as e: module.fail_json(msg=str(e)) return (True, _collect_facts(resource))
[ "def", "describe_cluster", "(", "module", ",", "redshift", ")", ":", "identifier", "=", "module", ".", "params", ".", "get", "(", "'identifier'", ")", "try", ":", "resource", "=", "redshift", ".", "describe_clusters", "(", "identifier", ")", "[", "'DescribeClustersResponse'", "]", "[", "'DescribeClustersResult'", "]", "[", "'Clusters'", "]", "[", "0", "]", "except", "boto", ".", "exception", ".", "JSONResponseError", "as", "e", ":", "module", ".", "fail_json", "(", "msg", "=", "str", "(", "e", ")", ")", "return", "(", "True", ",", "_collect_facts", "(", "resource", ")", ")" ]
collect data about the cluster .
train
false
6,375
def get_ssh_certificate_tokens(module, ssh_cert_path): (rc, stdout, stderr) = module.run_command(['openssl', 'x509', '-in', ssh_cert_path, '-fingerprint', '-noout']) if (rc != 0): module.fail_json(msg=('failed to generate the key fingerprint, error was: %s' % stderr)) fingerprint = stdout.strip()[17:].replace(':', '') (rc, stdout, stderr) = module.run_command(['openssl', 'pkcs12', '-export', '-in', ssh_cert_path, '-nokeys', '-password', 'pass:']) if (rc != 0): module.fail_json(msg=('failed to generate the pkcs12 signature from the certificate, error was: %s' % stderr)) pkcs12_base64 = base64.b64encode(stdout.strip()) return (fingerprint, pkcs12_base64)
[ "def", "get_ssh_certificate_tokens", "(", "module", ",", "ssh_cert_path", ")", ":", "(", "rc", ",", "stdout", ",", "stderr", ")", "=", "module", ".", "run_command", "(", "[", "'openssl'", ",", "'x509'", ",", "'-in'", ",", "ssh_cert_path", ",", "'-fingerprint'", ",", "'-noout'", "]", ")", "if", "(", "rc", "!=", "0", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "'failed to generate the key fingerprint, error was: %s'", "%", "stderr", ")", ")", "fingerprint", "=", "stdout", ".", "strip", "(", ")", "[", "17", ":", "]", ".", "replace", "(", "':'", ",", "''", ")", "(", "rc", ",", "stdout", ",", "stderr", ")", "=", "module", ".", "run_command", "(", "[", "'openssl'", ",", "'pkcs12'", ",", "'-export'", ",", "'-in'", ",", "ssh_cert_path", ",", "'-nokeys'", ",", "'-password'", ",", "'pass:'", "]", ")", "if", "(", "rc", "!=", "0", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "'failed to generate the pkcs12 signature from the certificate, error was: %s'", "%", "stderr", ")", ")", "pkcs12_base64", "=", "base64", ".", "b64encode", "(", "stdout", ".", "strip", "(", ")", ")", "return", "(", "fingerprint", ",", "pkcs12_base64", ")" ]
returns the sha1 fingerprint and a base64-encoded pkcs12 version of the certificate .
train
false
6,376
def list_format_strings_from_aliases(aliases): patterns = [] for alias in aliases: for format_ in alias.formats: (display, representations) = normalise_alias_format_string(format_) if (display and (len(representations) == 0)): patterns.extend([(display, [])]) else: patterns.extend([(display, representation) for representation in representations]) return patterns
[ "def", "list_format_strings_from_aliases", "(", "aliases", ")", ":", "patterns", "=", "[", "]", "for", "alias", "in", "aliases", ":", "for", "format_", "in", "alias", ".", "formats", ":", "(", "display", ",", "representations", ")", "=", "normalise_alias_format_string", "(", "format_", ")", "if", "(", "display", "and", "(", "len", "(", "representations", ")", "==", "0", ")", ")", ":", "patterns", ".", "extend", "(", "[", "(", "display", ",", "[", "]", ")", "]", ")", "else", ":", "patterns", ".", "extend", "(", "[", "(", "display", ",", "representation", ")", "for", "representation", "in", "representations", "]", ")", "return", "patterns" ]
list patterns from a collection of alias objects .
train
false
6,377
def start_tls_server(test, port, context_factory): server_endpoint = SSL4ServerEndpoint(reactor, port, context_factory, interface='127.0.0.1') server_factory = WaitForDisconnectsFactory.forProtocol(SendingProtocol) test.addCleanup((lambda : server_factory.wait_for_disconnects())) d = server_endpoint.listen(server_factory) d.addCallback((lambda port: test.addCleanup(port.stopListening))) return d
[ "def", "start_tls_server", "(", "test", ",", "port", ",", "context_factory", ")", ":", "server_endpoint", "=", "SSL4ServerEndpoint", "(", "reactor", ",", "port", ",", "context_factory", ",", "interface", "=", "'127.0.0.1'", ")", "server_factory", "=", "WaitForDisconnectsFactory", ".", "forProtocol", "(", "SendingProtocol", ")", "test", ".", "addCleanup", "(", "(", "lambda", ":", "server_factory", ".", "wait_for_disconnects", "(", ")", ")", ")", "d", "=", "server_endpoint", ".", "listen", "(", "server_factory", ")", "d", ".", "addCallback", "(", "(", "lambda", "port", ":", "test", ".", "addCleanup", "(", "port", ".", "stopListening", ")", ")", ")", "return", "d" ]
start a tls server on the given port .
train
false
6,380
def _lowess_robustify_fit(x_copy, y_copy, fitted, weights, k, n): nn_indices = [0, k] X = np.ones((k, 2)) residual_weights = np.copy(y_copy) residual_weights.shape = (n,) residual_weights -= fitted residual_weights = np.absolute(residual_weights) s = np.median(residual_weights) residual_weights /= (6 * s) too_big = (residual_weights >= 1) _lowess_bisquare(residual_weights) residual_weights[too_big] = 0 for i in range(n): total_weights = (weights[i, :] * np.sqrt(residual_weights[nn_indices[0]:nn_indices[1]])) X[:, 1] = x_copy[nn_indices[0]:nn_indices[1]] y_i = (total_weights * y_copy[nn_indices[0]:nn_indices[1]]) total_weights.shape = (k, 1) beta = lstsq((total_weights * X), y_i)[0] fitted[i] = (beta[0] + (beta[1] * x_copy[i])) _lowess_update_nn(x_copy, nn_indices, (i + 1))
[ "def", "_lowess_robustify_fit", "(", "x_copy", ",", "y_copy", ",", "fitted", ",", "weights", ",", "k", ",", "n", ")", ":", "nn_indices", "=", "[", "0", ",", "k", "]", "X", "=", "np", ".", "ones", "(", "(", "k", ",", "2", ")", ")", "residual_weights", "=", "np", ".", "copy", "(", "y_copy", ")", "residual_weights", ".", "shape", "=", "(", "n", ",", ")", "residual_weights", "-=", "fitted", "residual_weights", "=", "np", ".", "absolute", "(", "residual_weights", ")", "s", "=", "np", ".", "median", "(", "residual_weights", ")", "residual_weights", "/=", "(", "6", "*", "s", ")", "too_big", "=", "(", "residual_weights", ">=", "1", ")", "_lowess_bisquare", "(", "residual_weights", ")", "residual_weights", "[", "too_big", "]", "=", "0", "for", "i", "in", "range", "(", "n", ")", ":", "total_weights", "=", "(", "weights", "[", "i", ",", ":", "]", "*", "np", ".", "sqrt", "(", "residual_weights", "[", "nn_indices", "[", "0", "]", ":", "nn_indices", "[", "1", "]", "]", ")", ")", "X", "[", ":", ",", "1", "]", "=", "x_copy", "[", "nn_indices", "[", "0", "]", ":", "nn_indices", "[", "1", "]", "]", "y_i", "=", "(", "total_weights", "*", "y_copy", "[", "nn_indices", "[", "0", "]", ":", "nn_indices", "[", "1", "]", "]", ")", "total_weights", ".", "shape", "=", "(", "k", ",", "1", ")", "beta", "=", "lstsq", "(", "(", "total_weights", "*", "X", ")", ",", "y_i", ")", "[", "0", "]", "fitted", "[", "i", "]", "=", "(", "beta", "[", "0", "]", "+", "(", "beta", "[", "1", "]", "*", "x_copy", "[", "i", "]", ")", ")", "_lowess_update_nn", "(", "x_copy", ",", "nn_indices", ",", "(", "i", "+", "1", ")", ")" ]
additional weighted local linear regressions .
train
false
6,381
@handle_response_format @treeio_login_required def folder_add(request, response_format='html'): if request.POST: if ('cancel' not in request.POST): folder = Folder() form = FolderForm(request.user.profile, None, request.POST, instance=folder) if form.is_valid(): folder = form.save() folder.set_user_from_request(request) return HttpResponseRedirect(reverse('documents_folder_view', args=[folder.id])) else: return HttpResponseRedirect(reverse('document_index')) else: form = FolderForm(request.user.profile, None) context = _get_default_context(request) context.update({'form': form}) return render_to_response('documents/folder_add', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "folder_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "folder", "=", "Folder", "(", ")", "form", "=", "FolderForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "request", ".", "POST", ",", "instance", "=", "folder", ")", "if", "form", ".", "is_valid", "(", ")", ":", "folder", "=", "form", ".", "save", "(", ")", "folder", ".", "set_user_from_request", "(", "request", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'documents_folder_view'", ",", "args", "=", "[", "folder", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'document_index'", ")", ")", "else", ":", "form", "=", "FolderForm", "(", "request", ".", "user", ".", "profile", ",", "None", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'form'", ":", "form", "}", ")", "return", "render_to_response", "(", "'documents/folder_add'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
new folder form .
train
false
6,383
@dispatch(Projection) def rowfunc(t): from toolz.itertoolz import getter indices = [t._child.fields.index(col) for col in t.fields] return getter(indices)
[ "@", "dispatch", "(", "Projection", ")", "def", "rowfunc", "(", "t", ")", ":", "from", "toolz", ".", "itertoolz", "import", "getter", "indices", "=", "[", "t", ".", "_child", ".", "fields", ".", "index", "(", "col", ")", "for", "col", "in", "t", ".", "fields", "]", "return", "getter", "(", "indices", ")" ]
rowfunc provides a function that can be mapped onto a sequence .
train
false
6,384
def getSliceElementNodes(elementNode): gElementNodes = elementNode.getElementsByLocalName('g') sliceElementNodes = [] for gElementNode in gElementNodes: if ('id' in gElementNode.attributes): idValue = gElementNode.attributes['id'].strip() if idValue.startswith('z:'): sliceElementNodes.append(gElementNode) return sliceElementNodes
[ "def", "getSliceElementNodes", "(", "elementNode", ")", ":", "gElementNodes", "=", "elementNode", ".", "getElementsByLocalName", "(", "'g'", ")", "sliceElementNodes", "=", "[", "]", "for", "gElementNode", "in", "gElementNodes", ":", "if", "(", "'id'", "in", "gElementNode", ".", "attributes", ")", ":", "idValue", "=", "gElementNode", ".", "attributes", "[", "'id'", "]", ".", "strip", "(", ")", "if", "idValue", ".", "startswith", "(", "'z:'", ")", ":", "sliceElementNodes", ".", "append", "(", "gElementNode", ")", "return", "sliceElementNodes" ]
get the slice elements .
train
false
6,386
def open_repo_closing(path_or_repo): if isinstance(path_or_repo, BaseRepo): return _noop_context_manager(path_or_repo) return closing(Repo(path_or_repo))
[ "def", "open_repo_closing", "(", "path_or_repo", ")", ":", "if", "isinstance", "(", "path_or_repo", ",", "BaseRepo", ")", ":", "return", "_noop_context_manager", "(", "path_or_repo", ")", "return", "closing", "(", "Repo", "(", "path_or_repo", ")", ")" ]
open an argument that can be a repository or a path for a repository .
train
false
6,388
def test_info_stdout_logging(caplog, info_logger, info_messages): [stream_handler] = info_logger.handlers assert isinstance(stream_handler, logging.StreamHandler) assert (stream_handler.level == logging.INFO) create_log_records() stream_messages = [stream_handler.format(r) for r in caplog.records if (r.levelno >= stream_handler.level)] assert (stream_messages == info_messages)
[ "def", "test_info_stdout_logging", "(", "caplog", ",", "info_logger", ",", "info_messages", ")", ":", "[", "stream_handler", "]", "=", "info_logger", ".", "handlers", "assert", "isinstance", "(", "stream_handler", ",", "logging", ".", "StreamHandler", ")", "assert", "(", "stream_handler", ".", "level", "==", "logging", ".", "INFO", ")", "create_log_records", "(", ")", "stream_messages", "=", "[", "stream_handler", ".", "format", "(", "r", ")", "for", "r", "in", "caplog", ".", "records", "if", "(", "r", ".", "levelno", ">=", "stream_handler", ".", "level", ")", "]", "assert", "(", "stream_messages", "==", "info_messages", ")" ]
test that stdout logs use info format and level .
train
false
6,389
def is_site_enabled(site_name): return is_link(_site_link_path(site_name))
[ "def", "is_site_enabled", "(", "site_name", ")", ":", "return", "is_link", "(", "_site_link_path", "(", "site_name", ")", ")" ]
check if an apache site is enabled .
train
false
6,391
def potential_domain_matches(domain): matches = [domain] try: start = (domain.index('.') + 1) end = domain.rindex('.') while (start < end): matches.append(domain[start:]) start = (domain.index('.', start) + 1) except ValueError: pass return (matches + [('.' + d) for d in matches])
[ "def", "potential_domain_matches", "(", "domain", ")", ":", "matches", "=", "[", "domain", "]", "try", ":", "start", "=", "(", "domain", ".", "index", "(", "'.'", ")", "+", "1", ")", "end", "=", "domain", ".", "rindex", "(", "'.'", ")", "while", "(", "start", "<", "end", ")", ":", "matches", ".", "append", "(", "domain", "[", "start", ":", "]", ")", "start", "=", "(", "domain", ".", "index", "(", "'.'", ",", "start", ")", "+", "1", ")", "except", "ValueError", ":", "pass", "return", "(", "matches", "+", "[", "(", "'.'", "+", "d", ")", "for", "d", "in", "matches", "]", ")" ]
potential domain matches for a cookie .
train
false
6,392
def _logging_manager_aware_logger__find_caller(unused): f = sys._getframe(2).f_back rv = ('(unknown file)', 0, '(unknown function)') while hasattr(f, 'f_code'): co = f.f_code filename = os.path.normcase(co.co_filename) if (filename == logging._srcfile): f = f.f_back continue if (co in _caller_code_to_skip_in_logging_stack): f = f.f_back continue rv = (filename, f.f_lineno, co.co_name) break return rv
[ "def", "_logging_manager_aware_logger__find_caller", "(", "unused", ")", ":", "f", "=", "sys", ".", "_getframe", "(", "2", ")", ".", "f_back", "rv", "=", "(", "'(unknown file)'", ",", "0", ",", "'(unknown function)'", ")", "while", "hasattr", "(", "f", ",", "'f_code'", ")", ":", "co", "=", "f", ".", "f_code", "filename", "=", "os", ".", "path", ".", "normcase", "(", "co", ".", "co_filename", ")", "if", "(", "filename", "==", "logging", ".", "_srcfile", ")", ":", "f", "=", "f", ".", "f_back", "continue", "if", "(", "co", "in", "_caller_code_to_skip_in_logging_stack", ")", ":", "f", "=", "f", ".", "f_back", "continue", "rv", "=", "(", "filename", ",", "f", ".", "f_lineno", ",", "co", ".", "co_name", ")", "break", "return", "rv" ]
find the stack frame of the caller so that we can note the source file name .
train
false
6,393
def s3_parse_datetime(string, dtfmt=None): if (not string): return None if (dtfmt is None): dtfmt = ISOFORMAT try: (y, m, d, hh, mm, ss, t0, t1, t2) = time.strptime(string, dtfmt) dt = datetime.datetime(y, m, d, hh, mm, ss) except ValueError: dt = None return dt
[ "def", "s3_parse_datetime", "(", "string", ",", "dtfmt", "=", "None", ")", ":", "if", "(", "not", "string", ")", ":", "return", "None", "if", "(", "dtfmt", "is", "None", ")", ":", "dtfmt", "=", "ISOFORMAT", "try", ":", "(", "y", ",", "m", ",", "d", ",", "hh", ",", "mm", ",", "ss", ",", "t0", ",", "t1", ",", "t2", ")", "=", "time", ".", "strptime", "(", "string", ",", "dtfmt", ")", "dt", "=", "datetime", ".", "datetime", "(", "y", ",", "m", ",", "d", ",", "hh", ",", "mm", ",", "ss", ")", "except", "ValueError", ":", "dt", "=", "None", "return", "dt" ]
parse a date/time string according to the given format .
train
false
6,396
def mark_downloaded(*submissions): for submission in submissions: submission.downloaded = True db.db_session.commit()
[ "def", "mark_downloaded", "(", "*", "submissions", ")", ":", "for", "submission", "in", "submissions", ":", "submission", ".", "downloaded", "=", "True", "db", ".", "db_session", ".", "commit", "(", ")" ]
mark *submissions* as downloaded in the database .
train
false
6,397
@pytest.fixture def issue_2401_po(po_directory, settings, afrikaans_tutorial): return _require_store(afrikaans_tutorial, settings.POOTLE_TRANSLATION_DIRECTORY, 'issue_2401.po')
[ "@", "pytest", ".", "fixture", "def", "issue_2401_po", "(", "po_directory", ",", "settings", ",", "afrikaans_tutorial", ")", ":", "return", "_require_store", "(", "afrikaans_tutorial", ",", "settings", ".", "POOTLE_TRANSLATION_DIRECTORY", ",", "'issue_2401.po'", ")" ]
require the /af/tutorial/issue_2401 .
train
false
6,398
def get_num_escape_turns(x, y): c = complex(x, y) z = complex(x, y) num_iterations = 0 while ((MIN_MAGNITUDE < np.absolute(z) < ESCAPE_MAGNITUDE) and (num_iterations < MAX_ITERATIONS)): z = ((z ** 2) + c) num_iterations += 1 return (float(num_iterations) / float(MAX_ITERATIONS))
[ "def", "get_num_escape_turns", "(", "x", ",", "y", ")", ":", "c", "=", "complex", "(", "x", ",", "y", ")", "z", "=", "complex", "(", "x", ",", "y", ")", "num_iterations", "=", "0", "while", "(", "(", "MIN_MAGNITUDE", "<", "np", ".", "absolute", "(", "z", ")", "<", "ESCAPE_MAGNITUDE", ")", "and", "(", "num_iterations", "<", "MAX_ITERATIONS", ")", ")", ":", "z", "=", "(", "(", "z", "**", "2", ")", "+", "c", ")", "num_iterations", "+=", "1", "return", "(", "float", "(", "num_iterations", ")", "/", "float", "(", "MAX_ITERATIONS", ")", ")" ]
returns the number of iterations it took to escape as normalized values .
train
false
6,399
def simple_keyword(): print 'You have used the simplest keyword.'
[ "def", "simple_keyword", "(", ")", ":", "print", "'You have used the simplest keyword.'" ]
log a message .
train
false
6,400
def haystack_get_app_modules(): return [i.module for i in apps.get_app_configs()]
[ "def", "haystack_get_app_modules", "(", ")", ":", "return", "[", "i", ".", "module", "for", "i", "in", "apps", ".", "get_app_configs", "(", ")", "]" ]
return the python module for each installed app .
train
false
6,402
def get_ldev(obj): if (not obj): return None ldev = obj.get('provider_location') if ((not ldev) or (not ldev.isdigit())): return None return int(ldev)
[ "def", "get_ldev", "(", "obj", ")", ":", "if", "(", "not", "obj", ")", ":", "return", "None", "ldev", "=", "obj", ".", "get", "(", "'provider_location'", ")", "if", "(", "(", "not", "ldev", ")", "or", "(", "not", "ldev", ".", "isdigit", "(", ")", ")", ")", ":", "return", "None", "return", "int", "(", "ldev", ")" ]
get the ldev number from the given object and return it as integer .
train
false
6,404
def PrintPosition(pos, with_returns=False): print ' Position :', pos.position_title print ' Ticker ID :', pos.ticker_id print ' Symbol :', pos.symbol print ' Last updated :', pos.updated.text d = pos.position_data print ' Shares :', d.shares if with_returns: print ' Gain % :', d.gain_percentage PrRtn(' Returns :', d) print ' Cost basis :', d.cost_basis print ' Days gain :', d.days_gain print ' Gain :', d.gain print ' Market value :', d.market_value print if pos.transactions: print ' <inlined transactions>\n' PrintTransactions(pos.transactions) print ' </inlined transactions>\n'
[ "def", "PrintPosition", "(", "pos", ",", "with_returns", "=", "False", ")", ":", "print", "' Position :'", ",", "pos", ".", "position_title", "print", "' Ticker ID :'", ",", "pos", ".", "ticker_id", "print", "' Symbol :'", ",", "pos", ".", "symbol", "print", "' Last updated :'", ",", "pos", ".", "updated", ".", "text", "d", "=", "pos", ".", "position_data", "print", "' Shares :'", ",", "d", ".", "shares", "if", "with_returns", ":", "print", "' Gain % :'", ",", "d", ".", "gain_percentage", "PrRtn", "(", "' Returns :'", ",", "d", ")", "print", "' Cost basis :'", ",", "d", ".", "cost_basis", "print", "' Days gain :'", ",", "d", ".", "days_gain", "print", "' Gain :'", ",", "d", ".", "gain", "print", "' Market value :'", ",", "d", ".", "market_value", "print", "if", "pos", ".", "transactions", ":", "print", "' <inlined transactions>\\n'", "PrintTransactions", "(", "pos", ".", "transactions", ")", "print", "' </inlined transactions>\\n'" ]
print single position .
train
false
6,405
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
mount the named image via qemu-nbd and return the mounted roots cli example: .
train
false
6,406
def perform_translation(unit, form, request): oldchecks = set(unit.active_checks().values_list(u'check', flat=True)) if (not unit.translation.is_template()): (new_target, fixups) = fix_target(form.cleaned_data[u'target'], unit) else: new_target = form.cleaned_data[u'target'] fixups = [] saved = unit.translate(request, new_target, form.cleaned_data[u'fuzzy']) if (len(fixups) > 0): messages.info(request, (_(u'Following fixups were applied to translation: %s') % u', '.join([force_text(f) for f in fixups]))) newchecks = set(unit.active_checks().values_list(u'check', flat=True)) if (saved and (newchecks > oldchecks)): messages.error(request, _(u'Some checks have failed on your translation: {0}').format(u', '.join([force_text(CHECKS[check].name) for check in newchecks]))) return False return True
[ "def", "perform_translation", "(", "unit", ",", "form", ",", "request", ")", ":", "oldchecks", "=", "set", "(", "unit", ".", "active_checks", "(", ")", ".", "values_list", "(", "u'check'", ",", "flat", "=", "True", ")", ")", "if", "(", "not", "unit", ".", "translation", ".", "is_template", "(", ")", ")", ":", "(", "new_target", ",", "fixups", ")", "=", "fix_target", "(", "form", ".", "cleaned_data", "[", "u'target'", "]", ",", "unit", ")", "else", ":", "new_target", "=", "form", ".", "cleaned_data", "[", "u'target'", "]", "fixups", "=", "[", "]", "saved", "=", "unit", ".", "translate", "(", "request", ",", "new_target", ",", "form", ".", "cleaned_data", "[", "u'fuzzy'", "]", ")", "if", "(", "len", "(", "fixups", ")", ">", "0", ")", ":", "messages", ".", "info", "(", "request", ",", "(", "_", "(", "u'Following fixups were applied to translation: %s'", ")", "%", "u', '", ".", "join", "(", "[", "force_text", "(", "f", ")", "for", "f", "in", "fixups", "]", ")", ")", ")", "newchecks", "=", "set", "(", "unit", ".", "active_checks", "(", ")", ".", "values_list", "(", "u'check'", ",", "flat", "=", "True", ")", ")", "if", "(", "saved", "and", "(", "newchecks", ">", "oldchecks", ")", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "u'Some checks have failed on your translation: {0}'", ")", ".", "format", "(", "u', '", ".", "join", "(", "[", "force_text", "(", "CHECKS", "[", "check", "]", ".", "name", ")", "for", "check", "in", "newchecks", "]", ")", ")", ")", "return", "False", "return", "True" ]
handles translation and stores it to a backend .
train
false
6,408
def test_skip_dt_decorator2(): dtargs = (['x', 'y'], None, 'k', (1,)) dtargsr = getargspec(doctest_bad) assert (dtargsr == dtargs), ('Incorrectly reconstructed args for doctest_bad: %s' % (dtargsr,))
[ "def", "test_skip_dt_decorator2", "(", ")", ":", "dtargs", "=", "(", "[", "'x'", ",", "'y'", "]", ",", "None", ",", "'k'", ",", "(", "1", ",", ")", ")", "dtargsr", "=", "getargspec", "(", "doctest_bad", ")", "assert", "(", "dtargsr", "==", "dtargs", ")", ",", "(", "'Incorrectly reconstructed args for doctest_bad: %s'", "%", "(", "dtargsr", ",", ")", ")" ]
doctest-skipping decorator should preserve function signature .
train
false
6,409
def norm_open(path): return open(path, 'U')
[ "def", "norm_open", "(", "path", ")", ":", "return", "open", "(", "path", ",", "'U'", ")" ]
return a stream for a file with content with normalized line feeds .
train
false
6,410
def test_arraytransforms(): ra = (np.ones((4,), dtype=float) * u.deg) dec = ((2 * np.ones((4,), dtype=float)) * u.deg) distance = (np.ones((4,), dtype=float) * u.au) test_icrs = ICRS(ra=ra, dec=dec, distance=distance) test_gcrs = GCRS(test_icrs.data) bary_arr = test_icrs.transform_to(BarycentricTrueEcliptic) assert (bary_arr.shape == ra.shape) helio_arr = test_icrs.transform_to(HeliocentricTrueEcliptic) assert (helio_arr.shape == ra.shape) geo_arr = test_gcrs.transform_to(GeocentricTrueEcliptic) assert (geo_arr.shape == ra.shape) bary_icrs = bary_arr.transform_to(ICRS) assert (bary_icrs.shape == test_icrs.shape) helio_icrs = helio_arr.transform_to(ICRS) assert (helio_icrs.shape == test_icrs.shape) geo_gcrs = geo_arr.transform_to(GCRS) assert (geo_gcrs.shape == test_gcrs.shape)
[ "def", "test_arraytransforms", "(", ")", ":", "ra", "=", "(", "np", ".", "ones", "(", "(", "4", ",", ")", ",", "dtype", "=", "float", ")", "*", "u", ".", "deg", ")", "dec", "=", "(", "(", "2", "*", "np", ".", "ones", "(", "(", "4", ",", ")", ",", "dtype", "=", "float", ")", ")", "*", "u", ".", "deg", ")", "distance", "=", "(", "np", ".", "ones", "(", "(", "4", ",", ")", ",", "dtype", "=", "float", ")", "*", "u", ".", "au", ")", "test_icrs", "=", "ICRS", "(", "ra", "=", "ra", ",", "dec", "=", "dec", ",", "distance", "=", "distance", ")", "test_gcrs", "=", "GCRS", "(", "test_icrs", ".", "data", ")", "bary_arr", "=", "test_icrs", ".", "transform_to", "(", "BarycentricTrueEcliptic", ")", "assert", "(", "bary_arr", ".", "shape", "==", "ra", ".", "shape", ")", "helio_arr", "=", "test_icrs", ".", "transform_to", "(", "HeliocentricTrueEcliptic", ")", "assert", "(", "helio_arr", ".", "shape", "==", "ra", ".", "shape", ")", "geo_arr", "=", "test_gcrs", ".", "transform_to", "(", "GeocentricTrueEcliptic", ")", "assert", "(", "geo_arr", ".", "shape", "==", "ra", ".", "shape", ")", "bary_icrs", "=", "bary_arr", ".", "transform_to", "(", "ICRS", ")", "assert", "(", "bary_icrs", ".", "shape", "==", "test_icrs", ".", "shape", ")", "helio_icrs", "=", "helio_arr", ".", "transform_to", "(", "ICRS", ")", "assert", "(", "helio_icrs", ".", "shape", "==", "test_icrs", ".", "shape", ")", "geo_gcrs", "=", "geo_arr", ".", "transform_to", "(", "GCRS", ")", "assert", "(", "geo_gcrs", ".", "shape", "==", "test_gcrs", ".", "shape", ")" ]
test that transforms to/from ecliptic coordinates work on array coordinates .
train
false
6,411
@hook.command('twitter', 'tw', 'twatter') def twitter(text): if (tw_api is None): return 'This command requires a twitter API key.' if re.match('^\\d+$', text): try: tweet = tw_api.get_status(text) except tweepy.error.TweepError as e: if ('404' in e.reason): return 'Could not find tweet.' else: return 'Error: {}'.format(e.reason) user = tweet.user elif (re.match('^\\w{1,15}$', text) or re.match('^\\w{1,15}\\s+\\d+$', text)): if (text.find(' ') == (-1)): username = text tweet_number = 0 else: (username, tweet_number) = text.split() tweet_number = (int(tweet_number) - 1) if (tweet_number > 200): return 'This command can only find the last \x02200\x02 tweets.' try: user = tw_api.get_user(username) except tweepy.error.TweepError as e: if ('404' in e.reason): return 'Could not find user.' else: return 'Error: {}'.format(e.reason) user_timeline = tw_api.user_timeline(id=user.id, count=(tweet_number + 1)) if (not user_timeline): return 'The user \x02{}\x02 has no tweets.'.format(user.screen_name) try: tweet = user_timeline[tweet_number] except IndexError: tweet_count = len(user_timeline) return 'The user \x02{}\x02 only has \x02{}\x02 tweets.'.format(user.screen_name, tweet_count) elif re.match('^#\\w+$', text): search = tw_api.search(text) if (not search): return 'No tweets found.' tweet = random.choice(search) user = tweet.user else: return 'Invalid Input' text = ' '.join(tweet.text.split()) if user.verified: prefix = '\\u2713' else: prefix = '' time = timeformat.time_since(tweet.created_at, datetime.utcnow()) return '{}@\x02{}\x02 ({}): {} ({} ago)'.format(prefix, user.screen_name, user.name, text, time)
[ "@", "hook", ".", "command", "(", "'twitter'", ",", "'tw'", ",", "'twatter'", ")", "def", "twitter", "(", "text", ")", ":", "if", "(", "tw_api", "is", "None", ")", ":", "return", "'This command requires a twitter API key.'", "if", "re", ".", "match", "(", "'^\\\\d+$'", ",", "text", ")", ":", "try", ":", "tweet", "=", "tw_api", ".", "get_status", "(", "text", ")", "except", "tweepy", ".", "error", ".", "TweepError", "as", "e", ":", "if", "(", "'404'", "in", "e", ".", "reason", ")", ":", "return", "'Could not find tweet.'", "else", ":", "return", "'Error: {}'", ".", "format", "(", "e", ".", "reason", ")", "user", "=", "tweet", ".", "user", "elif", "(", "re", ".", "match", "(", "'^\\\\w{1,15}$'", ",", "text", ")", "or", "re", ".", "match", "(", "'^\\\\w{1,15}\\\\s+\\\\d+$'", ",", "text", ")", ")", ":", "if", "(", "text", ".", "find", "(", "' '", ")", "==", "(", "-", "1", ")", ")", ":", "username", "=", "text", "tweet_number", "=", "0", "else", ":", "(", "username", ",", "tweet_number", ")", "=", "text", ".", "split", "(", ")", "tweet_number", "=", "(", "int", "(", "tweet_number", ")", "-", "1", ")", "if", "(", "tweet_number", ">", "200", ")", ":", "return", "'This command can only find the last \\x02200\\x02 tweets.'", "try", ":", "user", "=", "tw_api", ".", "get_user", "(", "username", ")", "except", "tweepy", ".", "error", ".", "TweepError", "as", "e", ":", "if", "(", "'404'", "in", "e", ".", "reason", ")", ":", "return", "'Could not find user.'", "else", ":", "return", "'Error: {}'", ".", "format", "(", "e", ".", "reason", ")", "user_timeline", "=", "tw_api", ".", "user_timeline", "(", "id", "=", "user", ".", "id", ",", "count", "=", "(", "tweet_number", "+", "1", ")", ")", "if", "(", "not", "user_timeline", ")", ":", "return", "'The user \\x02{}\\x02 has no tweets.'", ".", "format", "(", "user", ".", "screen_name", ")", "try", ":", "tweet", "=", "user_timeline", "[", "tweet_number", "]", "except", "IndexError", ":", "tweet_count", "=", "len", "(", "user_timeline", ")", "return", "'The user \\x02{}\\x02 only has \\x02{}\\x02 tweets.'", ".", "format", "(", "user", ".", "screen_name", ",", "tweet_count", ")", "elif", "re", ".", "match", "(", "'^#\\\\w+$'", ",", "text", ")", ":", "search", "=", "tw_api", ".", "search", "(", "text", ")", "if", "(", "not", "search", ")", ":", "return", "'No tweets found.'", "tweet", "=", "random", ".", "choice", "(", "search", ")", "user", "=", "tweet", ".", "user", "else", ":", "return", "'Invalid Input'", "text", "=", "' '", ".", "join", "(", "tweet", ".", "text", ".", "split", "(", ")", ")", "if", "user", ".", "verified", ":", "prefix", "=", "'\\\\u2713'", "else", ":", "prefix", "=", "''", "time", "=", "timeformat", ".", "time_since", "(", "tweet", ".", "created_at", ",", "datetime", ".", "utcnow", "(", ")", ")", "return", "'{}@\\x02{}\\x02 ({}): {} ({} ago)'", ".", "format", "(", "prefix", ",", "user", ".", "screen_name", ",", "user", ".", "name", ",", "text", ",", "time", ")" ]
twitter restful controller @todo: action button to update async .
train
false
6,412
def _replacer(data, key): if (not isinstance(key, six.string_types)): return key try: return sanitize_sequence(data[key]) except KeyError: return key
[ "def", "_replacer", "(", "data", ",", "key", ")", ":", "if", "(", "not", "isinstance", "(", "key", ",", "six", ".", "string_types", ")", ")", ":", "return", "key", "try", ":", "return", "sanitize_sequence", "(", "data", "[", "key", "]", ")", "except", "KeyError", ":", "return", "key" ]
replace a number with its hexadecimal representation .
train
false
6,413
def salt_config_to_yaml(configuration, line_break='\n'): return yaml.dump(configuration, line_break=line_break, default_flow_style=False, Dumper=SafeOrderedDumper)
[ "def", "salt_config_to_yaml", "(", "configuration", ",", "line_break", "=", "'\\n'", ")", ":", "return", "yaml", ".", "dump", "(", "configuration", ",", "line_break", "=", "line_break", ",", "default_flow_style", "=", "False", ",", "Dumper", "=", "SafeOrderedDumper", ")" ]
return a salt configuration dictionary .
train
false
6,414
@public def sqf(f, *gens, **args): return _generic_factor(f, gens, args, method='sqf')
[ "@", "public", "def", "sqf", "(", "f", ",", "*", "gens", ",", "**", "args", ")", ":", "return", "_generic_factor", "(", "f", ",", "gens", ",", "args", ",", "method", "=", "'sqf'", ")" ]
compute square-free factorization of f .
train
false
6,415
def distributed_server_test(f): return x_server_test(f, (not settings.CENTRAL_SERVER), 'Distributed server test')
[ "def", "distributed_server_test", "(", "f", ")", ":", "return", "x_server_test", "(", "f", ",", "(", "not", "settings", ".", "CENTRAL_SERVER", ")", ",", "'Distributed server test'", ")" ]
run the test only on the distributed server .
train
false
6,416
def windowdiff(seg1, seg2, k, boundary='1', weighted=False): if (len(seg1) != len(seg2)): raise ValueError('Segmentations have unequal length') if (k > len(seg1)): raise ValueError('Window width k should be smaller or equal than segmentation lengths') wd = 0 for i in range(((len(seg1) - k) + 1)): ndiff = abs((seg1[i:(i + k)].count(boundary) - seg2[i:(i + k)].count(boundary))) if weighted: wd += ndiff else: wd += min(1, ndiff) return (wd / ((len(seg1) - k) + 1.0))
[ "def", "windowdiff", "(", "seg1", ",", "seg2", ",", "k", ",", "boundary", "=", "'1'", ",", "weighted", "=", "False", ")", ":", "if", "(", "len", "(", "seg1", ")", "!=", "len", "(", "seg2", ")", ")", ":", "raise", "ValueError", "(", "'Segmentations have unequal length'", ")", "if", "(", "k", ">", "len", "(", "seg1", ")", ")", ":", "raise", "ValueError", "(", "'Window width k should be smaller or equal than segmentation lengths'", ")", "wd", "=", "0", "for", "i", "in", "range", "(", "(", "(", "len", "(", "seg1", ")", "-", "k", ")", "+", "1", ")", ")", ":", "ndiff", "=", "abs", "(", "(", "seg1", "[", "i", ":", "(", "i", "+", "k", ")", "]", ".", "count", "(", "boundary", ")", "-", "seg2", "[", "i", ":", "(", "i", "+", "k", ")", "]", ".", "count", "(", "boundary", ")", ")", ")", "if", "weighted", ":", "wd", "+=", "ndiff", "else", ":", "wd", "+=", "min", "(", "1", ",", "ndiff", ")", "return", "(", "wd", "/", "(", "(", "len", "(", "seg1", ")", "-", "k", ")", "+", "1.0", ")", ")" ]
compute the windowdiff score for a pair of segmentations .
train
false
6,417
def transferPathsToNestedRings(nestedRings, paths): for nestedRing in nestedRings: nestedRing.transferPaths(paths)
[ "def", "transferPathsToNestedRings", "(", "nestedRings", ",", "paths", ")", ":", "for", "nestedRing", "in", "nestedRings", ":", "nestedRing", ".", "transferPaths", "(", "paths", ")" ]
transfer paths to nested rings .
train
false
6,418
def kaiser_atten(numtaps, width): a = ((((2.285 * (numtaps - 1)) * np.pi) * width) + 7.95) return a
[ "def", "kaiser_atten", "(", "numtaps", ",", "width", ")", ":", "a", "=", "(", "(", "(", "(", "2.285", "*", "(", "numtaps", "-", "1", ")", ")", "*", "np", ".", "pi", ")", "*", "width", ")", "+", "7.95", ")", "return", "a" ]
compute the attenuation of a kaiser fir filter .
train
false
6,420
def brick_get_connector_properties(multipath=False, enforce_multipath=False): root_helper = get_root_helper() return connector.get_connector_properties(root_helper, CONF.my_ip, multipath, enforce_multipath)
[ "def", "brick_get_connector_properties", "(", "multipath", "=", "False", ",", "enforce_multipath", "=", "False", ")", ":", "root_helper", "=", "get_root_helper", "(", ")", "return", "connector", ".", "get_connector_properties", "(", "root_helper", ",", "CONF", ".", "my_ip", ",", "multipath", ",", "enforce_multipath", ")" ]
wrapper to automatically set root_helper in brick calls .
train
false
6,422
def recall(): a = TpPd(pd=3) b = MessageType(mesType=11) c = RecallType() d = Facility() packet = (((a / b) / c) / d) return packet
[ "def", "recall", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "11", ")", "c", "=", "RecallType", "(", ")", "d", "=", "Facility", "(", ")", "packet", "=", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "return", "packet" ]
recall metric .
train
true
6,424
def test_prewitt_vertical(): (i, j) = np.mgrid[(-5):6, (-5):6] image = (j >= 0).astype(float) result = (filters.prewitt(image) * np.sqrt(2)) j[(np.abs(i) == 5)] = 10000 assert_allclose(result[(j == 0)], 1) assert_allclose(result[(np.abs(j) > 1)], 0, atol=1e-10)
[ "def", "test_prewitt_vertical", "(", ")", ":", "(", "i", ",", "j", ")", "=", "np", ".", "mgrid", "[", "(", "-", "5", ")", ":", "6", ",", "(", "-", "5", ")", ":", "6", "]", "image", "=", "(", "j", ">=", "0", ")", ".", "astype", "(", "float", ")", "result", "=", "(", "filters", ".", "prewitt", "(", "image", ")", "*", "np", ".", "sqrt", "(", "2", ")", ")", "j", "[", "(", "np", ".", "abs", "(", "i", ")", "==", "5", ")", "]", "=", "10000", "assert_allclose", "(", "result", "[", "(", "j", "==", "0", ")", "]", ",", "1", ")", "assert_allclose", "(", "result", "[", "(", "np", ".", "abs", "(", "j", ")", ">", "1", ")", "]", ",", "0", ",", "atol", "=", "1e-10", ")" ]
prewitt on a vertical edge should be a vertical line .
train
false
6,428
def volume_update_status_based_on_attachment(context, volume_id): return IMPL.volume_update_status_based_on_attachment(context, volume_id)
[ "def", "volume_update_status_based_on_attachment", "(", "context", ",", "volume_id", ")", ":", "return", "IMPL", ".", "volume_update_status_based_on_attachment", "(", "context", ",", "volume_id", ")" ]
update volume status based on attachment .
train
false
6,429
def get_accumulator_dir(cachedir): fn_ = os.path.join(cachedir, 'accumulator') if (not os.path.isdir(fn_)): os.makedirs(fn_) return fn_
[ "def", "get_accumulator_dir", "(", "cachedir", ")", ":", "fn_", "=", "os", ".", "path", ".", "join", "(", "cachedir", ",", "'accumulator'", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "fn_", ")", ")", ":", "os", ".", "makedirs", "(", "fn_", ")", "return", "fn_" ]
return the directory that accumulator data is stored in .
train
true
6,431
def test_content_type(): transformer = hug.transform.content_type({'application/json': int, 'text/plain': str}) class FakeRequest(object, ): content_type = 'application/json' request = FakeRequest() assert (transformer('1', request) == 1) request.content_type = 'text/plain' assert (transformer(2, request) == '2') request.content_type = 'undefined' (transformer({'data': 'value'}, request) == {'data': 'value'})
[ "def", "test_content_type", "(", ")", ":", "transformer", "=", "hug", ".", "transform", ".", "content_type", "(", "{", "'application/json'", ":", "int", ",", "'text/plain'", ":", "str", "}", ")", "class", "FakeRequest", "(", "object", ",", ")", ":", "content_type", "=", "'application/json'", "request", "=", "FakeRequest", "(", ")", "assert", "(", "transformer", "(", "'1'", ",", "request", ")", "==", "1", ")", "request", ".", "content_type", "=", "'text/plain'", "assert", "(", "transformer", "(", "2", ",", "request", ")", "==", "'2'", ")", "request", ".", "content_type", "=", "'undefined'", "(", "transformer", "(", "{", "'data'", ":", "'value'", "}", ",", "request", ")", "==", "{", "'data'", ":", "'value'", "}", ")" ]
test to ensure the transformer used can change based on the provided content-type .
train
false
6,434
def incr_ratelimit(user, domain='all'): (list_key, set_key, _) = redis_key(user, domain) now = time.time() if (len(rules) == 0): return with client.pipeline() as pipe: count = 0 while True: try: pipe.watch(list_key) last_val = pipe.lindex(list_key, (max_api_calls(user) - 1)) pipe.multi() pipe.lpush(list_key, now) pipe.ltrim(list_key, 0, (max_api_calls(user) - 1)) pipe.zadd(set_key, now, now) if (last_val is not None): pipe.zrem(set_key, last_val) api_window = max_api_window(user) pipe.expire(list_key, api_window) pipe.expire(set_key, api_window) pipe.execute() break except redis.WatchError: if (count > 10): logging.error('Failed to complete incr_ratelimit transaction without interference 10 times in a row! Aborting rate-limit increment') break count += 1 continue
[ "def", "incr_ratelimit", "(", "user", ",", "domain", "=", "'all'", ")", ":", "(", "list_key", ",", "set_key", ",", "_", ")", "=", "redis_key", "(", "user", ",", "domain", ")", "now", "=", "time", ".", "time", "(", ")", "if", "(", "len", "(", "rules", ")", "==", "0", ")", ":", "return", "with", "client", ".", "pipeline", "(", ")", "as", "pipe", ":", "count", "=", "0", "while", "True", ":", "try", ":", "pipe", ".", "watch", "(", "list_key", ")", "last_val", "=", "pipe", ".", "lindex", "(", "list_key", ",", "(", "max_api_calls", "(", "user", ")", "-", "1", ")", ")", "pipe", ".", "multi", "(", ")", "pipe", ".", "lpush", "(", "list_key", ",", "now", ")", "pipe", ".", "ltrim", "(", "list_key", ",", "0", ",", "(", "max_api_calls", "(", "user", ")", "-", "1", ")", ")", "pipe", ".", "zadd", "(", "set_key", ",", "now", ",", "now", ")", "if", "(", "last_val", "is", "not", "None", ")", ":", "pipe", ".", "zrem", "(", "set_key", ",", "last_val", ")", "api_window", "=", "max_api_window", "(", "user", ")", "pipe", ".", "expire", "(", "list_key", ",", "api_window", ")", "pipe", ".", "expire", "(", "set_key", ",", "api_window", ")", "pipe", ".", "execute", "(", ")", "break", "except", "redis", ".", "WatchError", ":", "if", "(", "count", ">", "10", ")", ":", "logging", ".", "error", "(", "'Failed to complete incr_ratelimit transaction without interference 10 times in a row! Aborting rate-limit increment'", ")", "break", "count", "+=", "1", "continue" ]
increases the rate-limit for the specified user .
train
false
6,437
def subfn(pattern, format, string, count=0, flags=0, pos=None, endpos=None, concurrent=None, **kwargs): return _compile(pattern, flags, kwargs).subfn(format, string, count, pos, endpos, concurrent)
[ "def", "subfn", "(", "pattern", ",", "format", ",", "string", ",", "count", "=", "0", ",", "flags", "=", "0", ",", "pos", "=", "None", ",", "endpos", "=", "None", ",", "concurrent", "=", "None", ",", "**", "kwargs", ")", ":", "return", "_compile", "(", "pattern", ",", "flags", ",", "kwargs", ")", ".", "subfn", "(", "format", ",", "string", ",", "count", ",", "pos", ",", "endpos", ",", "concurrent", ")" ]
return a 2-tuple containing .
train
false
6,438
def needs_label(model_field, field_name): default_label = field_name.replace('_', ' ').capitalize() return (capfirst(model_field.verbose_name) != default_label)
[ "def", "needs_label", "(", "model_field", ",", "field_name", ")", ":", "default_label", "=", "field_name", ".", "replace", "(", "'_'", ",", "' '", ")", ".", "capitalize", "(", ")", "return", "(", "capfirst", "(", "model_field", ".", "verbose_name", ")", "!=", "default_label", ")" ]
returns true if the label based on the models verbose name is not equal to the default label it would have based on its field name .
train
true
6,439
def get_common_complete_suffix(document, completions): def doesnt_change_before_cursor(completion): end = completion.text[:(- completion.start_position)] return document.text_before_cursor.endswith(end) completions2 = [c for c in completions if doesnt_change_before_cursor(c)] if (len(completions2) != len(completions)): return u'' def get_suffix(completion): return completion.text[(- completion.start_position):] return _commonprefix([get_suffix(c) for c in completions2])
[ "def", "get_common_complete_suffix", "(", "document", ",", "completions", ")", ":", "def", "doesnt_change_before_cursor", "(", "completion", ")", ":", "end", "=", "completion", ".", "text", "[", ":", "(", "-", "completion", ".", "start_position", ")", "]", "return", "document", ".", "text_before_cursor", ".", "endswith", "(", "end", ")", "completions2", "=", "[", "c", "for", "c", "in", "completions", "if", "doesnt_change_before_cursor", "(", "c", ")", "]", "if", "(", "len", "(", "completions2", ")", "!=", "len", "(", "completions", ")", ")", ":", "return", "u''", "def", "get_suffix", "(", "completion", ")", ":", "return", "completion", ".", "text", "[", "(", "-", "completion", ".", "start_position", ")", ":", "]", "return", "_commonprefix", "(", "[", "get_suffix", "(", "c", ")", "for", "c", "in", "completions2", "]", ")" ]
return the common prefix for all completions .
train
true
6,441
def load_regions(): endpoints = load_endpoint_json(boto.ENDPOINTS_PATH) additional_path = None if os.environ.get('BOTO_ENDPOINTS'): additional_path = os.environ['BOTO_ENDPOINTS'] elif boto.config.get('Boto', 'endpoints_path'): additional_path = boto.config.get('Boto', 'endpoints_path') if additional_path: additional = load_endpoint_json(additional_path) endpoints = merge_endpoints(endpoints, additional) return endpoints
[ "def", "load_regions", "(", ")", ":", "endpoints", "=", "load_endpoint_json", "(", "boto", ".", "ENDPOINTS_PATH", ")", "additional_path", "=", "None", "if", "os", ".", "environ", ".", "get", "(", "'BOTO_ENDPOINTS'", ")", ":", "additional_path", "=", "os", ".", "environ", "[", "'BOTO_ENDPOINTS'", "]", "elif", "boto", ".", "config", ".", "get", "(", "'Boto'", ",", "'endpoints_path'", ")", ":", "additional_path", "=", "boto", ".", "config", ".", "get", "(", "'Boto'", ",", "'endpoints_path'", ")", "if", "additional_path", ":", "additional", "=", "load_endpoint_json", "(", "additional_path", ")", "endpoints", "=", "merge_endpoints", "(", "endpoints", ",", "additional", ")", "return", "endpoints" ]
actually load the region/endpoint information from the json files .
train
false
6,442
def speakers_marshal_with(fields=None, fields_private=None): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): user = getattr(g, 'user', None) event_id = kwargs.get('event_id') if ((user is None) and current_user.is_authenticated): user = current_user if (user and (user.has_role(event_id) or user.is_staff)): model = (fields if fields else SPEAKER) else: model = (fields_private if fields_private else SPEAKER_PRIVATE) func2 = marshal_with(model)(func) return func2(*args, **kwargs) return wrapper return decorator
[ "def", "speakers_marshal_with", "(", "fields", "=", "None", ",", "fields_private", "=", "None", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "user", "=", "getattr", "(", "g", ",", "'user'", ",", "None", ")", "event_id", "=", "kwargs", ".", "get", "(", "'event_id'", ")", "if", "(", "(", "user", "is", "None", ")", "and", "current_user", ".", "is_authenticated", ")", ":", "user", "=", "current_user", "if", "(", "user", "and", "(", "user", ".", "has_role", "(", "event_id", ")", "or", "user", ".", "is_staff", ")", ")", ":", "model", "=", "(", "fields", "if", "fields", "else", "SPEAKER", ")", "else", ":", "model", "=", "(", "fields_private", "if", "fields_private", "else", "SPEAKER_PRIVATE", ")", "func2", "=", "marshal_with", "(", "model", ")", "(", "func", ")", "return", "func2", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
response marshalling for speakers .
train
false
6,443
def sanitize_win_path_string(winpath): intab = '<>:|?*' outtab = ('_' * len(intab)) trantab = (''.maketrans(intab, outtab) if six.PY3 else string.maketrans(intab, outtab)) if isinstance(winpath, str): winpath = winpath.translate(trantab) elif isinstance(winpath, six.text_type): winpath = winpath.translate(dict(((ord(c), u'_') for c in intab))) return winpath
[ "def", "sanitize_win_path_string", "(", "winpath", ")", ":", "intab", "=", "'<>:|?*'", "outtab", "=", "(", "'_'", "*", "len", "(", "intab", ")", ")", "trantab", "=", "(", "''", ".", "maketrans", "(", "intab", ",", "outtab", ")", "if", "six", ".", "PY3", "else", "string", ".", "maketrans", "(", "intab", ",", "outtab", ")", ")", "if", "isinstance", "(", "winpath", ",", "str", ")", ":", "winpath", "=", "winpath", ".", "translate", "(", "trantab", ")", "elif", "isinstance", "(", "winpath", ",", "six", ".", "text_type", ")", ":", "winpath", "=", "winpath", ".", "translate", "(", "dict", "(", "(", "(", "ord", "(", "c", ")", ",", "u'_'", ")", "for", "c", "in", "intab", ")", ")", ")", "return", "winpath" ]
remove illegal path characters for windows .
train
false
6,444
def process_npm_assets(): def copy_vendor_library(library, skip_if_missing=False): '\n Copies a vendor library to the shared vendor directory.\n ' library_path = 'node_modules/{library}'.format(library=library) if os.path.exists(library_path): sh('/bin/cp -rf {library_path} {vendor_dir}'.format(library_path=library_path, vendor_dir=NPM_VENDOR_DIRECTORY)) elif (not skip_if_missing): raise Exception('Missing vendor file {library_path}'.format(library_path=library_path)) if tasks.environment.dry_run: tasks.environment.info('install npm_assets') return NPM_VENDOR_DIRECTORY.mkdir_p() print('Copying vendor files into static directory') for library in NPM_INSTALLED_LIBRARIES: copy_vendor_library(library) print('Copying developer vendor files into static directory') for library in NPM_INSTALLED_DEVELOPER_LIBRARIES: copy_vendor_library(library, skip_if_missing=True)
[ "def", "process_npm_assets", "(", ")", ":", "def", "copy_vendor_library", "(", "library", ",", "skip_if_missing", "=", "False", ")", ":", "library_path", "=", "'node_modules/{library}'", ".", "format", "(", "library", "=", "library", ")", "if", "os", ".", "path", ".", "exists", "(", "library_path", ")", ":", "sh", "(", "'/bin/cp -rf {library_path} {vendor_dir}'", ".", "format", "(", "library_path", "=", "library_path", ",", "vendor_dir", "=", "NPM_VENDOR_DIRECTORY", ")", ")", "elif", "(", "not", "skip_if_missing", ")", ":", "raise", "Exception", "(", "'Missing vendor file {library_path}'", ".", "format", "(", "library_path", "=", "library_path", ")", ")", "if", "tasks", ".", "environment", ".", "dry_run", ":", "tasks", ".", "environment", ".", "info", "(", "'install npm_assets'", ")", "return", "NPM_VENDOR_DIRECTORY", ".", "mkdir_p", "(", ")", "print", "(", "'Copying vendor files into static directory'", ")", "for", "library", "in", "NPM_INSTALLED_LIBRARIES", ":", "copy_vendor_library", "(", "library", ")", "print", "(", "'Copying developer vendor files into static directory'", ")", "for", "library", "in", "NPM_INSTALLED_DEVELOPER_LIBRARIES", ":", "copy_vendor_library", "(", "library", ",", "skip_if_missing", "=", "True", ")" ]
process vendor libraries installed via npm .
train
false
6,445
def debug_ssh(function): def wrapper(self, *args, **kwargs): try: return function(self, *args, **kwargs) except tempest.lib.exceptions.SSHTimeout: try: original_exception = sys.exc_info() caller = (test_utils.find_test_caller() or 'not found') if self.server: msg = 'Caller: %s. Timeout trying to ssh to server %s' LOG.debug(msg, caller, self.server) if (self.log_console and self.servers_client): try: msg = 'Console log for server %s: %s' console_log = self.servers_client.get_console_output(self.server['id'])['output'] LOG.debug(msg, self.server['id'], console_log) except Exception: msg = 'Could not get console_log for server %s' LOG.debug(msg, self.server['id']) six.reraise(*original_exception) finally: (_, _, trace) = original_exception del trace return wrapper
[ "def", "debug_ssh", "(", "function", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "function", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "except", "tempest", ".", "lib", ".", "exceptions", ".", "SSHTimeout", ":", "try", ":", "original_exception", "=", "sys", ".", "exc_info", "(", ")", "caller", "=", "(", "test_utils", ".", "find_test_caller", "(", ")", "or", "'not found'", ")", "if", "self", ".", "server", ":", "msg", "=", "'Caller: %s. Timeout trying to ssh to server %s'", "LOG", ".", "debug", "(", "msg", ",", "caller", ",", "self", ".", "server", ")", "if", "(", "self", ".", "log_console", "and", "self", ".", "servers_client", ")", ":", "try", ":", "msg", "=", "'Console log for server %s: %s'", "console_log", "=", "self", ".", "servers_client", ".", "get_console_output", "(", "self", ".", "server", "[", "'id'", "]", ")", "[", "'output'", "]", "LOG", ".", "debug", "(", "msg", ",", "self", ".", "server", "[", "'id'", "]", ",", "console_log", ")", "except", "Exception", ":", "msg", "=", "'Could not get console_log for server %s'", "LOG", ".", "debug", "(", "msg", ",", "self", ".", "server", "[", "'id'", "]", ")", "six", ".", "reraise", "(", "*", "original_exception", ")", "finally", ":", "(", "_", ",", "_", ",", "trace", ")", "=", "original_exception", "del", "trace", "return", "wrapper" ]
decorator to generate extra debug info in case off ssh failure .
train
false
6,446
def get_node_cpu_times(reactor, runner, node, known_name, processes): process_list = list(processes) if (known_name in processes): delete_known_name = False else: process_list.append(known_name) delete_known_name = True parser = CPUParser(reactor) d = runner.run(node, (_GET_CPUTIME_COMMAND + [','.join(process_list)]), handle_stdout=parser.lineReceived) def get_parser_result(ignored): result = parser.result if (delete_known_name and (known_name in result)): del result[known_name] return result d.addCallback(get_parser_result) return d
[ "def", "get_node_cpu_times", "(", "reactor", ",", "runner", ",", "node", ",", "known_name", ",", "processes", ")", ":", "process_list", "=", "list", "(", "processes", ")", "if", "(", "known_name", "in", "processes", ")", ":", "delete_known_name", "=", "False", "else", ":", "process_list", ".", "append", "(", "known_name", ")", "delete_known_name", "=", "True", "parser", "=", "CPUParser", "(", "reactor", ")", "d", "=", "runner", ".", "run", "(", "node", ",", "(", "_GET_CPUTIME_COMMAND", "+", "[", "','", ".", "join", "(", "process_list", ")", "]", ")", ",", "handle_stdout", "=", "parser", ".", "lineReceived", ")", "def", "get_parser_result", "(", "ignored", ")", ":", "result", "=", "parser", ".", "result", "if", "(", "delete_known_name", "and", "(", "known_name", "in", "result", ")", ")", ":", "del", "result", "[", "known_name", "]", "return", "result", "d", ".", "addCallback", "(", "get_parser_result", ")", "return", "d" ]
get the cpu times for processes running on a node .
train
false
6,447
def addNegativesPositives(derivation, negatives, paths, positives): for path in paths: endMultiplier = None normal = euclidean.getNormalByPath(path) if (normal.dot(derivation.normal) < 0.0): endMultiplier = 1.000001 loopListsByPath = getLoopListsByPath(derivation, endMultiplier, path) geometryOutput = triangle_mesh.getPillarsOutput(loopListsByPath) if (endMultiplier == None): positives.append(geometryOutput) else: negatives.append(geometryOutput)
[ "def", "addNegativesPositives", "(", "derivation", ",", "negatives", ",", "paths", ",", "positives", ")", ":", "for", "path", "in", "paths", ":", "endMultiplier", "=", "None", "normal", "=", "euclidean", ".", "getNormalByPath", "(", "path", ")", "if", "(", "normal", ".", "dot", "(", "derivation", ".", "normal", ")", "<", "0.0", ")", ":", "endMultiplier", "=", "1.000001", "loopListsByPath", "=", "getLoopListsByPath", "(", "derivation", ",", "endMultiplier", ",", "path", ")", "geometryOutput", "=", "triangle_mesh", ".", "getPillarsOutput", "(", "loopListsByPath", ")", "if", "(", "endMultiplier", "==", "None", ")", ":", "positives", ".", "append", "(", "geometryOutput", ")", "else", ":", "negatives", ".", "append", "(", "geometryOutput", ")" ]
add pillars output to negatives and positives .
train
false
6,448
def _ncut_relabel(rag, thresh, num_cuts): (d, w) = _ncut.DW_matrices(rag) m = w.shape[0] if (m > 2): d2 = d.copy() d2.data = np.reciprocal(np.sqrt(d2.data, out=d2.data), out=d2.data) (vals, vectors) = linalg.eigsh(((d2 * (d - w)) * d2), which='SM', k=min(100, (m - 2))) (vals, vectors) = (np.real(vals), np.real(vectors)) index2 = _ncut_cy.argmin2(vals) ev = vectors[:, index2] (cut_mask, mcut) = get_min_ncut(ev, d, w, num_cuts) if (mcut < thresh): (sub1, sub2) = partition_by_cut(cut_mask, rag) _ncut_relabel(sub1, thresh, num_cuts) _ncut_relabel(sub2, thresh, num_cuts) return _label_all(rag, 'ncut label')
[ "def", "_ncut_relabel", "(", "rag", ",", "thresh", ",", "num_cuts", ")", ":", "(", "d", ",", "w", ")", "=", "_ncut", ".", "DW_matrices", "(", "rag", ")", "m", "=", "w", ".", "shape", "[", "0", "]", "if", "(", "m", ">", "2", ")", ":", "d2", "=", "d", ".", "copy", "(", ")", "d2", ".", "data", "=", "np", ".", "reciprocal", "(", "np", ".", "sqrt", "(", "d2", ".", "data", ",", "out", "=", "d2", ".", "data", ")", ",", "out", "=", "d2", ".", "data", ")", "(", "vals", ",", "vectors", ")", "=", "linalg", ".", "eigsh", "(", "(", "(", "d2", "*", "(", "d", "-", "w", ")", ")", "*", "d2", ")", ",", "which", "=", "'SM'", ",", "k", "=", "min", "(", "100", ",", "(", "m", "-", "2", ")", ")", ")", "(", "vals", ",", "vectors", ")", "=", "(", "np", ".", "real", "(", "vals", ")", ",", "np", ".", "real", "(", "vectors", ")", ")", "index2", "=", "_ncut_cy", ".", "argmin2", "(", "vals", ")", "ev", "=", "vectors", "[", ":", ",", "index2", "]", "(", "cut_mask", ",", "mcut", ")", "=", "get_min_ncut", "(", "ev", ",", "d", ",", "w", ",", "num_cuts", ")", "if", "(", "mcut", "<", "thresh", ")", ":", "(", "sub1", ",", "sub2", ")", "=", "partition_by_cut", "(", "cut_mask", ",", "rag", ")", "_ncut_relabel", "(", "sub1", ",", "thresh", ",", "num_cuts", ")", "_ncut_relabel", "(", "sub2", ",", "thresh", ",", "num_cuts", ")", "return", "_label_all", "(", "rag", ",", "'ncut label'", ")" ]
perform normalized graph cut on the region adjacency graph .
train
false
6,450
def parse_tls(message, throw_on_incomplete=False): extra_fragment_data = '' original_message = message try: while message: try: (record, size) = types.TlsRecord.from_stream(message, previous_fragment_data=extra_fragment_data) return (record, message[size:]) except types.TlsMessageFragmentedError as e: extra_fragment_data += e.fragment_data message = message[e.data_consumed:] if (not message): if throw_on_incomplete: raise e else: return (None, original_message) except (IndexError, ValueError, struct.error): return (None, original_message) except types.TlsRecordIncompleteError as e: if throw_on_incomplete: raise e else: return (None, original_message) return (None, original_message)
[ "def", "parse_tls", "(", "message", ",", "throw_on_incomplete", "=", "False", ")", ":", "extra_fragment_data", "=", "''", "original_message", "=", "message", "try", ":", "while", "message", ":", "try", ":", "(", "record", ",", "size", ")", "=", "types", ".", "TlsRecord", ".", "from_stream", "(", "message", ",", "previous_fragment_data", "=", "extra_fragment_data", ")", "return", "(", "record", ",", "message", "[", "size", ":", "]", ")", "except", "types", ".", "TlsMessageFragmentedError", "as", "e", ":", "extra_fragment_data", "+=", "e", ".", "fragment_data", "message", "=", "message", "[", "e", ".", "data_consumed", ":", "]", "if", "(", "not", "message", ")", ":", "if", "throw_on_incomplete", ":", "raise", "e", "else", ":", "return", "(", "None", ",", "original_message", ")", "except", "(", "IndexError", ",", "ValueError", ",", "struct", ".", "error", ")", ":", "return", "(", "None", ",", "original_message", ")", "except", "types", ".", "TlsRecordIncompleteError", "as", "e", ":", "if", "throw_on_incomplete", ":", "raise", "e", "else", ":", "return", "(", "None", ",", "original_message", ")", "return", "(", "None", ",", "original_message", ")" ]
try and parse a tls record .
train
false
6,451
def check_current_lock(con, host, port, warning, critical, perf_data): warning = (warning or 10) critical = (critical or 30) data = get_server_status(con) lockTime = float(data['globalLock']['lockTime']) totalTime = float(data['globalLock']['totalTime']) (err, delta) = maintain_delta([totalTime, lockTime], host, port, 'locktime') if (err == 0): lock_percentage = ((delta[2] / delta[1]) * 100) message = ('Current Lock Percentage: %.2f%%' % lock_percentage) message += performance_data(perf_data, [(('%.2f' % lock_percentage), 'current_lock_percentage', warning, critical)]) return check_levels(lock_percentage, warning, critical, message) else: return exit_with_general_warning('problem reading data from temp file')
[ "def", "check_current_lock", "(", "con", ",", "host", ",", "port", ",", "warning", ",", "critical", ",", "perf_data", ")", ":", "warning", "=", "(", "warning", "or", "10", ")", "critical", "=", "(", "critical", "or", "30", ")", "data", "=", "get_server_status", "(", "con", ")", "lockTime", "=", "float", "(", "data", "[", "'globalLock'", "]", "[", "'lockTime'", "]", ")", "totalTime", "=", "float", "(", "data", "[", "'globalLock'", "]", "[", "'totalTime'", "]", ")", "(", "err", ",", "delta", ")", "=", "maintain_delta", "(", "[", "totalTime", ",", "lockTime", "]", ",", "host", ",", "port", ",", "'locktime'", ")", "if", "(", "err", "==", "0", ")", ":", "lock_percentage", "=", "(", "(", "delta", "[", "2", "]", "/", "delta", "[", "1", "]", ")", "*", "100", ")", "message", "=", "(", "'Current Lock Percentage: %.2f%%'", "%", "lock_percentage", ")", "message", "+=", "performance_data", "(", "perf_data", ",", "[", "(", "(", "'%.2f'", "%", "lock_percentage", ")", ",", "'current_lock_percentage'", ",", "warning", ",", "critical", ")", "]", ")", "return", "check_levels", "(", "lock_percentage", ",", "warning", ",", "critical", ",", "message", ")", "else", ":", "return", "exit_with_general_warning", "(", "'problem reading data from temp file'", ")" ]
a function to get current lock percentage and not a global one .
train
false
6,452
def f4(t): if t: x = 1 else: y = 2 print sorted(locals().items()) print sorted(vars().items())
[ "def", "f4", "(", "t", ")", ":", "if", "t", ":", "x", "=", "1", "else", ":", "y", "=", "2", "print", "sorted", "(", "locals", "(", ")", ".", "items", "(", ")", ")", "print", "sorted", "(", "vars", "(", ")", ".", "items", "(", ")", ")" ]
testing synthetic is_defined variables .
train
false
6,453
def set_default_keychain(keychain, domain='user', user=None): cmd = 'security default-keychain -d {0} -s {1}'.format(domain, keychain) return __salt__['cmd.run'](cmd, runas=user)
[ "def", "set_default_keychain", "(", "keychain", ",", "domain", "=", "'user'", ",", "user", "=", "None", ")", ":", "cmd", "=", "'security default-keychain -d {0} -s {1}'", ".", "format", "(", "domain", ",", "keychain", ")", "return", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "runas", "=", "user", ")" ]
set the default keychain keychain the location of the keychain to set as default domain the domain to use valid values are user|system|common|dynamic .
train
true
6,454
def list_role_policies(role_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: response = conn.list_role_policies(role_name) _list = response.list_role_policies_response.list_role_policies_result return _list.policy_names except boto.exception.BotoServerError as e: log.debug(e) return []
[ "def", "list_role_policies", "(", "role_name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "response", "=", "conn", ".", "list_role_policies", "(", "role_name", ")", "_list", "=", "response", ".", "list_role_policies_response", ".", "list_role_policies_result", "return", "_list", ".", "policy_names", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "return", "[", "]" ]
get a list of policy names from a role .
train
true
6,456
@pytest.yield_fixture(params=[None, tdata]) def temp_add_server(request): data = request.param s = Server(copy(data), formats=all_formats, allow_add=True) s.app.testing = True with s.app.test_client() as c: (yield c)
[ "@", "pytest", ".", "yield_fixture", "(", "params", "=", "[", "None", ",", "tdata", "]", ")", "def", "temp_add_server", "(", "request", ")", ":", "data", "=", "request", ".", "param", "s", "=", "Server", "(", "copy", "(", "data", ")", ",", "formats", "=", "all_formats", ",", "allow_add", "=", "True", ")", "s", ".", "app", ".", "testing", "=", "True", "with", "s", ".", "app", ".", "test_client", "(", ")", "as", "c", ":", "(", "yield", "c", ")" ]
for when we want to mutate the server .
train
false
6,457
def str2size(s, scale=1024): if (not s): return 0 if isinstance(s, six.integer_types): return s match = re.match('^([\\.\\d]+)\\s*([BbKkMmGgTtPpEeZzYy]?)', s) if (match is None): raise ValueError((_('Invalid value: "%s"') % s)) groups = match.groups() value = float(groups[0]) suffix = (((len(groups) > 1) and groups[1].upper()) or 'B') types = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') for (i, t) in enumerate(types): if (suffix == t): return int((value * pow(scale, i)))
[ "def", "str2size", "(", "s", ",", "scale", "=", "1024", ")", ":", "if", "(", "not", "s", ")", ":", "return", "0", "if", "isinstance", "(", "s", ",", "six", ".", "integer_types", ")", ":", "return", "s", "match", "=", "re", ".", "match", "(", "'^([\\\\.\\\\d]+)\\\\s*([BbKkMmGgTtPpEeZzYy]?)'", ",", "s", ")", "if", "(", "match", "is", "None", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "'Invalid value: \"%s\"'", ")", "%", "s", ")", ")", "groups", "=", "match", ".", "groups", "(", ")", "value", "=", "float", "(", "groups", "[", "0", "]", ")", "suffix", "=", "(", "(", "(", "len", "(", "groups", ")", ">", "1", ")", "and", "groups", "[", "1", "]", ".", "upper", "(", ")", ")", "or", "'B'", ")", "types", "=", "(", "'B'", ",", "'K'", ",", "'M'", ",", "'G'", ",", "'T'", ",", "'P'", ",", "'E'", ",", "'Z'", ",", "'Y'", ")", "for", "(", "i", ",", "t", ")", "in", "enumerate", "(", "types", ")", ":", "if", "(", "suffix", "==", "t", ")", ":", "return", "int", "(", "(", "value", "*", "pow", "(", "scale", ",", "i", ")", ")", ")" ]
convert size-string .
train
false
6,458
def test_named_args(): print 'TODO'
[ "def", "test_named_args", "(", ")", ":", "print", "'TODO'" ]
URL existing use of named args tests look woefully insufficient .
train
false
6,459
def cell_create(context, values): return IMPL.cell_create(context, values)
[ "def", "cell_create", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "cell_create", "(", "context", ",", "values", ")" ]
create a new child cell entry .
train
false
6,460
def legacy_html_escape(string): return re.sub('([&<"\\\'>])', (lambda m: xml_escapes[m.group()]), string)
[ "def", "legacy_html_escape", "(", "string", ")", ":", "return", "re", ".", "sub", "(", "'([&<\"\\\\\\'>])'", ",", "(", "lambda", "m", ":", "xml_escapes", "[", "m", ".", "group", "(", ")", "]", ")", ",", "string", ")" ]
legacy html escape for non-unicode mode .
train
false
6,461
def open_stream(stream): global stream_fd try: stream_fd = stream.open() except StreamError as err: raise StreamError('Could not open stream: {0}'.format(err)) try: console.logger.debug('Pre-buffering 8192 bytes') prebuffer = stream_fd.read(8192) except IOError as err: raise StreamError('Failed to read data from stream: {0}'.format(err)) if (not prebuffer): raise StreamError('No data returned from stream') return (stream_fd, prebuffer)
[ "def", "open_stream", "(", "stream", ")", ":", "global", "stream_fd", "try", ":", "stream_fd", "=", "stream", ".", "open", "(", ")", "except", "StreamError", "as", "err", ":", "raise", "StreamError", "(", "'Could not open stream: {0}'", ".", "format", "(", "err", ")", ")", "try", ":", "console", ".", "logger", ".", "debug", "(", "'Pre-buffering 8192 bytes'", ")", "prebuffer", "=", "stream_fd", ".", "read", "(", "8192", ")", "except", "IOError", "as", "err", ":", "raise", "StreamError", "(", "'Failed to read data from stream: {0}'", ".", "format", "(", "err", ")", ")", "if", "(", "not", "prebuffer", ")", ":", "raise", "StreamError", "(", "'No data returned from stream'", ")", "return", "(", "stream_fd", ",", "prebuffer", ")" ]
opens a stream and reads 8192 bytes from it .
train
false
6,462
def isValidUSState(field_data, all_data): states = ['AA', 'AE', 'AK', 'AL', 'AP', 'AR', 'AS', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'FM', 'GA', 'GU', 'HI', 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MH', 'MI', 'MN', 'MO', 'MP', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'PR', 'PW', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI', 'VT', 'WA', 'WI', 'WV', 'WY'] if (field_data.upper() not in states): raise ValidationError, gettext('Enter a valid U.S. state abbreviation.')
[ "def", "isValidUSState", "(", "field_data", ",", "all_data", ")", ":", "states", "=", "[", "'AA'", ",", "'AE'", ",", "'AK'", ",", "'AL'", ",", "'AP'", ",", "'AR'", ",", "'AS'", ",", "'AZ'", ",", "'CA'", ",", "'CO'", ",", "'CT'", ",", "'DC'", ",", "'DE'", ",", "'FL'", ",", "'FM'", ",", "'GA'", ",", "'GU'", ",", "'HI'", ",", "'IA'", ",", "'ID'", ",", "'IL'", ",", "'IN'", ",", "'KS'", ",", "'KY'", ",", "'LA'", ",", "'MA'", ",", "'MD'", ",", "'ME'", ",", "'MH'", ",", "'MI'", ",", "'MN'", ",", "'MO'", ",", "'MP'", ",", "'MS'", ",", "'MT'", ",", "'NC'", ",", "'ND'", ",", "'NE'", ",", "'NH'", ",", "'NJ'", ",", "'NM'", ",", "'NV'", ",", "'NY'", ",", "'OH'", ",", "'OK'", ",", "'OR'", ",", "'PA'", ",", "'PR'", ",", "'PW'", ",", "'RI'", ",", "'SC'", ",", "'SD'", ",", "'TN'", ",", "'TX'", ",", "'UT'", ",", "'VA'", ",", "'VI'", ",", "'VT'", ",", "'WA'", ",", "'WI'", ",", "'WV'", ",", "'WY'", "]", "if", "(", "field_data", ".", "upper", "(", ")", "not", "in", "states", ")", ":", "raise", "ValidationError", ",", "gettext", "(", "'Enter a valid U.S. state abbreviation.'", ")" ]
checks that the given string is a valid two-letter u .
train
false
6,464
def version_gte(version, cmp_version_str): if (not isinstance(version, string_types)): raise TypeError(('%r is not a string' % version)) if (not isinstance(cmp_version_str, string_types)): raise TypeError(('%r is not a string' % cmp_version_str)) return (LooseVersion(version) >= LooseVersion(cmp_version_str))
[ "def", "version_gte", "(", "version", ",", "cmp_version_str", ")", ":", "if", "(", "not", "isinstance", "(", "version", ",", "string_types", ")", ")", ":", "raise", "TypeError", "(", "(", "'%r is not a string'", "%", "version", ")", ")", "if", "(", "not", "isinstance", "(", "cmp_version_str", ",", "string_types", ")", ")", ":", "raise", "TypeError", "(", "(", "'%r is not a string'", "%", "cmp_version_str", ")", ")", "return", "(", "LooseVersion", "(", "version", ")", ">=", "LooseVersion", "(", "cmp_version_str", ")", ")" ]
return true if version >= *cmp_version_str* .
train
false
6,465
def network_get_by_uuid(context, uuid): return IMPL.network_get_by_uuid(context, uuid)
[ "def", "network_get_by_uuid", "(", "context", ",", "uuid", ")", ":", "return", "IMPL", ".", "network_get_by_uuid", "(", "context", ",", "uuid", ")" ]
get a network by uuid or raise if it does not exist .
train
false
6,466
def canberra(u, v): u = _validate_vector(u) v = _validate_vector(v, dtype=np.float64) olderr = np.seterr(invalid='ignore') try: d = np.nansum((abs((u - v)) / (abs(u) + abs(v)))) finally: np.seterr(**olderr) return d
[ "def", "canberra", "(", "u", ",", "v", ")", ":", "u", "=", "_validate_vector", "(", "u", ")", "v", "=", "_validate_vector", "(", "v", ",", "dtype", "=", "np", ".", "float64", ")", "olderr", "=", "np", ".", "seterr", "(", "invalid", "=", "'ignore'", ")", "try", ":", "d", "=", "np", ".", "nansum", "(", "(", "abs", "(", "(", "u", "-", "v", ")", ")", "/", "(", "abs", "(", "u", ")", "+", "abs", "(", "v", ")", ")", ")", ")", "finally", ":", "np", ".", "seterr", "(", "**", "olderr", ")", "return", "d" ]
computes the canberra distance between two 1-d arrays .
train
false
6,468
def _update_access_token(user, graph): profile = try_get_profile(user) model_or_profile = get_instance_for_attribute(user, profile, 'access_token') if model_or_profile: new_token = (graph.access_token != model_or_profile.access_token) token_message = ('a new' if new_token else 'the same') logger.info('found %s token %s', token_message, graph.access_token[:10]) if new_token: logger.info('access token changed, updating now') model_or_profile.update_access_token(graph.access_token) model_or_profile.save() model_or_profile.extend_access_token()
[ "def", "_update_access_token", "(", "user", ",", "graph", ")", ":", "profile", "=", "try_get_profile", "(", "user", ")", "model_or_profile", "=", "get_instance_for_attribute", "(", "user", ",", "profile", ",", "'access_token'", ")", "if", "model_or_profile", ":", "new_token", "=", "(", "graph", ".", "access_token", "!=", "model_or_profile", ".", "access_token", ")", "token_message", "=", "(", "'a new'", "if", "new_token", "else", "'the same'", ")", "logger", ".", "info", "(", "'found %s token %s'", ",", "token_message", ",", "graph", ".", "access_token", "[", ":", "10", "]", ")", "if", "new_token", ":", "logger", ".", "info", "(", "'access token changed, updating now'", ")", "model_or_profile", ".", "update_access_token", "(", "graph", ".", "access_token", ")", "model_or_profile", ".", "save", "(", ")", "model_or_profile", ".", "extend_access_token", "(", ")" ]
conditionally updates the access token in the database .
train
false
6,470
def _get_vm_ref_from_uuid(session, instance_uuid): vms = session._call_method(vim_util, 'get_objects', 'VirtualMachine', ['name']) return _get_object_from_results(session, vms, instance_uuid, _get_object_for_value)
[ "def", "_get_vm_ref_from_uuid", "(", "session", ",", "instance_uuid", ")", ":", "vms", "=", "session", ".", "_call_method", "(", "vim_util", ",", "'get_objects'", ",", "'VirtualMachine'", ",", "[", "'name'", "]", ")", "return", "_get_object_from_results", "(", "session", ",", "vms", ",", "instance_uuid", ",", "_get_object_for_value", ")" ]
get reference to the vm with the uuid specified .
train
false
6,472
def p_command_read_bad(p): p[0] = 'MALFORMED VARIABLE LIST IN READ'
[ "def", "p_command_read_bad", "(", "p", ")", ":", "p", "[", "0", "]", "=", "'MALFORMED VARIABLE LIST IN READ'" ]
command : read error .
train
false
6,474
def build_list(context, builder, list_type, items): nitems = len(items) inst = ListInstance.allocate(context, builder, list_type, nitems) inst.size = context.get_constant(types.intp, nitems) for (i, val) in enumerate(items): inst.setitem(context.get_constant(types.intp, i), val) return impl_ret_new_ref(context, builder, list_type, inst.value)
[ "def", "build_list", "(", "context", ",", "builder", ",", "list_type", ",", "items", ")", ":", "nitems", "=", "len", "(", "items", ")", "inst", "=", "ListInstance", ".", "allocate", "(", "context", ",", "builder", ",", "list_type", ",", "nitems", ")", "inst", ".", "size", "=", "context", ".", "get_constant", "(", "types", ".", "intp", ",", "nitems", ")", "for", "(", "i", ",", "val", ")", "in", "enumerate", "(", "items", ")", ":", "inst", ".", "setitem", "(", "context", ".", "get_constant", "(", "types", ".", "intp", ",", "i", ")", ",", "val", ")", "return", "impl_ret_new_ref", "(", "context", ",", "builder", ",", "list_type", ",", "inst", ".", "value", ")" ]
build a list of the given type .
train
false
6,475
def collect_attribute_columns(**specs): selected_specs = {spec_name: spec for (spec_name, spec) in iteritems(specs) if spec.columns} spec_cols = list(itertools.chain.from_iterable([spec.columns for spec in selected_specs.values()])) return ordered_set(spec_cols)
[ "def", "collect_attribute_columns", "(", "**", "specs", ")", ":", "selected_specs", "=", "{", "spec_name", ":", "spec", "for", "(", "spec_name", ",", "spec", ")", "in", "iteritems", "(", "specs", ")", "if", "spec", ".", "columns", "}", "spec_cols", "=", "list", "(", "itertools", ".", "chain", ".", "from_iterable", "(", "[", "spec", ".", "columns", "for", "spec", "in", "selected_specs", ".", "values", "(", ")", "]", ")", ")", "return", "ordered_set", "(", "spec_cols", ")" ]
collect list of unique and ordered columns across attribute specifications .
train
false
6,476
def native_value(value): if isinstance(value, six.string_types): if (value.lower() in ['on', 'true', 'yes']): value = True elif (value.lower() in ['off', 'false', 'no']): value = False try: return ast.literal_eval(value) except (TypeError, ValueError, SyntaxError): pass return value
[ "def", "native_value", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "if", "(", "value", ".", "lower", "(", ")", "in", "[", "'on'", ",", "'true'", ",", "'yes'", "]", ")", ":", "value", "=", "True", "elif", "(", "value", ".", "lower", "(", ")", "in", "[", "'off'", ",", "'false'", ",", "'no'", "]", ")", ":", "value", "=", "False", "try", ":", "return", "ast", ".", "literal_eval", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ",", "SyntaxError", ")", ":", "pass", "return", "value" ]
convert string value to native python values .
train
false
6,477
def _CheckHtml(html): return _ValidateString(html, 'html', MAXIMUM_FIELD_VALUE_LENGTH, empty_ok=True)
[ "def", "_CheckHtml", "(", "html", ")", ":", "return", "_ValidateString", "(", "html", ",", "'html'", ",", "MAXIMUM_FIELD_VALUE_LENGTH", ",", "empty_ok", "=", "True", ")" ]
checks the field html is a valid html string .
train
false
6,478
def CreateInstance(clsid, reqIID): try: addnPaths = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT, (regAddnPath % clsid)).split(';') for newPath in addnPaths: if (newPath not in sys.path): sys.path.insert(0, newPath) except win32api.error: pass try: policy = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT, (regPolicy % clsid)) policy = resolve_func(policy) except win32api.error: policy = DefaultPolicy try: dispatcher = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT, (regDispatcher % clsid)) if dispatcher: dispatcher = resolve_func(dispatcher) except win32api.error: dispatcher = None if dispatcher: retObj = dispatcher(policy, None) else: retObj = policy(None) return retObj._CreateInstance_(clsid, reqIID)
[ "def", "CreateInstance", "(", "clsid", ",", "reqIID", ")", ":", "try", ":", "addnPaths", "=", "win32api", ".", "RegQueryValue", "(", "win32con", ".", "HKEY_CLASSES_ROOT", ",", "(", "regAddnPath", "%", "clsid", ")", ")", ".", "split", "(", "';'", ")", "for", "newPath", "in", "addnPaths", ":", "if", "(", "newPath", "not", "in", "sys", ".", "path", ")", ":", "sys", ".", "path", ".", "insert", "(", "0", ",", "newPath", ")", "except", "win32api", ".", "error", ":", "pass", "try", ":", "policy", "=", "win32api", ".", "RegQueryValue", "(", "win32con", ".", "HKEY_CLASSES_ROOT", ",", "(", "regPolicy", "%", "clsid", ")", ")", "policy", "=", "resolve_func", "(", "policy", ")", "except", "win32api", ".", "error", ":", "policy", "=", "DefaultPolicy", "try", ":", "dispatcher", "=", "win32api", ".", "RegQueryValue", "(", "win32con", ".", "HKEY_CLASSES_ROOT", ",", "(", "regDispatcher", "%", "clsid", ")", ")", "if", "dispatcher", ":", "dispatcher", "=", "resolve_func", "(", "dispatcher", ")", "except", "win32api", ".", "error", ":", "dispatcher", "=", "None", "if", "dispatcher", ":", "retObj", "=", "dispatcher", "(", "policy", ",", "None", ")", "else", ":", "retObj", "=", "policy", "(", "None", ")", "return", "retObj", ".", "_CreateInstance_", "(", "clsid", ",", "reqIID", ")" ]
create a new instance of the specified iid the com framework **always** calls this function to create a new instance for the specified clsid .
train
false
6,479
def intercept_renderer(path, context): response = render_to_response(path, context) response.mako_context = context response.mako_template = path return response
[ "def", "intercept_renderer", "(", "path", ",", "context", ")", ":", "response", "=", "render_to_response", "(", "path", ",", "context", ")", "response", ".", "mako_context", "=", "context", "response", ".", "mako_template", "=", "path", "return", "response" ]
intercept calls to render_to_response and attach the context dict to the response for examination in unit tests .
train
false
6,481
def shouldLogEvent(predicates, event): for predicate in predicates: result = predicate(event) if (result == PredicateResult.yes): return True if (result == PredicateResult.no): return False if (result == PredicateResult.maybe): continue raise TypeError('Invalid predicate result: {0!r}'.format(result)) return True
[ "def", "shouldLogEvent", "(", "predicates", ",", "event", ")", ":", "for", "predicate", "in", "predicates", ":", "result", "=", "predicate", "(", "event", ")", "if", "(", "result", "==", "PredicateResult", ".", "yes", ")", ":", "return", "True", "if", "(", "result", "==", "PredicateResult", ".", "no", ")", ":", "return", "False", "if", "(", "result", "==", "PredicateResult", ".", "maybe", ")", ":", "continue", "raise", "TypeError", "(", "'Invalid predicate result: {0!r}'", ".", "format", "(", "result", ")", ")", "return", "True" ]
determine whether an event should be logged .
train
false
6,482
def populate_db(db, num_participants=100, ntips=200, num_teams=5): print 'Making Participants' make_flag_tester = (num_participants > 1) participants = [] for i in xrange(((num_participants - 1) if make_flag_tester else num_participants)): participants.append(fake_participant(db)) if make_flag_tester: flag_tester = fake_participant(db, random_identities=False) participants.append(flag_tester) nepal = db.one("SELECT id FROM countries WHERE code='NP'") flag_tester.store_identity_info(nepal, 'nothing-enforced', {}) flag_tester.set_identity_verification(nepal, True) vatican = db.one("SELECT id FROM countries WHERE code='VA'") flag_tester.store_identity_info(vatican, 'nothing-enforced', {}) flag_tester.set_identity_verification(vatican, True) print 'Making Teams' teams = [] teamowners = random.sample(participants, num_teams) for teamowner in teamowners: teams.append(fake_team(db, teamowner)) teamowner = random.choice(participants) teams.append(fake_team(db, teamowner, 'Gratipay')) print 'Making Payment Instructions' npayment_instructions = 0 payment_instructions = [] for participant in participants: for team in teams: if (participant.username != team.owner): npayment_instructions += 1 if (npayment_instructions > ntips): break payment_instructions.append(fake_payment_instruction(db, participant, team)) if (npayment_instructions > ntips): break print 'Making Elsewheres' for p in participants: num_elsewheres = random.randint(1, 3) for platform_name in random.sample(PLATFORMS, num_elsewheres): fake_elsewhere(db, p, platform_name) min_date = min((x['mtime'] for x in payment_instructions)) max_date = max((x['mtime'] for x in payment_instructions)) payday_counter = 0 date = min_date paydays_total = (((max_date - min_date).days / 7) + 1) while (date < max_date): payday_counter += 1 end_date = (date + datetime.timedelta(days=7)) week_payment_instructions = filter((lambda x: (x['mtime'] < date)), payment_instructions) params = dict(ts_start=date, ts_end=end_date) with db.get_cursor() as cursor: payday_id = cursor.one('\n INSERT INTO paydays\n (ts_start, ts_end)\n VALUES (%(ts_start)s, %(ts_end)s)\n RETURNING id\n ', params) sys.stdout.write(('\rMaking Paydays (%i/%i)' % (payday_id, paydays_total))) sys.stdout.flush() week_payments = [] for payment_instruction in week_payment_instructions: participant = Participant.from_id(payment_instruction['participant_id']) team = Team.from_id(payment_instruction['team_id']) amount = payment_instruction['amount'] assert (participant.username != team.owner) week_payments.append(fake_payment(db=db, participant=participant.username, team=team.slug, timestamp=date, amount=amount, payday=payday_id, direction='to-team')) if (amount != 0): fee = (amount * D('0.02')) fee = abs(fee.quantize(D('.01'))) fake_exchange(db=db, participant=participant, amount=amount, fee=fee, timestamp=(date + datetime.timedelta(seconds=1))) for team in teams: week_payments_to_team = filter((lambda x: (x['team'] == team.slug)), week_payments) pay_out = sum((t['amount'] for t in week_payments_to_team)) if pay_out: week_payments.append(fake_payment(db=db, participant=team.owner, team=team.slug, timestamp=date, amount=pay_out, payday=payday_id, direction='to-participant')) actives = set() actives.update((x['participant'] for x in week_payments)) params = dict(nusers=len(actives), volume=sum((x['amount'] for x in week_payment_instructions)), payday_id=payday_id) db.run('\n UPDATE paydays\n SET nusers=%(nusers)s, volume=%(volume)s\n WHERE id=%(payday_id)s\n ', params) date = end_date print ''
[ "def", "populate_db", "(", "db", ",", "num_participants", "=", "100", ",", "ntips", "=", "200", ",", "num_teams", "=", "5", ")", ":", "print", "'Making Participants'", "make_flag_tester", "=", "(", "num_participants", ">", "1", ")", "participants", "=", "[", "]", "for", "i", "in", "xrange", "(", "(", "(", "num_participants", "-", "1", ")", "if", "make_flag_tester", "else", "num_participants", ")", ")", ":", "participants", ".", "append", "(", "fake_participant", "(", "db", ")", ")", "if", "make_flag_tester", ":", "flag_tester", "=", "fake_participant", "(", "db", ",", "random_identities", "=", "False", ")", "participants", ".", "append", "(", "flag_tester", ")", "nepal", "=", "db", ".", "one", "(", "\"SELECT id FROM countries WHERE code='NP'\"", ")", "flag_tester", ".", "store_identity_info", "(", "nepal", ",", "'nothing-enforced'", ",", "{", "}", ")", "flag_tester", ".", "set_identity_verification", "(", "nepal", ",", "True", ")", "vatican", "=", "db", ".", "one", "(", "\"SELECT id FROM countries WHERE code='VA'\"", ")", "flag_tester", ".", "store_identity_info", "(", "vatican", ",", "'nothing-enforced'", ",", "{", "}", ")", "flag_tester", ".", "set_identity_verification", "(", "vatican", ",", "True", ")", "print", "'Making Teams'", "teams", "=", "[", "]", "teamowners", "=", "random", ".", "sample", "(", "participants", ",", "num_teams", ")", "for", "teamowner", "in", "teamowners", ":", "teams", ".", "append", "(", "fake_team", "(", "db", ",", "teamowner", ")", ")", "teamowner", "=", "random", ".", "choice", "(", "participants", ")", "teams", ".", "append", "(", "fake_team", "(", "db", ",", "teamowner", ",", "'Gratipay'", ")", ")", "print", "'Making Payment Instructions'", "npayment_instructions", "=", "0", "payment_instructions", "=", "[", "]", "for", "participant", "in", "participants", ":", "for", "team", "in", "teams", ":", "if", "(", "participant", ".", "username", "!=", "team", ".", "owner", ")", ":", "npayment_instructions", "+=", "1", "if", "(", "npayment_instructions", ">", "ntips", ")", ":", "break", "payment_instructions", ".", "append", "(", "fake_payment_instruction", "(", "db", ",", "participant", ",", "team", ")", ")", "if", "(", "npayment_instructions", ">", "ntips", ")", ":", "break", "print", "'Making Elsewheres'", "for", "p", "in", "participants", ":", "num_elsewheres", "=", "random", ".", "randint", "(", "1", ",", "3", ")", "for", "platform_name", "in", "random", ".", "sample", "(", "PLATFORMS", ",", "num_elsewheres", ")", ":", "fake_elsewhere", "(", "db", ",", "p", ",", "platform_name", ")", "min_date", "=", "min", "(", "(", "x", "[", "'mtime'", "]", "for", "x", "in", "payment_instructions", ")", ")", "max_date", "=", "max", "(", "(", "x", "[", "'mtime'", "]", "for", "x", "in", "payment_instructions", ")", ")", "payday_counter", "=", "0", "date", "=", "min_date", "paydays_total", "=", "(", "(", "(", "max_date", "-", "min_date", ")", ".", "days", "/", "7", ")", "+", "1", ")", "while", "(", "date", "<", "max_date", ")", ":", "payday_counter", "+=", "1", "end_date", "=", "(", "date", "+", "datetime", ".", "timedelta", "(", "days", "=", "7", ")", ")", "week_payment_instructions", "=", "filter", "(", "(", "lambda", "x", ":", "(", "x", "[", "'mtime'", "]", "<", "date", ")", ")", ",", "payment_instructions", ")", "params", "=", "dict", "(", "ts_start", "=", "date", ",", "ts_end", "=", "end_date", ")", "with", "db", ".", "get_cursor", "(", ")", "as", "cursor", ":", "payday_id", "=", "cursor", ".", "one", "(", "'\\n INSERT INTO paydays\\n (ts_start, ts_end)\\n VALUES (%(ts_start)s, %(ts_end)s)\\n RETURNING id\\n '", ",", "params", ")", "sys", ".", "stdout", ".", "write", "(", "(", "'\\rMaking Paydays (%i/%i)'", "%", "(", "payday_id", ",", "paydays_total", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "week_payments", "=", "[", "]", "for", "payment_instruction", "in", "week_payment_instructions", ":", "participant", "=", "Participant", ".", "from_id", "(", "payment_instruction", "[", "'participant_id'", "]", ")", "team", "=", "Team", ".", "from_id", "(", "payment_instruction", "[", "'team_id'", "]", ")", "amount", "=", "payment_instruction", "[", "'amount'", "]", "assert", "(", "participant", ".", "username", "!=", "team", ".", "owner", ")", "week_payments", ".", "append", "(", "fake_payment", "(", "db", "=", "db", ",", "participant", "=", "participant", ".", "username", ",", "team", "=", "team", ".", "slug", ",", "timestamp", "=", "date", ",", "amount", "=", "amount", ",", "payday", "=", "payday_id", ",", "direction", "=", "'to-team'", ")", ")", "if", "(", "amount", "!=", "0", ")", ":", "fee", "=", "(", "amount", "*", "D", "(", "'0.02'", ")", ")", "fee", "=", "abs", "(", "fee", ".", "quantize", "(", "D", "(", "'.01'", ")", ")", ")", "fake_exchange", "(", "db", "=", "db", ",", "participant", "=", "participant", ",", "amount", "=", "amount", ",", "fee", "=", "fee", ",", "timestamp", "=", "(", "date", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "1", ")", ")", ")", "for", "team", "in", "teams", ":", "week_payments_to_team", "=", "filter", "(", "(", "lambda", "x", ":", "(", "x", "[", "'team'", "]", "==", "team", ".", "slug", ")", ")", ",", "week_payments", ")", "pay_out", "=", "sum", "(", "(", "t", "[", "'amount'", "]", "for", "t", "in", "week_payments_to_team", ")", ")", "if", "pay_out", ":", "week_payments", ".", "append", "(", "fake_payment", "(", "db", "=", "db", ",", "participant", "=", "team", ".", "owner", ",", "team", "=", "team", ".", "slug", ",", "timestamp", "=", "date", ",", "amount", "=", "pay_out", ",", "payday", "=", "payday_id", ",", "direction", "=", "'to-participant'", ")", ")", "actives", "=", "set", "(", ")", "actives", ".", "update", "(", "(", "x", "[", "'participant'", "]", "for", "x", "in", "week_payments", ")", ")", "params", "=", "dict", "(", "nusers", "=", "len", "(", "actives", ")", ",", "volume", "=", "sum", "(", "(", "x", "[", "'amount'", "]", "for", "x", "in", "week_payment_instructions", ")", ")", ",", "payday_id", "=", "payday_id", ")", "db", ".", "run", "(", "'\\n UPDATE paydays\\n SET nusers=%(nusers)s, volume=%(volume)s\\n WHERE id=%(payday_id)s\\n '", ",", "params", ")", "date", "=", "end_date", "print", "''" ]
populate database with total_users simulated users and their actions .
train
false
6,483
def make_remote_view(data, settings, more_excluded_names=None): data = get_remote_data(data, settings, mode='editable', more_excluded_names=more_excluded_names) remote = {} for (key, value) in list(data.items()): view = value_to_display(value, minmax=settings['minmax']) remote[key] = {'type': get_human_readable_type(value), 'size': get_size(value), 'color': get_color_name(value), 'view': view} return remote
[ "def", "make_remote_view", "(", "data", ",", "settings", ",", "more_excluded_names", "=", "None", ")", ":", "data", "=", "get_remote_data", "(", "data", ",", "settings", ",", "mode", "=", "'editable'", ",", "more_excluded_names", "=", "more_excluded_names", ")", "remote", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "list", "(", "data", ".", "items", "(", ")", ")", ":", "view", "=", "value_to_display", "(", "value", ",", "minmax", "=", "settings", "[", "'minmax'", "]", ")", "remote", "[", "key", "]", "=", "{", "'type'", ":", "get_human_readable_type", "(", "value", ")", ",", "'size'", ":", "get_size", "(", "value", ")", ",", "'color'", ":", "get_color_name", "(", "value", ")", ",", "'view'", ":", "view", "}", "return", "remote" ]
make a remote view of dictionary *data* -> globals explorer .
train
true
6,484
@register.simple_tag(takes_context=True) def querystring(context, **kwargs): request = context[u'request'] querydict = request.GET.copy() for (key, value) in kwargs.items(): if (value is None): querydict.pop(key, None) else: querydict[key] = value return (u'?' + querydict.urlencode())
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "querystring", "(", "context", ",", "**", "kwargs", ")", ":", "request", "=", "context", "[", "u'request'", "]", "querydict", "=", "request", ".", "GET", ".", "copy", "(", ")", "for", "(", "key", ",", "value", ")", "in", "kwargs", ".", "items", "(", ")", ":", "if", "(", "value", "is", "None", ")", ":", "querydict", ".", "pop", "(", "key", ",", "None", ")", "else", ":", "querydict", "[", "key", "]", "=", "value", "return", "(", "u'?'", "+", "querydict", ".", "urlencode", "(", ")", ")" ]
creates a url derived from the current urls querystring .
train
false
6,485
def fake_bdm_object(context, bdm_dict): return objects.BlockDeviceMapping._from_db_object(context, objects.BlockDeviceMapping(), FakeDbBlockDeviceDict(bdm_dict.copy()))
[ "def", "fake_bdm_object", "(", "context", ",", "bdm_dict", ")", ":", "return", "objects", ".", "BlockDeviceMapping", ".", "_from_db_object", "(", "context", ",", "objects", ".", "BlockDeviceMapping", "(", ")", ",", "FakeDbBlockDeviceDict", "(", "bdm_dict", ".", "copy", "(", ")", ")", ")" ]
creates a blockdevicemapping object from the given bdm_dict .
train
false
6,486
def transformToArray(tr): if isinstance(tr, QtGui.QTransform): return np.array([[tr.m11(), tr.m21(), tr.m31()], [tr.m12(), tr.m22(), tr.m32()], [tr.m13(), tr.m23(), tr.m33()]]) elif isinstance(tr, QtGui.QMatrix4x4): return np.array(tr.copyDataTo()).reshape(4, 4) else: raise Exception('Transform argument must be either QTransform or QMatrix4x4.')
[ "def", "transformToArray", "(", "tr", ")", ":", "if", "isinstance", "(", "tr", ",", "QtGui", ".", "QTransform", ")", ":", "return", "np", ".", "array", "(", "[", "[", "tr", ".", "m11", "(", ")", ",", "tr", ".", "m21", "(", ")", ",", "tr", ".", "m31", "(", ")", "]", ",", "[", "tr", ".", "m12", "(", ")", ",", "tr", ".", "m22", "(", ")", ",", "tr", ".", "m32", "(", ")", "]", ",", "[", "tr", ".", "m13", "(", ")", ",", "tr", ".", "m23", "(", ")", ",", "tr", ".", "m33", "(", ")", "]", "]", ")", "elif", "isinstance", "(", "tr", ",", "QtGui", ".", "QMatrix4x4", ")", ":", "return", "np", ".", "array", "(", "tr", ".", "copyDataTo", "(", ")", ")", ".", "reshape", "(", "4", ",", "4", ")", "else", ":", "raise", "Exception", "(", "'Transform argument must be either QTransform or QMatrix4x4.'", ")" ]
given a qtransform .
train
false
6,488
def _minpoly_add(x, dom, *a): mp = _minpoly_op_algebraic_element(Add, a[0], a[1], x, dom) p = (a[0] + a[1]) for px in a[2:]: mp = _minpoly_op_algebraic_element(Add, p, px, x, dom, mp1=mp) p = (p + px) return mp
[ "def", "_minpoly_add", "(", "x", ",", "dom", ",", "*", "a", ")", ":", "mp", "=", "_minpoly_op_algebraic_element", "(", "Add", ",", "a", "[", "0", "]", ",", "a", "[", "1", "]", ",", "x", ",", "dom", ")", "p", "=", "(", "a", "[", "0", "]", "+", "a", "[", "1", "]", ")", "for", "px", "in", "a", "[", "2", ":", "]", ":", "mp", "=", "_minpoly_op_algebraic_element", "(", "Add", ",", "p", ",", "px", ",", "x", ",", "dom", ",", "mp1", "=", "mp", ")", "p", "=", "(", "p", "+", "px", ")", "return", "mp" ]
returns minpoly(add .
train
false
6,489
def generate_collection(addon, app=None): if (app is None): application = None else: application = app.id c = create_collection(application=application) generate_translations(c) CollectionAddon.objects.create(addon=addon, collection=c) if (app is not None): FeaturedCollection.objects.create(application=application, collection=c)
[ "def", "generate_collection", "(", "addon", ",", "app", "=", "None", ")", ":", "if", "(", "app", "is", "None", ")", ":", "application", "=", "None", "else", ":", "application", "=", "app", ".", "id", "c", "=", "create_collection", "(", "application", "=", "application", ")", "generate_translations", "(", "c", ")", "CollectionAddon", ".", "objects", ".", "create", "(", "addon", "=", "addon", ",", "collection", "=", "c", ")", "if", "(", "app", "is", "not", "None", ")", ":", "FeaturedCollection", ".", "objects", ".", "create", "(", "application", "=", "application", ",", "collection", "=", "c", ")" ]
generate a collection .
train
false
6,490
def get_hosted_registry_insecure(): hosted_registry_insecure = None if os.path.exists('/etc/sysconfig/docker'): try: ini_str = text_type(('[root]\n' + open('/etc/sysconfig/docker', 'r').read()), 'utf-8') ini_fp = io.StringIO(ini_str) config = configparser.RawConfigParser() config.readfp(ini_fp) options = config.get('root', 'OPTIONS') if ('insecure-registry' in options): hosted_registry_insecure = True except Exception: pass return hosted_registry_insecure
[ "def", "get_hosted_registry_insecure", "(", ")", ":", "hosted_registry_insecure", "=", "None", "if", "os", ".", "path", ".", "exists", "(", "'/etc/sysconfig/docker'", ")", ":", "try", ":", "ini_str", "=", "text_type", "(", "(", "'[root]\\n'", "+", "open", "(", "'/etc/sysconfig/docker'", ",", "'r'", ")", ".", "read", "(", ")", ")", ",", "'utf-8'", ")", "ini_fp", "=", "io", ".", "StringIO", "(", "ini_str", ")", "config", "=", "configparser", ".", "RawConfigParser", "(", ")", "config", ".", "readfp", "(", "ini_fp", ")", "options", "=", "config", ".", "get", "(", "'root'", ",", "'OPTIONS'", ")", "if", "(", "'insecure-registry'", "in", "options", ")", ":", "hosted_registry_insecure", "=", "True", "except", "Exception", ":", "pass", "return", "hosted_registry_insecure" ]
parses options from /etc/sysconfig/docker to determine if the registry is currently insecure .
train
false