id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
53,353
def iter_fastq(in_fasta, quals, label_transform=split_lib_transform): for (label, seq) in parse_fasta(in_fasta): (new_label, qual_id) = label_transform(label) seq_id = label.split()[0] if seq_id.startswith('>'): seq_id = seq_id[1:] qual = quals[qual_id] (yield (make_fastq_rec(new_label, seq, qual), seq_id))
[ "def", "iter_fastq", "(", "in_fasta", ",", "quals", ",", "label_transform", "=", "split_lib_transform", ")", ":", "for", "(", "label", ",", "seq", ")", "in", "parse_fasta", "(", "in_fasta", ")", ":", "(", "new_label", ",", "qual_id", ")", "=", "label_transform", "(", "label", ")", "seq_id", "=", "label", ".", "split", "(", ")", "[", "0", "]", "if", "seq_id", ".", "startswith", "(", "'>'", ")", ":", "seq_id", "=", "seq_id", "[", "1", ":", "]", "qual", "=", "quals", "[", "qual_id", "]", "(", "yield", "(", "make_fastq_rec", "(", "new_label", ",", "seq", ",", "qual", ")", ",", "seq_id", ")", ")" ]
iterate over fastq records .
train
false
53,354
def pgcalc(sa_session, id, dryrun=False): sql_calc = 'SELECT COALESCE(SUM(total_size), 0)\n FROM ( SELECT DISTINCT ON (d.id) d.total_size, d.id\n FROM history_dataset_association hda\n JOIN history h ON h.id = hda.history_id\n JOIN dataset d ON hda.dataset_id = d.id\n WHERE h.user_id = :id\n AND h.purged = false\n AND hda.purged = false\n AND d.purged = false\n AND d.id NOT IN (SELECT dataset_id\n FROM library_dataset_dataset_association)\n ) sizes' sql_update = ('UPDATE galaxy_user\n SET disk_usage = (%s)\n WHERE id = :id\n RETURNING disk_usage;' % sql_calc) if dryrun: r = sa_session.execute(sql_calc, {'id': id}) else: r = sa_session.execute(sql_update, {'id': id}) return r.fetchone()[0]
[ "def", "pgcalc", "(", "sa_session", ",", "id", ",", "dryrun", "=", "False", ")", ":", "sql_calc", "=", "'SELECT COALESCE(SUM(total_size), 0)\\n FROM ( SELECT DISTINCT ON (d.id) d.total_size, d.id\\n FROM history_dataset_association hda\\n JOIN history h ON h.id = hda.history_id\\n JOIN dataset d ON hda.dataset_id = d.id\\n WHERE h.user_id = :id\\n AND h.purged = false\\n AND hda.purged = false\\n AND d.purged = false\\n AND d.id NOT IN (SELECT dataset_id\\n FROM library_dataset_dataset_association)\\n ) sizes'", "sql_update", "=", "(", "'UPDATE galaxy_user\\n SET disk_usage = (%s)\\n WHERE id = :id\\n RETURNING disk_usage;'", "%", "sql_calc", ")", "if", "dryrun", ":", "r", "=", "sa_session", ".", "execute", "(", "sql_calc", ",", "{", "'id'", ":", "id", "}", ")", "else", ":", "r", "=", "sa_session", ".", "execute", "(", "sql_update", ",", "{", "'id'", ":", "id", "}", ")", "return", "r", ".", "fetchone", "(", ")", "[", "0", "]" ]
utility method for quickly recalculating user disk usage in postgres .
train
false
53,355
def thumbnails(html): from django.conf import settings from bs4 import BeautifulSoup from mezzanine.core.templatetags.mezzanine_tags import thumbnail if (settings.MEDIA_URL.lower() not in html.lower()): return html dom = BeautifulSoup(html, u'html.parser') for img in dom.findAll(u'img'): src = img.get(u'src', u'') src_in_media = src.lower().startswith(settings.MEDIA_URL.lower()) width = img.get(u'width') height = img.get(u'height') if (src_in_media and width and height): img[u'src'] = (settings.MEDIA_URL + thumbnail(src, width, height)) return str(dom).replace(u'</br>', u'')
[ "def", "thumbnails", "(", "html", ")", ":", "from", "django", ".", "conf", "import", "settings", "from", "bs4", "import", "BeautifulSoup", "from", "mezzanine", ".", "core", ".", "templatetags", ".", "mezzanine_tags", "import", "thumbnail", "if", "(", "settings", ".", "MEDIA_URL", ".", "lower", "(", ")", "not", "in", "html", ".", "lower", "(", ")", ")", ":", "return", "html", "dom", "=", "BeautifulSoup", "(", "html", ",", "u'html.parser'", ")", "for", "img", "in", "dom", ".", "findAll", "(", "u'img'", ")", ":", "src", "=", "img", ".", "get", "(", "u'src'", ",", "u''", ")", "src_in_media", "=", "src", ".", "lower", "(", ")", ".", "startswith", "(", "settings", ".", "MEDIA_URL", ".", "lower", "(", ")", ")", "width", "=", "img", ".", "get", "(", "u'width'", ")", "height", "=", "img", ".", "get", "(", "u'height'", ")", "if", "(", "src_in_media", "and", "width", "and", "height", ")", ":", "img", "[", "u'src'", "]", "=", "(", "settings", ".", "MEDIA_URL", "+", "thumbnail", "(", "src", ",", "width", ",", "height", ")", ")", "return", "str", "(", "dom", ")", ".", "replace", "(", "u'</br>'", ",", "u''", ")" ]
given a html string .
train
false
53,357
def is_public_ip(ipstr): addr = ipstr.split(':')[0] addr = int(addr, 16) byte1 = (addr & 255) byte2 = ((addr >> 8) & 255) if (byte1 in (10, 0, 127)): return False if ((byte1 == 172) and (byte2 > 16)): return False if ((byte1 == 192) and (byte2 == 168)): return False return True
[ "def", "is_public_ip", "(", "ipstr", ")", ":", "addr", "=", "ipstr", ".", "split", "(", "':'", ")", "[", "0", "]", "addr", "=", "int", "(", "addr", ",", "16", ")", "byte1", "=", "(", "addr", "&", "255", ")", "byte2", "=", "(", "(", "addr", ">>", "8", ")", "&", "255", ")", "if", "(", "byte1", "in", "(", "10", ",", "0", ",", "127", ")", ")", ":", "return", "False", "if", "(", "(", "byte1", "==", "172", ")", "and", "(", "byte2", ">", "16", ")", ")", ":", "return", "False", "if", "(", "(", "byte1", "==", "192", ")", "and", "(", "byte2", "==", "168", ")", ")", ":", "return", "False", "return", "True" ]
take a /proc/net/tcp encoded src or dest string return true if it is coming from public ip space .
train
false
53,358
def test_from_object(): assert (hug.api.from_object(TestAPI) == api)
[ "def", "test_from_object", "(", ")", ":", "assert", "(", "hug", ".", "api", ".", "from_object", "(", "TestAPI", ")", "==", "api", ")" ]
test to ensure its possible to rechieve an api singleton from an arbitrary object .
train
false
53,360
def _babel_locale(locale): return locale.replace('-', '_')
[ "def", "_babel_locale", "(", "locale", ")", ":", "return", "locale", ".", "replace", "(", "'-'", ",", "'_'", ")" ]
return the babel locale code .
train
false
53,361
def branching_weight(G, attr='weight', default=1): return sum((edge[2].get(attr, default) for edge in G.edges(data=True)))
[ "def", "branching_weight", "(", "G", ",", "attr", "=", "'weight'", ",", "default", "=", "1", ")", ":", "return", "sum", "(", "(", "edge", "[", "2", "]", ".", "get", "(", "attr", ",", "default", ")", "for", "edge", "in", "G", ".", "edges", "(", "data", "=", "True", ")", ")", ")" ]
returns the total weight of a branching .
train
false
53,362
def fisher_population_correlation(corrcoefs, sample_sizes): tmp_rs = array(corrcoefs) tmp_ns = array(sample_sizes) rs = tmp_rs[(~ isnan(tmp_rs))] ns = tmp_ns[(~ isnan(tmp_rs))] if (not (ns > 3).all()): return (nan, nan) if (not (len(ns) > 1)): return (nan, nan) if (rs >= 1.0).any(): raise ValueError("A correlation coefficient >= 1 was passed. This is a non real valured correlation coefficient and it's Fisher Z transform cannot be computed.") zs = array([fisher_z_transform(float(i)) for i in rs]) z_bar = ((zs * (ns - 3)).sum() / float((ns - 3).sum())) rho = inverse_fisher_z_transform(z_bar) x_2 = ((ns - 3) * ((zs - z_bar) ** 2)).sum() h_val = chi2prob(x_2, (len(ns) - 1), direction='high') return (rho, h_val)
[ "def", "fisher_population_correlation", "(", "corrcoefs", ",", "sample_sizes", ")", ":", "tmp_rs", "=", "array", "(", "corrcoefs", ")", "tmp_ns", "=", "array", "(", "sample_sizes", ")", "rs", "=", "tmp_rs", "[", "(", "~", "isnan", "(", "tmp_rs", ")", ")", "]", "ns", "=", "tmp_ns", "[", "(", "~", "isnan", "(", "tmp_rs", ")", ")", "]", "if", "(", "not", "(", "ns", ">", "3", ")", ".", "all", "(", ")", ")", ":", "return", "(", "nan", ",", "nan", ")", "if", "(", "not", "(", "len", "(", "ns", ")", ">", "1", ")", ")", ":", "return", "(", "nan", ",", "nan", ")", "if", "(", "rs", ">=", "1.0", ")", ".", "any", "(", ")", ":", "raise", "ValueError", "(", "\"A correlation coefficient >= 1 was passed. This is a non real valured correlation coefficient and it's Fisher Z transform cannot be computed.\"", ")", "zs", "=", "array", "(", "[", "fisher_z_transform", "(", "float", "(", "i", ")", ")", "for", "i", "in", "rs", "]", ")", "z_bar", "=", "(", "(", "zs", "*", "(", "ns", "-", "3", ")", ")", ".", "sum", "(", ")", "/", "float", "(", "(", "ns", "-", "3", ")", ".", "sum", "(", ")", ")", ")", "rho", "=", "inverse_fisher_z_transform", "(", "z_bar", ")", "x_2", "=", "(", "(", "ns", "-", "3", ")", "*", "(", "(", "zs", "-", "z_bar", ")", "**", "2", ")", ")", ".", "sum", "(", ")", "h_val", "=", "chi2prob", "(", "x_2", ",", "(", "len", "(", "ns", ")", "-", "1", ")", ",", "direction", "=", "'high'", ")", "return", "(", "rho", ",", "h_val", ")" ]
calculate population rho .
train
false
53,364
def rzpad(value, total_length): return (value + ('\x00' * max(0, (total_length - len(value)))))
[ "def", "rzpad", "(", "value", ",", "total_length", ")", ":", "return", "(", "value", "+", "(", "'\\x00'", "*", "max", "(", "0", ",", "(", "total_length", "-", "len", "(", "value", ")", ")", ")", ")", ")" ]
right zero pad value x at least to length l .
train
false
53,365
def distribution(): return s3_rest_controller()
[ "def", "distribution", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
53,366
def get_tiles_height_width(n_tiles, desired_width=None): if (desired_width == None): width = int(np.ceil(np.sqrt(n_tiles))) height = width else: assert isinstance(desired_width, int) width = desired_width height = int(np.ceil((float(n_tiles) / width))) return (height, width)
[ "def", "get_tiles_height_width", "(", "n_tiles", ",", "desired_width", "=", "None", ")", ":", "if", "(", "desired_width", "==", "None", ")", ":", "width", "=", "int", "(", "np", ".", "ceil", "(", "np", ".", "sqrt", "(", "n_tiles", ")", ")", ")", "height", "=", "width", "else", ":", "assert", "isinstance", "(", "desired_width", ",", "int", ")", "width", "=", "desired_width", "height", "=", "int", "(", "np", ".", "ceil", "(", "(", "float", "(", "n_tiles", ")", "/", "width", ")", ")", ")", "return", "(", "height", ",", "width", ")" ]
get a height x width size that will fit n_tiles tiles .
train
false
53,367
@main.command() @click.option('-b', '--bundle', default='quantopian-quandl', metavar='BUNDLE-NAME', show_default=True, help='The data bundle to ingest.') @click.option('--assets-version', type=int, multiple=True, help='Version of the assets db to which to downgrade.') @click.option('--show-progress/--no-show-progress', default=True, help='Print progress information to the terminal.') def ingest(bundle, assets_version, show_progress): bundles_module.ingest(bundle, os.environ, pd.Timestamp.utcnow(), assets_version, show_progress)
[ "@", "main", ".", "command", "(", ")", "@", "click", ".", "option", "(", "'-b'", ",", "'--bundle'", ",", "default", "=", "'quantopian-quandl'", ",", "metavar", "=", "'BUNDLE-NAME'", ",", "show_default", "=", "True", ",", "help", "=", "'The data bundle to ingest.'", ")", "@", "click", ".", "option", "(", "'--assets-version'", ",", "type", "=", "int", ",", "multiple", "=", "True", ",", "help", "=", "'Version of the assets db to which to downgrade.'", ")", "@", "click", ".", "option", "(", "'--show-progress/--no-show-progress'", ",", "default", "=", "True", ",", "help", "=", "'Print progress information to the terminal.'", ")", "def", "ingest", "(", "bundle", ",", "assets_version", ",", "show_progress", ")", ":", "bundles_module", ".", "ingest", "(", "bundle", ",", "os", ".", "environ", ",", "pd", ".", "Timestamp", ".", "utcnow", "(", ")", ",", "assets_version", ",", "show_progress", ")" ]
ingest the data for the given bundle .
train
false
53,368
def shared(value, name=None, strict=False, allow_downcast=None, **kwargs): try: if isinstance(value, Variable): raise TypeError('Shared variable constructor needs numeric values and not symbolic variables.') for ctor in reversed(shared.constructors): try: var = ctor(value, name=name, strict=strict, allow_downcast=allow_downcast, **kwargs) utils.add_tag_trace(var) return var except TypeError: continue except MemoryError as e: e.args = (e.args + ("you might consider using 'theano.shared(..., borrow=True)'",)) raise raise TypeError(('No suitable SharedVariable constructor could be found. Are you sure all kwargs are supported? We do not support the parameter dtype or type. value="%s". parameters="%s"' % (value, kwargs)))
[ "def", "shared", "(", "value", ",", "name", "=", "None", ",", "strict", "=", "False", ",", "allow_downcast", "=", "None", ",", "**", "kwargs", ")", ":", "try", ":", "if", "isinstance", "(", "value", ",", "Variable", ")", ":", "raise", "TypeError", "(", "'Shared variable constructor needs numeric values and not symbolic variables.'", ")", "for", "ctor", "in", "reversed", "(", "shared", ".", "constructors", ")", ":", "try", ":", "var", "=", "ctor", "(", "value", ",", "name", "=", "name", ",", "strict", "=", "strict", ",", "allow_downcast", "=", "allow_downcast", ",", "**", "kwargs", ")", "utils", ".", "add_tag_trace", "(", "var", ")", "return", "var", "except", "TypeError", ":", "continue", "except", "MemoryError", "as", "e", ":", "e", ".", "args", "=", "(", "e", ".", "args", "+", "(", "\"you might consider using 'theano.shared(..., borrow=True)'\"", ",", ")", ")", "raise", "raise", "TypeError", "(", "(", "'No suitable SharedVariable constructor could be found. Are you sure all kwargs are supported? We do not support the parameter dtype or type. value=\"%s\". parameters=\"%s\"'", "%", "(", "value", ",", "kwargs", ")", ")", ")" ]
return a sharedvariable variable .
train
false
53,369
def set_expired_mode(course_id): _VERIFIED_MODE_EXPIRED.append(course_id)
[ "def", "set_expired_mode", "(", "course_id", ")", ":", "_VERIFIED_MODE_EXPIRED", ".", "append", "(", "course_id", ")" ]
set course verified mode as expired .
train
false
53,370
@must_have_permission(WRITE) @must_not_be_registration def add_pointers(auth, node, **kwargs): node_ids = request.json.get('nodeIds') if (not node_ids): raise HTTPError(http.BAD_REQUEST) nodes = [Node.load(node_id) for node_id in node_ids] try: _add_pointers(node, nodes, auth) except ValueError: raise HTTPError(http.BAD_REQUEST) return {}
[ "@", "must_have_permission", "(", "WRITE", ")", "@", "must_not_be_registration", "def", "add_pointers", "(", "auth", ",", "node", ",", "**", "kwargs", ")", ":", "node_ids", "=", "request", ".", "json", ".", "get", "(", "'nodeIds'", ")", "if", "(", "not", "node_ids", ")", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ")", "nodes", "=", "[", "Node", ".", "load", "(", "node_id", ")", "for", "node_id", "in", "node_ids", "]", "try", ":", "_add_pointers", "(", "node", ",", "nodes", ",", "auth", ")", "except", "ValueError", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ")", "return", "{", "}" ]
add pointers to a node .
train
false
53,372
def clear_static_cache(path): fully_qualified_path = os.path.join(django.conf.settings.WEB_ROOT, path) try: new_temp_path = tempfile.mkdtemp(dir=(django.conf.settings.WEB_ROOT + '/')) except OSError as e: if (e.errno == 28): shutil.rmtree(fully_qualified_path) return try: os.rename(fully_qualified_path, os.path.join(new_temp_path, 'old')) except OSError as e: if (e.errno == 2): pass else: raise shutil.rmtree(new_temp_path)
[ "def", "clear_static_cache", "(", "path", ")", ":", "fully_qualified_path", "=", "os", ".", "path", ".", "join", "(", "django", ".", "conf", ".", "settings", ".", "WEB_ROOT", ",", "path", ")", "try", ":", "new_temp_path", "=", "tempfile", ".", "mkdtemp", "(", "dir", "=", "(", "django", ".", "conf", ".", "settings", ".", "WEB_ROOT", "+", "'/'", ")", ")", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "==", "28", ")", ":", "shutil", ".", "rmtree", "(", "fully_qualified_path", ")", "return", "try", ":", "os", ".", "rename", "(", "fully_qualified_path", ",", "os", ".", "path", ".", "join", "(", "new_temp_path", ",", "'old'", ")", ")", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "==", "2", ")", ":", "pass", "else", ":", "raise", "shutil", ".", "rmtree", "(", "new_temp_path", ")" ]
atomically destroy the static file cache for a certain path .
train
false
53,373
def _find_directive(directives, directive_name): if ((not directives) or isinstance(directives, str) or (len(directives) == 0)): return None if (directives[0] == directive_name): return directives matches = (_find_directive(line, directive_name) for line in directives) return next((m for m in matches if (m is not None)), None)
[ "def", "_find_directive", "(", "directives", ",", "directive_name", ")", ":", "if", "(", "(", "not", "directives", ")", "or", "isinstance", "(", "directives", ",", "str", ")", "or", "(", "len", "(", "directives", ")", "==", "0", ")", ")", ":", "return", "None", "if", "(", "directives", "[", "0", "]", "==", "directive_name", ")", ":", "return", "directives", "matches", "=", "(", "_find_directive", "(", "line", ",", "directive_name", ")", "for", "line", "in", "directives", ")", "return", "next", "(", "(", "m", "for", "m", "in", "matches", "if", "(", "m", "is", "not", "None", ")", ")", ",", "None", ")" ]
find a directive of type directive_name in directives .
train
false
53,376
def find_documentation(module_data): start_line = (-1) mod_ast_tree = ast.parse(module_data) for child in mod_ast_tree.body: if isinstance(child, ast.Assign): for target in child.targets: if (target.id == 'DOCUMENTATION'): start_line = (child.lineno - 1) break return start_line
[ "def", "find_documentation", "(", "module_data", ")", ":", "start_line", "=", "(", "-", "1", ")", "mod_ast_tree", "=", "ast", ".", "parse", "(", "module_data", ")", "for", "child", "in", "mod_ast_tree", ".", "body", ":", "if", "isinstance", "(", "child", ",", "ast", ".", "Assign", ")", ":", "for", "target", "in", "child", ".", "targets", ":", "if", "(", "target", ".", "id", "==", "'DOCUMENTATION'", ")", ":", "start_line", "=", "(", "child", ".", "lineno", "-", "1", ")", "break", "return", "start_line" ]
find the documentation metadata for a module file .
train
false
53,377
def use_wildcard_certificate(bench_path, ret): from bench.config.common_site_config import get_config config = get_config(bench_path=bench_path) wildcard = config.get('wildcard') if (not wildcard): return domain = wildcard['domain'] ssl_certificate = wildcard['ssl_certificate'] ssl_certificate_key = wildcard['ssl_certificate_key'] if domain.startswith('*.'): domain = domain[1:] else: domain = ('.' + domain) for site in ret: if site.get('ssl_certificate'): continue if (site.get('domain') or site['name']).endswith(domain): site['ssl_certificate'] = ssl_certificate site['ssl_certificate_key'] = ssl_certificate_key site['wildcard'] = 1
[ "def", "use_wildcard_certificate", "(", "bench_path", ",", "ret", ")", ":", "from", "bench", ".", "config", ".", "common_site_config", "import", "get_config", "config", "=", "get_config", "(", "bench_path", "=", "bench_path", ")", "wildcard", "=", "config", ".", "get", "(", "'wildcard'", ")", "if", "(", "not", "wildcard", ")", ":", "return", "domain", "=", "wildcard", "[", "'domain'", "]", "ssl_certificate", "=", "wildcard", "[", "'ssl_certificate'", "]", "ssl_certificate_key", "=", "wildcard", "[", "'ssl_certificate_key'", "]", "if", "domain", ".", "startswith", "(", "'*.'", ")", ":", "domain", "=", "domain", "[", "1", ":", "]", "else", ":", "domain", "=", "(", "'.'", "+", "domain", ")", "for", "site", "in", "ret", ":", "if", "site", ".", "get", "(", "'ssl_certificate'", ")", ":", "continue", "if", "(", "site", ".", "get", "(", "'domain'", ")", "or", "site", "[", "'name'", "]", ")", ".", "endswith", "(", "domain", ")", ":", "site", "[", "'ssl_certificate'", "]", "=", "ssl_certificate", "site", "[", "'ssl_certificate_key'", "]", "=", "ssl_certificate_key", "site", "[", "'wildcard'", "]", "=", "1" ]
stored in common_site_config .
train
false
53,378
def get_exact_bc_matches(curr_bc, all_bcs): if (curr_bc in all_bcs): return curr_bc else: return None
[ "def", "get_exact_bc_matches", "(", "curr_bc", ",", "all_bcs", ")", ":", "if", "(", "curr_bc", "in", "all_bcs", ")", ":", "return", "curr_bc", "else", ":", "return", "None" ]
checks existing barcodes for an exact match .
train
false
53,379
def dictize_job(job): return {u'id': job.id, u'title': job.meta.get(u'title'), u'created': job.created_at.strftime(u'%Y-%m-%dT%H:%M:%S'), u'queue': remove_queue_name_prefix(job.origin)}
[ "def", "dictize_job", "(", "job", ")", ":", "return", "{", "u'id'", ":", "job", ".", "id", ",", "u'title'", ":", "job", ".", "meta", ".", "get", "(", "u'title'", ")", ",", "u'created'", ":", "job", ".", "created_at", ".", "strftime", "(", "u'%Y-%m-%dT%H:%M:%S'", ")", ",", "u'queue'", ":", "remove_queue_name_prefix", "(", "job", ".", "origin", ")", "}" ]
convert a job to a dict .
train
false
53,383
def check_paths(): if (not hasattr(settings, 'PEP_REPO_PATH')): raise ImproperlyConfigured('No PEP_REPO_PATH in settings') if (not os.path.exists(settings.PEP_REPO_PATH)): raise ImproperlyConfigured('PEP_REPO_PATH in settings does not exist')
[ "def", "check_paths", "(", ")", ":", "if", "(", "not", "hasattr", "(", "settings", ",", "'PEP_REPO_PATH'", ")", ")", ":", "raise", "ImproperlyConfigured", "(", "'No PEP_REPO_PATH in settings'", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "settings", ".", "PEP_REPO_PATH", ")", ")", ":", "raise", "ImproperlyConfigured", "(", "'PEP_REPO_PATH in settings does not exist'", ")" ]
method to check all paths have correct substitutions .
train
false
53,384
def get_view_extensions(): view_extensions = {} all_extensions = extensions.view.get_extensions() for extension in all_extensions: view_extensions[extension.get_id()] = extension.get_title() return view_extensions
[ "def", "get_view_extensions", "(", ")", ":", "view_extensions", "=", "{", "}", "all_extensions", "=", "extensions", ".", "view", ".", "get_extensions", "(", ")", "for", "extension", "in", "all_extensions", ":", "view_extensions", "[", "extension", ".", "get_id", "(", ")", "]", "=", "extension", ".", "get_title", "(", ")", "return", "view_extensions" ]
return all enabled view extensions .
train
false
53,385
def json_to_python(json_string): python_dict = json.loads(json_string) return python_dict
[ "def", "json_to_python", "(", "json_string", ")", ":", "python_dict", "=", "json", ".", "loads", "(", "json_string", ")", "return", "python_dict" ]
i receive error mails encoded as json .
train
false
53,386
def get_network_settings(): return _read_file(_RH_NETWORK_FILE)
[ "def", "get_network_settings", "(", ")", ":", "return", "_read_file", "(", "_RH_NETWORK_FILE", ")" ]
return the contents of the global network script .
train
false
53,387
def is_notebook(): try: get_ipython() return True except NameError: return False
[ "def", "is_notebook", "(", ")", ":", "try", ":", "get_ipython", "(", ")", "return", "True", "except", "NameError", ":", "return", "False" ]
test whether we are inside an ipython notebook .
train
false
53,391
def is_possible_number_string(number, region_dialing_from): try: return is_possible_number(parse(number, region_dialing_from)) except NumberParseException: return False
[ "def", "is_possible_number_string", "(", "number", ",", "region_dialing_from", ")", ":", "try", ":", "return", "is_possible_number", "(", "parse", "(", "number", ",", "region_dialing_from", ")", ")", "except", "NumberParseException", ":", "return", "False" ]
check whether a phone number string is a possible number .
train
false
53,393
@treeio_login_required @handle_response_format def lead_delete(request, lead_id, response_format='html'): lead = get_object_or_404(Lead, pk=lead_id) if ((not request.user.profile.has_permission(lead, mode='w')) and (not request.user.profile.is_admin('treeio.sales'))): return user_denied(request, "You don't have access to this Sale Status", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): lead.trash = True lead.save() else: lead.delete() return HttpResponseRedirect(reverse('sales_lead_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('sales_lead_view', args=[lead.id])) all_products = Object.filter_by_request(request, Product.objects.filter(parent__isnull=True)) all_leads = Object.filter_by_request(request, Lead.objects) return render_to_response('sales/lead_delete', {'lead': lead, 'leads': all_leads, 'products': all_products}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "lead_delete", "(", "request", ",", "lead_id", ",", "response_format", "=", "'html'", ")", ":", "lead", "=", "get_object_or_404", "(", "Lead", ",", "pk", "=", "lead_id", ")", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "lead", ",", "mode", "=", "'w'", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio.sales'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Sale Status\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "lead", ".", "trash", "=", "True", "lead", ".", "save", "(", ")", "else", ":", "lead", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_lead_index'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_lead_view'", ",", "args", "=", "[", "lead", ".", "id", "]", ")", ")", "all_products", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Product", ".", "objects", ".", "filter", "(", "parent__isnull", "=", "True", ")", ")", "all_leads", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Lead", ".", "objects", ")", "return", "render_to_response", "(", "'sales/lead_delete'", ",", "{", "'lead'", ":", "lead", ",", "'leads'", ":", "all_leads", ",", "'products'", ":", "all_products", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
lead delete .
train
false
53,394
def write_properties_core(properties): root = Element('cp:coreProperties', {'xmlns:cp': NAMESPACES['cp'], 'xmlns:xsi': NAMESPACES['xsi'], 'xmlns:dc': NAMESPACES['dc'], 'xmlns:dcterms': NAMESPACES['dcterms'], 'xmlns:dcmitype': NAMESPACES['dcmitype']}) SubElement(root, 'dc:creator').text = properties.creator SubElement(root, 'cp:lastModifiedBy').text = properties.last_modified_by SubElement(root, 'dcterms:created', {'xsi:type': 'dcterms:W3CDTF'}).text = datetime_to_W3CDTF(properties.created) SubElement(root, 'dcterms:modified', {'xsi:type': 'dcterms:W3CDTF'}).text = datetime_to_W3CDTF(properties.modified) return get_document_content(root)
[ "def", "write_properties_core", "(", "properties", ")", ":", "root", "=", "Element", "(", "'cp:coreProperties'", ",", "{", "'xmlns:cp'", ":", "NAMESPACES", "[", "'cp'", "]", ",", "'xmlns:xsi'", ":", "NAMESPACES", "[", "'xsi'", "]", ",", "'xmlns:dc'", ":", "NAMESPACES", "[", "'dc'", "]", ",", "'xmlns:dcterms'", ":", "NAMESPACES", "[", "'dcterms'", "]", ",", "'xmlns:dcmitype'", ":", "NAMESPACES", "[", "'dcmitype'", "]", "}", ")", "SubElement", "(", "root", ",", "'dc:creator'", ")", ".", "text", "=", "properties", ".", "creator", "SubElement", "(", "root", ",", "'cp:lastModifiedBy'", ")", ".", "text", "=", "properties", ".", "last_modified_by", "SubElement", "(", "root", ",", "'dcterms:created'", ",", "{", "'xsi:type'", ":", "'dcterms:W3CDTF'", "}", ")", ".", "text", "=", "datetime_to_W3CDTF", "(", "properties", ".", "created", ")", "SubElement", "(", "root", ",", "'dcterms:modified'", ",", "{", "'xsi:type'", ":", "'dcterms:W3CDTF'", "}", ")", ".", "text", "=", "datetime_to_W3CDTF", "(", "properties", ".", "modified", ")", "return", "get_document_content", "(", "root", ")" ]
write the core properties to xml .
train
false
53,396
def bash(command='bash'): bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh') child = pexpect.spawn(command, ['--rcfile', bashrc], echo=False, encoding='utf-8') ps1 = ((PEXPECT_PROMPT[:5] + u'\\[\\]') + PEXPECT_PROMPT[5:]) ps2 = ((PEXPECT_CONTINUATION_PROMPT[:5] + u'\\[\\]') + PEXPECT_CONTINUATION_PROMPT[5:]) prompt_change = u"PS1='{0}' PS2='{1}' PROMPT_COMMAND=''".format(ps1, ps2) return REPLWrapper(child, u'\\$', prompt_change, extra_init_cmd='export PAGER=cat')
[ "def", "bash", "(", "command", "=", "'bash'", ")", ":", "bashrc", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'bashrc.sh'", ")", "child", "=", "pexpect", ".", "spawn", "(", "command", ",", "[", "'--rcfile'", ",", "bashrc", "]", ",", "echo", "=", "False", ",", "encoding", "=", "'utf-8'", ")", "ps1", "=", "(", "(", "PEXPECT_PROMPT", "[", ":", "5", "]", "+", "u'\\\\[\\\\]'", ")", "+", "PEXPECT_PROMPT", "[", "5", ":", "]", ")", "ps2", "=", "(", "(", "PEXPECT_CONTINUATION_PROMPT", "[", ":", "5", "]", "+", "u'\\\\[\\\\]'", ")", "+", "PEXPECT_CONTINUATION_PROMPT", "[", "5", ":", "]", ")", "prompt_change", "=", "u\"PS1='{0}' PS2='{1}' PROMPT_COMMAND=''\"", ".", "format", "(", "ps1", ",", "ps2", ")", "return", "REPLWrapper", "(", "child", ",", "u'\\\\$'", ",", "prompt_change", ",", "extra_init_cmd", "=", "'export PAGER=cat'", ")" ]
start a bash shell and return a :class:replwrapper object .
train
true
53,397
def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''): if (not isinstance(string, (text_type, bytes, bytearray))): string = text_type(string) if isinstance(string, text_type): string = string.encode(charset, errors) if isinstance(safe, text_type): safe = safe.encode(charset, errors) if isinstance(unsafe, text_type): unsafe = unsafe.encode(charset, errors) safe = (frozenset((bytearray(safe) + _always_safe)) - frozenset(bytearray(unsafe))) rv = bytearray() for char in bytearray(string): if (char in safe): rv.append(char) else: rv.extend(('%%%02X' % char).encode('ascii')) return to_native(bytes(rv))
[ "def", "url_quote", "(", "string", ",", "charset", "=", "'utf-8'", ",", "errors", "=", "'strict'", ",", "safe", "=", "'/:'", ",", "unsafe", "=", "''", ")", ":", "if", "(", "not", "isinstance", "(", "string", ",", "(", "text_type", ",", "bytes", ",", "bytearray", ")", ")", ")", ":", "string", "=", "text_type", "(", "string", ")", "if", "isinstance", "(", "string", ",", "text_type", ")", ":", "string", "=", "string", ".", "encode", "(", "charset", ",", "errors", ")", "if", "isinstance", "(", "safe", ",", "text_type", ")", ":", "safe", "=", "safe", ".", "encode", "(", "charset", ",", "errors", ")", "if", "isinstance", "(", "unsafe", ",", "text_type", ")", ":", "unsafe", "=", "unsafe", ".", "encode", "(", "charset", ",", "errors", ")", "safe", "=", "(", "frozenset", "(", "(", "bytearray", "(", "safe", ")", "+", "_always_safe", ")", ")", "-", "frozenset", "(", "bytearray", "(", "unsafe", ")", ")", ")", "rv", "=", "bytearray", "(", ")", "for", "char", "in", "bytearray", "(", "string", ")", ":", "if", "(", "char", "in", "safe", ")", ":", "rv", ".", "append", "(", "char", ")", "else", ":", "rv", ".", "extend", "(", "(", "'%%%02X'", "%", "char", ")", ".", "encode", "(", "'ascii'", ")", ")", "return", "to_native", "(", "bytes", "(", "rv", ")", ")" ]
url encode a single string with a given encoding .
train
true
53,398
def dir_option(s): return s
[ "def", "dir_option", "(", "s", ")", ":", "return", "s" ]
same type as str .
train
false
53,399
def tauchen(rho, sigma_u, m=3, n=7): std_y = np.sqrt(((sigma_u ** 2) / (1 - (rho ** 2)))) x_max = (m * std_y) x_min = (- x_max) x = np.linspace(x_min, x_max, n) step = ((x_max - x_min) / (n - 1)) half_step = (0.5 * step) P = np.empty((n, n)) _fill_tauchen(x, P, n, rho, sigma_u, half_step) mc = MarkovChain(P, state_values=x) return mc
[ "def", "tauchen", "(", "rho", ",", "sigma_u", ",", "m", "=", "3", ",", "n", "=", "7", ")", ":", "std_y", "=", "np", ".", "sqrt", "(", "(", "(", "sigma_u", "**", "2", ")", "/", "(", "1", "-", "(", "rho", "**", "2", ")", ")", ")", ")", "x_max", "=", "(", "m", "*", "std_y", ")", "x_min", "=", "(", "-", "x_max", ")", "x", "=", "np", ".", "linspace", "(", "x_min", ",", "x_max", ",", "n", ")", "step", "=", "(", "(", "x_max", "-", "x_min", ")", "/", "(", "n", "-", "1", ")", ")", "half_step", "=", "(", "0.5", "*", "step", ")", "P", "=", "np", ".", "empty", "(", "(", "n", ",", "n", ")", ")", "_fill_tauchen", "(", "x", ",", "P", ",", "n", ",", "rho", ",", "sigma_u", ",", "half_step", ")", "mc", "=", "MarkovChain", "(", "P", ",", "state_values", "=", "x", ")", "return", "mc" ]
computes a markov chain associated with a discretized version of the linear gaussian ar(1) process y_{t+1} = rho * y_t + u_{t+1} using tauchens method .
train
true
53,400
def getmro(cls): if hasattr(cls, '__mro__'): return cls.__mro__ else: result = [] _searchbases(cls, result) return tuple(result)
[ "def", "getmro", "(", "cls", ")", ":", "if", "hasattr", "(", "cls", ",", "'__mro__'", ")", ":", "return", "cls", ".", "__mro__", "else", ":", "result", "=", "[", "]", "_searchbases", "(", "cls", ",", "result", ")", "return", "tuple", "(", "result", ")" ]
return tuple of base classes in method resolution order .
train
true
53,402
def test_clock(): data.clock()
[ "def", "test_clock", "(", ")", ":", "data", ".", "clock", "(", ")" ]
test that "clock" image can be loaded .
train
false
53,404
def convresblock(x, nfeats=8, ksize=3, nskipped=2): y0 = Convolution2D(nfeats, ksize, ksize, border_mode='same')(x) y = y0 for i in range(nskipped): y = BatchNormalization(mode=0, axis=1)(y) y = Activation('relu')(y) y = Convolution2D(nfeats, ksize, ksize, border_mode='same')(y) return merge([y0, y], mode='sum')
[ "def", "convresblock", "(", "x", ",", "nfeats", "=", "8", ",", "ksize", "=", "3", ",", "nskipped", "=", "2", ")", ":", "y0", "=", "Convolution2D", "(", "nfeats", ",", "ksize", ",", "ksize", ",", "border_mode", "=", "'same'", ")", "(", "x", ")", "y", "=", "y0", "for", "i", "in", "range", "(", "nskipped", ")", ":", "y", "=", "BatchNormalization", "(", "mode", "=", "0", ",", "axis", "=", "1", ")", "(", "y", ")", "y", "=", "Activation", "(", "'relu'", ")", "(", "y", ")", "y", "=", "Convolution2D", "(", "nfeats", ",", "ksize", ",", "ksize", ",", "border_mode", "=", "'same'", ")", "(", "y", ")", "return", "merge", "(", "[", "y0", ",", "y", "]", ",", "mode", "=", "'sum'", ")" ]
the proposed residual block from [4] .
train
false
53,405
def cbrt(arg): return Pow(arg, Rational(1, 3))
[ "def", "cbrt", "(", "arg", ")", ":", "return", "Pow", "(", "arg", ",", "Rational", "(", "1", ",", "3", ")", ")" ]
this function computes the principial cube root of arg .
train
false
53,407
def pagingRequestType2(MobileId_presence=0): a = L2PseudoLength() b = TpPd(pd=6) c = MessageType(mesType=34) d = PageModeAndChannelNeeded() f = MobileId() g = MobileId() packet = (((((a / b) / c) / d) / f) / g) if (MobileId_presence is 1): h = MobileIdHdr(ieiMI=23, eightBitMI=0) packet = (packet / h) i = P2RestOctets() packet = (packet / i) return packet
[ "def", "pagingRequestType2", "(", "MobileId_presence", "=", "0", ")", ":", "a", "=", "L2PseudoLength", "(", ")", "b", "=", "TpPd", "(", "pd", "=", "6", ")", "c", "=", "MessageType", "(", "mesType", "=", "34", ")", "d", "=", "PageModeAndChannelNeeded", "(", ")", "f", "=", "MobileId", "(", ")", "g", "=", "MobileId", "(", ")", "packet", "=", "(", "(", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "/", "f", ")", "/", "g", ")", "if", "(", "MobileId_presence", "is", "1", ")", ":", "h", "=", "MobileIdHdr", "(", "ieiMI", "=", "23", ",", "eightBitMI", "=", "0", ")", "packet", "=", "(", "packet", "/", "h", ")", "i", "=", "P2RestOctets", "(", ")", "packet", "=", "(", "packet", "/", "i", ")", "return", "packet" ]
paging request type 2 section 9 .
train
true
53,408
def submit_reset_problem_attempts_for_all_students(request, usage_key): modulestore().get_item(usage_key) task_type = 'reset_problem_attempts' task_class = reset_problem_attempts (task_input, task_key) = encode_problem_and_student_input(usage_key) return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
[ "def", "submit_reset_problem_attempts_for_all_students", "(", "request", ",", "usage_key", ")", ":", "modulestore", "(", ")", ".", "get_item", "(", "usage_key", ")", "task_type", "=", "'reset_problem_attempts'", "task_class", "=", "reset_problem_attempts", "(", "task_input", ",", "task_key", ")", "=", "encode_problem_and_student_input", "(", "usage_key", ")", "return", "submit_task", "(", "request", ",", "task_type", ",", "task_class", ",", "usage_key", ".", "course_key", ",", "task_input", ",", "task_key", ")" ]
request to have attempts reset for a problem as a background task .
train
false
53,409
def monitor_set_global(sock, name, value): return communicate(sock, ('__set_global__("%s")' % name), settings=[value])
[ "def", "monitor_set_global", "(", "sock", ",", "name", ",", "value", ")", ":", "return", "communicate", "(", "sock", ",", "(", "'__set_global__(\"%s\")'", "%", "name", ")", ",", "settings", "=", "[", "value", "]", ")" ]
set global variable *name* value to *value* .
train
false
53,410
def _at_origin(x): return (np.sum((x * x)) < 1e-08)
[ "def", "_at_origin", "(", "x", ")", ":", "return", "(", "np", ".", "sum", "(", "(", "x", "*", "x", ")", ")", "<", "1e-08", ")" ]
determine if a vector is at the origin .
train
false
53,412
def add_line_increment_for_dict(data, lineModified, diference, atLineStart=False): def _inner_increment(line): if (((not atLineStart) and (line <= lineModified)) or (lineModified == (line + diference))): return line newLine = (line + diference) summary = data.pop(line) data[newLine] = summary return newLine list(map(_inner_increment, list(data.keys()))) return data
[ "def", "add_line_increment_for_dict", "(", "data", ",", "lineModified", ",", "diference", ",", "atLineStart", "=", "False", ")", ":", "def", "_inner_increment", "(", "line", ")", ":", "if", "(", "(", "(", "not", "atLineStart", ")", "and", "(", "line", "<=", "lineModified", ")", ")", "or", "(", "lineModified", "==", "(", "line", "+", "diference", ")", ")", ")", ":", "return", "line", "newLine", "=", "(", "line", "+", "diference", ")", "summary", "=", "data", ".", "pop", "(", "line", ")", "data", "[", "newLine", "]", "=", "summary", "return", "newLine", "list", "(", "map", "(", "_inner_increment", ",", "list", "(", "data", ".", "keys", "(", ")", ")", ")", ")", "return", "data" ]
increment the line number of the dict content when needed .
train
false
53,414
@add_handler('gpl') def qute_gpl(_url): return ('text/html', utils.read_file('html/COPYING.html'))
[ "@", "add_handler", "(", "'gpl'", ")", "def", "qute_gpl", "(", "_url", ")", ":", "return", "(", "'text/html'", ",", "utils", ".", "read_file", "(", "'html/COPYING.html'", ")", ")" ]
handler for qute:gpl .
train
false
53,415
def getComplexPolygonByStartEnd(endAngle, radius, sides, startAngle=0.0): if (endAngle == startAngle): return getComplexPolygon(complex(), radius, sides, startAngle) angleExtent = (endAngle - startAngle) sideAngle = ((2.0 * math.pi) / float(sides)) sides = math.ceil(abs((angleExtent / sideAngle))) sideAngle = (angleExtent / float(sides)) complexPolygon = [] for side in xrange((abs(sides) + 1)): unitPolar = getWiddershinsUnitPolar(startAngle) complexPolygon.append((unitPolar * radius)) startAngle += sideAngle return complexPolygon
[ "def", "getComplexPolygonByStartEnd", "(", "endAngle", ",", "radius", ",", "sides", ",", "startAngle", "=", "0.0", ")", ":", "if", "(", "endAngle", "==", "startAngle", ")", ":", "return", "getComplexPolygon", "(", "complex", "(", ")", ",", "radius", ",", "sides", ",", "startAngle", ")", "angleExtent", "=", "(", "endAngle", "-", "startAngle", ")", "sideAngle", "=", "(", "(", "2.0", "*", "math", ".", "pi", ")", "/", "float", "(", "sides", ")", ")", "sides", "=", "math", ".", "ceil", "(", "abs", "(", "(", "angleExtent", "/", "sideAngle", ")", ")", ")", "sideAngle", "=", "(", "angleExtent", "/", "float", "(", "sides", ")", ")", "complexPolygon", "=", "[", "]", "for", "side", "in", "xrange", "(", "(", "abs", "(", "sides", ")", "+", "1", ")", ")", ":", "unitPolar", "=", "getWiddershinsUnitPolar", "(", "startAngle", ")", "complexPolygon", ".", "append", "(", "(", "unitPolar", "*", "radius", ")", ")", "startAngle", "+=", "sideAngle", "return", "complexPolygon" ]
get the complex polygon by start and end angle .
train
false
53,416
@contextmanager def gc_state(state): orig_state = gc.isenabled() set_gc_state(state) (yield) set_gc_state(orig_state)
[ "@", "contextmanager", "def", "gc_state", "(", "state", ")", ":", "orig_state", "=", "gc", ".", "isenabled", "(", ")", "set_gc_state", "(", "state", ")", "(", "yield", ")", "set_gc_state", "(", "orig_state", ")" ]
context manager to set state of garbage collector to state parameters state : bool true for gc enabled .
train
false
53,417
def test_ast_expression_basics(): code = can_compile(u'(foo bar)').body[0] tree = ast.Expr(value=ast.Call(func=ast.Name(id=u'foo', ctx=ast.Load()), args=[ast.Name(id=u'bar', ctx=ast.Load())], keywords=[], starargs=None, kwargs=None)) _ast_spotcheck(u'value.func.id', code, tree)
[ "def", "test_ast_expression_basics", "(", ")", ":", "code", "=", "can_compile", "(", "u'(foo bar)'", ")", ".", "body", "[", "0", "]", "tree", "=", "ast", ".", "Expr", "(", "value", "=", "ast", ".", "Call", "(", "func", "=", "ast", ".", "Name", "(", "id", "=", "u'foo'", ",", "ctx", "=", "ast", ".", "Load", "(", ")", ")", ",", "args", "=", "[", "ast", ".", "Name", "(", "id", "=", "u'bar'", ",", "ctx", "=", "ast", ".", "Load", "(", ")", ")", "]", ",", "keywords", "=", "[", "]", ",", "starargs", "=", "None", ",", "kwargs", "=", "None", ")", ")", "_ast_spotcheck", "(", "u'value.func.id'", ",", "code", ",", "tree", ")" ]
ensure basic ast expression conversion works .
train
false
53,418
def _TestUnlinkIdentity(tester, user_cookie, request_dict): validator = tester.validator (user_id, device_id) = tester.GetIdsFromCookie(user_cookie) request_dict = deepcopy(request_dict) actual_dict = tester.SendRequest('unlink_identity', user_cookie, request_dict) op_dict = tester._DeriveNotificationOpDict(user_id, device_id, request_dict) validator.ValidateUnlinkIdentity(op_dict, request_dict['identity']) tester._CompareResponseDicts('unlink_identity', user_id, request_dict, {}, actual_dict) return actual_dict
[ "def", "_TestUnlinkIdentity", "(", "tester", ",", "user_cookie", ",", "request_dict", ")", ":", "validator", "=", "tester", ".", "validator", "(", "user_id", ",", "device_id", ")", "=", "tester", ".", "GetIdsFromCookie", "(", "user_cookie", ")", "request_dict", "=", "deepcopy", "(", "request_dict", ")", "actual_dict", "=", "tester", ".", "SendRequest", "(", "'unlink_identity'", ",", "user_cookie", ",", "request_dict", ")", "op_dict", "=", "tester", ".", "_DeriveNotificationOpDict", "(", "user_id", ",", "device_id", ",", "request_dict", ")", "validator", ".", "ValidateUnlinkIdentity", "(", "op_dict", ",", "request_dict", "[", "'identity'", "]", ")", "tester", ".", "_CompareResponseDicts", "(", "'unlink_identity'", ",", "user_id", ",", "request_dict", ",", "{", "}", ",", "actual_dict", ")", "return", "actual_dict" ]
called by the servicetester in order to test unlink_identity service api call .
train
false
53,420
def _unpack_zipfile(filename, extract_dir): try: import zipfile except ImportError: raise ReadError('zlib not supported, cannot unpack this archive.') if (not zipfile.is_zipfile(filename)): raise ReadError(('%s is not a zip file' % filename)) zip = zipfile.ZipFile(filename) try: for info in zip.infolist(): name = info.filename if (name.startswith('/') or ('..' in name)): continue target = os.path.join(extract_dir, *name.split('/')) if (not target): continue _ensure_directory(target) if (not name.endswith('/')): data = zip.read(info.filename) f = open(target, 'wb') try: f.write(data) finally: f.close() del data finally: zip.close()
[ "def", "_unpack_zipfile", "(", "filename", ",", "extract_dir", ")", ":", "try", ":", "import", "zipfile", "except", "ImportError", ":", "raise", "ReadError", "(", "'zlib not supported, cannot unpack this archive.'", ")", "if", "(", "not", "zipfile", ".", "is_zipfile", "(", "filename", ")", ")", ":", "raise", "ReadError", "(", "(", "'%s is not a zip file'", "%", "filename", ")", ")", "zip", "=", "zipfile", ".", "ZipFile", "(", "filename", ")", "try", ":", "for", "info", "in", "zip", ".", "infolist", "(", ")", ":", "name", "=", "info", ".", "filename", "if", "(", "name", ".", "startswith", "(", "'/'", ")", "or", "(", "'..'", "in", "name", ")", ")", ":", "continue", "target", "=", "os", ".", "path", ".", "join", "(", "extract_dir", ",", "*", "name", ".", "split", "(", "'/'", ")", ")", "if", "(", "not", "target", ")", ":", "continue", "_ensure_directory", "(", "target", ")", "if", "(", "not", "name", ".", "endswith", "(", "'/'", ")", ")", ":", "data", "=", "zip", ".", "read", "(", "info", ".", "filename", ")", "f", "=", "open", "(", "target", ",", "'wb'", ")", "try", ":", "f", ".", "write", "(", "data", ")", "finally", ":", "f", ".", "close", "(", ")", "del", "data", "finally", ":", "zip", ".", "close", "(", ")" ]
unpack zip filename to extract_dir .
train
true
53,421
def _ValidateContacts(contact_dicts): for contact in contact_dicts: if ('identity' in contact): Identity.ValidateKey(contact['identity'])
[ "def", "_ValidateContacts", "(", "contact_dicts", ")", ":", "for", "contact", "in", "contact_dicts", ":", "if", "(", "'identity'", "in", "contact", ")", ":", "Identity", ".", "ValidateKey", "(", "contact", "[", "'identity'", "]", ")" ]
for each contact in "contact_dicts" that has an identity attribute .
train
false
53,423
def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): return random_partition_graph(([k] * l), p_in, p_out, seed, directed)
[ "def", "planted_partition_graph", "(", "l", ",", "k", ",", "p_in", ",", "p_out", ",", "seed", "=", "None", ",", "directed", "=", "False", ")", ":", "return", "random_partition_graph", "(", "(", "[", "k", "]", "*", "l", ")", ",", "p_in", ",", "p_out", ",", "seed", ",", "directed", ")" ]
return the planted l-partition graph .
train
false
53,424
@content_git_object_init.connect def git_permalink(content, git_content): if (not content.settings['GIT_GENERATE_PERMALINK']): return if (not string_to_bool(content.metadata.get('git_permalink', 'yes'))): return if (not git_content.is_committed()): return permalink_hash = hashlib.sha1() permalink_hash.update(str(git_content.get_oldest_commit())) permalink_hash.update(str(git_content.get_oldest_filename())) git_permalink_id = base64.urlsafe_b64encode(permalink_hash.digest()) permalink_id_metadata_key = content.settings['PERMALINK_ID_METADATA_KEY'] if (permalink_id_metadata_key in content.metadata): content.metadata[permalink_id_metadata_key] = ','.join((content.metadata[permalink_id_metadata_key], git_permalink_id)) else: content.metadata[permalink_id_metadata_key] = git_permalink_id
[ "@", "content_git_object_init", ".", "connect", "def", "git_permalink", "(", "content", ",", "git_content", ")", ":", "if", "(", "not", "content", ".", "settings", "[", "'GIT_GENERATE_PERMALINK'", "]", ")", ":", "return", "if", "(", "not", "string_to_bool", "(", "content", ".", "metadata", ".", "get", "(", "'git_permalink'", ",", "'yes'", ")", ")", ")", ":", "return", "if", "(", "not", "git_content", ".", "is_committed", "(", ")", ")", ":", "return", "permalink_hash", "=", "hashlib", ".", "sha1", "(", ")", "permalink_hash", ".", "update", "(", "str", "(", "git_content", ".", "get_oldest_commit", "(", ")", ")", ")", "permalink_hash", ".", "update", "(", "str", "(", "git_content", ".", "get_oldest_filename", "(", ")", ")", ")", "git_permalink_id", "=", "base64", ".", "urlsafe_b64encode", "(", "permalink_hash", ".", "digest", "(", ")", ")", "permalink_id_metadata_key", "=", "content", ".", "settings", "[", "'PERMALINK_ID_METADATA_KEY'", "]", "if", "(", "permalink_id_metadata_key", "in", "content", ".", "metadata", ")", ":", "content", ".", "metadata", "[", "permalink_id_metadata_key", "]", "=", "','", ".", "join", "(", "(", "content", ".", "metadata", "[", "permalink_id_metadata_key", "]", ",", "git_permalink_id", ")", ")", "else", ":", "content", ".", "metadata", "[", "permalink_id_metadata_key", "]", "=", "git_permalink_id" ]
add git based permalink id to content metadata .
train
false
53,426
def pefile_read_version(filename): vers = {'FileVersion': (0, 0, 0, 0), 'ProductVersion': (0, 0, 0, 0), 'translations': {'lang_id1': {'Comments': '', 'CompanyName': '', 'FileDescription': '', 'FileVersion': '', 'InternalName': '', 'LegalCopyright': '', 'LegalTrademarks': '', 'OriginalFilename': '', 'PrivateBuild': '', 'ProductName': '', 'ProductVersion': '', 'SpecialBuild': ''}}} pe = pefile.PE(filename) from pprint import pprint pprint(pe.VS_FIXEDFILEINFO) print dir(pe.VS_FIXEDFILEINFO) print repr(pe.VS_FIXEDFILEINFO) print pe.dump_info() pe.close() return vers
[ "def", "pefile_read_version", "(", "filename", ")", ":", "vers", "=", "{", "'FileVersion'", ":", "(", "0", ",", "0", ",", "0", ",", "0", ")", ",", "'ProductVersion'", ":", "(", "0", ",", "0", ",", "0", ",", "0", ")", ",", "'translations'", ":", "{", "'lang_id1'", ":", "{", "'Comments'", ":", "''", ",", "'CompanyName'", ":", "''", ",", "'FileDescription'", ":", "''", ",", "'FileVersion'", ":", "''", ",", "'InternalName'", ":", "''", ",", "'LegalCopyright'", ":", "''", ",", "'LegalTrademarks'", ":", "''", ",", "'OriginalFilename'", ":", "''", ",", "'PrivateBuild'", ":", "''", ",", "'ProductName'", ":", "''", ",", "'ProductVersion'", ":", "''", ",", "'SpecialBuild'", ":", "''", "}", "}", "}", "pe", "=", "pefile", ".", "PE", "(", "filename", ")", "from", "pprint", "import", "pprint", "pprint", "(", "pe", ".", "VS_FIXEDFILEINFO", ")", "print", "dir", "(", "pe", ".", "VS_FIXEDFILEINFO", ")", "print", "repr", "(", "pe", ".", "VS_FIXEDFILEINFO", ")", "print", "pe", ".", "dump_info", "(", ")", "pe", ".", "close", "(", ")", "return", "vers" ]
return structure like: # translation independent information .
train
false
53,427
def _unescape(text): pattern = '\\\\{1,2}u[0-9a-fA-F]{4}' decode = (lambda x: codecs.getdecoder('unicode_escape')(x.group())[0]) return re.sub(pattern, decode, text)
[ "def", "_unescape", "(", "text", ")", ":", "pattern", "=", "'\\\\\\\\{1,2}u[0-9a-fA-F]{4}'", "decode", "=", "(", "lambda", "x", ":", "codecs", ".", "getdecoder", "(", "'unicode_escape'", ")", "(", "x", ".", "group", "(", ")", ")", "[", "0", "]", ")", "return", "re", ".", "sub", "(", "pattern", ",", "decode", ",", "text", ")" ]
unescape unicode character codes within a string .
train
false
53,428
def do_title(s): rv = [] for item in re.compile('([-\\s]+)(?u)').split(s): if (not item): continue rv.append((item[0].upper() + item[1:].lower())) return ''.join(rv)
[ "def", "do_title", "(", "s", ")", ":", "rv", "=", "[", "]", "for", "item", "in", "re", ".", "compile", "(", "'([-\\\\s]+)(?u)'", ")", ".", "split", "(", "s", ")", ":", "if", "(", "not", "item", ")", ":", "continue", "rv", ".", "append", "(", "(", "item", "[", "0", "]", ".", "upper", "(", ")", "+", "item", "[", "1", ":", "]", ".", "lower", "(", ")", ")", ")", "return", "''", ".", "join", "(", "rv", ")" ]
return a titlecased version of the value .
train
true
53,429
def cast_unicode(s, encoding='utf8', errors='strict'): if isinstance(s, bytes): return s.decode(encoding, errors) elif isinstance(s, unicode): return s else: raise TypeError(('Expected unicode or bytes, got %r' % s))
[ "def", "cast_unicode", "(", "s", ",", "encoding", "=", "'utf8'", ",", "errors", "=", "'strict'", ")", ":", "if", "isinstance", "(", "s", ",", "bytes", ")", ":", "return", "s", ".", "decode", "(", "encoding", ",", "errors", ")", "elif", "isinstance", "(", "s", ",", "unicode", ")", ":", "return", "s", "else", ":", "raise", "TypeError", "(", "(", "'Expected unicode or bytes, got %r'", "%", "s", ")", ")" ]
cast bytes or unicode to unicode .
train
false
53,430
def _process_emerge_err(stdout, stderr): ret = {} rexp = re.compile('^[<>=][^ ]+/[^ ]+ [^\\n]+', re.M) slot_conflicts = re.compile('^[^ \\n]+/[^ ]+:[^ ]', re.M).findall(stderr) if slot_conflicts: ret['slot conflicts'] = slot_conflicts blocked = re.compile('(?m)^\\[blocks .+\\] ([^ ]+/[^ ]+-[0-9]+[^ ]+).*$').findall(stdout) unsatisfied = re.compile('Error: The above package list contains').findall(stderr) if (blocked and unsatisfied): ret['blocked'] = blocked sections = re.split('\n\n', stderr) for section in sections: if ('The following keyword changes' in section): ret['keywords'] = rexp.findall(section) elif ('The following license changes' in section): ret['license'] = rexp.findall(section) elif ('The following USE changes' in section): ret['use'] = rexp.findall(section) elif ('The following mask changes' in section): ret['mask'] = rexp.findall(section) return ret
[ "def", "_process_emerge_err", "(", "stdout", ",", "stderr", ")", ":", "ret", "=", "{", "}", "rexp", "=", "re", ".", "compile", "(", "'^[<>=][^ ]+/[^ ]+ [^\\\\n]+'", ",", "re", ".", "M", ")", "slot_conflicts", "=", "re", ".", "compile", "(", "'^[^ \\\\n]+/[^ ]+:[^ ]'", ",", "re", ".", "M", ")", ".", "findall", "(", "stderr", ")", "if", "slot_conflicts", ":", "ret", "[", "'slot conflicts'", "]", "=", "slot_conflicts", "blocked", "=", "re", ".", "compile", "(", "'(?m)^\\\\[blocks .+\\\\] ([^ ]+/[^ ]+-[0-9]+[^ ]+).*$'", ")", ".", "findall", "(", "stdout", ")", "unsatisfied", "=", "re", ".", "compile", "(", "'Error: The above package list contains'", ")", ".", "findall", "(", "stderr", ")", "if", "(", "blocked", "and", "unsatisfied", ")", ":", "ret", "[", "'blocked'", "]", "=", "blocked", "sections", "=", "re", ".", "split", "(", "'\\n\\n'", ",", "stderr", ")", "for", "section", "in", "sections", ":", "if", "(", "'The following keyword changes'", "in", "section", ")", ":", "ret", "[", "'keywords'", "]", "=", "rexp", ".", "findall", "(", "section", ")", "elif", "(", "'The following license changes'", "in", "section", ")", ":", "ret", "[", "'license'", "]", "=", "rexp", ".", "findall", "(", "section", ")", "elif", "(", "'The following USE changes'", "in", "section", ")", ":", "ret", "[", "'use'", "]", "=", "rexp", ".", "findall", "(", "section", ")", "elif", "(", "'The following mask changes'", "in", "section", ")", ":", "ret", "[", "'mask'", "]", "=", "rexp", ".", "findall", "(", "section", ")", "return", "ret" ]
used to parse emerge output to provide meaningful output when emerge fails .
train
false
53,431
def iter_except(func, exception): try: while True: (yield func()) except exception: pass
[ "def", "iter_except", "(", "func", ",", "exception", ")", ":", "try", ":", "while", "True", ":", "(", "yield", "func", "(", ")", ")", "except", "exception", ":", "pass" ]
iter_except calls func repeatedly until an exception is raised .
train
false
53,433
def _validate_autoscaling_params(params): if (not params['autoscaling']): return (True, '') if (not isinstance(params['autoscaling'], dict)): return (False, 'autoscaling: configuration expected to be a dictionary.') as_req_fields = [{'name': 'name', 'required': True, 'type': str}, {'name': 'enabled', 'required': True, 'type': bool}, {'name': 'policy', 'required': True, 'type': dict}] (as_req_valid, as_req_msg) = _check_params(params['autoscaling'], as_req_fields) if (not as_req_valid): return (False, as_req_msg) as_policy_fields = [{'name': 'max_instances', 'required': True, 'type': int}, {'name': 'min_instances', 'required': False, 'type': int}, {'name': 'cool_down_period', 'required': False, 'type': int}] (as_policy_valid, as_policy_msg) = _check_params(params['autoscaling']['policy'], as_policy_fields) if (not as_policy_valid): return (False, as_policy_msg) return (True, '')
[ "def", "_validate_autoscaling_params", "(", "params", ")", ":", "if", "(", "not", "params", "[", "'autoscaling'", "]", ")", ":", "return", "(", "True", ",", "''", ")", "if", "(", "not", "isinstance", "(", "params", "[", "'autoscaling'", "]", ",", "dict", ")", ")", ":", "return", "(", "False", ",", "'autoscaling: configuration expected to be a dictionary.'", ")", "as_req_fields", "=", "[", "{", "'name'", ":", "'name'", ",", "'required'", ":", "True", ",", "'type'", ":", "str", "}", ",", "{", "'name'", ":", "'enabled'", ",", "'required'", ":", "True", ",", "'type'", ":", "bool", "}", ",", "{", "'name'", ":", "'policy'", ",", "'required'", ":", "True", ",", "'type'", ":", "dict", "}", "]", "(", "as_req_valid", ",", "as_req_msg", ")", "=", "_check_params", "(", "params", "[", "'autoscaling'", "]", ",", "as_req_fields", ")", "if", "(", "not", "as_req_valid", ")", ":", "return", "(", "False", ",", "as_req_msg", ")", "as_policy_fields", "=", "[", "{", "'name'", ":", "'max_instances'", ",", "'required'", ":", "True", ",", "'type'", ":", "int", "}", ",", "{", "'name'", ":", "'min_instances'", ",", "'required'", ":", "False", ",", "'type'", ":", "int", "}", ",", "{", "'name'", ":", "'cool_down_period'", ",", "'required'", ":", "False", ",", "'type'", ":", "int", "}", "]", "(", "as_policy_valid", ",", "as_policy_msg", ")", "=", "_check_params", "(", "params", "[", "'autoscaling'", "]", "[", "'policy'", "]", ",", "as_policy_fields", ")", "if", "(", "not", "as_policy_valid", ")", ":", "return", "(", "False", ",", "as_policy_msg", ")", "return", "(", "True", ",", "''", ")" ]
validate that the minimum configuration is present for autoscaling .
train
false
53,434
def _get_proc_pid(proc): return proc.pid
[ "def", "_get_proc_pid", "(", "proc", ")", ":", "return", "proc", ".", "pid" ]
returns the pid of a process instance .
train
false
53,436
@validator def fi_business_id(business_id): if ((not business_id) or (not re.match(business_id_pattern, business_id))): return False factors = [7, 9, 10, 5, 8, 4, 2] numbers = map(int, business_id[:7]) checksum = int(business_id[8]) sum_ = sum(((f * n) for (f, n) in zip(factors, numbers))) modulo = (sum_ % 11) return (((11 - modulo) == checksum) or ((modulo == 0) and (checksum == 0)))
[ "@", "validator", "def", "fi_business_id", "(", "business_id", ")", ":", "if", "(", "(", "not", "business_id", ")", "or", "(", "not", "re", ".", "match", "(", "business_id_pattern", ",", "business_id", ")", ")", ")", ":", "return", "False", "factors", "=", "[", "7", ",", "9", ",", "10", ",", "5", ",", "8", ",", "4", ",", "2", "]", "numbers", "=", "map", "(", "int", ",", "business_id", "[", ":", "7", "]", ")", "checksum", "=", "int", "(", "business_id", "[", "8", "]", ")", "sum_", "=", "sum", "(", "(", "(", "f", "*", "n", ")", "for", "(", "f", ",", "n", ")", "in", "zip", "(", "factors", ",", "numbers", ")", ")", ")", "modulo", "=", "(", "sum_", "%", "11", ")", "return", "(", "(", "(", "11", "-", "modulo", ")", "==", "checksum", ")", "or", "(", "(", "modulo", "==", "0", ")", "and", "(", "checksum", "==", "0", ")", ")", ")" ]
validate a finnish business id .
train
true
53,437
def unpublicize_exploration(committer_id, exploration_id): _unpublicize_activity(committer_id, exploration_id, feconf.ACTIVITY_TYPE_EXPLORATION)
[ "def", "unpublicize_exploration", "(", "committer_id", ",", "exploration_id", ")", ":", "_unpublicize_activity", "(", "committer_id", ",", "exploration_id", ",", "feconf", ".", "ACTIVITY_TYPE_EXPLORATION", ")" ]
unpublicizes an exploration .
train
false
53,438
def roots_sh_chebyt(n, mu=False): xw = roots_chebyt(n, mu) return ((((xw[0] + 1) / 2),) + xw[1:])
[ "def", "roots_sh_chebyt", "(", "n", ",", "mu", "=", "False", ")", ":", "xw", "=", "roots_chebyt", "(", "n", ",", "mu", ")", "return", "(", "(", "(", "(", "xw", "[", "0", "]", "+", "1", ")", "/", "2", ")", ",", ")", "+", "xw", "[", "1", ":", "]", ")" ]
gauss-chebyshev quadrature .
train
false
53,439
def deep_merge(base, extra): for key in extra: if ((key in base) and isinstance(base[key], dict) and isinstance(extra[key], dict)): deep_merge(base[key], extra[key]) continue base[key] = extra[key]
[ "def", "deep_merge", "(", "base", ",", "extra", ")", ":", "for", "key", "in", "extra", ":", "if", "(", "(", "key", "in", "base", ")", "and", "isinstance", "(", "base", "[", "key", "]", ",", "dict", ")", "and", "isinstance", "(", "extra", "[", "key", "]", ",", "dict", ")", ")", ":", "deep_merge", "(", "base", "[", "key", "]", ",", "extra", "[", "key", "]", ")", "continue", "base", "[", "key", "]", "=", "extra", "[", "key", "]" ]
deeply two dictionaries .
train
false
53,440
def rotate_90_clockwise(request, fileobjects): transpose_image(request, fileobjects, 4)
[ "def", "rotate_90_clockwise", "(", "request", ",", "fileobjects", ")", ":", "transpose_image", "(", "request", ",", "fileobjects", ",", "4", ")" ]
rotate image 90 degrees clockwise .
train
false
53,441
def IncludeUtilitiesInUserAgent(value): with _UTILITY_LOCK: _utility_registry.SetEnabled(value)
[ "def", "IncludeUtilitiesInUserAgent", "(", "value", ")", ":", "with", "_UTILITY_LOCK", ":", "_utility_registry", ".", "SetEnabled", "(", "value", ")" ]
configures the logging of utilities in the user-agent .
train
false
53,443
def _single_hsv_to_rgb(hsv): from matplotlib.colors import hsv_to_rgb return hsv_to_rgb(array(hsv).reshape(1, 1, 3)).reshape(3)
[ "def", "_single_hsv_to_rgb", "(", "hsv", ")", ":", "from", "matplotlib", ".", "colors", "import", "hsv_to_rgb", "return", "hsv_to_rgb", "(", "array", "(", "hsv", ")", ".", "reshape", "(", "1", ",", "1", ",", "3", ")", ")", ".", "reshape", "(", "3", ")" ]
transform a color from the hsv space to the rgb .
train
false
53,444
def average_shortest_path_length(G, weight=None): n = len(G) if (n == 0): msg = 'the null graph has no paths, thus there is no averageshortest path length' raise nx.NetworkXPointlessConcept(msg) if (n == 1): return 0 if (G.is_directed() and (not nx.is_weakly_connected(G))): raise nx.NetworkXError('Graph is not weakly connected.') if ((not G.is_directed()) and (not nx.is_connected(G))): raise nx.NetworkXError('Graph is not connected.') if (weight is None): path_length = (lambda v: nx.single_source_shortest_path_length(G, v)) else: ssdpl = nx.single_source_dijkstra_path_length path_length = (lambda v: ssdpl(G, v, weight=weight)) s = sum((l for u in G for (v, l) in path_length(u))) return (s / (n * (n - 1)))
[ "def", "average_shortest_path_length", "(", "G", ",", "weight", "=", "None", ")", ":", "n", "=", "len", "(", "G", ")", "if", "(", "n", "==", "0", ")", ":", "msg", "=", "'the null graph has no paths, thus there is no averageshortest path length'", "raise", "nx", ".", "NetworkXPointlessConcept", "(", "msg", ")", "if", "(", "n", "==", "1", ")", ":", "return", "0", "if", "(", "G", ".", "is_directed", "(", ")", "and", "(", "not", "nx", ".", "is_weakly_connected", "(", "G", ")", ")", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'Graph is not weakly connected.'", ")", "if", "(", "(", "not", "G", ".", "is_directed", "(", ")", ")", "and", "(", "not", "nx", ".", "is_connected", "(", "G", ")", ")", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'Graph is not connected.'", ")", "if", "(", "weight", "is", "None", ")", ":", "path_length", "=", "(", "lambda", "v", ":", "nx", ".", "single_source_shortest_path_length", "(", "G", ",", "v", ")", ")", "else", ":", "ssdpl", "=", "nx", ".", "single_source_dijkstra_path_length", "path_length", "=", "(", "lambda", "v", ":", "ssdpl", "(", "G", ",", "v", ",", "weight", "=", "weight", ")", ")", "s", "=", "sum", "(", "(", "l", "for", "u", "in", "G", "for", "(", "v", ",", "l", ")", "in", "path_length", "(", "u", ")", ")", ")", "return", "(", "s", "/", "(", "n", "*", "(", "n", "-", "1", ")", ")", ")" ]
return the average shortest path length .
train
false
53,445
def getConnectedPaths(paths, pixelDictionary, width): if (len(paths) < 2): return paths connectedPaths = [] segments = [] for pathIndex in xrange(len(paths)): path = paths[pathIndex] segments.append(getSegmentFromPath(path, pathIndex)) for pathIndex in xrange(0, (len(paths) - 1)): concatenateRemovePath(connectedPaths, pathIndex, paths, pixelDictionary, segments, width) connectedPaths.append(paths[(-1)]) return connectedPaths
[ "def", "getConnectedPaths", "(", "paths", ",", "pixelDictionary", ",", "width", ")", ":", "if", "(", "len", "(", "paths", ")", "<", "2", ")", ":", "return", "paths", "connectedPaths", "=", "[", "]", "segments", "=", "[", "]", "for", "pathIndex", "in", "xrange", "(", "len", "(", "paths", ")", ")", ":", "path", "=", "paths", "[", "pathIndex", "]", "segments", ".", "append", "(", "getSegmentFromPath", "(", "path", ",", "pathIndex", ")", ")", "for", "pathIndex", "in", "xrange", "(", "0", ",", "(", "len", "(", "paths", ")", "-", "1", ")", ")", ":", "concatenateRemovePath", "(", "connectedPaths", ",", "pathIndex", ",", "paths", ",", "pixelDictionary", ",", "segments", ",", "width", ")", "connectedPaths", ".", "append", "(", "paths", "[", "(", "-", "1", ")", "]", ")", "return", "connectedPaths" ]
get connected paths from paths .
train
false
53,446
def generate_completions(event): b = event.current_buffer if b.complete_state: b.complete_next() else: event.cli.start_completion(insert_common_part=True, select_first=False)
[ "def", "generate_completions", "(", "event", ")", ":", "b", "=", "event", ".", "current_buffer", "if", "b", ".", "complete_state", ":", "b", ".", "complete_next", "(", ")", "else", ":", "event", ".", "cli", ".", "start_completion", "(", "insert_common_part", "=", "True", ",", "select_first", "=", "False", ")" ]
tab-completion: where the first tab completes the common suffix and the second tab lists all the completions .
train
true
53,452
def iter_http_requests(server, player): while ((not player) or player.running): try: (yield server.open(timeout=2.5)) except OSError: continue
[ "def", "iter_http_requests", "(", "server", ",", "player", ")", ":", "while", "(", "(", "not", "player", ")", "or", "player", ".", "running", ")", ":", "try", ":", "(", "yield", "server", ".", "open", "(", "timeout", "=", "2.5", ")", ")", "except", "OSError", ":", "continue" ]
repeatedly accept http connections on a server .
train
true
53,454
def getpackage(filename): src_file = src(filename) if ((os.path.isdir(src_file) or (not src_file.endswith('.py'))) and (not ispackage(src_file))): return None (base, ext) = os.path.splitext(os.path.basename(src_file)) if (base == '__init__'): mod_parts = [] else: mod_parts = [base] (path, part) = os.path.split(os.path.split(src_file)[0]) while part: if ispackage(os.path.join(path, part)): mod_parts.append(part) else: break (path, part) = os.path.split(path) mod_parts.reverse() return '.'.join(mod_parts)
[ "def", "getpackage", "(", "filename", ")", ":", "src_file", "=", "src", "(", "filename", ")", "if", "(", "(", "os", ".", "path", ".", "isdir", "(", "src_file", ")", "or", "(", "not", "src_file", ".", "endswith", "(", "'.py'", ")", ")", ")", "and", "(", "not", "ispackage", "(", "src_file", ")", ")", ")", ":", "return", "None", "(", "base", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "src_file", ")", ")", "if", "(", "base", "==", "'__init__'", ")", ":", "mod_parts", "=", "[", "]", "else", ":", "mod_parts", "=", "[", "base", "]", "(", "path", ",", "part", ")", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "split", "(", "src_file", ")", "[", "0", "]", ")", "while", "part", ":", "if", "ispackage", "(", "os", ".", "path", ".", "join", "(", "path", ",", "part", ")", ")", ":", "mod_parts", ".", "append", "(", "part", ")", "else", ":", "break", "(", "path", ",", "part", ")", "=", "os", ".", "path", ".", "split", "(", "path", ")", "mod_parts", ".", "reverse", "(", ")", "return", "'.'", ".", "join", "(", "mod_parts", ")" ]
find the full dotted package name for a given python source file name .
train
true
53,455
def build_response(response, request): dict_obj = {} dict_obj['path'] = urlparse.unquote(request.path) dict_obj['status'] = response.status_code dict_obj['headers'] = dict(response.headers) body = '' if (request.method != 'HEAD'): try: body = response.json except ValueError: body = response.body dict_obj['body'] = body return dict_obj
[ "def", "build_response", "(", "response", ",", "request", ")", ":", "dict_obj", "=", "{", "}", "dict_obj", "[", "'path'", "]", "=", "urlparse", ".", "unquote", "(", "request", ".", "path", ")", "dict_obj", "[", "'status'", "]", "=", "response", ".", "status_code", "dict_obj", "[", "'headers'", "]", "=", "dict", "(", "response", ".", "headers", ")", "body", "=", "''", "if", "(", "request", ".", "method", "!=", "'HEAD'", ")", ":", "try", ":", "body", "=", "response", ".", "json", "except", "ValueError", ":", "body", "=", "response", ".", "body", "dict_obj", "[", "'body'", "]", "=", "body", "return", "dict_obj" ]
transform a :class:pyramid .
train
false
53,457
def is_min_version(found, minversion): expected_version = version.LooseVersion(minversion) found_version = version.LooseVersion(found) return (found_version >= expected_version)
[ "def", "is_min_version", "(", "found", ",", "minversion", ")", ":", "expected_version", "=", "version", ".", "LooseVersion", "(", "minversion", ")", "found_version", "=", "version", ".", "LooseVersion", "(", "found", ")", "return", "(", "found_version", ">=", "expected_version", ")" ]
returns true if found is at least as high a version as minversion .
train
false
53,458
def add_parents(paths): all_paths = set() for path in paths: while (u'//' in path): path = path.replace(u'//', u'/') all_paths.add(path) if (u'/' in path): parent_dir = dirname(path) while parent_dir: all_paths.add(parent_dir) parent_dir = dirname(parent_dir) return all_paths
[ "def", "add_parents", "(", "paths", ")", ":", "all_paths", "=", "set", "(", ")", "for", "path", "in", "paths", ":", "while", "(", "u'//'", "in", "path", ")", ":", "path", "=", "path", ".", "replace", "(", "u'//'", ",", "u'/'", ")", "all_paths", ".", "add", "(", "path", ")", "if", "(", "u'/'", "in", "path", ")", ":", "parent_dir", "=", "dirname", "(", "path", ")", "while", "parent_dir", ":", "all_paths", ".", "add", "(", "parent_dir", ")", "parent_dir", "=", "dirname", "(", "parent_dir", ")", "return", "all_paths" ]
iterate over each item in the set and add its parent directories .
train
false
53,459
def data_clustering(data, distance=Euclidean, linkage=AVERAGE): matrix = distance(data) return dist_matrix_clustering(matrix, linkage=linkage)
[ "def", "data_clustering", "(", "data", ",", "distance", "=", "Euclidean", ",", "linkage", "=", "AVERAGE", ")", ":", "matrix", "=", "distance", "(", "data", ")", "return", "dist_matrix_clustering", "(", "matrix", ",", "linkage", "=", "linkage", ")" ]
return the hierarchical clustering of the data sets rows .
train
false
53,460
def test_freeze_basic(script): script.scratch_path.join('initools-req.txt').write(textwrap.dedent(' simple==2.0\n # and something else to test out:\n simple2<=3.0\n ')) script.pip_install_local('-r', (script.scratch_path / 'initools-req.txt')) result = script.pip('freeze', expect_stderr=True) expected = textwrap.dedent(' ...simple==2.0\n simple2==3.0...\n <BLANKLINE>') _check_output(result.stdout, expected)
[ "def", "test_freeze_basic", "(", "script", ")", ":", "script", ".", "scratch_path", ".", "join", "(", "'initools-req.txt'", ")", ".", "write", "(", "textwrap", ".", "dedent", "(", "' simple==2.0\\n # and something else to test out:\\n simple2<=3.0\\n '", ")", ")", "script", ".", "pip_install_local", "(", "'-r'", ",", "(", "script", ".", "scratch_path", "/", "'initools-req.txt'", ")", ")", "result", "=", "script", ".", "pip", "(", "'freeze'", ",", "expect_stderr", "=", "True", ")", "expected", "=", "textwrap", ".", "dedent", "(", "' ...simple==2.0\\n simple2==3.0...\\n <BLANKLINE>'", ")", "_check_output", "(", "result", ".", "stdout", ",", "expected", ")" ]
some tests of freeze .
train
false
53,461
def make_packing_list(doc): if (doc.get(u'_action') and (doc._action == u'update_after_submit')): return parent_items = [] for d in doc.get(u'items'): if frappe.db.get_value(u'Product Bundle', {u'new_item_code': d.item_code}): for i in get_product_bundle_items(d.item_code): update_packing_list_item(doc, i.item_code, (flt(i.qty) * flt(d.qty)), d, i.description) if ([d.item_code, d.name] not in parent_items): parent_items.append([d.item_code, d.name]) cleanup_packing_list(doc, parent_items)
[ "def", "make_packing_list", "(", "doc", ")", ":", "if", "(", "doc", ".", "get", "(", "u'_action'", ")", "and", "(", "doc", ".", "_action", "==", "u'update_after_submit'", ")", ")", ":", "return", "parent_items", "=", "[", "]", "for", "d", "in", "doc", ".", "get", "(", "u'items'", ")", ":", "if", "frappe", ".", "db", ".", "get_value", "(", "u'Product Bundle'", ",", "{", "u'new_item_code'", ":", "d", ".", "item_code", "}", ")", ":", "for", "i", "in", "get_product_bundle_items", "(", "d", ".", "item_code", ")", ":", "update_packing_list_item", "(", "doc", ",", "i", ".", "item_code", ",", "(", "flt", "(", "i", ".", "qty", ")", "*", "flt", "(", "d", ".", "qty", ")", ")", ",", "d", ",", "i", ".", "description", ")", "if", "(", "[", "d", ".", "item_code", ",", "d", ".", "name", "]", "not", "in", "parent_items", ")", ":", "parent_items", ".", "append", "(", "[", "d", ".", "item_code", ",", "d", ".", "name", "]", ")", "cleanup_packing_list", "(", "doc", ",", "parent_items", ")" ]
make packing list for product bundle item .
train
false
53,462
def record_chosen_plugins(config, plugins, auth, inst): cn = config.namespace cn.authenticator = (plugins.find_init(auth).name if auth else 'None') cn.installer = (plugins.find_init(inst).name if inst else 'None')
[ "def", "record_chosen_plugins", "(", "config", ",", "plugins", ",", "auth", ",", "inst", ")", ":", "cn", "=", "config", ".", "namespace", "cn", ".", "authenticator", "=", "(", "plugins", ".", "find_init", "(", "auth", ")", ".", "name", "if", "auth", "else", "'None'", ")", "cn", ".", "installer", "=", "(", "plugins", ".", "find_init", "(", "inst", ")", ".", "name", "if", "inst", "else", "'None'", ")" ]
update the config entries to reflect the plugins we actually selected .
train
false
53,463
def create_env(): searchpath = list(settings.JINJA2_TEMPLATE_DIRS) return Environment(loader=FileSystemLoader(searchpath), auto_reload=settings.TEMPLATE_DEBUG, cache_size=getattr(settings, 'JINJA2_CACHE_SIZE', 50), extensions=getattr(settings, 'JINJA2_EXTENSIONS', ()))
[ "def", "create_env", "(", ")", ":", "searchpath", "=", "list", "(", "settings", ".", "JINJA2_TEMPLATE_DIRS", ")", "return", "Environment", "(", "loader", "=", "FileSystemLoader", "(", "searchpath", ")", ",", "auto_reload", "=", "settings", ".", "TEMPLATE_DEBUG", ",", "cache_size", "=", "getattr", "(", "settings", ",", "'JINJA2_CACHE_SIZE'", ",", "50", ")", ",", "extensions", "=", "getattr", "(", "settings", ",", "'JINJA2_EXTENSIONS'", ",", "(", ")", ")", ")" ]
create a new jinja2 environment .
train
false
53,467
def numsplit(text): result = [] for group in re.split('(\\d+)', text): if group: try: group = int(group) except ValueError: pass result.append(group) return result
[ "def", "numsplit", "(", "text", ")", ":", "result", "=", "[", "]", "for", "group", "in", "re", ".", "split", "(", "'(\\\\d+)'", ",", "text", ")", ":", "if", "group", ":", "try", ":", "group", "=", "int", "(", "group", ")", "except", "ValueError", ":", "pass", "result", ".", "append", "(", "group", ")", "return", "result" ]
convert string into a list of texts and numbers in order to support a natural sorting .
train
false
53,468
def _qsympify_sequence(seq): return tuple(__qsympify_sequence_helper(seq))
[ "def", "_qsympify_sequence", "(", "seq", ")", ":", "return", "tuple", "(", "__qsympify_sequence_helper", "(", "seq", ")", ")" ]
convert elements of a sequence to standard form .
train
false
53,469
def engine(func): @functools.wraps(func) def wrapper(*args, **kwargs): runner = None def handle_exception(typ, value, tb): if (runner is not None): return runner.handle_exception(typ, value, tb) return False with ExceptionStackContext(handle_exception) as deactivate: try: result = func(*args, **kwargs) except (Return, StopIteration) as e: result = getattr(e, 'value', None) else: if isinstance(result, types.GeneratorType): def final_callback(value): if (value is not None): raise ReturnValueIgnoredError(('@gen.engine functions cannot return values: %r' % (value,))) assert (value is None) deactivate() runner = Runner(result, final_callback) runner.run() return if (result is not None): raise ReturnValueIgnoredError(('@gen.engine functions cannot return values: %r' % (result,))) deactivate() return wrapper
[ "def", "engine", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "runner", "=", "None", "def", "handle_exception", "(", "typ", ",", "value", ",", "tb", ")", ":", "if", "(", "runner", "is", "not", "None", ")", ":", "return", "runner", ".", "handle_exception", "(", "typ", ",", "value", ",", "tb", ")", "return", "False", "with", "ExceptionStackContext", "(", "handle_exception", ")", "as", "deactivate", ":", "try", ":", "result", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "except", "(", "Return", ",", "StopIteration", ")", "as", "e", ":", "result", "=", "getattr", "(", "e", ",", "'value'", ",", "None", ")", "else", ":", "if", "isinstance", "(", "result", ",", "types", ".", "GeneratorType", ")", ":", "def", "final_callback", "(", "value", ")", ":", "if", "(", "value", "is", "not", "None", ")", ":", "raise", "ReturnValueIgnoredError", "(", "(", "'@gen.engine functions cannot return values: %r'", "%", "(", "value", ",", ")", ")", ")", "assert", "(", "value", "is", "None", ")", "deactivate", "(", ")", "runner", "=", "Runner", "(", "result", ",", "final_callback", ")", "runner", ".", "run", "(", ")", "return", "if", "(", "result", "is", "not", "None", ")", ":", "raise", "ReturnValueIgnoredError", "(", "(", "'@gen.engine functions cannot return values: %r'", "%", "(", "result", ",", ")", ")", ")", "deactivate", "(", ")", "return", "wrapper" ]
callback-oriented decorator for asynchronous generators .
train
false
53,470
def isIPv6Address(addr): return isIPAddress(addr, AF_INET6)
[ "def", "isIPv6Address", "(", "addr", ")", ":", "return", "isIPAddress", "(", "addr", ",", "AF_INET6", ")" ]
determine whether the given string represents an ipv6 address .
train
false
53,471
def _img2rgba(im): assert (im.mode == 'RGBA') return [(_img2arr(band).astype(numpy.float32) / 255.0) for band in im.split()]
[ "def", "_img2rgba", "(", "im", ")", ":", "assert", "(", "im", ".", "mode", "==", "'RGBA'", ")", "return", "[", "(", "_img2arr", "(", "band", ")", ".", "astype", "(", "numpy", ".", "float32", ")", "/", "255.0", ")", "for", "band", "in", "im", ".", "split", "(", ")", "]" ]
convert pil image to four numeric array objects .
train
false
53,473
def _import_plugin(module_name, plugin_path, modnames, modlist): if (module_name in modnames): return try: mock = _ModuleMock() mock.LOCALEPATH = osp.join(plugin_path, module_name, 'locale') sys.modules[module_name] = mock if osp.isdir(osp.join(plugin_path, module_name)): module = _import_module_from_path(module_name, plugin_path) else: module = None if module: sys.modules[module_name] = module modlist.append(module) modnames.append(module_name) except Exception: sys.stderr.write('ERROR: 3rd party plugin import failed for `{0}`\n'.format(module_name)) traceback.print_exc(file=sys.stderr)
[ "def", "_import_plugin", "(", "module_name", ",", "plugin_path", ",", "modnames", ",", "modlist", ")", ":", "if", "(", "module_name", "in", "modnames", ")", ":", "return", "try", ":", "mock", "=", "_ModuleMock", "(", ")", "mock", ".", "LOCALEPATH", "=", "osp", ".", "join", "(", "plugin_path", ",", "module_name", ",", "'locale'", ")", "sys", ".", "modules", "[", "module_name", "]", "=", "mock", "if", "osp", ".", "isdir", "(", "osp", ".", "join", "(", "plugin_path", ",", "module_name", ")", ")", ":", "module", "=", "_import_module_from_path", "(", "module_name", ",", "plugin_path", ")", "else", ":", "module", "=", "None", "if", "module", ":", "sys", ".", "modules", "[", "module_name", "]", "=", "module", "modlist", ".", "append", "(", "module", ")", "modnames", ".", "append", "(", "module_name", ")", "except", "Exception", ":", "sys", ".", "stderr", ".", "write", "(", "'ERROR: 3rd party plugin import failed for `{0}`\\n'", ".", "format", "(", "module_name", ")", ")", "traceback", ".", "print_exc", "(", "file", "=", "sys", ".", "stderr", ")" ]
import the plugin module_name from plugin_path .
train
true
53,474
def post_delete_layer(instance, sender, **kwargs): from geonode.maps.models import MapLayer if instance.typename: logger.debug('Going to delete associated maplayers for [%s]', instance.typename.encode('utf-8')) MapLayer.objects.filter(name=instance.typename, ows_url=instance.ows_url).delete() if instance.is_remote: return if instance.typename: logger.debug('Going to delete the default style for [%s]', instance.typename.encode('utf-8')) if (instance.default_style and (Layer.objects.filter(default_style__id=instance.default_style.id).count() == 0)): instance.default_style.delete() try: if instance.upload_session: for lf in instance.upload_session.layerfile_set.all(): lf.file.delete() except UploadSession.DoesNotExist: pass
[ "def", "post_delete_layer", "(", "instance", ",", "sender", ",", "**", "kwargs", ")", ":", "from", "geonode", ".", "maps", ".", "models", "import", "MapLayer", "if", "instance", ".", "typename", ":", "logger", ".", "debug", "(", "'Going to delete associated maplayers for [%s]'", ",", "instance", ".", "typename", ".", "encode", "(", "'utf-8'", ")", ")", "MapLayer", ".", "objects", ".", "filter", "(", "name", "=", "instance", ".", "typename", ",", "ows_url", "=", "instance", ".", "ows_url", ")", ".", "delete", "(", ")", "if", "instance", ".", "is_remote", ":", "return", "if", "instance", ".", "typename", ":", "logger", ".", "debug", "(", "'Going to delete the default style for [%s]'", ",", "instance", ".", "typename", ".", "encode", "(", "'utf-8'", ")", ")", "if", "(", "instance", ".", "default_style", "and", "(", "Layer", ".", "objects", ".", "filter", "(", "default_style__id", "=", "instance", ".", "default_style", ".", "id", ")", ".", "count", "(", ")", "==", "0", ")", ")", ":", "instance", ".", "default_style", ".", "delete", "(", ")", "try", ":", "if", "instance", ".", "upload_session", ":", "for", "lf", "in", "instance", ".", "upload_session", ".", "layerfile_set", ".", "all", "(", ")", ":", "lf", ".", "file", ".", "delete", "(", ")", "except", "UploadSession", ".", "DoesNotExist", ":", "pass" ]
removed the layer from any associated map .
train
false
53,477
def getBridgeDirection(belowLoops, layerLoops, radius): if (len(belowLoops) < 1): return None belowOutsetLoops = intercircle.getInsetLoopsFromLoops(belowLoops, (- radius)) bridgeRotation = complex() for loop in layerLoops: for (pointIndex, point) in enumerate(loop): previousIndex = (((pointIndex + len(loop)) - 1) % len(loop)) bridgeRotation += getOverhangDirection(belowOutsetLoops, loop[previousIndex], point) if (abs(bridgeRotation) < (0.75 * radius)): return None else: return cmath.sqrt((bridgeRotation / abs(bridgeRotation)))
[ "def", "getBridgeDirection", "(", "belowLoops", ",", "layerLoops", ",", "radius", ")", ":", "if", "(", "len", "(", "belowLoops", ")", "<", "1", ")", ":", "return", "None", "belowOutsetLoops", "=", "intercircle", ".", "getInsetLoopsFromLoops", "(", "belowLoops", ",", "(", "-", "radius", ")", ")", "bridgeRotation", "=", "complex", "(", ")", "for", "loop", "in", "layerLoops", ":", "for", "(", "pointIndex", ",", "point", ")", "in", "enumerate", "(", "loop", ")", ":", "previousIndex", "=", "(", "(", "(", "pointIndex", "+", "len", "(", "loop", ")", ")", "-", "1", ")", "%", "len", "(", "loop", ")", ")", "bridgeRotation", "+=", "getOverhangDirection", "(", "belowOutsetLoops", ",", "loop", "[", "previousIndex", "]", ",", "point", ")", "if", "(", "abs", "(", "bridgeRotation", ")", "<", "(", "0.75", "*", "radius", ")", ")", ":", "return", "None", "else", ":", "return", "cmath", ".", "sqrt", "(", "(", "bridgeRotation", "/", "abs", "(", "bridgeRotation", ")", ")", ")" ]
get span direction for the majority of the overhanging extrusion perimeter .
train
false
53,478
def suppressWarnings(f, *suppressedWarnings): def warningSuppressingWrapper(*a, **kw): return runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw) return tputil.mergeFunctionMetadata(f, warningSuppressingWrapper)
[ "def", "suppressWarnings", "(", "f", ",", "*", "suppressedWarnings", ")", ":", "def", "warningSuppressingWrapper", "(", "*", "a", ",", "**", "kw", ")", ":", "return", "runWithWarningsSuppressed", "(", "suppressedWarnings", ",", "f", ",", "*", "a", ",", "**", "kw", ")", "return", "tputil", ".", "mergeFunctionMetadata", "(", "f", ",", "warningSuppressingWrapper", ")" ]
wrap c{f} in a callable which suppresses the indicated warnings before invoking c{f} and unsuppresses them afterwards .
train
false
53,479
def clone_plugin(plugin): dirname = os.path.join(plugin_cache_dir(), os.path.basename(plugin)) print ('Cloning %s -> %s' % (plugin, dirname)) if os.path.exists(dirname): print ('Skip cloning of %s. Already there.' % plugin) return create_directory(dirname) subprocess.call(['git', 'clone', '--recursive', '--depth', '1', ('https://github.com/%s' % plugin), dirname]) if (plugin == 'Valloric/YouCompleteMe'): subprocess.call(os.path.join(dirname, './install.sh'), cwd=dirname)
[ "def", "clone_plugin", "(", "plugin", ")", ":", "dirname", "=", "os", ".", "path", ".", "join", "(", "plugin_cache_dir", "(", ")", ",", "os", ".", "path", ".", "basename", "(", "plugin", ")", ")", "print", "(", "'Cloning %s -> %s'", "%", "(", "plugin", ",", "dirname", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "dirname", ")", ":", "print", "(", "'Skip cloning of %s. Already there.'", "%", "plugin", ")", "return", "create_directory", "(", "dirname", ")", "subprocess", ".", "call", "(", "[", "'git'", ",", "'clone'", ",", "'--recursive'", ",", "'--depth'", ",", "'1'", ",", "(", "'https://github.com/%s'", "%", "plugin", ")", ",", "dirname", "]", ")", "if", "(", "plugin", "==", "'Valloric/YouCompleteMe'", ")", ":", "subprocess", ".", "call", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "'./install.sh'", ")", ",", "cwd", "=", "dirname", ")" ]
clone the given plugin into our plugin directory .
train
false
53,480
def _send_picklemetrics(metrics): metrics = [(metric_name, (timestamp, value)) for (metric_name, value, timestamp) in metrics] data = cPickle.dumps(metrics, (-1)) payload = (struct.pack('!L', len(data)) + data) return payload
[ "def", "_send_picklemetrics", "(", "metrics", ")", ":", "metrics", "=", "[", "(", "metric_name", ",", "(", "timestamp", ",", "value", ")", ")", "for", "(", "metric_name", ",", "value", ",", "timestamp", ")", "in", "metrics", "]", "data", "=", "cPickle", ".", "dumps", "(", "metrics", ",", "(", "-", "1", ")", ")", "payload", "=", "(", "struct", ".", "pack", "(", "'!L'", ",", "len", "(", "data", ")", ")", "+", "data", ")", "return", "payload" ]
format metrics for the carbon pickle protocol .
train
true
53,481
def _get_backend_tag(tag): pytest_marks = {'qtwebengine_todo': pytest.mark.qtwebengine_todo, 'qtwebengine_skip': pytest.mark.qtwebengine_skip, 'qtwebkit_skip': pytest.mark.qtwebkit_skip} if (not any((tag.startswith((t + ':')) for t in pytest_marks))): return None (name, desc) = tag.split(':', maxsplit=1) return pytest_marks[name](desc)
[ "def", "_get_backend_tag", "(", "tag", ")", ":", "pytest_marks", "=", "{", "'qtwebengine_todo'", ":", "pytest", ".", "mark", ".", "qtwebengine_todo", ",", "'qtwebengine_skip'", ":", "pytest", ".", "mark", ".", "qtwebengine_skip", ",", "'qtwebkit_skip'", ":", "pytest", ".", "mark", ".", "qtwebkit_skip", "}", "if", "(", "not", "any", "(", "(", "tag", ".", "startswith", "(", "(", "t", "+", "':'", ")", ")", "for", "t", "in", "pytest_marks", ")", ")", ")", ":", "return", "None", "(", "name", ",", "desc", ")", "=", "tag", ".", "split", "(", "':'", ",", "maxsplit", "=", "1", ")", "return", "pytest_marks", "[", "name", "]", "(", "desc", ")" ]
handle a @qtwebengine_*/@qtwebkit_skip tag .
train
false
53,482
@pytest.mark.network def test_search_should_exit_status_code_zero_when_find_packages(script): result = script.pip('search', 'pip') assert (result.returncode == SUCCESS)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_search_should_exit_status_code_zero_when_find_packages", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'search'", ",", "'pip'", ")", "assert", "(", "result", ".", "returncode", "==", "SUCCESS", ")" ]
test search exit status code for package found .
train
false
53,484
def make_totp(secret, skew=0, timestamp=None): timestamp = (timestamp or time.time()) counter = (timestamp // PERIOD) return make_hotp(secret, (counter - skew))
[ "def", "make_totp", "(", "secret", ",", "skew", "=", "0", ",", "timestamp", "=", "None", ")", ":", "timestamp", "=", "(", "timestamp", "or", "time", ".", "time", "(", ")", ")", "counter", "=", "(", "timestamp", "//", "PERIOD", ")", "return", "make_hotp", "(", "secret", ",", "(", "counter", "-", "skew", ")", ")" ]
generate an rfc-6238 time-based one time password .
train
false
53,485
def publish_file(source=None, source_path=None, destination=None, destination_path=None, reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=False): (output, pub) = publish_programmatically(source_class=io.FileInput, source=source, source_path=source_path, destination_class=io.FileOutput, destination=destination, destination_path=destination_path, reader=reader, reader_name=reader_name, parser=parser, parser_name=parser_name, writer=writer, writer_name=writer_name, settings=settings, settings_spec=settings_spec, settings_overrides=settings_overrides, config_section=config_section, enable_exit_status=enable_exit_status) return output
[ "def", "publish_file", "(", "source", "=", "None", ",", "source_path", "=", "None", ",", "destination", "=", "None", ",", "destination_path", "=", "None", ",", "reader", "=", "None", ",", "reader_name", "=", "'standalone'", ",", "parser", "=", "None", ",", "parser_name", "=", "'restructuredtext'", ",", "writer", "=", "None", ",", "writer_name", "=", "'pseudoxml'", ",", "settings", "=", "None", ",", "settings_spec", "=", "None", ",", "settings_overrides", "=", "None", ",", "config_section", "=", "None", ",", "enable_exit_status", "=", "False", ")", ":", "(", "output", ",", "pub", ")", "=", "publish_programmatically", "(", "source_class", "=", "io", ".", "FileInput", ",", "source", "=", "source", ",", "source_path", "=", "source_path", ",", "destination_class", "=", "io", ".", "FileOutput", ",", "destination", "=", "destination", ",", "destination_path", "=", "destination_path", ",", "reader", "=", "reader", ",", "reader_name", "=", "reader_name", ",", "parser", "=", "parser", ",", "parser_name", "=", "parser_name", ",", "writer", "=", "writer", ",", "writer_name", "=", "writer_name", ",", "settings", "=", "settings", ",", "settings_spec", "=", "settings_spec", ",", "settings_overrides", "=", "settings_overrides", ",", "config_section", "=", "config_section", ",", "enable_exit_status", "=", "enable_exit_status", ")", "return", "output" ]
set up & run a publisher for programmatic use with file-like i/o .
train
false
53,486
def set_creation_order(instance): global _creation_order instance._creation_order = _creation_order _creation_order += 1
[ "def", "set_creation_order", "(", "instance", ")", ":", "global", "_creation_order", "instance", ".", "_creation_order", "=", "_creation_order", "_creation_order", "+=", "1" ]
assign a _creation_order sequence to the given instance .
train
false
53,487
def get_enrolled(): return User.objects.raw('SELECT * FROM auth_user where id not in (SELECT user_id from student_usersignupsource)')
[ "def", "get_enrolled", "(", ")", ":", "return", "User", ".", "objects", ".", "raw", "(", "'SELECT * FROM auth_user where id not in (SELECT user_id from student_usersignupsource)'", ")" ]
filter out all users who signed up via a microsite .
train
false
53,489
def get_site_status_msg(course_key): try: if (not GlobalStatusMessage.current().enabled): return None return GlobalStatusMessage.current().full_message(course_key) except: log.exception('Error while getting a status message.') return None
[ "def", "get_site_status_msg", "(", "course_key", ")", ":", "try", ":", "if", "(", "not", "GlobalStatusMessage", ".", "current", "(", ")", ".", "enabled", ")", ":", "return", "None", "return", "GlobalStatusMessage", ".", "current", "(", ")", ".", "full_message", "(", "course_key", ")", "except", ":", "log", ".", "exception", "(", "'Error while getting a status message.'", ")", "return", "None" ]
pull the status message from the database .
train
false
53,491
def parse_basic_auth(src_ip_port, dst_ip_port, headers, authorization_header): if authorization_header: try: header_val = headers[authorization_header.group()] except KeyError: return b64_auth_re = re.match('basic (.+)', header_val, re.IGNORECASE) if (b64_auth_re != None): basic_auth_b64 = b64_auth_re.group(1) basic_auth_creds = base64.decodestring(basic_auth_b64) msg = ('Basic Authentication: %s' % basic_auth_creds) printer(src_ip_port, dst_ip_port, msg)
[ "def", "parse_basic_auth", "(", "src_ip_port", ",", "dst_ip_port", ",", "headers", ",", "authorization_header", ")", ":", "if", "authorization_header", ":", "try", ":", "header_val", "=", "headers", "[", "authorization_header", ".", "group", "(", ")", "]", "except", "KeyError", ":", "return", "b64_auth_re", "=", "re", ".", "match", "(", "'basic (.+)'", ",", "header_val", ",", "re", ".", "IGNORECASE", ")", "if", "(", "b64_auth_re", "!=", "None", ")", ":", "basic_auth_b64", "=", "b64_auth_re", ".", "group", "(", "1", ")", "basic_auth_creds", "=", "base64", ".", "decodestring", "(", "basic_auth_b64", ")", "msg", "=", "(", "'Basic Authentication: %s'", "%", "basic_auth_creds", ")", "printer", "(", "src_ip_port", ",", "dst_ip_port", ",", "msg", ")" ]
parse basic authentication over http .
train
false
53,492
def variable_not_found(name, candidates, msg=None, deco_braces=True): if (msg is None): msg = ("Variable '%s' not found." % name) candidates = _decorate_candidates(name[0], candidates, deco_braces) normalizer = partial(normalize, ignore='$@%&*{}_', caseless=True, spaceless=True) finder = RecommendationFinder(normalizer) recommendations = finder.find_recommendations(name, candidates) msg = finder.format_recommendations(msg, recommendations) raise VariableError(msg)
[ "def", "variable_not_found", "(", "name", ",", "candidates", ",", "msg", "=", "None", ",", "deco_braces", "=", "True", ")", ":", "if", "(", "msg", "is", "None", ")", ":", "msg", "=", "(", "\"Variable '%s' not found.\"", "%", "name", ")", "candidates", "=", "_decorate_candidates", "(", "name", "[", "0", "]", ",", "candidates", ",", "deco_braces", ")", "normalizer", "=", "partial", "(", "normalize", ",", "ignore", "=", "'$@%&*{}_'", ",", "caseless", "=", "True", ",", "spaceless", "=", "True", ")", "finder", "=", "RecommendationFinder", "(", "normalizer", ")", "recommendations", "=", "finder", ".", "find_recommendations", "(", "name", ",", "candidates", ")", "msg", "=", "finder", ".", "format_recommendations", "(", "msg", ",", "recommendations", ")", "raise", "VariableError", "(", "msg", ")" ]
raise dataerror for missing variable name .
train
false
53,493
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None): if (io_loop is None): io_loop = IOLoop.current() request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout) request = httpclient._RequestProxy(request, httpclient.HTTPRequest._DEFAULTS) conn = WebSocketClientConnection(io_loop, request) if (callback is not None): io_loop.add_future(conn.connect_future, callback) return conn.connect_future
[ "def", "websocket_connect", "(", "url", ",", "io_loop", "=", "None", ",", "callback", "=", "None", ",", "connect_timeout", "=", "None", ")", ":", "if", "(", "io_loop", "is", "None", ")", ":", "io_loop", "=", "IOLoop", ".", "current", "(", ")", "request", "=", "httpclient", ".", "HTTPRequest", "(", "url", ",", "connect_timeout", "=", "connect_timeout", ")", "request", "=", "httpclient", ".", "_RequestProxy", "(", "request", ",", "httpclient", ".", "HTTPRequest", ".", "_DEFAULTS", ")", "conn", "=", "WebSocketClientConnection", "(", "io_loop", ",", "request", ")", "if", "(", "callback", "is", "not", "None", ")", ":", "io_loop", ".", "add_future", "(", "conn", ".", "connect_future", ",", "callback", ")", "return", "conn", ".", "connect_future" ]
client-side websocket support .
train
false