id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
49,510
def readable_directory(arg): arg = directory(arg) if (not os.access(arg, os.R_OK)): raise argparse.ArgumentTypeError('{0} exists but is not readable with its current permissions'.format(arg)) return arg
[ "def", "readable_directory", "(", "arg", ")", ":", "arg", "=", "directory", "(", "arg", ")", "if", "(", "not", "os", ".", "access", "(", "arg", ",", "os", ".", "R_OK", ")", ")", ":", "raise", "argparse", ".", "ArgumentTypeError", "(", "'{0} exists but is not readable with its current permissions'", ".", "format", "(", "arg", ")", ")", "return", "arg" ]
an argument type (for use with the type= argument to argparse .
train
false
49,511
def client_requests_text_rewrite(raw_text): def replace_to_real_domain(match_obj): scheme = get_group('scheme', match_obj) colon = match_obj.group('colon') scheme_slash = get_group('scheme_slash', match_obj) _is_https = bool(get_group('is_https', match_obj)) real_domain = match_obj.group('real_domain') result = '' if scheme: if ('http' in scheme): if (_is_https or is_target_domain_use_https(real_domain)): result += ('https' + colon) else: result += ('http' + colon) result += (scheme_slash * 2) result += real_domain return result replaced = regex_request_rewriter_extdomains.sub(replace_to_real_domain, raw_text) if ((developer_string_trace is not None) and (developer_string_trace in replaced)): infoprint('StringTrace: appears client_requests_text_rewrite, code line no. ', current_line_number()) replaced = regex_request_rewriter_main_domain.sub(target_domain, replaced) replaced = replaced.replace(my_host_name, target_domain) dbgprint('ClientRequestedUrl: ', raw_text, '<- Has Been Rewrited To ->', replaced) return replaced
[ "def", "client_requests_text_rewrite", "(", "raw_text", ")", ":", "def", "replace_to_real_domain", "(", "match_obj", ")", ":", "scheme", "=", "get_group", "(", "'scheme'", ",", "match_obj", ")", "colon", "=", "match_obj", ".", "group", "(", "'colon'", ")", "scheme_slash", "=", "get_group", "(", "'scheme_slash'", ",", "match_obj", ")", "_is_https", "=", "bool", "(", "get_group", "(", "'is_https'", ",", "match_obj", ")", ")", "real_domain", "=", "match_obj", ".", "group", "(", "'real_domain'", ")", "result", "=", "''", "if", "scheme", ":", "if", "(", "'http'", "in", "scheme", ")", ":", "if", "(", "_is_https", "or", "is_target_domain_use_https", "(", "real_domain", ")", ")", ":", "result", "+=", "(", "'https'", "+", "colon", ")", "else", ":", "result", "+=", "(", "'http'", "+", "colon", ")", "result", "+=", "(", "scheme_slash", "*", "2", ")", "result", "+=", "real_domain", "return", "result", "replaced", "=", "regex_request_rewriter_extdomains", ".", "sub", "(", "replace_to_real_domain", ",", "raw_text", ")", "if", "(", "(", "developer_string_trace", "is", "not", "None", ")", "and", "(", "developer_string_trace", "in", "replaced", ")", ")", ":", "infoprint", "(", "'StringTrace: appears client_requests_text_rewrite, code line no. '", ",", "current_line_number", "(", ")", ")", "replaced", "=", "regex_request_rewriter_main_domain", ".", "sub", "(", "target_domain", ",", "replaced", ")", "replaced", "=", "replaced", ".", "replace", "(", "my_host_name", ",", "target_domain", ")", "dbgprint", "(", "'ClientRequestedUrl: '", ",", "raw_text", ",", "'<- Has Been Rewrited To ->'", ",", "replaced", ")", "return", "replaced" ]
rewrite proxy domain to origin domain .
train
false
49,514
def add_cache_bypass(url): if (not cache_installed()): return url hash_str = u'' if (u'#' in url): (url, hash_str) = url.split(u'#', 1) hash_str = (u'#' + hash_str) url += (u'?' if (u'?' not in url) else u'&') return (((url + u't=') + str(time()).replace(u'.', u'')) + hash_str)
[ "def", "add_cache_bypass", "(", "url", ")", ":", "if", "(", "not", "cache_installed", "(", ")", ")", ":", "return", "url", "hash_str", "=", "u''", "if", "(", "u'#'", "in", "url", ")", ":", "(", "url", ",", "hash_str", ")", "=", "url", ".", "split", "(", "u'#'", ",", "1", ")", "hash_str", "=", "(", "u'#'", "+", "hash_str", ")", "url", "+=", "(", "u'?'", "if", "(", "u'?'", "not", "in", "url", ")", "else", "u'&'", ")", "return", "(", "(", "(", "url", "+", "u't='", ")", "+", "str", "(", "time", "(", ")", ")", ".", "replace", "(", "u'.'", ",", "u''", ")", ")", "+", "hash_str", ")" ]
adds the current time to the querystring of the url to force a cache reload .
train
false
49,515
@public def to_number_field(extension, theta=None, **args): gen = args.get('gen') if hasattr(extension, '__iter__'): extension = list(extension) else: extension = [extension] if ((len(extension) == 1) and (type(extension[0]) is tuple)): return AlgebraicNumber(extension[0]) (minpoly, coeffs) = primitive_element(extension, gen, polys=True) root = sum([(coeff * ext) for (coeff, ext) in zip(coeffs, extension)]) if (theta is None): return AlgebraicNumber((minpoly, root)) else: theta = sympify(theta) if (not theta.is_AlgebraicNumber): theta = AlgebraicNumber(theta, gen=gen) coeffs = field_isomorphism(root, theta) if (coeffs is not None): return AlgebraicNumber(theta, coeffs) else: raise IsomorphismFailed(('%s is not in a subfield of %s' % (root, theta.root)))
[ "@", "public", "def", "to_number_field", "(", "extension", ",", "theta", "=", "None", ",", "**", "args", ")", ":", "gen", "=", "args", ".", "get", "(", "'gen'", ")", "if", "hasattr", "(", "extension", ",", "'__iter__'", ")", ":", "extension", "=", "list", "(", "extension", ")", "else", ":", "extension", "=", "[", "extension", "]", "if", "(", "(", "len", "(", "extension", ")", "==", "1", ")", "and", "(", "type", "(", "extension", "[", "0", "]", ")", "is", "tuple", ")", ")", ":", "return", "AlgebraicNumber", "(", "extension", "[", "0", "]", ")", "(", "minpoly", ",", "coeffs", ")", "=", "primitive_element", "(", "extension", ",", "gen", ",", "polys", "=", "True", ")", "root", "=", "sum", "(", "[", "(", "coeff", "*", "ext", ")", "for", "(", "coeff", ",", "ext", ")", "in", "zip", "(", "coeffs", ",", "extension", ")", "]", ")", "if", "(", "theta", "is", "None", ")", ":", "return", "AlgebraicNumber", "(", "(", "minpoly", ",", "root", ")", ")", "else", ":", "theta", "=", "sympify", "(", "theta", ")", "if", "(", "not", "theta", ".", "is_AlgebraicNumber", ")", ":", "theta", "=", "AlgebraicNumber", "(", "theta", ",", "gen", "=", "gen", ")", "coeffs", "=", "field_isomorphism", "(", "root", ",", "theta", ")", "if", "(", "coeffs", "is", "not", "None", ")", ":", "return", "AlgebraicNumber", "(", "theta", ",", "coeffs", ")", "else", ":", "raise", "IsomorphismFailed", "(", "(", "'%s is not in a subfield of %s'", "%", "(", "root", ",", "theta", ".", "root", ")", ")", ")" ]
express extension in the field generated by theta .
train
false
49,519
def get_tensor_children(tensor): children_list = [] children_list.append(tensor) if tensor.op: for t in tensor.op.outputs: if (not ('read:0' in t.name)): children_list += get_tensor_children(t) return list(set(children_list))
[ "def", "get_tensor_children", "(", "tensor", ")", ":", "children_list", "=", "[", "]", "children_list", ".", "append", "(", "tensor", ")", "if", "tensor", ".", "op", ":", "for", "t", "in", "tensor", ".", "op", ".", "outputs", ":", "if", "(", "not", "(", "'read:0'", "in", "t", ".", "name", ")", ")", ":", "children_list", "+=", "get_tensor_children", "(", "t", ")", "return", "list", "(", "set", "(", "children_list", ")", ")" ]
get all calculation and data parent tensors .
train
false
49,520
def test_variable_varname(): g = compile(u'((?P<varname>hello|world)|test)') m = g.match(u'hello') variables = m.variables() assert isinstance(variables, Variables) assert (variables.get(u'varname') == u'hello') assert (variables[u'varname'] == u'hello') m = g.match(u'world') variables = m.variables() assert isinstance(variables, Variables) assert (variables.get(u'varname') == u'world') assert (variables[u'varname'] == u'world') m = g.match(u'test') variables = m.variables() assert isinstance(variables, Variables) assert (variables.get(u'varname') is None) assert (variables[u'varname'] is None)
[ "def", "test_variable_varname", "(", ")", ":", "g", "=", "compile", "(", "u'((?P<varname>hello|world)|test)'", ")", "m", "=", "g", ".", "match", "(", "u'hello'", ")", "variables", "=", "m", ".", "variables", "(", ")", "assert", "isinstance", "(", "variables", ",", "Variables", ")", "assert", "(", "variables", ".", "get", "(", "u'varname'", ")", "==", "u'hello'", ")", "assert", "(", "variables", "[", "u'varname'", "]", "==", "u'hello'", ")", "m", "=", "g", ".", "match", "(", "u'world'", ")", "variables", "=", "m", ".", "variables", "(", ")", "assert", "isinstance", "(", "variables", ",", "Variables", ")", "assert", "(", "variables", ".", "get", "(", "u'varname'", ")", "==", "u'world'", ")", "assert", "(", "variables", "[", "u'varname'", "]", "==", "u'world'", ")", "m", "=", "g", ".", "match", "(", "u'test'", ")", "variables", "=", "m", ".", "variables", "(", ")", "assert", "isinstance", "(", "variables", ",", "Variables", ")", "assert", "(", "variables", ".", "get", "(", "u'varname'", ")", "is", "None", ")", "assert", "(", "variables", "[", "u'varname'", "]", "is", "None", ")" ]
test variable with varname .
train
false
49,524
def test_divmod(): (x, y) = fscalars('xy') (d, r) = divmod(x, y) fn = gof.DualLinker().accept(gof.FunctionGraph([x, y], [d, r])).make_function() for (a, b) in ((0, 1), (1, 1), (0, (-1)), (1, (-1)), ((-1), (-1)), (1, 2), ((-1), 2), (1, (-2)), ((-1), (-2)), (5, 3), ((-5), 3), (5, (-3)), ((-5), (-3))): (d_v, r_v) = fn(a, b) (d_vp, r_vp) = divmod(a, b) assert ((d_v == d_vp) and (r_v == r_vp)), (a,)
[ "def", "test_divmod", "(", ")", ":", "(", "x", ",", "y", ")", "=", "fscalars", "(", "'xy'", ")", "(", "d", ",", "r", ")", "=", "divmod", "(", "x", ",", "y", ")", "fn", "=", "gof", ".", "DualLinker", "(", ")", ".", "accept", "(", "gof", ".", "FunctionGraph", "(", "[", "x", ",", "y", "]", ",", "[", "d", ",", "r", "]", ")", ")", ".", "make_function", "(", ")", "for", "(", "a", ",", "b", ")", "in", "(", "(", "0", ",", "1", ")", ",", "(", "1", ",", "1", ")", ",", "(", "0", ",", "(", "-", "1", ")", ")", ",", "(", "1", ",", "(", "-", "1", ")", ")", ",", "(", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ",", "(", "1", ",", "2", ")", ",", "(", "(", "-", "1", ")", ",", "2", ")", ",", "(", "1", ",", "(", "-", "2", ")", ")", ",", "(", "(", "-", "1", ")", ",", "(", "-", "2", ")", ")", ",", "(", "5", ",", "3", ")", ",", "(", "(", "-", "5", ")", ",", "3", ")", ",", "(", "5", ",", "(", "-", "3", ")", ")", ",", "(", "(", "-", "5", ")", ",", "(", "-", "3", ")", ")", ")", ":", "(", "d_v", ",", "r_v", ")", "=", "fn", "(", "a", ",", "b", ")", "(", "d_vp", ",", "r_vp", ")", "=", "divmod", "(", "a", ",", "b", ")", "assert", "(", "(", "d_v", "==", "d_vp", ")", "and", "(", "r_v", "==", "r_vp", ")", ")", ",", "(", "a", ",", ")" ]
confirm that divmod is equivalent to the python version .
train
false
49,525
def pollscm(registry, xml_parent, data): try: cron = data['cron'] ipch = str(data.get('ignore-post-commit-hooks', False)).lower() except KeyError as e: raise MissingAttributeError(e) except TypeError: logger.warning('Your pollscm usage is deprecated, please use the syntax described in the documentation instead') cron = data ipch = 'false' if ((not cron) and (cron != '')): raise InvalidAttributeError('cron', cron) scmtrig = XML.SubElement(xml_parent, 'hudson.triggers.SCMTrigger') XML.SubElement(scmtrig, 'spec').text = cron XML.SubElement(scmtrig, 'ignorePostCommitHooks').text = ipch
[ "def", "pollscm", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "try", ":", "cron", "=", "data", "[", "'cron'", "]", "ipch", "=", "str", "(", "data", ".", "get", "(", "'ignore-post-commit-hooks'", ",", "False", ")", ")", ".", "lower", "(", ")", "except", "KeyError", "as", "e", ":", "raise", "MissingAttributeError", "(", "e", ")", "except", "TypeError", ":", "logger", ".", "warning", "(", "'Your pollscm usage is deprecated, please use the syntax described in the documentation instead'", ")", "cron", "=", "data", "ipch", "=", "'false'", "if", "(", "(", "not", "cron", ")", "and", "(", "cron", "!=", "''", ")", ")", ":", "raise", "InvalidAttributeError", "(", "'cron'", ",", "cron", ")", "scmtrig", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.triggers.SCMTrigger'", ")", "XML", ".", "SubElement", "(", "scmtrig", ",", "'spec'", ")", ".", "text", "=", "cron", "XML", ".", "SubElement", "(", "scmtrig", ",", "'ignorePostCommitHooks'", ")", ".", "text", "=", "ipch" ]
yaml: pollscm poll the scm to determine if there has been a change .
train
false
49,526
def evac_route(): return s3_rest_controller()
[ "def", "evac_route", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
rest controller .
train
false
49,527
def serialize_stats(model): data = model_to_dict(model) del data['id'] return json.dumps(data)
[ "def", "serialize_stats", "(", "model", ")", ":", "data", "=", "model_to_dict", "(", "model", ")", "del", "data", "[", "'id'", "]", "return", "json", ".", "dumps", "(", "data", ")" ]
return the stats from the model ready to write to a file .
train
false
49,528
def is_api(auth_entry): return ((auth_entry == AUTH_ENTRY_LOGIN_API) or (auth_entry == AUTH_ENTRY_REGISTER_API))
[ "def", "is_api", "(", "auth_entry", ")", ":", "return", "(", "(", "auth_entry", "==", "AUTH_ENTRY_LOGIN_API", ")", "or", "(", "auth_entry", "==", "AUTH_ENTRY_REGISTER_API", ")", ")" ]
returns whether the auth entry point is via an api call .
train
false
49,529
def DEFINE_enum(name, default, enum_values, help, flag_values=FLAGS, **args): DEFINE_flag(EnumFlag(name, default, help, enum_values, **args), flag_values)
[ "def", "DEFINE_enum", "(", "name", ",", "default", ",", "enum_values", ",", "help", ",", "flag_values", "=", "FLAGS", ",", "**", "args", ")", ":", "DEFINE_flag", "(", "EnumFlag", "(", "name", ",", "default", ",", "help", ",", "enum_values", ",", "**", "args", ")", ",", "flag_values", ")" ]
registers a flag whose value can be any string from enum_values .
train
true
49,531
def _decrease_indent(): global _INDENT _INDENT = _INDENT[4:]
[ "def", "_decrease_indent", "(", ")", ":", "global", "_INDENT", "_INDENT", "=", "_INDENT", "[", "4", ":", "]" ]
decreases the indentation level .
train
false
49,532
def _evaluate_ground(f, i, a): ring = f.ring.clone(domain=f.ring.domain.ring.drop(i)) fa = ring.zero for (monom, coeff) in f.iterterms(): fa[monom] = coeff.evaluate(i, a) return fa
[ "def", "_evaluate_ground", "(", "f", ",", "i", ",", "a", ")", ":", "ring", "=", "f", ".", "ring", ".", "clone", "(", "domain", "=", "f", ".", "ring", ".", "domain", ".", "ring", ".", "drop", "(", "i", ")", ")", "fa", "=", "ring", ".", "zero", "for", "(", "monom", ",", "coeff", ")", "in", "f", ".", "iterterms", "(", ")", ":", "fa", "[", "monom", "]", "=", "coeff", ".", "evaluate", "(", "i", ",", "a", ")", "return", "fa" ]
evaluate a polynomial f at a in the i-th variable of the ground domain .
train
false
49,533
def json_response(response, has_trans=False, status_code=200): if has_trans: response = json.dumps(response, cls=AMOJSONEncoder) else: response = json.dumps(response) return http.HttpResponse(response, content_type='application/json', status=status_code)
[ "def", "json_response", "(", "response", ",", "has_trans", "=", "False", ",", "status_code", "=", "200", ")", ":", "if", "has_trans", ":", "response", "=", "json", ".", "dumps", "(", "response", ",", "cls", "=", "AMOJSONEncoder", ")", "else", ":", "response", "=", "json", ".", "dumps", "(", "response", ")", "return", "http", ".", "HttpResponse", "(", "response", ",", "content_type", "=", "'application/json'", ",", "status", "=", "status_code", ")" ]
return a response as json .
train
false
49,534
def project_data_dir(project='default'): if (not inside_project()): raise NotConfigured('Not inside a project') cfg = get_config() if cfg.has_option(DATADIR_CFG_SECTION, project): d = cfg.get(DATADIR_CFG_SECTION, project) else: scrapy_cfg = closest_scrapy_cfg() if (not scrapy_cfg): raise NotConfigured('Unable to find scrapy.cfg file to infer project data dir') d = abspath(join(dirname(scrapy_cfg), '.scrapy')) if (not exists(d)): os.makedirs(d) return d
[ "def", "project_data_dir", "(", "project", "=", "'default'", ")", ":", "if", "(", "not", "inside_project", "(", ")", ")", ":", "raise", "NotConfigured", "(", "'Not inside a project'", ")", "cfg", "=", "get_config", "(", ")", "if", "cfg", ".", "has_option", "(", "DATADIR_CFG_SECTION", ",", "project", ")", ":", "d", "=", "cfg", ".", "get", "(", "DATADIR_CFG_SECTION", ",", "project", ")", "else", ":", "scrapy_cfg", "=", "closest_scrapy_cfg", "(", ")", "if", "(", "not", "scrapy_cfg", ")", ":", "raise", "NotConfigured", "(", "'Unable to find scrapy.cfg file to infer project data dir'", ")", "d", "=", "abspath", "(", "join", "(", "dirname", "(", "scrapy_cfg", ")", ",", "'.scrapy'", ")", ")", "if", "(", "not", "exists", "(", "d", ")", ")", ":", "os", ".", "makedirs", "(", "d", ")", "return", "d" ]
return the current project data dir .
train
false
49,535
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid', sort_dir='asc', limit=None, marker=None): return IMPL.flavor_get_all(context, inactive=inactive, filters=filters, sort_key=sort_key, sort_dir=sort_dir, limit=limit, marker=marker)
[ "def", "flavor_get_all", "(", "context", ",", "inactive", "=", "False", ",", "filters", "=", "None", ",", "sort_key", "=", "'flavorid'", ",", "sort_dir", "=", "'asc'", ",", "limit", "=", "None", ",", "marker", "=", "None", ")", ":", "return", "IMPL", ".", "flavor_get_all", "(", "context", ",", "inactive", "=", "inactive", ",", "filters", "=", "filters", ",", "sort_key", "=", "sort_key", ",", "sort_dir", "=", "sort_dir", ",", "limit", "=", "limit", ",", "marker", "=", "marker", ")" ]
get all instance flavors .
train
false
49,536
def can_ignore(field): full_name = ('%s.%s' % (field.__class__.__module__, field.__class__.__name__)) for regex in ignored_fields: if re.match(regex, full_name): return True return False
[ "def", "can_ignore", "(", "field", ")", ":", "full_name", "=", "(", "'%s.%s'", "%", "(", "field", ".", "__class__", ".", "__module__", ",", "field", ".", "__class__", ".", "__name__", ")", ")", "for", "regex", "in", "ignored_fields", ":", "if", "re", ".", "match", "(", "regex", ",", "full_name", ")", ":", "return", "True", "return", "False" ]
returns true if we know for certain that we can ignore this field .
train
false
49,538
def check_file(option, opt, value): if exists(value): return value msg = 'option %s: file %r does not exist' raise OptionValueError((msg % (opt, value)))
[ "def", "check_file", "(", "option", ",", "opt", ",", "value", ")", ":", "if", "exists", "(", "value", ")", ":", "return", "value", "msg", "=", "'option %s: file %r does not exist'", "raise", "OptionValueError", "(", "(", "msg", "%", "(", "opt", ",", "value", ")", ")", ")" ]
check a file value return the filepath .
train
false
49,540
def create_transfer_config_from_runtime_config(runtime_config): translation_map = {'max_concurrent_requests': 'max_request_concurrency', 'max_queue_size': 'max_request_queue_size', 'multipart_threshold': 'multipart_threshold', 'multipart_chunksize': 'multipart_chunksize'} kwargs = {} for (key, value) in runtime_config.items(): if (key not in translation_map): continue kwargs[translation_map[key]] = value return TransferConfig(**kwargs)
[ "def", "create_transfer_config_from_runtime_config", "(", "runtime_config", ")", ":", "translation_map", "=", "{", "'max_concurrent_requests'", ":", "'max_request_concurrency'", ",", "'max_queue_size'", ":", "'max_request_queue_size'", ",", "'multipart_threshold'", ":", "'multipart_threshold'", ",", "'multipart_chunksize'", ":", "'multipart_chunksize'", "}", "kwargs", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "runtime_config", ".", "items", "(", ")", ":", "if", "(", "key", "not", "in", "translation_map", ")", ":", "continue", "kwargs", "[", "translation_map", "[", "key", "]", "]", "=", "value", "return", "TransferConfig", "(", "**", "kwargs", ")" ]
creates an equivalent s3transfer transferconfig :type runtime_config: dict :argument runtime_config: a valid runtimeconfig-generated dict .
train
false
49,541
def make_timestamp_query(func, start=None, start_op=None, end=None, end_op=None, bounds_only=False, **kwargs): (rts_start, rts_end) = get_start_end_rts(start, end) (start_row, end_row) = func(rts_start, rts_end, **kwargs) if bounds_only: return (start_row, end_row) q = [] start_op = (start_op or 'ge') end_op = (end_op or 'lt') if rts_start: q.append(("SingleColumnValueFilter ('f', 'rts', %s, 'binary:%s')" % (OP_SIGN_REV[start_op], rts_start))) if rts_end: q.append(("SingleColumnValueFilter ('f', 'rts', %s, 'binary:%s')" % (OP_SIGN_REV[end_op], rts_end))) res_q = None if len(q): res_q = ' AND '.join(q) return (start_row, end_row, res_q)
[ "def", "make_timestamp_query", "(", "func", ",", "start", "=", "None", ",", "start_op", "=", "None", ",", "end", "=", "None", ",", "end_op", "=", "None", ",", "bounds_only", "=", "False", ",", "**", "kwargs", ")", ":", "(", "rts_start", ",", "rts_end", ")", "=", "get_start_end_rts", "(", "start", ",", "end", ")", "(", "start_row", ",", "end_row", ")", "=", "func", "(", "rts_start", ",", "rts_end", ",", "**", "kwargs", ")", "if", "bounds_only", ":", "return", "(", "start_row", ",", "end_row", ")", "q", "=", "[", "]", "start_op", "=", "(", "start_op", "or", "'ge'", ")", "end_op", "=", "(", "end_op", "or", "'lt'", ")", "if", "rts_start", ":", "q", ".", "append", "(", "(", "\"SingleColumnValueFilter ('f', 'rts', %s, 'binary:%s')\"", "%", "(", "OP_SIGN_REV", "[", "start_op", "]", ",", "rts_start", ")", ")", ")", "if", "rts_end", ":", "q", ".", "append", "(", "(", "\"SingleColumnValueFilter ('f', 'rts', %s, 'binary:%s')\"", "%", "(", "OP_SIGN_REV", "[", "end_op", "]", ",", "rts_end", ")", ")", ")", "res_q", "=", "None", "if", "len", "(", "q", ")", ":", "res_q", "=", "' AND '", ".", "join", "(", "q", ")", "return", "(", "start_row", ",", "end_row", ",", "res_q", ")" ]
return a filter start and stop row for filtering and a query .
train
false
49,542
def check_root_user(): if (os.name == 'posix'): import pwd if (pwd.getpwuid(os.getuid())[0] == 'root'): sys.stderr.write("Running as user 'root' is a security risk.\n")
[ "def", "check_root_user", "(", ")", ":", "if", "(", "os", ".", "name", "==", "'posix'", ")", ":", "import", "pwd", "if", "(", "pwd", ".", "getpwuid", "(", "os", ".", "getuid", "(", ")", ")", "[", "0", "]", "==", "'root'", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"Running as user 'root' is a security risk.\\n\"", ")" ]
warn if the processs user is root .
train
false
49,543
def register_validation(form): form_vars = form.vars mobile = form_vars.get('mobile') if mobile: import re regex = re.compile(single_phone_number_pattern) if (not regex.match(mobile)): form.errors.mobile = T('Invalid phone number') elif settings.get_auth_registration_mobile_phone_mandatory(): form.errors.mobile = T('Phone number is required') home = form_vars.get('home') if home: import re regex = re.compile(single_phone_number_pattern) if (not regex.match(home)): form.errors.home = T('Invalid phone number') org = settings.get_auth_registration_organisation_id_default() if org: form_vars.organisation_id = org return
[ "def", "register_validation", "(", "form", ")", ":", "form_vars", "=", "form", ".", "vars", "mobile", "=", "form_vars", ".", "get", "(", "'mobile'", ")", "if", "mobile", ":", "import", "re", "regex", "=", "re", ".", "compile", "(", "single_phone_number_pattern", ")", "if", "(", "not", "regex", ".", "match", "(", "mobile", ")", ")", ":", "form", ".", "errors", ".", "mobile", "=", "T", "(", "'Invalid phone number'", ")", "elif", "settings", ".", "get_auth_registration_mobile_phone_mandatory", "(", ")", ":", "form", ".", "errors", ".", "mobile", "=", "T", "(", "'Phone number is required'", ")", "home", "=", "form_vars", ".", "get", "(", "'home'", ")", "if", "home", ":", "import", "re", "regex", "=", "re", ".", "compile", "(", "single_phone_number_pattern", ")", "if", "(", "not", "regex", ".", "match", "(", "home", ")", ")", ":", "form", ".", "errors", ".", "home", "=", "T", "(", "'Invalid phone number'", ")", "org", "=", "settings", ".", "get_auth_registration_organisation_id_default", "(", ")", "if", "org", ":", "form_vars", ".", "organisation_id", "=", "org", "return" ]
validate the fields in registration form .
train
false
49,545
def getGeometryOutputByFacesVertexes(faces, vertexes): return {'trianglemesh': {'vertex': vertexes, 'face': faces}}
[ "def", "getGeometryOutputByFacesVertexes", "(", "faces", ",", "vertexes", ")", ":", "return", "{", "'trianglemesh'", ":", "{", "'vertex'", ":", "vertexes", ",", "'face'", ":", "faces", "}", "}" ]
get geometry output dictionary by faces and vertexes .
train
false
49,547
def get_overlapping_samples(map_rows, otu_table): map_sample_ids = zip(*map_rows)[0] shared_ids = (set(map_sample_ids) & set(otu_table.ids())) otu_table = filter_samples_from_otu_table(otu_table, shared_ids, (- np.inf), np.inf) new_map = [] for sam_id in map_sample_ids: if (sam_id in shared_ids): ix = map_sample_ids.index(sam_id) new_map.append(map_rows[ix]) return (new_map, otu_table)
[ "def", "get_overlapping_samples", "(", "map_rows", ",", "otu_table", ")", ":", "map_sample_ids", "=", "zip", "(", "*", "map_rows", ")", "[", "0", "]", "shared_ids", "=", "(", "set", "(", "map_sample_ids", ")", "&", "set", "(", "otu_table", ".", "ids", "(", ")", ")", ")", "otu_table", "=", "filter_samples_from_otu_table", "(", "otu_table", ",", "shared_ids", ",", "(", "-", "np", ".", "inf", ")", ",", "np", ".", "inf", ")", "new_map", "=", "[", "]", "for", "sam_id", "in", "map_sample_ids", ":", "if", "(", "sam_id", "in", "shared_ids", ")", ":", "ix", "=", "map_sample_ids", ".", "index", "(", "sam_id", ")", "new_map", ".", "append", "(", "map_rows", "[", "ix", "]", ")", "return", "(", "new_map", ",", "otu_table", ")" ]
extracts only samples contained in otu table and mapping file .
train
false
49,548
def Resample(xs, n=None): if (n is None): n = len(xs) return np.random.choice(xs, n, replace=True)
[ "def", "Resample", "(", "xs", ",", "n", "=", "None", ")", ":", "if", "(", "n", "is", "None", ")", ":", "n", "=", "len", "(", "xs", ")", "return", "np", ".", "random", ".", "choice", "(", "xs", ",", "n", ",", "replace", "=", "True", ")" ]
draw a sample from xs with the same length as xs .
train
false
49,550
def run_ghostscript_command(args, stdout=__sentinel__, stderr=__sentinel__): if (not isinstance(args, list)): raise TypeError('args must be a list') args.insert(0, _get_gs_command()) args.insert(1, '-q') args.insert(1, '-dQUIET') args.insert(1, '-dNOPROMPT') args.insert(1, '-dNOPAUSE') args.insert(1, '-dBATCH') args.insert(1, '-dSAFER') return execute_command(args, shell=(sublime.platform() == 'windows'), stdout=stdout, stderr=stderr)
[ "def", "run_ghostscript_command", "(", "args", ",", "stdout", "=", "__sentinel__", ",", "stderr", "=", "__sentinel__", ")", ":", "if", "(", "not", "isinstance", "(", "args", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "'args must be a list'", ")", "args", ".", "insert", "(", "0", ",", "_get_gs_command", "(", ")", ")", "args", ".", "insert", "(", "1", ",", "'-q'", ")", "args", ".", "insert", "(", "1", ",", "'-dQUIET'", ")", "args", ".", "insert", "(", "1", ",", "'-dNOPROMPT'", ")", "args", ".", "insert", "(", "1", ",", "'-dNOPAUSE'", ")", "args", ".", "insert", "(", "1", ",", "'-dBATCH'", ")", "args", ".", "insert", "(", "1", ",", "'-dSAFER'", ")", "return", "execute_command", "(", "args", ",", "shell", "=", "(", "sublime", ".", "platform", "(", ")", "==", "'windows'", ")", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ")" ]
executes a ghostscript command with the given args .
train
false
49,551
@receiver(update_creator_state, sender=CourseCreator) def update_creator_group_callback(sender, **kwargs): user = kwargs['user'] updated_state = kwargs['state'] update_course_creator_group(kwargs['caller'], user, (updated_state == CourseCreator.GRANTED))
[ "@", "receiver", "(", "update_creator_state", ",", "sender", "=", "CourseCreator", ")", "def", "update_creator_group_callback", "(", "sender", ",", "**", "kwargs", ")", ":", "user", "=", "kwargs", "[", "'user'", "]", "updated_state", "=", "kwargs", "[", "'state'", "]", "update_course_creator_group", "(", "kwargs", "[", "'caller'", "]", ",", "user", ",", "(", "updated_state", "==", "CourseCreator", ".", "GRANTED", ")", ")" ]
callback for when the models creator status has changed .
train
false
49,553
@pytest.mark.network def test_finder_priority_page_over_deplink(): req = InstallRequirement.from_line('pip==1.5.6', None) finder = PackageFinder([], ['https://pypi.python.org/simple'], process_dependency_links=True, session=PipSession()) finder.add_dependency_links(['https://warehouse.python.org/packages/source/p/pip/pip-1.5.6.tar.gz']) all_versions = finder.find_all_candidates(req.name) assert all_versions[(-1)].location.url.startswith('https://warehouse') link = finder.find_requirement(req, False) assert link.url.startswith('https://pypi'), link
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_finder_priority_page_over_deplink", "(", ")", ":", "req", "=", "InstallRequirement", ".", "from_line", "(", "'pip==1.5.6'", ",", "None", ")", "finder", "=", "PackageFinder", "(", "[", "]", ",", "[", "'https://pypi.python.org/simple'", "]", ",", "process_dependency_links", "=", "True", ",", "session", "=", "PipSession", "(", ")", ")", "finder", ".", "add_dependency_links", "(", "[", "'https://warehouse.python.org/packages/source/p/pip/pip-1.5.6.tar.gz'", "]", ")", "all_versions", "=", "finder", ".", "find_all_candidates", "(", "req", ".", "name", ")", "assert", "all_versions", "[", "(", "-", "1", ")", "]", ".", "location", ".", "url", ".", "startswith", "(", "'https://warehouse'", ")", "link", "=", "finder", ".", "find_requirement", "(", "req", ",", "False", ")", "assert", "link", ".", "url", ".", "startswith", "(", "'https://pypi'", ")", ",", "link" ]
test packagefinder prefers page links over equivalent dependency links .
train
false
49,555
def apply_rules(page, response=None): if (response is None): response = {} for rule in RULESET: response = rule(page, response) return response
[ "def", "apply_rules", "(", "page", ",", "response", "=", "None", ")", ":", "if", "(", "response", "is", "None", ")", ":", "response", "=", "{", "}", "for", "rule", "in", "RULESET", ":", "response", "=", "rule", "(", "page", ",", "response", ")", "return", "response" ]
applies all rules .
train
false
49,557
def aggregate_update(context, aggregate_id, values): return IMPL.aggregate_update(context, aggregate_id, values)
[ "def", "aggregate_update", "(", "context", ",", "aggregate_id", ",", "values", ")", ":", "return", "IMPL", ".", "aggregate_update", "(", "context", ",", "aggregate_id", ",", "values", ")" ]
update the attributes of an aggregates .
train
false
49,559
def get_conv_shape_1axis(image_shape, kernel_shape, border_mode, subsample, dilation=1): if (None in [image_shape, kernel_shape, border_mode, subsample, dilation]): return None dil_kernel_shape = (((kernel_shape - 1) * dilation) + 1) if (border_mode == 'half'): pad = (dil_kernel_shape // 2) elif (border_mode == 'full'): pad = (dil_kernel_shape - 1) elif (border_mode == 'valid'): pad = 0 else: pad = border_mode if (pad < 0): raise ValueError('border_mode must be >= 0') if (pad == 0): out_shp = (image_shape - dil_kernel_shape) else: out_shp = ((image_shape + (2 * pad)) - dil_kernel_shape) if (subsample != 1): out_shp = (out_shp // subsample) out_shp = (out_shp + 1) return out_shp
[ "def", "get_conv_shape_1axis", "(", "image_shape", ",", "kernel_shape", ",", "border_mode", ",", "subsample", ",", "dilation", "=", "1", ")", ":", "if", "(", "None", "in", "[", "image_shape", ",", "kernel_shape", ",", "border_mode", ",", "subsample", ",", "dilation", "]", ")", ":", "return", "None", "dil_kernel_shape", "=", "(", "(", "(", "kernel_shape", "-", "1", ")", "*", "dilation", ")", "+", "1", ")", "if", "(", "border_mode", "==", "'half'", ")", ":", "pad", "=", "(", "dil_kernel_shape", "//", "2", ")", "elif", "(", "border_mode", "==", "'full'", ")", ":", "pad", "=", "(", "dil_kernel_shape", "-", "1", ")", "elif", "(", "border_mode", "==", "'valid'", ")", ":", "pad", "=", "0", "else", ":", "pad", "=", "border_mode", "if", "(", "pad", "<", "0", ")", ":", "raise", "ValueError", "(", "'border_mode must be >= 0'", ")", "if", "(", "pad", "==", "0", ")", ":", "out_shp", "=", "(", "image_shape", "-", "dil_kernel_shape", ")", "else", ":", "out_shp", "=", "(", "(", "image_shape", "+", "(", "2", "*", "pad", ")", ")", "-", "dil_kernel_shape", ")", "if", "(", "subsample", "!=", "1", ")", ":", "out_shp", "=", "(", "out_shp", "//", "subsample", ")", "out_shp", "=", "(", "out_shp", "+", "1", ")", "return", "out_shp" ]
this function compute the output shape of convolution operation .
train
false
49,560
def any_schema(schemas): schemas = list(schemas) if (len(schemas) == 1): return schemas[0] else: return {u'anyOf': schemas}
[ "def", "any_schema", "(", "schemas", ")", ":", "schemas", "=", "list", "(", "schemas", ")", "if", "(", "len", "(", "schemas", ")", "==", "1", ")", ":", "return", "schemas", "[", "0", "]", "else", ":", "return", "{", "u'anyOf'", ":", "schemas", "}" ]
creates a schema that will match any of the given schemas .
train
false
49,562
def get_server_url(local_site_name=None, local_site=None, request=None): site = Site.objects.get_current() siteconfig = SiteConfiguration.objects.get_current() root = local_site_reverse(u'root', local_site_name=local_site_name, local_site=local_site, request=request) return (u'%s://%s%s' % (siteconfig.get(u'site_domain_method'), site.domain, root))
[ "def", "get_server_url", "(", "local_site_name", "=", "None", ",", "local_site", "=", "None", ",", "request", "=", "None", ")", ":", "site", "=", "Site", ".", "objects", ".", "get_current", "(", ")", "siteconfig", "=", "SiteConfiguration", ".", "objects", ".", "get_current", "(", ")", "root", "=", "local_site_reverse", "(", "u'root'", ",", "local_site_name", "=", "local_site_name", ",", "local_site", "=", "local_site", ",", "request", "=", "request", ")", "return", "(", "u'%s://%s%s'", "%", "(", "siteconfig", ".", "get", "(", "u'site_domain_method'", ")", ",", "site", ".", "domain", ",", "root", ")", ")" ]
return the url for the root of the server .
train
false
49,564
@frappe.whitelist() def get_student_batch_students(student_batch): students = frappe.get_list(u'Student Batch Student', fields=[u'student', u'student_name', u'idx'], filters={u'parent': student_batch, u'active': 1}, order_by=u'idx') return students
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_student_batch_students", "(", "student_batch", ")", ":", "students", "=", "frappe", ".", "get_list", "(", "u'Student Batch Student'", ",", "fields", "=", "[", "u'student'", ",", "u'student_name'", ",", "u'idx'", "]", ",", "filters", "=", "{", "u'parent'", ":", "student_batch", ",", "u'active'", ":", "1", "}", ",", "order_by", "=", "u'idx'", ")", "return", "students" ]
returns list of student .
train
false
49,565
def get_profile_url(): return get_client().get_profile_url()
[ "def", "get_profile_url", "(", ")", ":", "return", "get_client", "(", ")", ".", "get_profile_url", "(", ")" ]
convenience function for getting a profile url for a user .
train
false
49,567
def is_abstract_method(attr): return (hasattr(attr, '__isabstractmethod__') and getattr(attr, '__isabstractmethod__'))
[ "def", "is_abstract_method", "(", "attr", ")", ":", "return", "(", "hasattr", "(", "attr", ",", "'__isabstractmethod__'", ")", "and", "getattr", "(", "attr", ",", "'__isabstractmethod__'", ")", ")" ]
returns true if the given object has __isabstractmethod__ == true .
train
false
49,569
def mimic_for_test(test_case): mimic_config = {'realtime': True, 'listen': '0', 'verbose': True} mimic_service = mimic_make_service(mimic_config) mimic_service.startService() test_case.addCleanup(mimic_service.stopService) [site_service] = mimic_service.services waiting_for_port = site_service._waitingForPort def stop_the_port(listening_port): test_case.addCleanup((lambda : listening_port.stopListening())) return listening_port listening = waiting_for_port.addCallback(stop_the_port) return listening
[ "def", "mimic_for_test", "(", "test_case", ")", ":", "mimic_config", "=", "{", "'realtime'", ":", "True", ",", "'listen'", ":", "'0'", ",", "'verbose'", ":", "True", "}", "mimic_service", "=", "mimic_make_service", "(", "mimic_config", ")", "mimic_service", ".", "startService", "(", ")", "test_case", ".", "addCleanup", "(", "mimic_service", ".", "stopService", ")", "[", "site_service", "]", "=", "mimic_service", ".", "services", "waiting_for_port", "=", "site_service", ".", "_waitingForPort", "def", "stop_the_port", "(", "listening_port", ")", ":", "test_case", ".", "addCleanup", "(", "(", "lambda", ":", "listening_port", ".", "stopListening", "(", ")", ")", ")", "return", "listening_port", "listening", "=", "waiting_for_port", ".", "addCallback", "(", "stop_the_port", ")", "return", "listening" ]
start a mimic server in the background on an ephemeral port and return the port number .
train
false
49,570
def delete_account(account_id): _query(action='accounts', command=account_id, method='DELETE') return True
[ "def", "delete_account", "(", "account_id", ")", ":", "_query", "(", "action", "=", "'accounts'", ",", "command", "=", "account_id", ",", "method", "=", "'DELETE'", ")", "return", "True" ]
delete an account .
train
false
49,571
def _sessionCounter(counter=itertools.count()): return next(counter)
[ "def", "_sessionCounter", "(", "counter", "=", "itertools", ".", "count", "(", ")", ")", ":", "return", "next", "(", "counter", ")" ]
private - shared between all opensslcertificateoptions .
train
false
49,572
def as_property(fact): return ('is_%s' % fact)
[ "def", "as_property", "(", "fact", ")", ":", "return", "(", "'is_%s'", "%", "fact", ")" ]
convert a fact name to the name of the corresponding property .
train
false
49,573
def oo_prepend_strings_in_list(data, prepend): if (not isinstance(data, list)): raise errors.AnsibleFilterError('|failed expects first param is a list') if (not all((isinstance(x, string_types) for x in data))): raise errors.AnsibleFilterError('|failed expects first param is a list of strings') retval = [(prepend + s) for s in data] return retval
[ "def", "oo_prepend_strings_in_list", "(", "data", ",", "prepend", ")", ":", "if", "(", "not", "isinstance", "(", "data", ",", "list", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects first param is a list'", ")", "if", "(", "not", "all", "(", "(", "isinstance", "(", "x", ",", "string_types", ")", "for", "x", "in", "data", ")", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects first param is a list of strings'", ")", "retval", "=", "[", "(", "prepend", "+", "s", ")", "for", "s", "in", "data", "]", "return", "retval" ]
this takes a list of strings and prepends a string to each item in the list ex: data = [cart .
train
false
49,574
def change_name(command_table, session, **kwargs): utils.rename_command(command_table, 'config', 'configservice')
[ "def", "change_name", "(", "command_table", ",", "session", ",", "**", "kwargs", ")", ":", "utils", ".", "rename_command", "(", "command_table", ",", "'config'", ",", "'configservice'", ")" ]
change all existing aws config commands to aws configservice commands .
train
false
49,575
def _get_valid_format(mode, cls, path, fileobj, args, kwargs): valid_formats = identify_format(mode, cls, path, fileobj, args, kwargs) if (len(valid_formats) == 0): format_table_str = _get_format_table_str(cls, mode.capitalize()) raise IORegistryError(u'Format could not be identified.\nThe available formats are:\n{0}'.format(format_table_str)) elif (len(valid_formats) > 1): raise IORegistryError(u'Format is ambiguous - options are: {0}'.format(u', '.join(sorted(valid_formats, key=itemgetter(0))))) return valid_formats[0]
[ "def", "_get_valid_format", "(", "mode", ",", "cls", ",", "path", ",", "fileobj", ",", "args", ",", "kwargs", ")", ":", "valid_formats", "=", "identify_format", "(", "mode", ",", "cls", ",", "path", ",", "fileobj", ",", "args", ",", "kwargs", ")", "if", "(", "len", "(", "valid_formats", ")", "==", "0", ")", ":", "format_table_str", "=", "_get_format_table_str", "(", "cls", ",", "mode", ".", "capitalize", "(", ")", ")", "raise", "IORegistryError", "(", "u'Format could not be identified.\\nThe available formats are:\\n{0}'", ".", "format", "(", "format_table_str", ")", ")", "elif", "(", "len", "(", "valid_formats", ")", ">", "1", ")", ":", "raise", "IORegistryError", "(", "u'Format is ambiguous - options are: {0}'", ".", "format", "(", "u', '", ".", "join", "(", "sorted", "(", "valid_formats", ",", "key", "=", "itemgetter", "(", "0", ")", ")", ")", ")", ")", "return", "valid_formats", "[", "0", "]" ]
returns the first valid format that can be used to read/write the data in question .
train
false
49,576
def crt2(m, v, mm, e, s, symmetric=False): result = gf_crt2(v, m, mm, e, s, ZZ) if symmetric: return (symmetric_residue(result, mm), mm) return (result, mm)
[ "def", "crt2", "(", "m", ",", "v", ",", "mm", ",", "e", ",", "s", ",", "symmetric", "=", "False", ")", ":", "result", "=", "gf_crt2", "(", "v", ",", "m", ",", "mm", ",", "e", ",", "s", ",", "ZZ", ")", "if", "symmetric", ":", "return", "(", "symmetric_residue", "(", "result", ",", "mm", ")", ",", "mm", ")", "return", "(", "result", ",", "mm", ")" ]
second part of chinese remainder theorem .
train
false
49,578
def generate_aes_key(token, secret): return hashlib.sha256((token + secret)).digest()
[ "def", "generate_aes_key", "(", "token", ",", "secret", ")", ":", "return", "hashlib", ".", "sha256", "(", "(", "token", "+", "secret", ")", ")", ".", "digest", "(", ")" ]
generates and returns a 256 bit aes key .
train
false
49,579
def p_statement_expr2(t): pass
[ "def", "p_statement_expr2", "(", "t", ")", ":", "pass" ]
statement : bad&rule .
train
false
49,581
def unbox_usecase4(x): (a, b) = x res = a for v in b: res += len(v) return res
[ "def", "unbox_usecase4", "(", "x", ")", ":", "(", "a", ",", "b", ")", "=", "x", "res", "=", "a", "for", "v", "in", "b", ":", "res", "+=", "len", "(", "v", ")", "return", "res" ]
expect a tuple .
train
false
49,582
def loadLocalVariables(filename): with open(filename, 'r') as f: lines = [f.readline(), f.readline()] for line in lines: try: return _parseLocalVariables(line) except ValueError: pass return {}
[ "def", "loadLocalVariables", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "lines", "=", "[", "f", ".", "readline", "(", ")", ",", "f", ".", "readline", "(", ")", "]", "for", "line", "in", "lines", ":", "try", ":", "return", "_parseLocalVariables", "(", "line", ")", "except", "ValueError", ":", "pass", "return", "{", "}" ]
accepts a filename and attempts to load the emacs variable declarations from that file .
train
false
49,583
def getLoopConvexCentroid(polygonComplex): return getLoopCentroid(getLoopConvex(polygonComplex))
[ "def", "getLoopConvexCentroid", "(", "polygonComplex", ")", ":", "return", "getLoopCentroid", "(", "getLoopConvex", "(", "polygonComplex", ")", ")" ]
get centroid of the convex hull of a complex polygon .
train
false
49,585
def _error_msg_iface(iface, option, expected): msg = 'Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]' return msg.format(iface, option, '|'.join(expected))
[ "def", "_error_msg_iface", "(", "iface", ",", "option", ",", "expected", ")", ":", "msg", "=", "'Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]'", "return", "msg", ".", "format", "(", "iface", ",", "option", ",", "'|'", ".", "join", "(", "expected", ")", ")" ]
build an appropriate error message from a given option and a list of expected values .
train
false
49,586
def float_is_zero(value, precision_digits=None, precision_rounding=None): epsilon = _float_check_precision(precision_digits=precision_digits, precision_rounding=precision_rounding) return (abs(float_round(value, precision_rounding=epsilon)) < epsilon)
[ "def", "float_is_zero", "(", "value", ",", "precision_digits", "=", "None", ",", "precision_rounding", "=", "None", ")", ":", "epsilon", "=", "_float_check_precision", "(", "precision_digits", "=", "precision_digits", ",", "precision_rounding", "=", "precision_rounding", ")", "return", "(", "abs", "(", "float_round", "(", "value", ",", "precision_rounding", "=", "epsilon", ")", ")", "<", "epsilon", ")" ]
returns true if value is small enough to be treated as zero at the given precision .
train
false
49,587
def resolve_document_etag(documents, resource): if config.IF_MATCH: ignore_fields = config.DOMAIN[resource]['etag_ignore_fields'] if (not isinstance(documents, list)): documents = [documents] for document in documents: document[config.ETAG] = document_etag(document, ignore_fields=ignore_fields)
[ "def", "resolve_document_etag", "(", "documents", ",", "resource", ")", ":", "if", "config", ".", "IF_MATCH", ":", "ignore_fields", "=", "config", ".", "DOMAIN", "[", "resource", "]", "[", "'etag_ignore_fields'", "]", "if", "(", "not", "isinstance", "(", "documents", ",", "list", ")", ")", ":", "documents", "=", "[", "documents", "]", "for", "document", "in", "documents", ":", "document", "[", "config", ".", "ETAG", "]", "=", "document_etag", "(", "document", ",", "ignore_fields", "=", "ignore_fields", ")" ]
adds etags to documents .
train
false
49,588
def families(root=None): if (not root): root = Tkinter._default_root return root.tk.splitlist(root.tk.call('font', 'families'))
[ "def", "families", "(", "root", "=", "None", ")", ":", "if", "(", "not", "root", ")", ":", "root", "=", "Tkinter", ".", "_default_root", "return", "root", ".", "tk", ".", "splitlist", "(", "root", ".", "tk", ".", "call", "(", "'font'", ",", "'families'", ")", ")" ]
get font families .
train
false
49,590
def add_dns(ip, interface='Local Area Connection', index=1): servers = get_dns_servers(interface) if (servers is False): return False try: if (servers[(index - 1)] == ip): return True except IndexError: pass if (ip in servers): rm_dns(ip, interface) cmd = ['netsh', 'interface', 'ip', 'add', 'dns', interface, ip, 'index={0}'.format(index), 'validate=no'] return (__salt__['cmd.retcode'](cmd, python_shell=False) == 0)
[ "def", "add_dns", "(", "ip", ",", "interface", "=", "'Local Area Connection'", ",", "index", "=", "1", ")", ":", "servers", "=", "get_dns_servers", "(", "interface", ")", "if", "(", "servers", "is", "False", ")", ":", "return", "False", "try", ":", "if", "(", "servers", "[", "(", "index", "-", "1", ")", "]", "==", "ip", ")", ":", "return", "True", "except", "IndexError", ":", "pass", "if", "(", "ip", "in", "servers", ")", ":", "rm_dns", "(", "ip", ",", "interface", ")", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'add'", ",", "'dns'", ",", "interface", ",", "ip", ",", "'index={0}'", ".", "format", "(", "index", ")", ",", "'validate=no'", "]", "return", "(", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "==", "0", ")" ]
add the dns server to the network interface note: if the interface dns is configured by dhcp .
train
true
49,593
def _validate_methods(plugin_base_class, plugin_klass): expected_methods = plugin_base_class.__abstractmethods__ plugin_methods = _get_plugin_methods(plugin_klass) for method in expected_methods: if (method not in plugin_methods): message = 'Class "%s" doesn\'t implement required "%s" method from the base class' raise IncompatiblePluginException((message % (plugin_klass.__name__, method)))
[ "def", "_validate_methods", "(", "plugin_base_class", ",", "plugin_klass", ")", ":", "expected_methods", "=", "plugin_base_class", ".", "__abstractmethods__", "plugin_methods", "=", "_get_plugin_methods", "(", "plugin_klass", ")", "for", "method", "in", "expected_methods", ":", "if", "(", "method", "not", "in", "plugin_methods", ")", ":", "message", "=", "'Class \"%s\" doesn\\'t implement required \"%s\" method from the base class'", "raise", "IncompatiblePluginException", "(", "(", "message", "%", "(", "plugin_klass", ".", "__name__", ",", "method", ")", ")", ")" ]
xxx: this is hacky but wed like to validate the methods in plugin_impl at least has all the *abstract* methods in plugin_base_class .
train
false
49,598
def new_codename(client, session): with client as c: c.get('/generate') codename = session['codename'] c.post('/create') return codename
[ "def", "new_codename", "(", "client", ",", "session", ")", ":", "with", "client", "as", "c", ":", "c", ".", "get", "(", "'/generate'", ")", "codename", "=", "session", "[", "'codename'", "]", "c", ".", "post", "(", "'/create'", ")", "return", "codename" ]
helper function to go through the "generate codename" flow .
train
false
49,599
def maybe_future(x): if is_future(x): return x else: fut = Future() fut.set_result(x) return fut
[ "def", "maybe_future", "(", "x", ")", ":", "if", "is_future", "(", "x", ")", ":", "return", "x", "else", ":", "fut", "=", "Future", "(", ")", "fut", ".", "set_result", "(", "x", ")", "return", "fut" ]
converts x into a .
train
true
49,600
def _safe_urlsplit(s): rv = urlparse.urlsplit(s) if (type(rv[2]) is not type(s)): assert hasattr(urlparse, 'clear_cache') urlparse.clear_cache() rv = urlparse.urlsplit(s) assert (type(rv[2]) is type(s)) return rv
[ "def", "_safe_urlsplit", "(", "s", ")", ":", "rv", "=", "urlparse", ".", "urlsplit", "(", "s", ")", "if", "(", "type", "(", "rv", "[", "2", "]", ")", "is", "not", "type", "(", "s", ")", ")", ":", "assert", "hasattr", "(", "urlparse", ",", "'clear_cache'", ")", "urlparse", ".", "clear_cache", "(", ")", "rv", "=", "urlparse", ".", "urlsplit", "(", "s", ")", "assert", "(", "type", "(", "rv", "[", "2", "]", ")", "is", "type", "(", "s", ")", ")", "return", "rv" ]
the urlparse .
train
true
49,601
def p_iteration_statement_1(t): pass
[ "def", "p_iteration_statement_1", "(", "t", ")", ":", "pass" ]
iteration_statement : while lparen expression rparen statement .
train
false
49,602
def _n_colors(n, bytes_=False, cmap='hsv'): n_max = (2 ** 10) if (n > n_max): raise NotImplementedError(("Can't produce more than %i unique colors" % n_max)) from matplotlib.cm import get_cmap cm = get_cmap(cmap, n_max) pos = np.linspace(0, 1, n, False) colors = cm(pos, bytes=bytes_) if bytes_: for (ii, c) in enumerate(colors): if np.any(np.all((colors[:ii] == c), 1)): raise RuntimeError(('Could not get %d unique colors from %s colormap. Try using a different colormap.' % (n, cmap))) return colors
[ "def", "_n_colors", "(", "n", ",", "bytes_", "=", "False", ",", "cmap", "=", "'hsv'", ")", ":", "n_max", "=", "(", "2", "**", "10", ")", "if", "(", "n", ">", "n_max", ")", ":", "raise", "NotImplementedError", "(", "(", "\"Can't produce more than %i unique colors\"", "%", "n_max", ")", ")", "from", "matplotlib", ".", "cm", "import", "get_cmap", "cm", "=", "get_cmap", "(", "cmap", ",", "n_max", ")", "pos", "=", "np", ".", "linspace", "(", "0", ",", "1", ",", "n", ",", "False", ")", "colors", "=", "cm", "(", "pos", ",", "bytes", "=", "bytes_", ")", "if", "bytes_", ":", "for", "(", "ii", ",", "c", ")", "in", "enumerate", "(", "colors", ")", ":", "if", "np", ".", "any", "(", "np", ".", "all", "(", "(", "colors", "[", ":", "ii", "]", "==", "c", ")", ",", "1", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'Could not get %d unique colors from %s colormap. Try using a different colormap.'", "%", "(", "n", ",", "cmap", ")", ")", ")", "return", "colors" ]
produce a list of n unique rgba color tuples based on a colormap .
train
false
49,604
def require_device_memory(obj): if (not is_device_memory(obj)): raise Exception('Not a CUDA memory object.')
[ "def", "require_device_memory", "(", "obj", ")", ":", "if", "(", "not", "is_device_memory", "(", "obj", ")", ")", ":", "raise", "Exception", "(", "'Not a CUDA memory object.'", ")" ]
a sentry for methods that accept cuda memory object .
train
false
49,605
@login_required @require_GET def program_listing(request): programs_config = ProgramsApiConfig.current() if (not programs_config.show_program_listing): raise Http404 use_catalog = waffle.switch_is_active('get_programs_from_catalog') meter = utils.ProgramProgressMeter(request.user, use_catalog=use_catalog) context = {'credentials': get_programs_credentials(request.user), 'disable_courseware_js': True, 'marketing_url': utils.get_program_marketing_url(programs_config), 'nav_hidden': True, 'programs': meter.engaged_programs(), 'progress': meter.progress, 'show_program_listing': programs_config.show_program_listing, 'uses_pattern_library': True} return render_to_response('learner_dashboard/programs.html', context)
[ "@", "login_required", "@", "require_GET", "def", "program_listing", "(", "request", ")", ":", "programs_config", "=", "ProgramsApiConfig", ".", "current", "(", ")", "if", "(", "not", "programs_config", ".", "show_program_listing", ")", ":", "raise", "Http404", "use_catalog", "=", "waffle", ".", "switch_is_active", "(", "'get_programs_from_catalog'", ")", "meter", "=", "utils", ".", "ProgramProgressMeter", "(", "request", ".", "user", ",", "use_catalog", "=", "use_catalog", ")", "context", "=", "{", "'credentials'", ":", "get_programs_credentials", "(", "request", ".", "user", ")", ",", "'disable_courseware_js'", ":", "True", ",", "'marketing_url'", ":", "utils", ".", "get_program_marketing_url", "(", "programs_config", ")", ",", "'nav_hidden'", ":", "True", ",", "'programs'", ":", "meter", ".", "engaged_programs", "(", ")", ",", "'progress'", ":", "meter", ".", "progress", ",", "'show_program_listing'", ":", "programs_config", ".", "show_program_listing", ",", "'uses_pattern_library'", ":", "True", "}", "return", "render_to_response", "(", "'learner_dashboard/programs.html'", ",", "context", ")" ]
view a list of programs in which the user is engaged .
train
false
49,606
def isIntersectingWithinLists(loop, loopLists): for loopList in loopLists: if getIsIntersectingWithinList(loop, loopList): return True return False
[ "def", "isIntersectingWithinLists", "(", "loop", ",", "loopLists", ")", ":", "for", "loopList", "in", "loopLists", ":", "if", "getIsIntersectingWithinList", "(", "loop", ",", "loopList", ")", ":", "return", "True", "return", "False" ]
determine if the loop is intersecting or is within the loop lists .
train
false
49,607
def get_dialect(self): try: module = getattr(__import__(('sqlalchemy.databases.%s' % self.drivername)).databases, self.drivername) return module.dialect except ImportError: if True: import pkg_resources for res in pkg_resources.iter_entry_points('sqlalchemy.databases'): if (res.name == self.drivername): return res.load() raise
[ "def", "get_dialect", "(", "self", ")", ":", "try", ":", "module", "=", "getattr", "(", "__import__", "(", "(", "'sqlalchemy.databases.%s'", "%", "self", ".", "drivername", ")", ")", ".", "databases", ",", "self", ".", "drivername", ")", "return", "module", ".", "dialect", "except", "ImportError", ":", "if", "True", ":", "import", "pkg_resources", "for", "res", "in", "pkg_resources", ".", "iter_entry_points", "(", "'sqlalchemy.databases'", ")", ":", "if", "(", "res", ".", "name", "==", "self", ".", "drivername", ")", ":", "return", "res", ".", "load", "(", ")", "raise" ]
return the sqlalchemy database dialect class corresponding to this urls driver name .
train
false
49,608
def Shutdown(): ioloop.IOLoop.current().stop()
[ "def", "Shutdown", "(", ")", ":", "ioloop", ".", "IOLoop", ".", "current", "(", ")", ".", "stop", "(", ")" ]
shut down the watchdog process .
train
false
49,609
def get_or_create_hosted_zone(zone_name): zone = r53.get_hosted_zone_by_name(zone_name) parent_zone_name = '.'.join(zone_name.split('.')[1:]) parent_zone = r53.get_hosted_zone_by_name(parent_zone_name) if args.noop: if parent_zone: print 'Would have created/updated zone: {} parent: {}'.format(zone_name, parent_zone_name) else: print 'Would have created/updated zone: {}'.format(zone_name, parent_zone_name) return zone if (not zone): print 'zone {} does not exist, creating'.format(zone_name) ts = datetime.datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%SUTC') zone = r53.create_hosted_zone(zone_name, comment='Created by vpc_dns script - {}'.format(ts)) if parent_zone: print 'Updating parent zone {}'.format(parent_zone_name) dns_records = set() dns_records.add(DNSRecord(parent_zone, zone_name, 'NS', 900, zone.NameServers)) add_or_update_record(dns_records) return zone
[ "def", "get_or_create_hosted_zone", "(", "zone_name", ")", ":", "zone", "=", "r53", ".", "get_hosted_zone_by_name", "(", "zone_name", ")", "parent_zone_name", "=", "'.'", ".", "join", "(", "zone_name", ".", "split", "(", "'.'", ")", "[", "1", ":", "]", ")", "parent_zone", "=", "r53", ".", "get_hosted_zone_by_name", "(", "parent_zone_name", ")", "if", "args", ".", "noop", ":", "if", "parent_zone", ":", "print", "'Would have created/updated zone: {} parent: {}'", ".", "format", "(", "zone_name", ",", "parent_zone_name", ")", "else", ":", "print", "'Would have created/updated zone: {}'", ".", "format", "(", "zone_name", ",", "parent_zone_name", ")", "return", "zone", "if", "(", "not", "zone", ")", ":", "print", "'zone {} does not exist, creating'", ".", "format", "(", "zone_name", ")", "ts", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "'%Y-%m-%d-%H:%M:%SUTC'", ")", "zone", "=", "r53", ".", "create_hosted_zone", "(", "zone_name", ",", "comment", "=", "'Created by vpc_dns script - {}'", ".", "format", "(", "ts", ")", ")", "if", "parent_zone", ":", "print", "'Updating parent zone {}'", ".", "format", "(", "parent_zone_name", ")", "dns_records", "=", "set", "(", ")", "dns_records", ".", "add", "(", "DNSRecord", "(", "parent_zone", ",", "zone_name", ",", "'NS'", ",", "900", ",", "zone", ".", "NameServers", ")", ")", "add_or_update_record", "(", "dns_records", ")", "return", "zone" ]
creates the zone and updates the parent with the ns information in the zone returns: created zone .
train
false
49,610
def delete_multi_async(keys, **ctx_options): return [key.delete_async(**ctx_options) for key in keys]
[ "def", "delete_multi_async", "(", "keys", ",", "**", "ctx_options", ")", ":", "return", "[", "key", ".", "delete_async", "(", "**", "ctx_options", ")", "for", "key", "in", "keys", "]" ]
deletes a sequence of keys .
train
false
49,611
def dup_cancel(f, g, K, include=True): return dmp_cancel(f, g, 0, K, include=include)
[ "def", "dup_cancel", "(", "f", ",", "g", ",", "K", ",", "include", "=", "True", ")", ":", "return", "dmp_cancel", "(", "f", ",", "g", ",", "0", ",", "K", ",", "include", "=", "include", ")" ]
cancel common factors in a rational function f/g .
train
false
49,612
def _get_serialization_name(element_name): known = _KNOWN_SERIALIZATION_XFORMS.get(element_name) if (known is not None): return known if element_name.startswith('x_ms_'): return element_name.replace('_', '-') if element_name.endswith('_id'): element_name = element_name.replace('_id', 'ID') for name in ['content_', 'last_modified', 'if_', 'cache_control']: if element_name.startswith(name): element_name = element_name.replace('_', '-_') return ''.join((name.capitalize() for name in element_name.split('_')))
[ "def", "_get_serialization_name", "(", "element_name", ")", ":", "known", "=", "_KNOWN_SERIALIZATION_XFORMS", ".", "get", "(", "element_name", ")", "if", "(", "known", "is", "not", "None", ")", ":", "return", "known", "if", "element_name", ".", "startswith", "(", "'x_ms_'", ")", ":", "return", "element_name", ".", "replace", "(", "'_'", ",", "'-'", ")", "if", "element_name", ".", "endswith", "(", "'_id'", ")", ":", "element_name", "=", "element_name", ".", "replace", "(", "'_id'", ",", "'ID'", ")", "for", "name", "in", "[", "'content_'", ",", "'last_modified'", ",", "'if_'", ",", "'cache_control'", "]", ":", "if", "element_name", ".", "startswith", "(", "name", ")", ":", "element_name", "=", "element_name", ".", "replace", "(", "'_'", ",", "'-_'", ")", "return", "''", ".", "join", "(", "(", "name", ".", "capitalize", "(", ")", "for", "name", "in", "element_name", ".", "split", "(", "'_'", ")", ")", ")" ]
converts a python name into a serializable name .
train
true
49,613
def make_analysator(f): def text_analyse(text): rv = f(text) if (not rv): return 0.0 return min(1.0, max(0.0, float(rv))) text_analyse.__doc__ = f.__doc__ return staticmethod(text_analyse)
[ "def", "make_analysator", "(", "f", ")", ":", "def", "text_analyse", "(", "text", ")", ":", "rv", "=", "f", "(", "text", ")", "if", "(", "not", "rv", ")", ":", "return", "0.0", "return", "min", "(", "1.0", ",", "max", "(", "0.0", ",", "float", "(", "rv", ")", ")", ")", "text_analyse", ".", "__doc__", "=", "f", ".", "__doc__", "return", "staticmethod", "(", "text_analyse", ")" ]
return a static text analysation function that returns float values .
train
false
49,614
def valid_sensor(value): return _valid_device(value, 'sensor')
[ "def", "valid_sensor", "(", "value", ")", ":", "return", "_valid_device", "(", "value", ",", "'sensor'", ")" ]
validate sensor configuration .
train
false
49,615
def signal_pid(pid, sig): try: os.kill(pid, sig) except OSError: pass for i in range(5): if (not pid_is_alive(pid)): return True time.sleep(1) return False
[ "def", "signal_pid", "(", "pid", ",", "sig", ")", ":", "try", ":", "os", ".", "kill", "(", "pid", ",", "sig", ")", "except", "OSError", ":", "pass", "for", "i", "in", "range", "(", "5", ")", ":", "if", "(", "not", "pid_is_alive", "(", "pid", ")", ")", ":", "return", "True", "time", ".", "sleep", "(", "1", ")", "return", "False" ]
sends a signal to a process id .
train
false
49,617
def random_uniform_variable(shape, low, high, dtype=None, name=None, seed=None): if (dtype is None): dtype = floatx() shape = tuple(map(int, shape)) tf_dtype = _convert_string_dtype(dtype) if (seed is None): seed = np.random.randint(1000000000.0) value = tf.random_uniform_initializer(low, high, dtype=tf_dtype, seed=seed)(shape) return variable(value, dtype=dtype, name=name)
[ "def", "random_uniform_variable", "(", "shape", ",", "low", ",", "high", ",", "dtype", "=", "None", ",", "name", "=", "None", ",", "seed", "=", "None", ")", ":", "if", "(", "dtype", "is", "None", ")", ":", "dtype", "=", "floatx", "(", ")", "shape", "=", "tuple", "(", "map", "(", "int", ",", "shape", ")", ")", "tf_dtype", "=", "_convert_string_dtype", "(", "dtype", ")", "if", "(", "seed", "is", "None", ")", ":", "seed", "=", "np", ".", "random", ".", "randint", "(", "1000000000.0", ")", "value", "=", "tf", ".", "random_uniform_initializer", "(", "low", ",", "high", ",", "dtype", "=", "tf_dtype", ",", "seed", "=", "seed", ")", "(", "shape", ")", "return", "variable", "(", "value", ",", "dtype", "=", "dtype", ",", "name", "=", "name", ")" ]
instantiates an keras variable filled with samples drawn from a uniform distribution and returns it .
train
false
49,620
def fix_unicode_array(arr): new_arr = [] for ind in xrange(len(arr)): if isinstance(arr[ind], (str, unicode)): new_arr.append(arr[ind].decode('utf-8', 'ignore')) elif isinstance(arr[ind], dict): new_arr.append(fix_unicode_dict(arr[ind])) else: new_arr.append(arr[ind]) return new_arr
[ "def", "fix_unicode_array", "(", "arr", ")", ":", "new_arr", "=", "[", "]", "for", "ind", "in", "xrange", "(", "len", "(", "arr", ")", ")", ":", "if", "isinstance", "(", "arr", "[", "ind", "]", ",", "(", "str", ",", "unicode", ")", ")", ":", "new_arr", ".", "append", "(", "arr", "[", "ind", "]", ".", "decode", "(", "'utf-8'", ",", "'ignore'", ")", ")", "elif", "isinstance", "(", "arr", "[", "ind", "]", ",", "dict", ")", ":", "new_arr", ".", "append", "(", "fix_unicode_dict", "(", "arr", "[", "ind", "]", ")", ")", "else", ":", "new_arr", ".", "append", "(", "arr", "[", "ind", "]", ")", "return", "new_arr" ]
iterate over the items of the array and remove invalid unicode characters .
train
false
49,621
def macro(name, filename, source, **identifiers): if (not identifiers.has_key('name')): identifiers['name'] = name source = (source % identifiers) codeplace = ('<%s (macro)>' % filename) code = compile(source, codeplace, 'exec') sm = sys.modules tprm = 'twisted.python.reflect.macros' if (not sm.has_key(tprm)): macros = new.module(tprm) sm[tprm] = macros macros.count = 0 macros = sm[tprm] macros.count += 1 macroname = ('macro_' + str(macros.count)) tprmm = ((tprm + '.') + macroname) mymod = new.module(tprmm) sys.modules[tprmm] = mymod setattr(macros, macroname, mymod) dict = mymod.__dict__ exec code in dict, dict return dict[name]
[ "def", "macro", "(", "name", ",", "filename", ",", "source", ",", "**", "identifiers", ")", ":", "if", "(", "not", "identifiers", ".", "has_key", "(", "'name'", ")", ")", ":", "identifiers", "[", "'name'", "]", "=", "name", "source", "=", "(", "source", "%", "identifiers", ")", "codeplace", "=", "(", "'<%s (macro)>'", "%", "filename", ")", "code", "=", "compile", "(", "source", ",", "codeplace", ",", "'exec'", ")", "sm", "=", "sys", ".", "modules", "tprm", "=", "'twisted.python.reflect.macros'", "if", "(", "not", "sm", ".", "has_key", "(", "tprm", ")", ")", ":", "macros", "=", "new", ".", "module", "(", "tprm", ")", "sm", "[", "tprm", "]", "=", "macros", "macros", ".", "count", "=", "0", "macros", "=", "sm", "[", "tprm", "]", "macros", ".", "count", "+=", "1", "macroname", "=", "(", "'macro_'", "+", "str", "(", "macros", ".", "count", ")", ")", "tprmm", "=", "(", "(", "tprm", "+", "'.'", ")", "+", "macroname", ")", "mymod", "=", "new", ".", "module", "(", "tprmm", ")", "sys", ".", "modules", "[", "tprmm", "]", "=", "mymod", "setattr", "(", "macros", ",", "macroname", ",", "mymod", ")", "dict", "=", "mymod", ".", "__dict__", "exec", "code", "in", "dict", ",", "dict", "return", "dict", "[", "name", "]" ]
macro this allows you to create macro-like behaviors in python .
train
false
49,622
@pytest.mark.network def test_pip_wheel_fail_cause_of_previous_build_dir(script, data): script.pip('install', 'wheel') build = ((script.venv_path / 'build') / 'simple') os.makedirs(build) write_delete_marker_file((script.venv_path / 'build')) build.join('setup.py').write('#') result = script.pip('wheel', '--no-index', ('--find-links=%s' % data.find_links), '--build', (script.venv_path / 'build'), 'simple==3.0', expect_error=True) assert (result.returncode == PREVIOUS_BUILD_DIR_ERROR), result
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_pip_wheel_fail_cause_of_previous_build_dir", "(", "script", ",", "data", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'wheel'", ")", "build", "=", "(", "(", "script", ".", "venv_path", "/", "'build'", ")", "/", "'simple'", ")", "os", ".", "makedirs", "(", "build", ")", "write_delete_marker_file", "(", "(", "script", ".", "venv_path", "/", "'build'", ")", ")", "build", ".", "join", "(", "'setup.py'", ")", ".", "write", "(", "'#'", ")", "result", "=", "script", ".", "pip", "(", "'wheel'", ",", "'--no-index'", ",", "(", "'--find-links=%s'", "%", "data", ".", "find_links", ")", ",", "'--build'", ",", "(", "script", ".", "venv_path", "/", "'build'", ")", ",", "'simple==3.0'", ",", "expect_error", "=", "True", ")", "assert", "(", "result", ".", "returncode", "==", "PREVIOUS_BUILD_DIR_ERROR", ")", ",", "result" ]
test when pip wheel tries to install a package that has a previous build directory .
train
false
49,623
def alias_get_collections(alias_name, **kwargs): if (not isinstance(alias_name, six.string_types)): raise ValueError('Alias name must be a string') collection_aliases = [(k_v[0], k_v[1]['aliases']) for k_v in six.iteritems(cluster_status(**kwargs)['collections'])] aliases = [k_v1[0] for k_v1 in [k_v for k_v in collection_aliases if (alias_name in k_v[1])]] return aliases
[ "def", "alias_get_collections", "(", "alias_name", ",", "**", "kwargs", ")", ":", "if", "(", "not", "isinstance", "(", "alias_name", ",", "six", ".", "string_types", ")", ")", ":", "raise", "ValueError", "(", "'Alias name must be a string'", ")", "collection_aliases", "=", "[", "(", "k_v", "[", "0", "]", ",", "k_v", "[", "1", "]", "[", "'aliases'", "]", ")", "for", "k_v", "in", "six", ".", "iteritems", "(", "cluster_status", "(", "**", "kwargs", ")", "[", "'collections'", "]", ")", "]", "aliases", "=", "[", "k_v1", "[", "0", "]", "for", "k_v1", "in", "[", "k_v", "for", "k_v", "in", "collection_aliases", "if", "(", "alias_name", "in", "k_v", "[", "1", "]", ")", "]", "]", "return", "aliases" ]
get collection list for an alias additional parameters may be passed .
train
true
49,624
def find_redirect(key): rv = (get_url(session.pop(key.lower(), None)) or get_url((current_app.config[key.upper()] or None)) or '/') return rv
[ "def", "find_redirect", "(", "key", ")", ":", "rv", "=", "(", "get_url", "(", "session", ".", "pop", "(", "key", ".", "lower", "(", ")", ",", "None", ")", ")", "or", "get_url", "(", "(", "current_app", ".", "config", "[", "key", ".", "upper", "(", ")", "]", "or", "None", ")", ")", "or", "'/'", ")", "return", "rv" ]
returns the url to redirect to after a user logs in successfully .
train
true
49,625
@pytest.mark.parametrize('inp, exp', [('x = 42', 'x = 42'), ('!!', 'ls')]) def test_preproc(inp, exp, xonsh_builtins): from xontrib.bashisms import bash_preproc xonsh_builtins.__xonsh_history__.inps = ['ls\n'] obs = bash_preproc(inp) assert (exp == obs)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'inp, exp'", ",", "[", "(", "'x = 42'", ",", "'x = 42'", ")", ",", "(", "'!!'", ",", "'ls'", ")", "]", ")", "def", "test_preproc", "(", "inp", ",", "exp", ",", "xonsh_builtins", ")", ":", "from", "xontrib", ".", "bashisms", "import", "bash_preproc", "xonsh_builtins", ".", "__xonsh_history__", ".", "inps", "=", "[", "'ls\\n'", "]", "obs", "=", "bash_preproc", "(", "inp", ")", "assert", "(", "exp", "==", "obs", ")" ]
test the bash preprocessor .
train
false
49,628
def get_index_kv_from_tuple(tuple_list, reverse=False): all_rows = [] for (prefix, e) in tuple_list: for p in e.property_list(): val = str(encode_index_pb(p.value())) if reverse: val = helper_functions.reverse_lex(val) params = [prefix, get_entity_kind(e), p.name(), val, str(encode_index_pb(e.key().path()))] index_key = get_index_key_from_params(params) p_vals = [index_key, (buffer((prefix + dbconstants.KEY_DELIMITER)) + encode_index_pb(e.key().path()))] all_rows.append(p_vals) return tuple((ii for ii in all_rows))
[ "def", "get_index_kv_from_tuple", "(", "tuple_list", ",", "reverse", "=", "False", ")", ":", "all_rows", "=", "[", "]", "for", "(", "prefix", ",", "e", ")", "in", "tuple_list", ":", "for", "p", "in", "e", ".", "property_list", "(", ")", ":", "val", "=", "str", "(", "encode_index_pb", "(", "p", ".", "value", "(", ")", ")", ")", "if", "reverse", ":", "val", "=", "helper_functions", ".", "reverse_lex", "(", "val", ")", "params", "=", "[", "prefix", ",", "get_entity_kind", "(", "e", ")", ",", "p", ".", "name", "(", ")", ",", "val", ",", "str", "(", "encode_index_pb", "(", "e", ".", "key", "(", ")", ".", "path", "(", ")", ")", ")", "]", "index_key", "=", "get_index_key_from_params", "(", "params", ")", "p_vals", "=", "[", "index_key", ",", "(", "buffer", "(", "(", "prefix", "+", "dbconstants", ".", "KEY_DELIMITER", ")", ")", "+", "encode_index_pb", "(", "e", ".", "key", "(", ")", ".", "path", "(", ")", ")", ")", "]", "all_rows", ".", "append", "(", "p_vals", ")", "return", "tuple", "(", "(", "ii", "for", "ii", "in", "all_rows", ")", ")" ]
returns keys/value of indexes for a set of entities .
train
false
49,629
def split_on_feat(artist): regex = re.compile(plugins.feat_tokens(), re.IGNORECASE) parts = [s.strip() for s in regex.split(artist, 1)] if (len(parts) == 1): return (parts[0], None) else: return tuple(parts)
[ "def", "split_on_feat", "(", "artist", ")", ":", "regex", "=", "re", ".", "compile", "(", "plugins", ".", "feat_tokens", "(", ")", ",", "re", ".", "IGNORECASE", ")", "parts", "=", "[", "s", ".", "strip", "(", ")", "for", "s", "in", "regex", ".", "split", "(", "artist", ",", "1", ")", "]", "if", "(", "len", "(", "parts", ")", "==", "1", ")", ":", "return", "(", "parts", "[", "0", "]", ",", "None", ")", "else", ":", "return", "tuple", "(", "parts", ")" ]
given an artist string .
train
false
49,630
def resize_mig(mig, size): changed = False return_data = [] actions_filter = ['CREATING', 'DELETING'] if mig.resize(size): changed = True return_data = _get_instance_list(mig, filter_list=actions_filter) return (changed, return_data)
[ "def", "resize_mig", "(", "mig", ",", "size", ")", ":", "changed", "=", "False", "return_data", "=", "[", "]", "actions_filter", "=", "[", "'CREATING'", ",", "'DELETING'", "]", "if", "mig", ".", "resize", "(", "size", ")", ":", "changed", "=", "True", "return_data", "=", "_get_instance_list", "(", "mig", ",", "filter_list", "=", "actions_filter", ")", "return", "(", "changed", ",", "return_data", ")" ]
resize a managed instance group .
train
false
49,632
def is_import_error(handler): names = None if isinstance(handler.type, astroid.Tuple): names = [name for name in handler.type.elts if isinstance(name, astroid.Name)] elif isinstance(handler.type, astroid.Name): names = [handler.type] else: return for name in names: try: for infered in name.infer(): if (isinstance(infered, astroid.Class) and inherit_from_std_ex(infered) and (infered.name == 'ImportError')): return True except astroid.InferenceError: continue
[ "def", "is_import_error", "(", "handler", ")", ":", "names", "=", "None", "if", "isinstance", "(", "handler", ".", "type", ",", "astroid", ".", "Tuple", ")", ":", "names", "=", "[", "name", "for", "name", "in", "handler", ".", "type", ".", "elts", "if", "isinstance", "(", "name", ",", "astroid", ".", "Name", ")", "]", "elif", "isinstance", "(", "handler", ".", "type", ",", "astroid", ".", "Name", ")", ":", "names", "=", "[", "handler", ".", "type", "]", "else", ":", "return", "for", "name", "in", "names", ":", "try", ":", "for", "infered", "in", "name", ".", "infer", "(", ")", ":", "if", "(", "isinstance", "(", "infered", ",", "astroid", ".", "Class", ")", "and", "inherit_from_std_ex", "(", "infered", ")", "and", "(", "infered", ".", "name", "==", "'ImportError'", ")", ")", ":", "return", "True", "except", "astroid", ".", "InferenceError", ":", "continue" ]
check if the given exception handler catches importerror .
train
false
49,633
@_memoize_get_file_map def get_file_map(node, file_map): for (key, value) in file_map: (yield (key, value, node._id)) for child in node.nodes_primary: for (key, value, node_id) in get_file_map(child): (yield (key, value, node_id))
[ "@", "_memoize_get_file_map", "def", "get_file_map", "(", "node", ",", "file_map", ")", ":", "for", "(", "key", ",", "value", ")", "in", "file_map", ":", "(", "yield", "(", "key", ",", "value", ",", "node", ".", "_id", ")", ")", "for", "child", "in", "node", ".", "nodes_primary", ":", "for", "(", "key", ",", "value", ",", "node_id", ")", "in", "get_file_map", "(", "child", ")", ":", "(", "yield", "(", "key", ",", "value", ",", "node_id", ")", ")" ]
note:: file_map is injected implictly by the decorator; this method is called like: get_file_map .
train
false
49,634
@pytest.mark.parametrize('nplurals, decompressed_value', [(1, [None]), (2, [None, None]), (3, [None, None, None]), (4, [None, None, None, None])]) def test_multistringwidget_decompress_none(nplurals, decompressed_value): widget = MultiStringWidget(nplurals=nplurals) assert (widget.decompress(None) == decompressed_value)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'nplurals, decompressed_value'", ",", "[", "(", "1", ",", "[", "None", "]", ")", ",", "(", "2", ",", "[", "None", ",", "None", "]", ")", ",", "(", "3", ",", "[", "None", ",", "None", ",", "None", "]", ")", ",", "(", "4", ",", "[", "None", ",", "None", ",", "None", ",", "None", "]", ")", "]", ")", "def", "test_multistringwidget_decompress_none", "(", "nplurals", ",", "decompressed_value", ")", ":", "widget", "=", "MultiStringWidget", "(", "nplurals", "=", "nplurals", ")", "assert", "(", "widget", ".", "decompress", "(", "None", ")", "==", "decompressed_value", ")" ]
tests units multistringwidget decompresses none values .
train
false
49,635
def readFromFile(fileName, length=(-1)): global memory_files data = None if (not (fileName in memory_files)): log.debug(("Memory File `%s' does not exist (yet?)." % fileName)) return None log.debug(("Opening memory file `%s' for reading." % fileName)) memory_files[fileName].seek(0) data = memory_files[fileName].read(length) return data
[ "def", "readFromFile", "(", "fileName", ",", "length", "=", "(", "-", "1", ")", ")", ":", "global", "memory_files", "data", "=", "None", "if", "(", "not", "(", "fileName", "in", "memory_files", ")", ")", ":", "log", ".", "debug", "(", "(", "\"Memory File `%s' does not exist (yet?).\"", "%", "fileName", ")", ")", "return", "None", "log", ".", "debug", "(", "(", "\"Opening memory file `%s' for reading.\"", "%", "fileName", ")", ")", "memory_files", "[", "fileName", "]", ".", "seek", "(", "0", ")", "data", "=", "memory_files", "[", "fileName", "]", ".", "read", "(", "length", ")", "return", "data" ]
read length amount of bytes from the given filename if length equals -1 .
train
false
49,637
def re_show(regexp, string, left='{', right='}'): print(re.compile(regexp, re.M).sub(((left + '\\g<0>') + right), string.rstrip()))
[ "def", "re_show", "(", "regexp", ",", "string", ",", "left", "=", "'{'", ",", "right", "=", "'}'", ")", ":", "print", "(", "re", ".", "compile", "(", "regexp", ",", "re", ".", "M", ")", ".", "sub", "(", "(", "(", "left", "+", "'\\\\g<0>'", ")", "+", "right", ")", ",", "string", ".", "rstrip", "(", ")", ")", ")" ]
return a string with markers surrounding the matched substrings .
train
false
49,638
def _build_nodes_inner_for_one_menu(nodes, menu_class_name): done_nodes = {} final_nodes = [] list_total_length = len(nodes) while nodes: should_add_to_final_list = True node = nodes.pop(0) node._counter = (getattr(node, '_counter', 0) + 1) if (not node.namespace): node.namespace = menu_class_name if (node.namespace not in done_nodes): done_nodes[node.namespace] = {} if (node.parent_id in done_nodes[node.namespace]): if (not node.parent_namespace): node.parent_namespace = menu_class_name parent = done_nodes[node.namespace][node.parent_id] parent.children.append(node) node.parent = parent elif node.parent_id: if (node._counter < list_total_length): nodes.append(node) should_add_to_final_list = False if should_add_to_final_list: final_nodes.append(node) done_nodes[node.namespace][node.id] = node return final_nodes
[ "def", "_build_nodes_inner_for_one_menu", "(", "nodes", ",", "menu_class_name", ")", ":", "done_nodes", "=", "{", "}", "final_nodes", "=", "[", "]", "list_total_length", "=", "len", "(", "nodes", ")", "while", "nodes", ":", "should_add_to_final_list", "=", "True", "node", "=", "nodes", ".", "pop", "(", "0", ")", "node", ".", "_counter", "=", "(", "getattr", "(", "node", ",", "'_counter'", ",", "0", ")", "+", "1", ")", "if", "(", "not", "node", ".", "namespace", ")", ":", "node", ".", "namespace", "=", "menu_class_name", "if", "(", "node", ".", "namespace", "not", "in", "done_nodes", ")", ":", "done_nodes", "[", "node", ".", "namespace", "]", "=", "{", "}", "if", "(", "node", ".", "parent_id", "in", "done_nodes", "[", "node", ".", "namespace", "]", ")", ":", "if", "(", "not", "node", ".", "parent_namespace", ")", ":", "node", ".", "parent_namespace", "=", "menu_class_name", "parent", "=", "done_nodes", "[", "node", ".", "namespace", "]", "[", "node", ".", "parent_id", "]", "parent", ".", "children", ".", "append", "(", "node", ")", "node", ".", "parent", "=", "parent", "elif", "node", ".", "parent_id", ":", "if", "(", "node", ".", "_counter", "<", "list_total_length", ")", ":", "nodes", ".", "append", "(", "node", ")", "should_add_to_final_list", "=", "False", "if", "should_add_to_final_list", ":", "final_nodes", ".", "append", "(", "node", ")", "done_nodes", "[", "node", ".", "namespace", "]", "[", "node", ".", "id", "]", "=", "node", "return", "final_nodes" ]
this is an easier to test "inner loop" building the menu tree structure for one menu .
train
false
49,639
def _increment_provider_generation(conn, rp): rp_gen = rp.generation new_generation = (rp_gen + 1) upd_stmt = _RP_TBL.update().where(sa.and_((_RP_TBL.c.id == rp.id), (_RP_TBL.c.generation == rp_gen))).values(generation=new_generation) res = conn.execute(upd_stmt) if (res.rowcount != 1): raise exception.ConcurrentUpdateDetected return new_generation
[ "def", "_increment_provider_generation", "(", "conn", ",", "rp", ")", ":", "rp_gen", "=", "rp", ".", "generation", "new_generation", "=", "(", "rp_gen", "+", "1", ")", "upd_stmt", "=", "_RP_TBL", ".", "update", "(", ")", ".", "where", "(", "sa", ".", "and_", "(", "(", "_RP_TBL", ".", "c", ".", "id", "==", "rp", ".", "id", ")", ",", "(", "_RP_TBL", ".", "c", ".", "generation", "==", "rp_gen", ")", ")", ")", ".", "values", "(", "generation", "=", "new_generation", ")", "res", "=", "conn", ".", "execute", "(", "upd_stmt", ")", "if", "(", "res", ".", "rowcount", "!=", "1", ")", ":", "raise", "exception", ".", "ConcurrentUpdateDetected", "return", "new_generation" ]
increments the supplied providers generation value .
train
false
49,640
def mklabel(device, label_type): if (label_type not in set(['aix', 'amiga', 'bsd', 'dvh', 'gpt', 'loop', 'mac', 'msdos', 'pc98', 'sun'])): raise CommandExecutionError('Invalid label_type passed to partition.mklabel') cmd = ('parted', '-m', '-s', device, 'mklabel', label_type) out = __salt__['cmd.run'](cmd, python_shell=False).splitlines() return out
[ "def", "mklabel", "(", "device", ",", "label_type", ")", ":", "if", "(", "label_type", "not", "in", "set", "(", "[", "'aix'", ",", "'amiga'", ",", "'bsd'", ",", "'dvh'", ",", "'gpt'", ",", "'loop'", ",", "'mac'", ",", "'msdos'", ",", "'pc98'", ",", "'sun'", "]", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Invalid label_type passed to partition.mklabel'", ")", "cmd", "=", "(", "'parted'", ",", "'-m'", ",", "'-s'", ",", "device", ",", "'mklabel'", ",", "label_type", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", "return", "out" ]
create a new disklabel of label_type .
train
true
49,642
@requires_application() def test_colorbar_draw(): with TestingCanvas() as c: colorbar_top = create_colorbar(pos=(50, 50), size=(60, 4), orientation='top') c.draw_visual(colorbar_top) assert_image_approved(c.render(), 'visuals/colorbar/top.png') colorbar_top.parent = None colorbar_bottom = create_colorbar(pos=(50, 50), size=(60, 4), orientation='bottom') c.draw_visual(colorbar_bottom) assert_image_approved(c.render(), 'visuals/colorbar/bottom.png') colorbar_bottom.parent = None colorbar_left = create_colorbar(pos=(50, 50), size=(60, 4), orientation='left') c.draw_visual(colorbar_left) assert_image_approved(c.render(), 'visuals/colorbar/left.png') colorbar_left.parent = None colorbar_right = create_colorbar(pos=(50, 50), size=(60, 4), orientation='right') c.draw_visual(colorbar_right) assert_image_approved(c.render(), 'visuals/colorbar/right.png')
[ "@", "requires_application", "(", ")", "def", "test_colorbar_draw", "(", ")", ":", "with", "TestingCanvas", "(", ")", "as", "c", ":", "colorbar_top", "=", "create_colorbar", "(", "pos", "=", "(", "50", ",", "50", ")", ",", "size", "=", "(", "60", ",", "4", ")", ",", "orientation", "=", "'top'", ")", "c", ".", "draw_visual", "(", "colorbar_top", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/colorbar/top.png'", ")", "colorbar_top", ".", "parent", "=", "None", "colorbar_bottom", "=", "create_colorbar", "(", "pos", "=", "(", "50", ",", "50", ")", ",", "size", "=", "(", "60", ",", "4", ")", ",", "orientation", "=", "'bottom'", ")", "c", ".", "draw_visual", "(", "colorbar_bottom", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/colorbar/bottom.png'", ")", "colorbar_bottom", ".", "parent", "=", "None", "colorbar_left", "=", "create_colorbar", "(", "pos", "=", "(", "50", ",", "50", ")", ",", "size", "=", "(", "60", ",", "4", ")", ",", "orientation", "=", "'left'", ")", "c", ".", "draw_visual", "(", "colorbar_left", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/colorbar/left.png'", ")", "colorbar_left", ".", "parent", "=", "None", "colorbar_right", "=", "create_colorbar", "(", "pos", "=", "(", "50", ",", "50", ")", ",", "size", "=", "(", "60", ",", "4", ")", ",", "orientation", "=", "'right'", ")", "c", ".", "draw_visual", "(", "colorbar_right", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/colorbar/right.png'", ")" ]
test drawing colorbar without transform using colorbarvisual .
train
false
49,644
@open_file(0, mode='rb') def read_gml(path, label='label', destringizer=None): def filter_lines(lines): for line in lines: try: line = line.decode('ascii') except UnicodeDecodeError: raise NetworkXError('input is not ASCII-encoded') if (not isinstance(line, str)): lines = str(lines) if (line and (line[(-1)] == '\n')): line = line[:(-1)] (yield line) G = parse_gml_lines(filter_lines(path), label, destringizer) return G
[ "@", "open_file", "(", "0", ",", "mode", "=", "'rb'", ")", "def", "read_gml", "(", "path", ",", "label", "=", "'label'", ",", "destringizer", "=", "None", ")", ":", "def", "filter_lines", "(", "lines", ")", ":", "for", "line", "in", "lines", ":", "try", ":", "line", "=", "line", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "raise", "NetworkXError", "(", "'input is not ASCII-encoded'", ")", "if", "(", "not", "isinstance", "(", "line", ",", "str", ")", ")", ":", "lines", "=", "str", "(", "lines", ")", "if", "(", "line", "and", "(", "line", "[", "(", "-", "1", ")", "]", "==", "'\\n'", ")", ")", ":", "line", "=", "line", "[", ":", "(", "-", "1", ")", "]", "(", "yield", "line", ")", "G", "=", "parse_gml_lines", "(", "filter_lines", "(", "path", ")", ",", "label", ",", "destringizer", ")", "return", "G" ]
read graph in gml format from path .
train
false
49,645
def makeRadialMatrix(matrixSize): oneStep = (2.0 / (matrixSize - 1)) (xx, yy) = (numpy.mgrid[0:(2 + oneStep):oneStep, 0:(2 + oneStep):oneStep] - 1.0) rad = numpy.sqrt(((xx ** 2) + (yy ** 2))) return rad
[ "def", "makeRadialMatrix", "(", "matrixSize", ")", ":", "oneStep", "=", "(", "2.0", "/", "(", "matrixSize", "-", "1", ")", ")", "(", "xx", ",", "yy", ")", "=", "(", "numpy", ".", "mgrid", "[", "0", ":", "(", "2", "+", "oneStep", ")", ":", "oneStep", ",", "0", ":", "(", "2", "+", "oneStep", ")", ":", "oneStep", "]", "-", "1.0", ")", "rad", "=", "numpy", ".", "sqrt", "(", "(", "(", "xx", "**", "2", ")", "+", "(", "yy", "**", "2", ")", ")", ")", "return", "rad" ]
generate a square matrix where each element val is its distance from the centre of the matrix .
train
false
49,647
def create_inapp_receipt(contrib): if contrib.is_inapp_simulation(): storedata = {'id': 0, 'contrib': int(contrib.pk), 'inapp_id': contrib.inapp_product.guid} return create_test_receipt(settings.SITE_URL, 'ok', storedata=storedata) return create_receipt(contrib.addon, None, 'anonymous-user', flavour='inapp', contrib=contrib)
[ "def", "create_inapp_receipt", "(", "contrib", ")", ":", "if", "contrib", ".", "is_inapp_simulation", "(", ")", ":", "storedata", "=", "{", "'id'", ":", "0", ",", "'contrib'", ":", "int", "(", "contrib", ".", "pk", ")", ",", "'inapp_id'", ":", "contrib", ".", "inapp_product", ".", "guid", "}", "return", "create_test_receipt", "(", "settings", ".", "SITE_URL", ",", "'ok'", ",", "storedata", "=", "storedata", ")", "return", "create_receipt", "(", "contrib", ".", "addon", ",", "None", ",", "'anonymous-user'", ",", "flavour", "=", "'inapp'", ",", "contrib", "=", "contrib", ")" ]
creates a receipt for an in-app purchase .
train
false
49,649
def inverse_sigmoid_numpy(x): return np.log((x / (1.0 - x)))
[ "def", "inverse_sigmoid_numpy", "(", "x", ")", ":", "return", "np", ".", "log", "(", "(", "x", "/", "(", "1.0", "-", "x", ")", ")", ")" ]
numpy implementation of the inverse of the logistic sigmoid function .
train
false
49,652
def test_error_on_directory_to_FileLink(): td = mkdtemp() nt.assert_raises(ValueError, display.FileLink, td)
[ "def", "test_error_on_directory_to_FileLink", "(", ")", ":", "td", "=", "mkdtemp", "(", ")", "nt", ".", "assert_raises", "(", "ValueError", ",", "display", ".", "FileLink", ",", "td", ")" ]
filelink: raises error when passed directory .
train
false
49,653
def remove_package(package, image=None, restart=False): cmd = ['DISM', '/Quiet', ('/Image:{0}'.format(image) if image else '/Online'), '/Remove-Package'] if (not restart): cmd.append('/NoRestart') if ('~' in package): cmd.append('/PackageName:{0}'.format(package)) else: cmd.append('/PackagePath:{0}'.format(package)) return __salt__['cmd.run_all'](cmd)
[ "def", "remove_package", "(", "package", ",", "image", "=", "None", ",", "restart", "=", "False", ")", ":", "cmd", "=", "[", "'DISM'", ",", "'/Quiet'", ",", "(", "'/Image:{0}'", ".", "format", "(", "image", ")", "if", "image", "else", "'/Online'", ")", ",", "'/Remove-Package'", "]", "if", "(", "not", "restart", ")", ":", "cmd", ".", "append", "(", "'/NoRestart'", ")", "if", "(", "'~'", "in", "package", ")", ":", "cmd", ".", "append", "(", "'/PackageName:{0}'", ".", "format", "(", "package", ")", ")", "else", ":", "cmd", ".", "append", "(", "'/PackagePath:{0}'", ".", "format", "(", "package", ")", ")", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")" ]
uninstall a package args: package : the full path to the package .
train
true
49,654
def upload_students_csv(_xmodule_instance_args, _entry_id, course_id, task_input, action_name): start_time = time() start_date = datetime.now(UTC) enrolled_students = CourseEnrollment.objects.users_enrolled_in(course_id) task_progress = TaskProgress(action_name, enrolled_students.count(), start_time) current_step = {'step': 'Calculating Profile Info'} task_progress.update_task_state(extra_meta=current_step) query_features = task_input student_data = enrolled_students_features(course_id, query_features) (header, rows) = format_dictlist(student_data, query_features) task_progress.attempted = task_progress.succeeded = len(rows) task_progress.skipped = (task_progress.total - task_progress.attempted) rows.insert(0, header) current_step = {'step': 'Uploading CSV'} task_progress.update_task_state(extra_meta=current_step) upload_csv_to_report_store(rows, 'student_profile_info', course_id, start_date) return task_progress.update_task_state(extra_meta=current_step)
[ "def", "upload_students_csv", "(", "_xmodule_instance_args", ",", "_entry_id", ",", "course_id", ",", "task_input", ",", "action_name", ")", ":", "start_time", "=", "time", "(", ")", "start_date", "=", "datetime", ".", "now", "(", "UTC", ")", "enrolled_students", "=", "CourseEnrollment", ".", "objects", ".", "users_enrolled_in", "(", "course_id", ")", "task_progress", "=", "TaskProgress", "(", "action_name", ",", "enrolled_students", ".", "count", "(", ")", ",", "start_time", ")", "current_step", "=", "{", "'step'", ":", "'Calculating Profile Info'", "}", "task_progress", ".", "update_task_state", "(", "extra_meta", "=", "current_step", ")", "query_features", "=", "task_input", "student_data", "=", "enrolled_students_features", "(", "course_id", ",", "query_features", ")", "(", "header", ",", "rows", ")", "=", "format_dictlist", "(", "student_data", ",", "query_features", ")", "task_progress", ".", "attempted", "=", "task_progress", ".", "succeeded", "=", "len", "(", "rows", ")", "task_progress", ".", "skipped", "=", "(", "task_progress", ".", "total", "-", "task_progress", ".", "attempted", ")", "rows", ".", "insert", "(", "0", ",", "header", ")", "current_step", "=", "{", "'step'", ":", "'Uploading CSV'", "}", "task_progress", ".", "update_task_state", "(", "extra_meta", "=", "current_step", ")", "upload_csv_to_report_store", "(", "rows", ",", "'student_profile_info'", ",", "course_id", ",", "start_date", ")", "return", "task_progress", ".", "update_task_state", "(", "extra_meta", "=", "current_step", ")" ]
for a given course_id .
train
false
49,656
def ensure_osf_files(settings): settings.COPY_GIT_REPOS = True if ('osffiles' not in settings.ADDONS_REQUESTED): settings.ADDONS_REQUESTED.append('osffiles')
[ "def", "ensure_osf_files", "(", "settings", ")", ":", "settings", ".", "COPY_GIT_REPOS", "=", "True", "if", "(", "'osffiles'", "not", "in", "settings", ".", "ADDONS_REQUESTED", ")", ":", "settings", ".", "ADDONS_REQUESTED", ".", "append", "(", "'osffiles'", ")" ]
ensure osffiles is enabled for access to legacy models .
train
false