id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
45,412
def load_tool_elements_from_path(path, load_exception_handler=load_exception_handler, recursive=False, register_load_errors=False): return _load_tools_from_path(path, load_exception_handler=load_exception_handler, recursive=recursive, register_load_errors=register_load_errors, loader_func=loader.load_tool, enable_beta_formats=False)
[ "def", "load_tool_elements_from_path", "(", "path", ",", "load_exception_handler", "=", "load_exception_handler", ",", "recursive", "=", "False", ",", "register_load_errors", "=", "False", ")", ":", "return", "_load_tools_from_path", "(", "path", ",", "load_exception_handler", "=", "load_exception_handler", ",", "recursive", "=", "recursive", ",", "register_load_errors", "=", "register_load_errors", ",", "loader_func", "=", "loader", ".", "load_tool", ",", "enable_beta_formats", "=", "False", ")" ]
walk a directory and load tool xml elements .
train
false
45,413
def _permute_observations(x, y, num_perms): vals = hstack([array(x), array(y)]) lenx = len(x) vals.sort() inds = arange(vals.size) (xs, ys) = ([], []) for i in range(num_perms): shuffle(inds) xs.append(vals[inds[:lenx]]) ys.append(vals[inds[lenx:]]) return (xs, ys)
[ "def", "_permute_observations", "(", "x", ",", "y", ",", "num_perms", ")", ":", "vals", "=", "hstack", "(", "[", "array", "(", "x", ")", ",", "array", "(", "y", ")", "]", ")", "lenx", "=", "len", "(", "x", ")", "vals", ".", "sort", "(", ")", "inds", "=", "arange", "(", "vals", ".", "size", ")", "(", "xs", ",", "ys", ")", "=", "(", "[", "]", ",", "[", "]", ")", "for", "i", "in", "range", "(", "num_perms", ")", ":", "shuffle", "(", "inds", ")", "xs", ".", "append", "(", "vals", "[", "inds", "[", ":", "lenx", "]", "]", ")", "ys", ".", "append", "(", "vals", "[", "inds", "[", "lenx", ":", "]", "]", ")", "return", "(", "xs", ",", "ys", ")" ]
return num_perms pairs of permuted vectors x .
train
false
45,414
def stream_list(lst): def sublist_iterator(start, stop): return iter(lst[start:stop]) start = 0 while True: stop = len(lst) (yield sublist_iterator(start, stop)) start = stop
[ "def", "stream_list", "(", "lst", ")", ":", "def", "sublist_iterator", "(", "start", ",", "stop", ")", ":", "return", "iter", "(", "lst", "[", "start", ":", "stop", "]", ")", "start", "=", "0", "while", "True", ":", "stop", "=", "len", "(", "lst", ")", "(", "yield", "sublist_iterator", "(", "start", ",", "stop", ")", ")", "start", "=", "stop" ]
given a list .
train
false
45,417
def getLoopsFromLoopsDirection(isWiddershins, loops): directionalLoops = [] for loop in loops: if (euclidean.isWiddershins(loop) == isWiddershins): directionalLoops.append(loop) return directionalLoops
[ "def", "getLoopsFromLoopsDirection", "(", "isWiddershins", ",", "loops", ")", ":", "directionalLoops", "=", "[", "]", "for", "loop", "in", "loops", ":", "if", "(", "euclidean", ".", "isWiddershins", "(", "loop", ")", "==", "isWiddershins", ")", ":", "directionalLoops", ".", "append", "(", "loop", ")", "return", "directionalLoops" ]
get the loops going round in a given direction .
train
false
45,418
def write_trunc_qual(trunc_qual_scores, qual_out_fp, seq_order): qual_line_size = 60 qual_out = open(qual_out_fp, 'w') for label in seq_order: trunc_label = label.split()[0].strip() current_trunc_qual_scores = trunc_qual_scores[trunc_label] qual_out.write(('>%s\n' % label)) current_qual_scores_lines = [] for slice in range(0, len(trunc_qual_scores[trunc_label]), qual_line_size): current_segment = current_trunc_qual_scores[slice:(slice + qual_line_size)] current_qual_scores_lines.append(' '.join(current_segment)) qual_out.write('\n'.join(current_qual_scores_lines)) qual_out.write('\n')
[ "def", "write_trunc_qual", "(", "trunc_qual_scores", ",", "qual_out_fp", ",", "seq_order", ")", ":", "qual_line_size", "=", "60", "qual_out", "=", "open", "(", "qual_out_fp", ",", "'w'", ")", "for", "label", "in", "seq_order", ":", "trunc_label", "=", "label", ".", "split", "(", ")", "[", "0", "]", ".", "strip", "(", ")", "current_trunc_qual_scores", "=", "trunc_qual_scores", "[", "trunc_label", "]", "qual_out", ".", "write", "(", "(", "'>%s\\n'", "%", "label", ")", ")", "current_qual_scores_lines", "=", "[", "]", "for", "slice", "in", "range", "(", "0", ",", "len", "(", "trunc_qual_scores", "[", "trunc_label", "]", ")", ",", "qual_line_size", ")", ":", "current_segment", "=", "current_trunc_qual_scores", "[", "slice", ":", "(", "slice", "+", "qual_line_size", ")", "]", "current_qual_scores_lines", ".", "append", "(", "' '", ".", "join", "(", "current_segment", ")", ")", "qual_out", ".", "write", "(", "'\\n'", ".", "join", "(", "current_qual_scores_lines", ")", ")", "qual_out", ".", "write", "(", "'\\n'", ")" ]
writes truncated quality score files out in proper format trunc_qual_scores: dict of seq label: numpy array of scores as ints qual_out_fp: output filepath to write truncated quality scores to seq_order: list of full fasta labels to write to output filepath and maintain the same order as input quality file .
train
false
45,419
def _resize(masked, new_size): new_array = ma.zeros((new_size,), dtype=masked.dtype) length = min(len(masked), new_size) try: new_array.data[:length] = masked.data[:length] except TypeError: new_array[:length] = masked[:length] else: if (length != 0): new_array.mask[:length] = masked.mask[:length] return new_array
[ "def", "_resize", "(", "masked", ",", "new_size", ")", ":", "new_array", "=", "ma", ".", "zeros", "(", "(", "new_size", ",", ")", ",", "dtype", "=", "masked", ".", "dtype", ")", "length", "=", "min", "(", "len", "(", "masked", ")", ",", "new_size", ")", "try", ":", "new_array", ".", "data", "[", ":", "length", "]", "=", "masked", ".", "data", "[", ":", "length", "]", "except", "TypeError", ":", "new_array", "[", ":", "length", "]", "=", "masked", "[", ":", "length", "]", "else", ":", "if", "(", "length", "!=", "0", ")", ":", "new_array", ".", "mask", "[", ":", "length", "]", "=", "masked", ".", "mask", "[", ":", "length", "]", "return", "new_array" ]
masked arrays can not be resized inplace .
train
false
45,420
def _compute_multivariate_sample_pacf(endog, maxlag): sample_autocovariances = _compute_multivariate_sample_acovf(endog, maxlag) return _compute_multivariate_pacf_from_autocovariances(sample_autocovariances)
[ "def", "_compute_multivariate_sample_pacf", "(", "endog", ",", "maxlag", ")", ":", "sample_autocovariances", "=", "_compute_multivariate_sample_acovf", "(", "endog", ",", "maxlag", ")", "return", "_compute_multivariate_pacf_from_autocovariances", "(", "sample_autocovariances", ")" ]
computer multivariate sample partial autocorrelations parameters endog : array_like sample data on which to compute sample autocovariances .
train
false
45,421
def search_collections(query, limit, sort=None, cursor=None): return search_services.search(query, SEARCH_INDEX_COLLECTIONS, cursor, limit, sort, ids_only=True)
[ "def", "search_collections", "(", "query", ",", "limit", ",", "sort", "=", "None", ",", "cursor", "=", "None", ")", ":", "return", "search_services", ".", "search", "(", "query", ",", "SEARCH_INDEX_COLLECTIONS", ",", "cursor", ",", "limit", ",", "sort", ",", "ids_only", "=", "True", ")" ]
searches through the available collections .
train
false
45,422
def truncate_week(dt, measure): return truncate_day(dt, (measure * 7))
[ "def", "truncate_week", "(", "dt", ",", "measure", ")", ":", "return", "truncate_day", "(", "dt", ",", "(", "measure", "*", "7", ")", ")" ]
truncate by weeks .
train
false
45,424
def makeColor(color): return make_color(color)
[ "def", "makeColor", "(", "color", ")", ":", "return", "make_color", "(", "color", ")" ]
an old name for the make_color function .
train
false
45,425
def _get_plural_forms(js_translations): plural = None n_plural = 2 if ('' in js_translations._catalog): for l in js_translations._catalog[''].split('\n'): if l.startswith('Plural-Forms:'): plural = l.split(':', 1)[1].strip() print ('plural is %s' % plural) if (plural is not None): for raw_element in plural.split(';'): element = raw_element.strip() if element.startswith('nplurals='): n_plural = int(element.split('=', 1)[1]) elif element.startswith('plural='): plural = element.split('=', 1)[1] print ('plural is now %s' % plural) else: n_plural = 2 plural = '(n == 1) ? 0 : 1' return (plural, n_plural)
[ "def", "_get_plural_forms", "(", "js_translations", ")", ":", "plural", "=", "None", "n_plural", "=", "2", "if", "(", "''", "in", "js_translations", ".", "_catalog", ")", ":", "for", "l", "in", "js_translations", ".", "_catalog", "[", "''", "]", ".", "split", "(", "'\\n'", ")", ":", "if", "l", ".", "startswith", "(", "'Plural-Forms:'", ")", ":", "plural", "=", "l", ".", "split", "(", "':'", ",", "1", ")", "[", "1", "]", ".", "strip", "(", ")", "print", "(", "'plural is %s'", "%", "plural", ")", "if", "(", "plural", "is", "not", "None", ")", ":", "for", "raw_element", "in", "plural", ".", "split", "(", "';'", ")", ":", "element", "=", "raw_element", ".", "strip", "(", ")", "if", "element", ".", "startswith", "(", "'nplurals='", ")", ":", "n_plural", "=", "int", "(", "element", ".", "split", "(", "'='", ",", "1", ")", "[", "1", "]", ")", "elif", "element", ".", "startswith", "(", "'plural='", ")", ":", "plural", "=", "element", ".", "split", "(", "'='", ",", "1", ")", "[", "1", "]", "print", "(", "'plural is now %s'", "%", "plural", ")", "else", ":", "n_plural", "=", "2", "plural", "=", "'(n == 1) ? 0 : 1'", "return", "(", "plural", ",", "n_plural", ")" ]
extracts the parameters for what constitutes a plural .
train
false
45,426
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
45,427
def split_name_version(name): prefix = '' if (name[0] in ['-', '~', '+']): prefix = name[0] name = name[1:] version_check = re.compile('^(.*?)((?:<|>|<=|>=|=)[0-9.-]*)?$') try: reres = version_check.match(name) (name, version) = reres.groups() return (prefix, name, version) except: return (prefix, name, None)
[ "def", "split_name_version", "(", "name", ")", ":", "prefix", "=", "''", "if", "(", "name", "[", "0", "]", "in", "[", "'-'", ",", "'~'", ",", "'+'", "]", ")", ":", "prefix", "=", "name", "[", "0", "]", "name", "=", "name", "[", "1", ":", "]", "version_check", "=", "re", ".", "compile", "(", "'^(.*?)((?:<|>|<=|>=|=)[0-9.-]*)?$'", ")", "try", ":", "reres", "=", "version_check", ".", "match", "(", "name", ")", "(", "name", ",", "version", ")", "=", "reres", ".", "groups", "(", ")", "return", "(", "prefix", ",", "name", ",", "version", ")", "except", ":", "return", "(", "prefix", ",", "name", ",", "None", ")" ]
splits of the package name and desired version example formats: - docker>=1 .
train
false
45,428
def test_quantize_api(method, prec, exprange, restricted_range, itr, stat): for op in all_binary(prec, restricted_range, itr): for rounding in RoundModes: c = randcontext(exprange) quantizeop = (op[0], op[1], rounding, c) t = TestSet(method, quantizeop) try: if (not convert(t)): continue callfuncs(t) verify(t, stat) except VerifyError as err: log(err)
[ "def", "test_quantize_api", "(", "method", ",", "prec", ",", "exprange", ",", "restricted_range", ",", "itr", ",", "stat", ")", ":", "for", "op", "in", "all_binary", "(", "prec", ",", "restricted_range", ",", "itr", ")", ":", "for", "rounding", "in", "RoundModes", ":", "c", "=", "randcontext", "(", "exprange", ")", "quantizeop", "=", "(", "op", "[", "0", "]", ",", "op", "[", "1", "]", ",", "rounding", ",", "c", ")", "t", "=", "TestSet", "(", "method", ",", "quantizeop", ")", "try", ":", "if", "(", "not", "convert", "(", "t", ")", ")", ":", "continue", "callfuncs", "(", "t", ")", "verify", "(", "t", ",", "stat", ")", "except", "VerifyError", "as", "err", ":", "log", "(", "err", ")" ]
iterate the quantize method through many test cases .
train
false
45,431
def _get_flags(**kwargs): flags = kwargs.get('flags', __salt__['config.option']('service.flags', default='')) return flags
[ "def", "_get_flags", "(", "**", "kwargs", ")", ":", "flags", "=", "kwargs", ".", "get", "(", "'flags'", ",", "__salt__", "[", "'config.option'", "]", "(", "'service.flags'", ",", "default", "=", "''", ")", ")", "return", "flags" ]
return an integer appropriate for use as a flag for the re module from a list of human-readable strings .
train
false
45,433
def chunks_from_arrays(arrays): if (not arrays): return () result = [] dim = 0 def shape(x): try: return x.shape except AttributeError: return (1,) while isinstance(arrays, (list, tuple)): result.append(tuple((shape(deepfirst(a))[dim] for a in arrays))) arrays = arrays[0] dim += 1 return tuple(result)
[ "def", "chunks_from_arrays", "(", "arrays", ")", ":", "if", "(", "not", "arrays", ")", ":", "return", "(", ")", "result", "=", "[", "]", "dim", "=", "0", "def", "shape", "(", "x", ")", ":", "try", ":", "return", "x", ".", "shape", "except", "AttributeError", ":", "return", "(", "1", ",", ")", "while", "isinstance", "(", "arrays", ",", "(", "list", ",", "tuple", ")", ")", ":", "result", ".", "append", "(", "tuple", "(", "(", "shape", "(", "deepfirst", "(", "a", ")", ")", "[", "dim", "]", "for", "a", "in", "arrays", ")", ")", ")", "arrays", "=", "arrays", "[", "0", "]", "dim", "+=", "1", "return", "tuple", "(", "result", ")" ]
chunks tuple from nested list of arrays .
train
false
45,435
def update_firmware(filename, host=None, admin_username=None, admin_password=None): if os.path.exists(filename): return _update_firmware('update -f {0}'.format(filename), host=None, admin_username=None, admin_password=None) else: raise CommandExecutionError('Unable to find firmware file {0}'.format(filename))
[ "def", "update_firmware", "(", "filename", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "return", "_update_firmware", "(", "'update -f {0}'", ".", "format", "(", "filename", ")", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", "else", ":", "raise", "CommandExecutionError", "(", "'Unable to find firmware file {0}'", ".", "format", "(", "filename", ")", ")" ]
updates firmware using local firmware file .
train
true
45,436
def _get_textbox(text, renderer): projected_xs = [] projected_ys = [] theta = np.deg2rad(text.get_rotation()) tr = mtransforms.Affine2D().rotate((- theta)) (_, parts, d) = text._get_layout(renderer) for (t, wh, x, y) in parts: (w, h) = wh (xt1, yt1) = tr.transform_point((x, y)) yt1 -= d (xt2, yt2) = ((xt1 + w), (yt1 + h)) projected_xs.extend([xt1, xt2]) projected_ys.extend([yt1, yt2]) (xt_box, yt_box) = (min(projected_xs), min(projected_ys)) (w_box, h_box) = ((max(projected_xs) - xt_box), (max(projected_ys) - yt_box)) tr = mtransforms.Affine2D().rotate(theta) (x_box, y_box) = tr.transform_point((xt_box, yt_box)) return (x_box, y_box, w_box, h_box)
[ "def", "_get_textbox", "(", "text", ",", "renderer", ")", ":", "projected_xs", "=", "[", "]", "projected_ys", "=", "[", "]", "theta", "=", "np", ".", "deg2rad", "(", "text", ".", "get_rotation", "(", ")", ")", "tr", "=", "mtransforms", ".", "Affine2D", "(", ")", ".", "rotate", "(", "(", "-", "theta", ")", ")", "(", "_", ",", "parts", ",", "d", ")", "=", "text", ".", "_get_layout", "(", "renderer", ")", "for", "(", "t", ",", "wh", ",", "x", ",", "y", ")", "in", "parts", ":", "(", "w", ",", "h", ")", "=", "wh", "(", "xt1", ",", "yt1", ")", "=", "tr", ".", "transform_point", "(", "(", "x", ",", "y", ")", ")", "yt1", "-=", "d", "(", "xt2", ",", "yt2", ")", "=", "(", "(", "xt1", "+", "w", ")", ",", "(", "yt1", "+", "h", ")", ")", "projected_xs", ".", "extend", "(", "[", "xt1", ",", "xt2", "]", ")", "projected_ys", ".", "extend", "(", "[", "yt1", ",", "yt2", "]", ")", "(", "xt_box", ",", "yt_box", ")", "=", "(", "min", "(", "projected_xs", ")", ",", "min", "(", "projected_ys", ")", ")", "(", "w_box", ",", "h_box", ")", "=", "(", "(", "max", "(", "projected_xs", ")", "-", "xt_box", ")", ",", "(", "max", "(", "projected_ys", ")", "-", "yt_box", ")", ")", "tr", "=", "mtransforms", ".", "Affine2D", "(", ")", ".", "rotate", "(", "theta", ")", "(", "x_box", ",", "y_box", ")", "=", "tr", ".", "transform_point", "(", "(", "xt_box", ",", "yt_box", ")", ")", "return", "(", "x_box", ",", "y_box", ",", "w_box", ",", "h_box", ")" ]
calculate the bounding box of the text .
train
false
45,437
def rq_workers_are_running(): if redis_is_running(): queue = get_queue() if len(queue.connection.smembers(Worker.redis_workers_keys)): return True return False
[ "def", "rq_workers_are_running", "(", ")", ":", "if", "redis_is_running", "(", ")", ":", "queue", "=", "get_queue", "(", ")", "if", "len", "(", "queue", ".", "connection", ".", "smembers", "(", "Worker", ".", "redis_workers_keys", ")", ")", ":", "return", "True", "return", "False" ]
checks if there are any rq workers running :returns: true if there are rq workers running .
train
false
45,440
def getTransferredPaths(insides, loop): transferredPaths = [] for insideIndex in xrange((len(insides) - 1), (-1), (-1)): inside = insides[insideIndex] if isPathInsideLoop(loop, inside): transferredPaths.append(inside) del insides[insideIndex] return transferredPaths
[ "def", "getTransferredPaths", "(", "insides", ",", "loop", ")", ":", "transferredPaths", "=", "[", "]", "for", "insideIndex", "in", "xrange", "(", "(", "len", "(", "insides", ")", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "inside", "=", "insides", "[", "insideIndex", "]", "if", "isPathInsideLoop", "(", "loop", ",", "inside", ")", ":", "transferredPaths", ".", "append", "(", "inside", ")", "del", "insides", "[", "insideIndex", "]", "return", "transferredPaths" ]
get transferred paths from inside paths .
train
false
45,444
def get_programs_for_credentials(user, programs_credentials): certified_programs = [] programs = get_programs(user) for program in programs: for credential in programs_credentials: if (program[u'uuid'] == credential[u'credential'][u'program_uuid']): program[u'credential_url'] = credential[u'certificate_url'] certified_programs.append(program) return certified_programs
[ "def", "get_programs_for_credentials", "(", "user", ",", "programs_credentials", ")", ":", "certified_programs", "=", "[", "]", "programs", "=", "get_programs", "(", "user", ")", "for", "program", "in", "programs", ":", "for", "credential", "in", "programs_credentials", ":", "if", "(", "program", "[", "u'uuid'", "]", "==", "credential", "[", "u'credential'", "]", "[", "u'program_uuid'", "]", ")", ":", "program", "[", "u'credential_url'", "]", "=", "credential", "[", "u'certificate_url'", "]", "certified_programs", ".", "append", "(", "program", ")", "return", "certified_programs" ]
given a user and an iterable of credentials .
train
false
45,446
def dup_gcd(f, g, K): return dup_inner_gcd(f, g, K)[0]
[ "def", "dup_gcd", "(", "f", ",", "g", ",", "K", ")", ":", "return", "dup_inner_gcd", "(", "f", ",", "g", ",", "K", ")", "[", "0", "]" ]
computes polynomial gcd of f and g in k[x] .
train
false
45,448
def _authn_context_decl_ref(decl_ref, authn_auth=None): return factory(saml.AuthnContext, authn_context_decl_ref=decl_ref, authenticating_authority=factory(saml.AuthenticatingAuthority, text=authn_auth))
[ "def", "_authn_context_decl_ref", "(", "decl_ref", ",", "authn_auth", "=", "None", ")", ":", "return", "factory", "(", "saml", ".", "AuthnContext", ",", "authn_context_decl_ref", "=", "decl_ref", ",", "authenticating_authority", "=", "factory", "(", "saml", ".", "AuthenticatingAuthority", ",", "text", "=", "authn_auth", ")", ")" ]
construct the authn context with a authn context declaration reference .
train
true
45,449
def to36(q): if (q < 0): raise ValueError('must supply a positive integer') letters = '0123456789abcdefghijklmnopqrstuvwxyz' converted = [] while (q != 0): (q, r) = divmod(q, 36) converted.insert(0, letters[r]) return (''.join(converted) or '0')
[ "def", "to36", "(", "q", ")", ":", "if", "(", "q", "<", "0", ")", ":", "raise", "ValueError", "(", "'must supply a positive integer'", ")", "letters", "=", "'0123456789abcdefghijklmnopqrstuvwxyz'", "converted", "=", "[", "]", "while", "(", "q", "!=", "0", ")", ":", "(", "q", ",", "r", ")", "=", "divmod", "(", "q", ",", "36", ")", "converted", ".", "insert", "(", "0", ",", "letters", "[", "r", "]", ")", "return", "(", "''", ".", "join", "(", "converted", ")", "or", "'0'", ")" ]
converts an integer to base 36 .
train
false
45,450
@hook.command('steamcalc', 'steamdb') def steamcalc(text): user = text.strip().lower() try: data = get_data(user) except SteamError as e: return '{}'.format(e) data['short_url'] = web.try_shorten(data['url']) return '\x02{name}\x02 has \x02{count:,}\x02 games with a total value of \x02{value}\x02 (\x02{value_sales}\x02 during sales). \x02{count_unplayed:,}\x02 (\x02{percent_unplayed}%\x02) have never been played - {short_url}'.format(**data)
[ "@", "hook", ".", "command", "(", "'steamcalc'", ",", "'steamdb'", ")", "def", "steamcalc", "(", "text", ")", ":", "user", "=", "text", ".", "strip", "(", ")", ".", "lower", "(", ")", "try", ":", "data", "=", "get_data", "(", "user", ")", "except", "SteamError", "as", "e", ":", "return", "'{}'", ".", "format", "(", "e", ")", "data", "[", "'short_url'", "]", "=", "web", ".", "try_shorten", "(", "data", "[", "'url'", "]", ")", "return", "'\\x02{name}\\x02 has \\x02{count:,}\\x02 games with a total value of \\x02{value}\\x02 (\\x02{value_sales}\\x02 during sales). \\x02{count_unplayed:,}\\x02 (\\x02{percent_unplayed}%\\x02) have never been played - {short_url}'", ".", "format", "(", "**", "data", ")" ]
steamcalc <username> - gets value of steam account .
train
false
45,451
def show_most_common_types(limit=10, objects=None, shortnames=True): stats = most_common_types(limit, objects, shortnames=shortnames) width = max((len(name) for (name, count) in stats)) for (name, count) in stats: print ('%-*s %i' % (width, name, count))
[ "def", "show_most_common_types", "(", "limit", "=", "10", ",", "objects", "=", "None", ",", "shortnames", "=", "True", ")", ":", "stats", "=", "most_common_types", "(", "limit", ",", "objects", ",", "shortnames", "=", "shortnames", ")", "width", "=", "max", "(", "(", "len", "(", "name", ")", "for", "(", "name", ",", "count", ")", "in", "stats", ")", ")", "for", "(", "name", ",", "count", ")", "in", "stats", ":", "print", "(", "'%-*s %i'", "%", "(", "width", ",", "name", ",", "count", ")", ")" ]
print the table of types of most common instances .
train
false
45,452
def get_answer(aphorism): time.sleep(0.0) return aphorisms.get(aphorism, 'Error: unknown aphorism.')
[ "def", "get_answer", "(", "aphorism", ")", ":", "time", ".", "sleep", "(", "0.0", ")", "return", "aphorisms", ".", "get", "(", "aphorism", ",", "'Error: unknown aphorism.'", ")" ]
return the string response to a particular zen-of-python aphorism .
train
false
45,453
def image_comparison(baseline_images=None, extensions=None, tol=0, freetype_version=None, remove_text=False, savefig_kwarg=None, style=u'classic'): if (baseline_images is None): raise ValueError(u'baseline_images must be specified') if (extensions is None): extensions = [u'png', u'pdf', u'svg'] if (savefig_kwarg is None): savefig_kwarg = dict() return ImageComparisonDecorator(baseline_images=baseline_images, extensions=extensions, tol=tol, freetype_version=freetype_version, remove_text=remove_text, savefig_kwargs=savefig_kwarg, style=style)
[ "def", "image_comparison", "(", "baseline_images", "=", "None", ",", "extensions", "=", "None", ",", "tol", "=", "0", ",", "freetype_version", "=", "None", ",", "remove_text", "=", "False", ",", "savefig_kwarg", "=", "None", ",", "style", "=", "u'classic'", ")", ":", "if", "(", "baseline_images", "is", "None", ")", ":", "raise", "ValueError", "(", "u'baseline_images must be specified'", ")", "if", "(", "extensions", "is", "None", ")", ":", "extensions", "=", "[", "u'png'", ",", "u'pdf'", ",", "u'svg'", "]", "if", "(", "savefig_kwarg", "is", "None", ")", ":", "savefig_kwarg", "=", "dict", "(", ")", "return", "ImageComparisonDecorator", "(", "baseline_images", "=", "baseline_images", ",", "extensions", "=", "extensions", ",", "tol", "=", "tol", ",", "freetype_version", "=", "freetype_version", ",", "remove_text", "=", "remove_text", ",", "savefig_kwargs", "=", "savefig_kwarg", ",", "style", "=", "style", ")" ]
compare images generated by the test with those specified in *baseline_images* .
train
false
45,454
def p_type_name(t): pass
[ "def", "p_type_name", "(", "t", ")", ":", "pass" ]
type_name : specifier_qualifier_list abstract_declarator_opt .
train
false
45,455
def create_option_values(option_values): return _FakeOptionValues(option_values)
[ "def", "create_option_values", "(", "option_values", ")", ":", "return", "_FakeOptionValues", "(", "option_values", ")" ]
create a fake optionvaluecontainer object for testing .
train
false
45,456
def metrop_select(mr, q, q0): if (np.isfinite(mr) and (np.log(uniform()) < mr)): return q else: return q0
[ "def", "metrop_select", "(", "mr", ",", "q", ",", "q0", ")", ":", "if", "(", "np", ".", "isfinite", "(", "mr", ")", "and", "(", "np", ".", "log", "(", "uniform", "(", ")", ")", "<", "mr", ")", ")", ":", "return", "q", "else", ":", "return", "q0" ]
perform rejection/acceptance step for metropolis class samplers .
train
false
45,457
def is_trusted_ip(request): ip_addr = get_real_ip(request) return (ip_addr and any(((ip_addr in trusted_network) for trusted_network in request.app[KEY_TRUSTED_NETWORKS])))
[ "def", "is_trusted_ip", "(", "request", ")", ":", "ip_addr", "=", "get_real_ip", "(", "request", ")", "return", "(", "ip_addr", "and", "any", "(", "(", "(", "ip_addr", "in", "trusted_network", ")", "for", "trusted_network", "in", "request", ".", "app", "[", "KEY_TRUSTED_NETWORKS", "]", ")", ")", ")" ]
test if request is from a trusted ip .
train
false
45,458
def read_raw_edf(input_fname, montage=None, eog=None, misc=None, stim_channel=(-1), annot=None, annotmap=None, exclude=(), preload=False, verbose=None): return RawEDF(input_fname=input_fname, montage=montage, eog=eog, misc=misc, stim_channel=stim_channel, annot=annot, annotmap=annotmap, exclude=exclude, preload=preload, verbose=verbose)
[ "def", "read_raw_edf", "(", "input_fname", ",", "montage", "=", "None", ",", "eog", "=", "None", ",", "misc", "=", "None", ",", "stim_channel", "=", "(", "-", "1", ")", ",", "annot", "=", "None", ",", "annotmap", "=", "None", ",", "exclude", "=", "(", ")", ",", "preload", "=", "False", ",", "verbose", "=", "None", ")", ":", "return", "RawEDF", "(", "input_fname", "=", "input_fname", ",", "montage", "=", "montage", ",", "eog", "=", "eog", ",", "misc", "=", "misc", ",", "stim_channel", "=", "stim_channel", ",", "annot", "=", "annot", ",", "annotmap", "=", "annotmap", ",", "exclude", "=", "exclude", ",", "preload", "=", "preload", ",", "verbose", "=", "verbose", ")" ]
reader function for edf+ .
train
false
45,459
def getNewMouseTool(): return ViewpointRotate()
[ "def", "getNewMouseTool", "(", ")", ":", "return", "ViewpointRotate", "(", ")" ]
get a new mouse tool .
train
false
45,460
def recursive_repr(func): repr_running = set() @wraps(func) def wrapper(self): key = (id(self), get_ident()) if (key in repr_running): return '...' repr_running.add(key) try: return func(self) finally: repr_running.discard(key) return wrapper
[ "def", "recursive_repr", "(", "func", ")", ":", "repr_running", "=", "set", "(", ")", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ")", ":", "key", "=", "(", "id", "(", "self", ")", ",", "get_ident", "(", ")", ")", "if", "(", "key", "in", "repr_running", ")", ":", "return", "'...'", "repr_running", ".", "add", "(", "key", ")", "try", ":", "return", "func", "(", "self", ")", "finally", ":", "repr_running", ".", "discard", "(", "key", ")", "return", "wrapper" ]
decorator to make a repr function return fillvalue for a recursive call .
train
true
45,461
def get_cache(): return requests.Session().cache
[ "def", "get_cache", "(", ")", ":", "return", "requests", ".", "Session", "(", ")", ".", "cache" ]
returns internal cache object .
train
false
45,462
def get_declared_fields(bases, attrs, with_base_fields=True): fields = [(field_name, attrs.pop(field_name)) for (field_name, obj) in list(six.iteritems(attrs)) if isinstance(obj, Field)] fields.sort(key=(lambda x: x[1].creation_counter)) if with_base_fields: for base in bases[::(-1)]: if hasattr(base, u'base_fields'): fields = (list(six.iteritems(base.base_fields)) + fields) else: for base in bases[::(-1)]: if hasattr(base, u'declared_fields'): fields = (list(six.iteritems(base.declared_fields)) + fields) return SortedDict(fields)
[ "def", "get_declared_fields", "(", "bases", ",", "attrs", ",", "with_base_fields", "=", "True", ")", ":", "fields", "=", "[", "(", "field_name", ",", "attrs", ".", "pop", "(", "field_name", ")", ")", "for", "(", "field_name", ",", "obj", ")", "in", "list", "(", "six", ".", "iteritems", "(", "attrs", ")", ")", "if", "isinstance", "(", "obj", ",", "Field", ")", "]", "fields", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", "[", "1", "]", ".", "creation_counter", ")", ")", "if", "with_base_fields", ":", "for", "base", "in", "bases", "[", ":", ":", "(", "-", "1", ")", "]", ":", "if", "hasattr", "(", "base", ",", "u'base_fields'", ")", ":", "fields", "=", "(", "list", "(", "six", ".", "iteritems", "(", "base", ".", "base_fields", ")", ")", "+", "fields", ")", "else", ":", "for", "base", "in", "bases", "[", ":", ":", "(", "-", "1", ")", "]", ":", "if", "hasattr", "(", "base", ",", "u'declared_fields'", ")", ":", "fields", "=", "(", "list", "(", "six", ".", "iteritems", "(", "base", ".", "declared_fields", ")", ")", "+", "fields", ")", "return", "SortedDict", "(", "fields", ")" ]
create a list of form field instances from the passed in attrs .
train
false
45,466
def mac_address(mac, for_item=True): if (not isinstance(mac, basestring)): raise CX('Invalid input, mac must be a string') else: mac = mac.lower().strip() if (for_item is True): if (mac == 'random'): return mac if (not netaddr.valid_mac(mac)): raise CX(('Invalid mac address format (%s)' % mac)) return mac
[ "def", "mac_address", "(", "mac", ",", "for_item", "=", "True", ")", ":", "if", "(", "not", "isinstance", "(", "mac", ",", "basestring", ")", ")", ":", "raise", "CX", "(", "'Invalid input, mac must be a string'", ")", "else", ":", "mac", "=", "mac", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "(", "for_item", "is", "True", ")", ":", "if", "(", "mac", "==", "'random'", ")", ":", "return", "mac", "if", "(", "not", "netaddr", ".", "valid_mac", "(", "mac", ")", ")", ":", "raise", "CX", "(", "(", "'Invalid mac address format (%s)'", "%", "mac", ")", ")", "return", "mac" ]
validate as an eternet mac address .
train
false
45,467
def DrillURL(urlstats): drill = Drill() drill.reqsummary = Summary(urlstats) drill.rpcsummaries = SortedRPCSummaries(urlstats, 0.9) drill.rpc_variation = RPCVariation(drill.reqsummary, drill.rpcsummaries) groupcounts = urlstats.EntityGroupCount() (drill.groupcounts, drill.maxgroupcount) = SplitByKind(groupcounts) entitycounts = urlstats.EntityCount() (drill.entitycounts, drill.maxentitycount) = SplitByKind(entitycounts) drill_json = drill._ToJsonDrill() return drill_json
[ "def", "DrillURL", "(", "urlstats", ")", ":", "drill", "=", "Drill", "(", ")", "drill", ".", "reqsummary", "=", "Summary", "(", "urlstats", ")", "drill", ".", "rpcsummaries", "=", "SortedRPCSummaries", "(", "urlstats", ",", "0.9", ")", "drill", ".", "rpc_variation", "=", "RPCVariation", "(", "drill", ".", "reqsummary", ",", "drill", ".", "rpcsummaries", ")", "groupcounts", "=", "urlstats", ".", "EntityGroupCount", "(", ")", "(", "drill", ".", "groupcounts", ",", "drill", ".", "maxgroupcount", ")", "=", "SplitByKind", "(", "groupcounts", ")", "entitycounts", "=", "urlstats", ".", "EntityCount", "(", ")", "(", "drill", ".", "entitycounts", ",", "drill", ".", "maxentitycount", ")", "=", "SplitByKind", "(", "entitycounts", ")", "drill_json", "=", "drill", ".", "_ToJsonDrill", "(", ")", "return", "drill_json" ]
analyzes url statistics and generates data for drill page .
train
false
45,468
def get_cdrom_attach_config_spec(client_factory, datastore, file_path, controller_key, cdrom_unit_number): config_spec = client_factory.create('ns0:VirtualMachineConfigSpec') device_config_spec = [] virtual_device_config_spec = create_virtual_cdrom_spec(client_factory, datastore, controller_key, file_path, cdrom_unit_number) device_config_spec.append(virtual_device_config_spec) config_spec.deviceChange = device_config_spec return config_spec
[ "def", "get_cdrom_attach_config_spec", "(", "client_factory", ",", "datastore", ",", "file_path", ",", "controller_key", ",", "cdrom_unit_number", ")", ":", "config_spec", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineConfigSpec'", ")", "device_config_spec", "=", "[", "]", "virtual_device_config_spec", "=", "create_virtual_cdrom_spec", "(", "client_factory", ",", "datastore", ",", "controller_key", ",", "file_path", ",", "cdrom_unit_number", ")", "device_config_spec", ".", "append", "(", "virtual_device_config_spec", ")", "config_spec", ".", "deviceChange", "=", "device_config_spec", "return", "config_spec" ]
builds and returns the cdrom attach config spec .
train
false
45,469
def on_failure(exc_info): pass
[ "def", "on_failure", "(", "exc_info", ")", ":", "pass" ]
called if the function fails .
train
false
45,471
def describe_enum_value(enum_value): enum_value_descriptor = EnumValueDescriptor() enum_value_descriptor.name = unicode(enum_value.name) enum_value_descriptor.number = enum_value.number return enum_value_descriptor
[ "def", "describe_enum_value", "(", "enum_value", ")", ":", "enum_value_descriptor", "=", "EnumValueDescriptor", "(", ")", "enum_value_descriptor", ".", "name", "=", "unicode", "(", "enum_value", ".", "name", ")", "enum_value_descriptor", ".", "number", "=", "enum_value", ".", "number", "return", "enum_value_descriptor" ]
build descriptor for enum instance .
train
false
45,472
def score_from_local(directory): results = gym.monitoring.monitor.load_results(directory) if (results is None): return None episode_lengths = results['episode_lengths'] episode_rewards = results['episode_rewards'] episode_types = results['episode_types'] timestamps = results['timestamps'] initial_reset_timestamp = results['initial_reset_timestamp'] spec = gym.spec(results['env_info']['env_id']) return score_from_merged(episode_lengths, episode_rewards, episode_types, timestamps, initial_reset_timestamp, spec.trials, spec.reward_threshold)
[ "def", "score_from_local", "(", "directory", ")", ":", "results", "=", "gym", ".", "monitoring", ".", "monitor", ".", "load_results", "(", "directory", ")", "if", "(", "results", "is", "None", ")", ":", "return", "None", "episode_lengths", "=", "results", "[", "'episode_lengths'", "]", "episode_rewards", "=", "results", "[", "'episode_rewards'", "]", "episode_types", "=", "results", "[", "'episode_types'", "]", "timestamps", "=", "results", "[", "'timestamps'", "]", "initial_reset_timestamp", "=", "results", "[", "'initial_reset_timestamp'", "]", "spec", "=", "gym", ".", "spec", "(", "results", "[", "'env_info'", "]", "[", "'env_id'", "]", ")", "return", "score_from_merged", "(", "episode_lengths", ",", "episode_rewards", ",", "episode_types", ",", "timestamps", ",", "initial_reset_timestamp", ",", "spec", ".", "trials", ",", "spec", ".", "reward_threshold", ")" ]
calculate score from a local results directory .
train
false
45,473
def equateCylindrical(point, returnValue): point = evaluate.getVector3ByFloatList(returnValue, point) azimuthComplex = (euclidean.getWiddershinsUnitPolar(math.radians(point.y)) * point.x) point.x = azimuthComplex.real point.y = azimuthComplex.imag
[ "def", "equateCylindrical", "(", "point", ",", "returnValue", ")", ":", "point", "=", "evaluate", ".", "getVector3ByFloatList", "(", "returnValue", ",", "point", ")", "azimuthComplex", "=", "(", "euclidean", ".", "getWiddershinsUnitPolar", "(", "math", ".", "radians", "(", "point", ".", "y", ")", ")", "*", "point", ".", "x", ")", "point", ".", "x", "=", "azimuthComplex", ".", "real", "point", ".", "y", "=", "azimuthComplex", ".", "imag" ]
get equation for cylindrical .
train
false
45,474
def from_class_path(cls_path): (module_path, cls_name) = cls_path.split(':') module = __import__(module_path, fromlist=[cls_name]) return getattr(module, cls_name)
[ "def", "from_class_path", "(", "cls_path", ")", ":", "(", "module_path", ",", "cls_name", ")", "=", "cls_path", ".", "split", "(", "':'", ")", "module", "=", "__import__", "(", "module_path", ",", "fromlist", "=", "[", "cls_name", "]", ")", "return", "getattr", "(", "module", ",", "cls_name", ")" ]
returns the class takes a class path and returns the class for it .
train
false
45,475
def make_message_identifier(command_id): return (MESSAGE_START + (command_id,))
[ "def", "make_message_identifier", "(", "command_id", ")", ":", "return", "(", "MESSAGE_START", "+", "(", "command_id", ",", ")", ")" ]
return the unique initial part of the sysex message without any arguments .
train
false
45,476
def make_group_frame(master, name=None, label=None, fill=Y, side=None, expand=None, font=None): font = (font or '-*-helvetica-medium-r-normal-*-*-100-*-*-*-*-*-*') outer = Frame(master, borderwidth=2, relief=GROOVE) outer.pack(expand=expand, fill=fill, side=side) if label: Label(outer, text=label, font=font, anchor=W).pack(fill=X) inner = Frame(master, borderwidth='1m', name=name) inner.pack(expand=1, fill=BOTH, in_=outer) inner.forget = outer.forget return inner
[ "def", "make_group_frame", "(", "master", ",", "name", "=", "None", ",", "label", "=", "None", ",", "fill", "=", "Y", ",", "side", "=", "None", ",", "expand", "=", "None", ",", "font", "=", "None", ")", ":", "font", "=", "(", "font", "or", "'-*-helvetica-medium-r-normal-*-*-100-*-*-*-*-*-*'", ")", "outer", "=", "Frame", "(", "master", ",", "borderwidth", "=", "2", ",", "relief", "=", "GROOVE", ")", "outer", ".", "pack", "(", "expand", "=", "expand", ",", "fill", "=", "fill", ",", "side", "=", "side", ")", "if", "label", ":", "Label", "(", "outer", ",", "text", "=", "label", ",", "font", "=", "font", ",", "anchor", "=", "W", ")", ".", "pack", "(", "fill", "=", "X", ")", "inner", "=", "Frame", "(", "master", ",", "borderwidth", "=", "'1m'", ",", "name", "=", "name", ")", "inner", ".", "pack", "(", "expand", "=", "1", ",", "fill", "=", "BOTH", ",", "in_", "=", "outer", ")", "inner", ".", "forget", "=", "outer", ".", "forget", "return", "inner" ]
create nested frames with a border and optional label .
train
false
45,477
def _reverse_and_conj(x): reverse = ([slice(None, None, (-1))] * x.ndim) return x[reverse].conj()
[ "def", "_reverse_and_conj", "(", "x", ")", ":", "reverse", "=", "(", "[", "slice", "(", "None", ",", "None", ",", "(", "-", "1", ")", ")", "]", "*", "x", ".", "ndim", ")", "return", "x", "[", "reverse", "]", ".", "conj", "(", ")" ]
reverse array x in all dimensions and perform the complex conjugate .
train
false
45,478
@receiver(SUBSECTION_SCORE_CHANGED) def recalculate_course_grade(sender, course, course_structure, user, **kwargs): CourseGradeFactory().update(user, course, course_structure)
[ "@", "receiver", "(", "SUBSECTION_SCORE_CHANGED", ")", "def", "recalculate_course_grade", "(", "sender", ",", "course", ",", "course_structure", ",", "user", ",", "**", "kwargs", ")", ":", "CourseGradeFactory", "(", ")", ".", "update", "(", "user", ",", "course", ",", "course_structure", ")" ]
updates a saved course grade .
train
false
45,481
def test_import_vispy_app2(): allmodnames = loaded_vispy_modules('vispy.app', 2, True) assert_not_in('PySide', allmodnames) assert_not_in('PyQt4', allmodnames) assert_not_in('pyglet', allmodnames)
[ "def", "test_import_vispy_app2", "(", ")", ":", "allmodnames", "=", "loaded_vispy_modules", "(", "'vispy.app'", ",", "2", ",", "True", ")", "assert_not_in", "(", "'PySide'", ",", "allmodnames", ")", "assert_not_in", "(", "'PyQt4'", ",", "allmodnames", ")", "assert_not_in", "(", "'pyglet'", ",", "allmodnames", ")" ]
importing vispy .
train
false
45,482
@pytest.mark.parametrize(u'testframe', totest_frames) def test_icrs_altaz_moonish(testframe): (earth_pv_helio, earth_pv_bary) = epv00(*get_jd12(testframe.obstime, u'tdb')) earth_icrs_xyz = (earth_pv_bary[0] * u.au) moonoffset = ([0, 0, MOONDIST.value] * MOONDIST.unit) moonish_icrs = ICRS(CartesianRepresentation((earth_icrs_xyz + moonoffset))) moonaa = moonish_icrs.transform_to(testframe) assert ((1000 * u.km) < np.abs((moonaa.distance - MOONDIST)).to(u.au) < (7000 * u.km))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "u'testframe'", ",", "totest_frames", ")", "def", "test_icrs_altaz_moonish", "(", "testframe", ")", ":", "(", "earth_pv_helio", ",", "earth_pv_bary", ")", "=", "epv00", "(", "*", "get_jd12", "(", "testframe", ".", "obstime", ",", "u'tdb'", ")", ")", "earth_icrs_xyz", "=", "(", "earth_pv_bary", "[", "0", "]", "*", "u", ".", "au", ")", "moonoffset", "=", "(", "[", "0", ",", "0", ",", "MOONDIST", ".", "value", "]", "*", "MOONDIST", ".", "unit", ")", "moonish_icrs", "=", "ICRS", "(", "CartesianRepresentation", "(", "(", "earth_icrs_xyz", "+", "moonoffset", ")", ")", ")", "moonaa", "=", "moonish_icrs", ".", "transform_to", "(", "testframe", ")", "assert", "(", "(", "1000", "*", "u", ".", "km", ")", "<", "np", ".", "abs", "(", "(", "moonaa", ".", "distance", "-", "MOONDIST", ")", ")", ".", "to", "(", "u", ".", "au", ")", "<", "(", "7000", "*", "u", ".", "km", ")", ")" ]
check that something expressed in *icrs* as being moon-like goes to the right altaz distance .
train
false
45,483
def comp_matrix(coefs): (p, k, k2) = coefs.shape assert (k == k2) kp = (k * p) result = np.zeros((kp, kp)) result[:k] = np.concatenate(coefs, axis=1) if (p > 1): result[(np.arange(k, kp), np.arange((kp - k)))] = 1 return result
[ "def", "comp_matrix", "(", "coefs", ")", ":", "(", "p", ",", "k", ",", "k2", ")", "=", "coefs", ".", "shape", "assert", "(", "k", "==", "k2", ")", "kp", "=", "(", "k", "*", "p", ")", "result", "=", "np", ".", "zeros", "(", "(", "kp", ",", "kp", ")", ")", "result", "[", ":", "k", "]", "=", "np", ".", "concatenate", "(", "coefs", ",", "axis", "=", "1", ")", "if", "(", "p", ">", "1", ")", ":", "result", "[", "(", "np", ".", "arange", "(", "k", ",", "kp", ")", ",", "np", ".", "arange", "(", "(", "kp", "-", "k", ")", ")", ")", "]", "=", "1", "return", "result" ]
return compansion matrix for the var(1) representation for a var(p) process a = [a_1 a_2 .
train
false
45,484
def p_type_qualifier_list_1(t): pass
[ "def", "p_type_qualifier_list_1", "(", "t", ")", ":", "pass" ]
type_qualifier_list : type_qualifier .
train
false
45,487
def refusing_port(): (server_socket, port) = bind_unused_port() server_socket.setblocking(1) client_socket = socket.socket() client_socket.connect(('127.0.0.1', port)) (conn, client_addr) = server_socket.accept() conn.close() server_socket.close() return (client_socket.close, client_addr[1])
[ "def", "refusing_port", "(", ")", ":", "(", "server_socket", ",", "port", ")", "=", "bind_unused_port", "(", ")", "server_socket", ".", "setblocking", "(", "1", ")", "client_socket", "=", "socket", ".", "socket", "(", ")", "client_socket", ".", "connect", "(", "(", "'127.0.0.1'", ",", "port", ")", ")", "(", "conn", ",", "client_addr", ")", "=", "server_socket", ".", "accept", "(", ")", "conn", ".", "close", "(", ")", "server_socket", ".", "close", "(", ")", "return", "(", "client_socket", ".", "close", ",", "client_addr", "[", "1", "]", ")" ]
returns a local port number that will refuse all connections .
train
false
45,488
@decorators.api_view(['GET']) @decorators.permission_classes((permissions.AllowAny,)) @decorators.renderer_classes((JSONRenderer,)) def docurl(request): project = request.GET.get('project') version = request.GET.get('version', LATEST) doc = request.GET.get('doc', 'index') if (project is None): return Response({'error': 'Need project and doc'}, status=status.HTTP_400_BAD_REQUEST) project = get_object_or_404(Project, slug=project) version = get_object_or_404(Version.objects.public(request.user, project=project, only_active=False), slug=version) return Response({'url': make_document_url(project=project, version=version.slug, page=doc)})
[ "@", "decorators", ".", "api_view", "(", "[", "'GET'", "]", ")", "@", "decorators", ".", "permission_classes", "(", "(", "permissions", ".", "AllowAny", ",", ")", ")", "@", "decorators", ".", "renderer_classes", "(", "(", "JSONRenderer", ",", ")", ")", "def", "docurl", "(", "request", ")", ":", "project", "=", "request", ".", "GET", ".", "get", "(", "'project'", ")", "version", "=", "request", ".", "GET", ".", "get", "(", "'version'", ",", "LATEST", ")", "doc", "=", "request", ".", "GET", ".", "get", "(", "'doc'", ",", "'index'", ")", "if", "(", "project", "is", "None", ")", ":", "return", "Response", "(", "{", "'error'", ":", "'Need project and doc'", "}", ",", "status", "=", "status", ".", "HTTP_400_BAD_REQUEST", ")", "project", "=", "get_object_or_404", "(", "Project", ",", "slug", "=", "project", ")", "version", "=", "get_object_or_404", "(", "Version", ".", "objects", ".", "public", "(", "request", ".", "user", ",", "project", "=", "project", ",", "only_active", "=", "False", ")", ",", "slug", "=", "version", ")", "return", "Response", "(", "{", "'url'", ":", "make_document_url", "(", "project", "=", "project", ",", "version", "=", "version", ".", "slug", ",", "page", "=", "doc", ")", "}", ")" ]
get the url that a slug resolves to .
train
false
45,489
def test_loads_sum_steps(): world.ran = False @before.each_step def assert_is_fine(step): world.ran = True runner = Runner(join(abspath(dirname(__file__)), 'simple_features', '2nd_feature_dir'), verbosity=0) runner.run() assert world.ran
[ "def", "test_loads_sum_steps", "(", ")", ":", "world", ".", "ran", "=", "False", "@", "before", ".", "each_step", "def", "assert_is_fine", "(", "step", ")", ":", "world", ".", "ran", "=", "True", "runner", "=", "Runner", "(", "join", "(", "abspath", "(", "dirname", "(", "__file__", ")", ")", ",", "'simple_features'", ",", "'2nd_feature_dir'", ")", ",", "verbosity", "=", "0", ")", "runner", ".", "run", "(", ")", "assert", "world", ".", "ran" ]
can load step definitions from step_definitions folder .
train
false
45,490
def create_secret_key(): import random import string secret_key = list(((string.letters + string.digits) + string.punctuation).replace('\\', '').replace("'", '"').replace('{', '_').replace('}', '-')) random.shuffle(secret_key) secret_key = ''.join(secret_key[:40]) return secret_key
[ "def", "create_secret_key", "(", ")", ":", "import", "random", "import", "string", "secret_key", "=", "list", "(", "(", "(", "string", ".", "letters", "+", "string", ".", "digits", ")", "+", "string", ".", "punctuation", ")", ".", "replace", "(", "'\\\\'", ",", "''", ")", ".", "replace", "(", "\"'\"", ",", "'\"'", ")", ".", "replace", "(", "'{'", ",", "'_'", ")", ".", "replace", "(", "'}'", ",", "'-'", ")", ")", "random", ".", "shuffle", "(", "secret_key", ")", "secret_key", "=", "''", ".", "join", "(", "secret_key", "[", ":", "40", "]", ")", "return", "secret_key" ]
randomly create the secret key for the settings file .
train
false
45,492
def getMinimumRadius(beginComplexSegmentLength, endComplexSegmentLength, radius): return min(abs(radius), (0.5 * min(beginComplexSegmentLength, endComplexSegmentLength)))
[ "def", "getMinimumRadius", "(", "beginComplexSegmentLength", ",", "endComplexSegmentLength", ",", "radius", ")", ":", "return", "min", "(", "abs", "(", "radius", ")", ",", "(", "0.5", "*", "min", "(", "beginComplexSegmentLength", ",", "endComplexSegmentLength", ")", ")", ")" ]
get minimum radius .
train
false
45,495
def create_zipfile(directory, files, zipname='test'): zipfile_path = ((directory / zipname) + '.zip') with zipfile.ZipFile(str(zipfile_path), 'w') as new_zipfile: for file_path in files: new_zipfile.write(str(file_path), arcname=os.path.basename(str(file_path))) return str(zipfile_path)
[ "def", "create_zipfile", "(", "directory", ",", "files", ",", "zipname", "=", "'test'", ")", ":", "zipfile_path", "=", "(", "(", "directory", "/", "zipname", ")", "+", "'.zip'", ")", "with", "zipfile", ".", "ZipFile", "(", "str", "(", "zipfile_path", ")", ",", "'w'", ")", "as", "new_zipfile", ":", "for", "file_path", "in", "files", ":", "new_zipfile", ".", "write", "(", "str", "(", "file_path", ")", ",", "arcname", "=", "os", ".", "path", ".", "basename", "(", "str", "(", "file_path", ")", ")", ")", "return", "str", "(", "zipfile_path", ")" ]
return a path to a newly created zip file .
train
false
45,496
def _load_properties(property_name, config_option, set_default=False, default=None): if (not property_name): log.debug('No property specified in function, trying to load from salt configuration') try: options = __salt__['config.option']('cassandra') except BaseException as e: log.error('Failed to get cassandra config options. Reason: {0}'.format(str(e))) raise loaded_property = options.get(config_option) if (not loaded_property): if set_default: log.debug('Setting default Cassandra {0} to {1}'.format(config_option, default)) loaded_property = default else: log.error('No cassandra {0} specified in the configuration or passed to the module.'.format(config_option)) raise CommandExecutionError('ERROR: Cassandra {0} cannot be empty.'.format(config_option)) return loaded_property return property_name
[ "def", "_load_properties", "(", "property_name", ",", "config_option", ",", "set_default", "=", "False", ",", "default", "=", "None", ")", ":", "if", "(", "not", "property_name", ")", ":", "log", ".", "debug", "(", "'No property specified in function, trying to load from salt configuration'", ")", "try", ":", "options", "=", "__salt__", "[", "'config.option'", "]", "(", "'cassandra'", ")", "except", "BaseException", "as", "e", ":", "log", ".", "error", "(", "'Failed to get cassandra config options. Reason: {0}'", ".", "format", "(", "str", "(", "e", ")", ")", ")", "raise", "loaded_property", "=", "options", ".", "get", "(", "config_option", ")", "if", "(", "not", "loaded_property", ")", ":", "if", "set_default", ":", "log", ".", "debug", "(", "'Setting default Cassandra {0} to {1}'", ".", "format", "(", "config_option", ",", "default", ")", ")", "loaded_property", "=", "default", "else", ":", "log", ".", "error", "(", "'No cassandra {0} specified in the configuration or passed to the module.'", ".", "format", "(", "config_option", ")", ")", "raise", "CommandExecutionError", "(", "'ERROR: Cassandra {0} cannot be empty.'", ".", "format", "(", "config_option", ")", ")", "return", "loaded_property", "return", "property_name" ]
load properties for the cassandra module from config or pillar .
train
true
45,497
def linear_congruence(a, b, m): from sympy.polys.polytools import gcdex if ((a % m) == 0): if ((b % m) == 0): return list(range(m)) else: return [] (r, _, g) = gcdex(a, m) if ((b % g) != 0): return [] return [((((r * b) // g) + ((t * m) // g)) % m) for t in range(g)]
[ "def", "linear_congruence", "(", "a", ",", "b", ",", "m", ")", ":", "from", "sympy", ".", "polys", ".", "polytools", "import", "gcdex", "if", "(", "(", "a", "%", "m", ")", "==", "0", ")", ":", "if", "(", "(", "b", "%", "m", ")", "==", "0", ")", ":", "return", "list", "(", "range", "(", "m", ")", ")", "else", ":", "return", "[", "]", "(", "r", ",", "_", ",", "g", ")", "=", "gcdex", "(", "a", ",", "m", ")", "if", "(", "(", "b", "%", "g", ")", "!=", "0", ")", ":", "return", "[", "]", "return", "[", "(", "(", "(", "(", "r", "*", "b", ")", "//", "g", ")", "+", "(", "(", "t", "*", "m", ")", "//", "g", ")", ")", "%", "m", ")", "for", "t", "in", "range", "(", "g", ")", "]" ]
returns the values of x satisfying a*x congruent b mod(m) here m is positive integer and a .
train
false
45,500
def serve_download(path, name=None): return serve_file(path, 'application/x-download', 'attachment', name)
[ "def", "serve_download", "(", "path", ",", "name", "=", "None", ")", ":", "return", "serve_file", "(", "path", ",", "'application/x-download'", ",", "'attachment'", ",", "name", ")" ]
serve path as an application/x-download attachment .
train
false
45,501
def quote(string, safe=u'/'): if (sys.version_info.major < 3): if isinstance(string, unicode): string = string.encode(u'utf8') string = urllib.quote(string, safe.encode(u'utf8')) else: string = urllib.parse.quote(str(string), safe) return string
[ "def", "quote", "(", "string", ",", "safe", "=", "u'/'", ")", ":", "if", "(", "sys", ".", "version_info", ".", "major", "<", "3", ")", ":", "if", "isinstance", "(", "string", ",", "unicode", ")", ":", "string", "=", "string", ".", "encode", "(", "u'utf8'", ")", "string", "=", "urllib", ".", "quote", "(", "string", ",", "safe", ".", "encode", "(", "u'utf8'", ")", ")", "else", ":", "string", "=", "urllib", ".", "parse", ".", "quote", "(", "str", "(", "string", ")", ",", "safe", ")", "return", "string" ]
returns quoted po term string .
train
false
45,502
def getchunks(im, **params): class collector(object, ): data = [] def write(self, data): pass def append(self, chunk): self.data.append(chunk) def append(fp, cid, *data): data = ''.join(data) (hi, lo) = Image.core.crc32(data, Image.core.crc32(cid)) crc = (o16(hi) + o16(lo)) fp.append((cid, data, crc)) fp = collector() try: im.encoderinfo = params _save(im, fp, None, append) finally: del im.encoderinfo return fp.data
[ "def", "getchunks", "(", "im", ",", "**", "params", ")", ":", "class", "collector", "(", "object", ",", ")", ":", "data", "=", "[", "]", "def", "write", "(", "self", ",", "data", ")", ":", "pass", "def", "append", "(", "self", ",", "chunk", ")", ":", "self", ".", "data", ".", "append", "(", "chunk", ")", "def", "append", "(", "fp", ",", "cid", ",", "*", "data", ")", ":", "data", "=", "''", ".", "join", "(", "data", ")", "(", "hi", ",", "lo", ")", "=", "Image", ".", "core", ".", "crc32", "(", "data", ",", "Image", ".", "core", ".", "crc32", "(", "cid", ")", ")", "crc", "=", "(", "o16", "(", "hi", ")", "+", "o16", "(", "lo", ")", ")", "fp", ".", "append", "(", "(", "cid", ",", "data", ",", "crc", ")", ")", "fp", "=", "collector", "(", ")", "try", ":", "im", ".", "encoderinfo", "=", "params", "_save", "(", "im", ",", "fp", ",", "None", ",", "append", ")", "finally", ":", "del", "im", ".", "encoderinfo", "return", "fp", ".", "data" ]
return a list of png chunks representing this image .
train
false
45,503
def search_ctr(start_date, end_date): ctr = {} request = _build_request() date = start_date while (date <= end_date): date_str = str(date) metric_name = 'ga:goal11ConversionRate' ctr_str = request.get(ids=('ga:' + profile_id), start_date=date_str, end_date=date_str, metrics=metric_name).execute()['rows'][0][0] ctr[date_str] = float(ctr_str) date += timedelta(days=1) return ctr
[ "def", "search_ctr", "(", "start_date", ",", "end_date", ")", ":", "ctr", "=", "{", "}", "request", "=", "_build_request", "(", ")", "date", "=", "start_date", "while", "(", "date", "<=", "end_date", ")", ":", "date_str", "=", "str", "(", "date", ")", "metric_name", "=", "'ga:goal11ConversionRate'", "ctr_str", "=", "request", ".", "get", "(", "ids", "=", "(", "'ga:'", "+", "profile_id", ")", ",", "start_date", "=", "date_str", ",", "end_date", "=", "date_str", ",", "metrics", "=", "metric_name", ")", ".", "execute", "(", ")", "[", "'rows'", "]", "[", "0", "]", "[", "0", "]", "ctr", "[", "date_str", "]", "=", "float", "(", "ctr_str", ")", "date", "+=", "timedelta", "(", "days", "=", "1", ")", "return", "ctr" ]
return search click through rate based on goal 11 in google analytics .
train
false
45,504
def _parse_task_stderr(lines): task_error = None stack_trace_start_line = None for (line_num, line) in enumerate(lines): line = line.rstrip('\r\n') if _SUBPROCESS_FAILED_STACK_TRACE_START.match(line): stack_trace_start_line = line_num continue if (stack_trace_start_line is not None): if (line.lstrip() != line): continue else: stack_trace_start_line = None if _TASK_STDERR_IGNORE_RE.match(line): if (task_error and ('num_lines' not in task_error)): task_error['num_lines'] = (line_num - task_error['start_line']) continue elif ((not task_error) or line.startswith('+ ')): task_error = dict(message=line, start_line=line_num) else: task_error['message'] += ('\n' + line) if task_error: if ('num_lines' not in task_error): end_line = (stack_trace_start_line or (line_num + 1)) task_error['num_lines'] = (end_line - task_error['start_line']) return task_error else: return None
[ "def", "_parse_task_stderr", "(", "lines", ")", ":", "task_error", "=", "None", "stack_trace_start_line", "=", "None", "for", "(", "line_num", ",", "line", ")", "in", "enumerate", "(", "lines", ")", ":", "line", "=", "line", ".", "rstrip", "(", "'\\r\\n'", ")", "if", "_SUBPROCESS_FAILED_STACK_TRACE_START", ".", "match", "(", "line", ")", ":", "stack_trace_start_line", "=", "line_num", "continue", "if", "(", "stack_trace_start_line", "is", "not", "None", ")", ":", "if", "(", "line", ".", "lstrip", "(", ")", "!=", "line", ")", ":", "continue", "else", ":", "stack_trace_start_line", "=", "None", "if", "_TASK_STDERR_IGNORE_RE", ".", "match", "(", "line", ")", ":", "if", "(", "task_error", "and", "(", "'num_lines'", "not", "in", "task_error", ")", ")", ":", "task_error", "[", "'num_lines'", "]", "=", "(", "line_num", "-", "task_error", "[", "'start_line'", "]", ")", "continue", "elif", "(", "(", "not", "task_error", ")", "or", "line", ".", "startswith", "(", "'+ '", ")", ")", ":", "task_error", "=", "dict", "(", "message", "=", "line", ",", "start_line", "=", "line_num", ")", "else", ":", "task_error", "[", "'message'", "]", "+=", "(", "'\\n'", "+", "line", ")", "if", "task_error", ":", "if", "(", "'num_lines'", "not", "in", "task_error", ")", ":", "end_line", "=", "(", "stack_trace_start_line", "or", "(", "line_num", "+", "1", ")", ")", "task_error", "[", "'num_lines'", "]", "=", "(", "end_line", "-", "task_error", "[", "'start_line'", "]", ")", "return", "task_error", "else", ":", "return", "None" ]
attempt to explain any error in task stderr .
train
false
45,505
def get_link_suffix(symlink): with settings(warn_only=True): if (not exists(('~/%s' % symlink))): return None target = run(('readlink ~/%s' % symlink)) if (target.return_code != 0): return None suffix = get_file_suffix(symlink, target) assert (suffix is not None), ('Could not determine suffix from filename %s' % target) return suffix
[ "def", "get_link_suffix", "(", "symlink", ")", ":", "with", "settings", "(", "warn_only", "=", "True", ")", ":", "if", "(", "not", "exists", "(", "(", "'~/%s'", "%", "symlink", ")", ")", ")", ":", "return", "None", "target", "=", "run", "(", "(", "'readlink ~/%s'", "%", "symlink", ")", ")", "if", "(", "target", ".", "return_code", "!=", "0", ")", ":", "return", "None", "suffix", "=", "get_file_suffix", "(", "symlink", ",", "target", ")", "assert", "(", "suffix", "is", "not", "None", ")", ",", "(", "'Could not determine suffix from filename %s'", "%", "target", ")", "return", "suffix" ]
follow symlink in ~/ and determine the suffix for the target (of the form <symlink> .
train
false
45,508
def decode_file_iter(file_obj, codec_options=DEFAULT_CODEC_OPTIONS): while True: size_data = file_obj.read(4) if (len(size_data) == 0): break elif (len(size_data) != 4): raise InvalidBSON('cut off in middle of objsize') obj_size = (_UNPACK_INT(size_data)[0] - 4) elements = (size_data + file_obj.read(obj_size)) (yield _bson_to_dict(elements, codec_options))
[ "def", "decode_file_iter", "(", "file_obj", ",", "codec_options", "=", "DEFAULT_CODEC_OPTIONS", ")", ":", "while", "True", ":", "size_data", "=", "file_obj", ".", "read", "(", "4", ")", "if", "(", "len", "(", "size_data", ")", "==", "0", ")", ":", "break", "elif", "(", "len", "(", "size_data", ")", "!=", "4", ")", ":", "raise", "InvalidBSON", "(", "'cut off in middle of objsize'", ")", "obj_size", "=", "(", "_UNPACK_INT", "(", "size_data", ")", "[", "0", "]", "-", "4", ")", "elements", "=", "(", "size_data", "+", "file_obj", ".", "read", "(", "obj_size", ")", ")", "(", "yield", "_bson_to_dict", "(", "elements", ",", "codec_options", ")", ")" ]
decode bson data from a file to multiple documents as a generator .
train
true
45,509
def hashable(a): if isinstance(a, dict): return hashable(a.items()) try: return tuple(map(hashable, a)) except: return a
[ "def", "hashable", "(", "a", ")", ":", "if", "isinstance", "(", "a", ",", "dict", ")", ":", "return", "hashable", "(", "a", ".", "items", "(", ")", ")", "try", ":", "return", "tuple", "(", "map", "(", "hashable", ",", "a", ")", ")", "except", ":", "return", "a" ]
turn some unhashable objects into hashable ones .
train
false
45,510
def container_configuration_response(application, node): result = {'node_uuid': unicode(node), 'name': application.name} result.update(ApplicationMarshaller(application).convert()) if (u'volume' in result): volume = result.pop(u'volume') result[u'volumes'] = [{u'dataset_id': volume[u'dataset_id'], u'mountpoint': volume[u'mountpoint']}] if (application.cpu_shares is not None): result['cpu_shares'] = application.cpu_shares if (application.memory_limit is not None): result['memory_limit'] = application.memory_limit if (application.command_line is not None): result['command_line'] = list(application.command_line) return result
[ "def", "container_configuration_response", "(", "application", ",", "node", ")", ":", "result", "=", "{", "'node_uuid'", ":", "unicode", "(", "node", ")", ",", "'name'", ":", "application", ".", "name", "}", "result", ".", "update", "(", "ApplicationMarshaller", "(", "application", ")", ".", "convert", "(", ")", ")", "if", "(", "u'volume'", "in", "result", ")", ":", "volume", "=", "result", ".", "pop", "(", "u'volume'", ")", "result", "[", "u'volumes'", "]", "=", "[", "{", "u'dataset_id'", ":", "volume", "[", "u'dataset_id'", "]", ",", "u'mountpoint'", ":", "volume", "[", "u'mountpoint'", "]", "}", "]", "if", "(", "application", ".", "cpu_shares", "is", "not", "None", ")", ":", "result", "[", "'cpu_shares'", "]", "=", "application", ".", "cpu_shares", "if", "(", "application", ".", "memory_limit", "is", "not", "None", ")", ":", "result", "[", "'memory_limit'", "]", "=", "application", ".", "memory_limit", "if", "(", "application", ".", "command_line", "is", "not", "None", ")", ":", "result", "[", "'command_line'", "]", "=", "list", "(", "application", ".", "command_line", ")", "return", "result" ]
return a container dict which confirms to /v1/endpoints .
train
false
45,512
def get_logout_url(*args, **kwargs): return get_client().get_logout_url(*args, **kwargs)
[ "def", "get_logout_url", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "get_client", "(", ")", ".", "get_logout_url", "(", "*", "args", ",", "**", "kwargs", ")" ]
convenience function for getting a logout url for a service .
train
false
45,515
def debug_error_message(msg): action = config.compute_test_value assert (action != 'off') if (action in ['raise', 'ignore']): raise ValueError(msg) else: assert (action == 'warn') warnings.warn(msg, stacklevel=2)
[ "def", "debug_error_message", "(", "msg", ")", ":", "action", "=", "config", ".", "compute_test_value", "assert", "(", "action", "!=", "'off'", ")", "if", "(", "action", "in", "[", "'raise'", ",", "'ignore'", "]", ")", ":", "raise", "ValueError", "(", "msg", ")", "else", ":", "assert", "(", "action", "==", "'warn'", ")", "warnings", ".", "warn", "(", "msg", ",", "stacklevel", "=", "2", ")" ]
displays a message saying that an error was found in some test_values .
train
false
45,516
def CDLMORNINGSTAR(barDs, count, penetration=(-4e+37)): return call_talib_with_ohlc(barDs, count, talib.CDLMORNINGSTAR, penetration)
[ "def", "CDLMORNINGSTAR", "(", "barDs", ",", "count", ",", "penetration", "=", "(", "-", "4e+37", ")", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLMORNINGSTAR", ",", "penetration", ")" ]
morning star .
train
false
45,517
def get_cohort(user, course_key, assign=True, use_cached=False): request_cache = RequestCache.get_request_cache() cache_key = u'cohorts.get_cohort.{}.{}'.format(user.id, course_key) if (use_cached and (cache_key in request_cache.data)): return request_cache.data[cache_key] request_cache.data.pop(cache_key, None) course_cohort_settings = get_course_cohort_settings(course_key) if (not course_cohort_settings.is_cohorted): return request_cache.data.setdefault(cache_key, None) try: membership = CohortMembership.objects.get(course_id=course_key, user_id=user.id) return request_cache.data.setdefault(cache_key, membership.course_user_group) except CohortMembership.DoesNotExist: if (not assign): return None membership = CohortMembership.objects.create(user=user, course_user_group=get_random_cohort(course_key)) return request_cache.data.setdefault(cache_key, membership.course_user_group)
[ "def", "get_cohort", "(", "user", ",", "course_key", ",", "assign", "=", "True", ",", "use_cached", "=", "False", ")", ":", "request_cache", "=", "RequestCache", ".", "get_request_cache", "(", ")", "cache_key", "=", "u'cohorts.get_cohort.{}.{}'", ".", "format", "(", "user", ".", "id", ",", "course_key", ")", "if", "(", "use_cached", "and", "(", "cache_key", "in", "request_cache", ".", "data", ")", ")", ":", "return", "request_cache", ".", "data", "[", "cache_key", "]", "request_cache", ".", "data", ".", "pop", "(", "cache_key", ",", "None", ")", "course_cohort_settings", "=", "get_course_cohort_settings", "(", "course_key", ")", "if", "(", "not", "course_cohort_settings", ".", "is_cohorted", ")", ":", "return", "request_cache", ".", "data", ".", "setdefault", "(", "cache_key", ",", "None", ")", "try", ":", "membership", "=", "CohortMembership", ".", "objects", ".", "get", "(", "course_id", "=", "course_key", ",", "user_id", "=", "user", ".", "id", ")", "return", "request_cache", ".", "data", ".", "setdefault", "(", "cache_key", ",", "membership", ".", "course_user_group", ")", "except", "CohortMembership", ".", "DoesNotExist", ":", "if", "(", "not", "assign", ")", ":", "return", "None", "membership", "=", "CohortMembership", ".", "objects", ".", "create", "(", "user", "=", "user", ",", "course_user_group", "=", "get_random_cohort", "(", "course_key", ")", ")", "return", "request_cache", ".", "data", ".", "setdefault", "(", "cache_key", ",", "membership", ".", "course_user_group", ")" ]
returns the users cohort for the specified course .
train
false
45,518
def apt_get_update(sudo=False): return _from_args(sudo)(['apt-get', 'update'])
[ "def", "apt_get_update", "(", "sudo", "=", "False", ")", ":", "return", "_from_args", "(", "sudo", ")", "(", "[", "'apt-get'", ",", "'update'", "]", ")" ]
update apts package metadata cache .
train
false
45,519
def sqlquote(a): if isinstance(a, list): return _sqllist(a) else: return sqlparam(a).sqlquery()
[ "def", "sqlquote", "(", "a", ")", ":", "if", "isinstance", "(", "a", ",", "list", ")", ":", "return", "_sqllist", "(", "a", ")", "else", ":", "return", "sqlparam", "(", "a", ")", ".", "sqlquery", "(", ")" ]
ensures a is quoted properly for use in a sql query .
train
false
45,521
@with_device def proc_exe(pid): with context.quiet: io = process(['realpath', ('/proc/%d/exe' % pid)]) data = io.recvall().strip() return data
[ "@", "with_device", "def", "proc_exe", "(", "pid", ")", ":", "with", "context", ".", "quiet", ":", "io", "=", "process", "(", "[", "'realpath'", ",", "(", "'/proc/%d/exe'", "%", "pid", ")", "]", ")", "data", "=", "io", ".", "recvall", "(", ")", ".", "strip", "(", ")", "return", "data" ]
returns the full path of the executable for the provided pid .
train
false
45,522
def _get_reason(cluster_or_step): return getattr(getattr(cluster_or_step.status, 'statechangereason', ''), 'message', '').rstrip()
[ "def", "_get_reason", "(", "cluster_or_step", ")", ":", "return", "getattr", "(", "getattr", "(", "cluster_or_step", ".", "status", ",", "'statechangereason'", ",", "''", ")", ",", "'message'", ",", "''", ")", ".", "rstrip", "(", ")" ]
extract statechangereason .
train
false
45,523
def build_from_document(service, base, future=None, http=None, developerKey=None, model=None, requestBuilder=HttpRequest): service = simplejson.loads(service) base = urlparse.urljoin(base, service['basePath']) if future: future = simplejson.loads(future) auth_discovery = future.get('auth', {}) else: future = {} auth_discovery = {} schema = Schemas(service) if (model is None): features = service.get('features', []) model = JsonModel(('dataWrapper' in features)) resource = createResource(http, base, model, requestBuilder, developerKey, service, future, schema) def auth_method(): 'Discovery information about the authentication the API uses.' return auth_discovery setattr(resource, 'auth_discovery', auth_method) return resource
[ "def", "build_from_document", "(", "service", ",", "base", ",", "future", "=", "None", ",", "http", "=", "None", ",", "developerKey", "=", "None", ",", "model", "=", "None", ",", "requestBuilder", "=", "HttpRequest", ")", ":", "service", "=", "simplejson", ".", "loads", "(", "service", ")", "base", "=", "urlparse", ".", "urljoin", "(", "base", ",", "service", "[", "'basePath'", "]", ")", "if", "future", ":", "future", "=", "simplejson", ".", "loads", "(", "future", ")", "auth_discovery", "=", "future", ".", "get", "(", "'auth'", ",", "{", "}", ")", "else", ":", "future", "=", "{", "}", "auth_discovery", "=", "{", "}", "schema", "=", "Schemas", "(", "service", ")", "if", "(", "model", "is", "None", ")", ":", "features", "=", "service", ".", "get", "(", "'features'", ",", "[", "]", ")", "model", "=", "JsonModel", "(", "(", "'dataWrapper'", "in", "features", ")", ")", "resource", "=", "createResource", "(", "http", ",", "base", ",", "model", ",", "requestBuilder", ",", "developerKey", ",", "service", ",", "future", ",", "schema", ")", "def", "auth_method", "(", ")", ":", "return", "auth_discovery", "setattr", "(", "resource", ",", "'auth_discovery'", ",", "auth_method", ")", "return", "resource" ]
create a resource for interacting with an api .
train
false
45,524
def _set_vhostuser_settings(vif, obj): obj.mode = vif['details'].get(model.VIF_DETAILS_VHOSTUSER_MODE, 'server') path = vif['details'].get(model.VIF_DETAILS_VHOSTUSER_SOCKET, None) if path: obj.path = path else: raise exception.VifDetailsMissingVhostuserSockPath(vif_id=vif['id'])
[ "def", "_set_vhostuser_settings", "(", "vif", ",", "obj", ")", ":", "obj", ".", "mode", "=", "vif", "[", "'details'", "]", ".", "get", "(", "model", ".", "VIF_DETAILS_VHOSTUSER_MODE", ",", "'server'", ")", "path", "=", "vif", "[", "'details'", "]", ".", "get", "(", "model", ".", "VIF_DETAILS_VHOSTUSER_SOCKET", ",", "None", ")", "if", "path", ":", "obj", ".", "path", "=", "path", "else", ":", "raise", "exception", ".", "VifDetailsMissingVhostuserSockPath", "(", "vif_id", "=", "vif", "[", "'id'", "]", ")" ]
set vhostuser socket mode and path .
train
false
45,525
def getPillarsOutput(loopLists): pillarsOutput = [] for loopList in loopLists: pillarsOutput.append(getPillarOutput(loopList)) return getUnifiedOutput(pillarsOutput)
[ "def", "getPillarsOutput", "(", "loopLists", ")", ":", "pillarsOutput", "=", "[", "]", "for", "loopList", "in", "loopLists", ":", "pillarsOutput", ".", "append", "(", "getPillarOutput", "(", "loopList", ")", ")", "return", "getUnifiedOutput", "(", "pillarsOutput", ")" ]
get pillars output .
train
false
45,527
def mt_services(request): machine_services = list(MACHINE_TRANSLATION_SERVICES.keys()) return JsonResponse(data=machine_services, safe=False)
[ "def", "mt_services", "(", "request", ")", ":", "machine_services", "=", "list", "(", "MACHINE_TRANSLATION_SERVICES", ".", "keys", "(", ")", ")", "return", "JsonResponse", "(", "data", "=", "machine_services", ",", "safe", "=", "False", ")" ]
generates list of installed machine translation services in json .
train
false
45,528
def _get_distset(tgt): tgtattrs = tgt.split('-') if (tgtattrs[0] == 'amzn'): distset = '--define "dist .{0}1"'.format(tgtattrs[0]) elif (tgtattrs[1] in ['5', '6', '7']): distset = '--define "dist .el{0}"'.format(tgtattrs[1]) else: distset = '' return distset
[ "def", "_get_distset", "(", "tgt", ")", ":", "tgtattrs", "=", "tgt", ".", "split", "(", "'-'", ")", "if", "(", "tgtattrs", "[", "0", "]", "==", "'amzn'", ")", ":", "distset", "=", "'--define \"dist .{0}1\"'", ".", "format", "(", "tgtattrs", "[", "0", "]", ")", "elif", "(", "tgtattrs", "[", "1", "]", "in", "[", "'5'", ",", "'6'", ",", "'7'", "]", ")", ":", "distset", "=", "'--define \"dist .el{0}\"'", ".", "format", "(", "tgtattrs", "[", "1", "]", ")", "else", ":", "distset", "=", "''", "return", "distset" ]
get the distribution string for use with rpmbuild and mock .
train
true
45,529
def _mminion(): global MMINION if (MMINION is None): MMINION = salt.minion.MasterMinion(__opts__) return MMINION
[ "def", "_mminion", "(", ")", ":", "global", "MMINION", "if", "(", "MMINION", "is", "None", ")", ":", "MMINION", "=", "salt", ".", "minion", ".", "MasterMinion", "(", "__opts__", ")", "return", "MMINION" ]
create a single mminion for this module to use .
train
false
45,530
def categorical_sample(prob_n, np_random): prob_n = np.asarray(prob_n) csprob_n = np.cumsum(prob_n) return (csprob_n > np_random.rand()).argmax()
[ "def", "categorical_sample", "(", "prob_n", ",", "np_random", ")", ":", "prob_n", "=", "np", ".", "asarray", "(", "prob_n", ")", "csprob_n", "=", "np", ".", "cumsum", "(", "prob_n", ")", "return", "(", "csprob_n", ">", "np_random", ".", "rand", "(", ")", ")", ".", "argmax", "(", ")" ]
sample from categorical distribution each row specifies class probabilities .
train
false
45,532
def add_null_handler_to_root_logger(): log = logging.getLogger() if (NULL_HANDLER not in log.handlers): log.addHandler(NULL_HANDLER)
[ "def", "add_null_handler_to_root_logger", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", ")", "if", "(", "NULL_HANDLER", "not", "in", "log", ".", "handlers", ")", ":", "log", ".", "addHandler", "(", "NULL_HANDLER", ")" ]
add a nullhandler to the root logger .
train
false
45,533
def monitor_copy_global(sock, orig_name, new_name): return communicate(sock, ('__copy_global__("%s", "%s")' % (orig_name, new_name)))
[ "def", "monitor_copy_global", "(", "sock", ",", "orig_name", ",", "new_name", ")", ":", "return", "communicate", "(", "sock", ",", "(", "'__copy_global__(\"%s\", \"%s\")'", "%", "(", "orig_name", ",", "new_name", ")", ")", ")" ]
copy global variable *orig_name* to *new_name* .
train
false
45,534
def unique_resource_id(delimiter='_'): build_id = os.getenv('TRAVIS_BUILD_ID', os.getenv('CIRCLE_BUILD_NUM', '')) if (build_id == ''): return ('%s%d' % (delimiter, (1000 * time.time()))) else: return ('%s%s%s%d' % (delimiter, build_id, delimiter, time.time()))
[ "def", "unique_resource_id", "(", "delimiter", "=", "'_'", ")", ":", "build_id", "=", "os", ".", "getenv", "(", "'TRAVIS_BUILD_ID'", ",", "os", ".", "getenv", "(", "'CIRCLE_BUILD_NUM'", ",", "''", ")", ")", "if", "(", "build_id", "==", "''", ")", ":", "return", "(", "'%s%d'", "%", "(", "delimiter", ",", "(", "1000", "*", "time", ".", "time", "(", ")", ")", ")", ")", "else", ":", "return", "(", "'%s%s%s%d'", "%", "(", "delimiter", ",", "build_id", ",", "delimiter", ",", "time", ".", "time", "(", ")", ")", ")" ]
a unique identifier for a resource .
train
false
45,535
def dup_to_raw_dict(f, K=None, zero=False): if ((not f) and zero): return {0: K.zero} (n, result) = ((len(f) - 1), {}) for k in range(0, (n + 1)): if f[(n - k)]: result[k] = f[(n - k)] return result
[ "def", "dup_to_raw_dict", "(", "f", ",", "K", "=", "None", ",", "zero", "=", "False", ")", ":", "if", "(", "(", "not", "f", ")", "and", "zero", ")", ":", "return", "{", "0", ":", "K", ".", "zero", "}", "(", "n", ",", "result", ")", "=", "(", "(", "len", "(", "f", ")", "-", "1", ")", ",", "{", "}", ")", "for", "k", "in", "range", "(", "0", ",", "(", "n", "+", "1", ")", ")", ":", "if", "f", "[", "(", "n", "-", "k", ")", "]", ":", "result", "[", "k", "]", "=", "f", "[", "(", "n", "-", "k", ")", "]", "return", "result" ]
convert a k[x] polynomial to a raw dict .
train
false
45,537
def makeGauss(x, mean=0.0, sd=1.0, gain=1.0, base=0.0): simpleGauss = numpy.exp(((- numpy.power((mean - x), 2)) / (2 * (sd ** 2)))) return (base + (gain * simpleGauss))
[ "def", "makeGauss", "(", "x", ",", "mean", "=", "0.0", ",", "sd", "=", "1.0", ",", "gain", "=", "1.0", ",", "base", "=", "0.0", ")", ":", "simpleGauss", "=", "numpy", ".", "exp", "(", "(", "(", "-", "numpy", ".", "power", "(", "(", "mean", "-", "x", ")", ",", "2", ")", ")", "/", "(", "2", "*", "(", "sd", "**", "2", ")", ")", ")", ")", "return", "(", "base", "+", "(", "gain", "*", "simpleGauss", ")", ")" ]
return the gaussian distribution for a given set of x-vals .
train
false
45,538
def libvlc_media_list_player_new(p_instance): f = (_Cfunctions.get('libvlc_media_list_player_new', None) or _Cfunction('libvlc_media_list_player_new', ((1,),), class_result(MediaListPlayer), ctypes.c_void_p, Instance)) return f(p_instance)
[ "def", "libvlc_media_list_player_new", "(", "p_instance", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_new'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_new'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "class_result", "(", "MediaListPlayer", ")", ",", "ctypes", ".", "c_void_p", ",", "Instance", ")", ")", "return", "f", "(", "p_instance", ")" ]
create new media_list_player .
train
true
45,541
def build_treeprocessors(md_instance, **kwargs): treeprocessors = odict.OrderedDict() treeprocessors[u'inline'] = InlineProcessor(md_instance) treeprocessors[u'prettify'] = PrettifyTreeprocessor(md_instance) return treeprocessors
[ "def", "build_treeprocessors", "(", "md_instance", ",", "**", "kwargs", ")", ":", "treeprocessors", "=", "odict", ".", "OrderedDict", "(", ")", "treeprocessors", "[", "u'inline'", "]", "=", "InlineProcessor", "(", "md_instance", ")", "treeprocessors", "[", "u'prettify'", "]", "=", "PrettifyTreeprocessor", "(", "md_instance", ")", "return", "treeprocessors" ]
build the default treeprocessors for markdown .
train
false
45,542
def getScreen(screen): screens = getScreens() if (screen > (len(screens) - 1)): msg = 'Requested refresh rate of screen %i, but only have %i screens.' raise IndexError((msg % (screen, len(screens)))) return getScreens()[screen]
[ "def", "getScreen", "(", "screen", ")", ":", "screens", "=", "getScreens", "(", ")", "if", "(", "screen", ">", "(", "len", "(", "screens", ")", "-", "1", ")", ")", ":", "msg", "=", "'Requested refresh rate of screen %i, but only have %i screens.'", "raise", "IndexError", "(", "(", "msg", "%", "(", "screen", ",", "len", "(", "screens", ")", ")", ")", ")", "return", "getScreens", "(", ")", "[", "screen", "]" ]
select screen from getscreens() .
train
false
45,543
def _get_last_modified(last_modified=None): if (last_modified is None): dt = datetime.datetime.utcnow() else: dt = datetime.datetime.utcfromtimestamp((last_modified / 1000.0)) dt = dt.replace(microsecond=0) return (u'%sZ' % dt.isoformat())
[ "def", "_get_last_modified", "(", "last_modified", "=", "None", ")", ":", "if", "(", "last_modified", "is", "None", ")", ":", "dt", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "else", ":", "dt", "=", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "(", "last_modified", "/", "1000.0", ")", ")", "dt", "=", "dt", ".", "replace", "(", "microsecond", "=", "0", ")", "return", "(", "u'%sZ'", "%", "dt", ".", "isoformat", "(", ")", ")" ]
formats last modified timestamp of a playlist for mpd .
train
false
45,545
def _validate_rpc_ip(rpc_server_ip): if (not is_valid_ipv4(rpc_server_ip)): raise NetworkControllerError(desc='Invalid rpc ip address.') return rpc_server_ip
[ "def", "_validate_rpc_ip", "(", "rpc_server_ip", ")", ":", "if", "(", "not", "is_valid_ipv4", "(", "rpc_server_ip", ")", ")", ":", "raise", "NetworkControllerError", "(", "desc", "=", "'Invalid rpc ip address.'", ")", "return", "rpc_server_ip" ]
validates given ip for use as rpc host bind address .
train
false
45,549
def loaddata(settings_module, fixtures, bin_env=None, database=None, pythonpath=None, env=None): kwargs = {} if database: kwargs['database'] = database return command(settings_module, 'loaddata', bin_env, pythonpath, env, *fixtures.split(','), **kwargs)
[ "def", "loaddata", "(", "settings_module", ",", "fixtures", ",", "bin_env", "=", "None", ",", "database", "=", "None", ",", "pythonpath", "=", "None", ",", "env", "=", "None", ")", ":", "kwargs", "=", "{", "}", "if", "database", ":", "kwargs", "[", "'database'", "]", "=", "database", "return", "command", "(", "settings_module", ",", "'loaddata'", ",", "bin_env", ",", "pythonpath", ",", "env", ",", "*", "fixtures", ".", "split", "(", "','", ")", ",", "**", "kwargs", ")" ]
load fixture data fixtures: comma separated list of fixtures to load cli example: .
train
false
45,551
def stream_decode_response_unicode(iterator, r): if (r.encoding is None): for item in iterator: (yield item) return decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') for chunk in iterator: rv = decoder.decode(chunk) if rv: (yield rv) rv = decoder.decode('', final=True) if rv: (yield rv)
[ "def", "stream_decode_response_unicode", "(", "iterator", ",", "r", ")", ":", "if", "(", "r", ".", "encoding", "is", "None", ")", ":", "for", "item", "in", "iterator", ":", "(", "yield", "item", ")", "return", "decoder", "=", "codecs", ".", "getincrementaldecoder", "(", "r", ".", "encoding", ")", "(", "errors", "=", "'replace'", ")", "for", "chunk", "in", "iterator", ":", "rv", "=", "decoder", ".", "decode", "(", "chunk", ")", "if", "rv", ":", "(", "yield", "rv", ")", "rv", "=", "decoder", ".", "decode", "(", "''", ",", "final", "=", "True", ")", "if", "rv", ":", "(", "yield", "rv", ")" ]
stream decodes a iterator .
train
true
45,552
def delete_minion_cachedir(minion_id, provider, opts, base=None): if isinstance(opts, dict): __opts__.update(opts) if (__opts__.get('update_cachedir', False) is False): return if (base is None): base = __opts__['cachedir'] driver = next(six.iterkeys(__opts__['providers'][provider])) fname = '{0}.p'.format(minion_id) for cachedir in ('requested', 'active'): path = os.path.join(base, cachedir, driver, provider, fname) log.debug('path: {0}'.format(path)) if os.path.exists(path): os.remove(path)
[ "def", "delete_minion_cachedir", "(", "minion_id", ",", "provider", ",", "opts", ",", "base", "=", "None", ")", ":", "if", "isinstance", "(", "opts", ",", "dict", ")", ":", "__opts__", ".", "update", "(", "opts", ")", "if", "(", "__opts__", ".", "get", "(", "'update_cachedir'", ",", "False", ")", "is", "False", ")", ":", "return", "if", "(", "base", "is", "None", ")", ":", "base", "=", "__opts__", "[", "'cachedir'", "]", "driver", "=", "next", "(", "six", ".", "iterkeys", "(", "__opts__", "[", "'providers'", "]", "[", "provider", "]", ")", ")", "fname", "=", "'{0}.p'", ".", "format", "(", "minion_id", ")", "for", "cachedir", "in", "(", "'requested'", ",", "'active'", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "base", ",", "cachedir", ",", "driver", ",", "provider", ",", "fname", ")", "log", ".", "debug", "(", "'path: {0}'", ".", "format", "(", "path", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "os", ".", "remove", "(", "path", ")" ]
deletes a minions entry from the cloud cachedir .
train
true
45,554
def clean_image_extension(form_field): if form_field: if ('.' not in form_field.name): raise ValidationError(MSG_IMAGE_EXTENSION) (_, ext) = form_field.name.rsplit('.', 1) if (ext.lower() not in ALLOWED_IMAGE_EXTENSIONS): raise ValidationError(MSG_IMAGE_EXTENSION)
[ "def", "clean_image_extension", "(", "form_field", ")", ":", "if", "form_field", ":", "if", "(", "'.'", "not", "in", "form_field", ".", "name", ")", ":", "raise", "ValidationError", "(", "MSG_IMAGE_EXTENSION", ")", "(", "_", ",", "ext", ")", "=", "form_field", ".", "name", ".", "rsplit", "(", "'.'", ",", "1", ")", "if", "(", "ext", ".", "lower", "(", ")", "not", "in", "ALLOWED_IMAGE_EXTENSIONS", ")", ":", "raise", "ValidationError", "(", "MSG_IMAGE_EXTENSION", ")" ]
ensure only images of certain extensions can be uploaded .
train
false
45,555
def proxyconnect_sso(request): if request.user.is_authenticated(): return mysite.base.decorators.as_view(request, 'vanilla-proxy-connect-sso.txt', {}, 'proxyconnect-sso') return HttpResponse('')
[ "def", "proxyconnect_sso", "(", "request", ")", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "return", "mysite", ".", "base", ".", "decorators", ".", "as_view", "(", "request", ",", "'vanilla-proxy-connect-sso.txt'", ",", "{", "}", ",", "'proxyconnect-sso'", ")", "return", "HttpResponse", "(", "''", ")" ]
this function implements the proxyconnect single sign-on api described by vanilla forums .
train
false