id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
37,597
@task @write def addon_grouped_rating(*addons, **kw): log.info(('[%s@%s] Updating addon grouped ratings.' % (len(addons), addon_grouped_rating.rate_limit))) for addon in addons: GroupedRating.set(addon, using='default')
[ "@", "task", "@", "write", "def", "addon_grouped_rating", "(", "*", "addons", ",", "**", "kw", ")", ":", "log", ".", "info", "(", "(", "'[%s@%s] Updating addon grouped ratings.'", "%", "(", "len", "(", "addons", ")", ",", "addon_grouped_rating", ".", "rate_limit", ")", ")", ")", "for", "addon", "in", "addons", ":", "GroupedRating", ".", "set", "(", "addon", ",", "using", "=", "'default'", ")" ]
roll up add-on ratings for the bar chart .
train
false
37,598
@interruptable def _fork_posix(args, cwd=None): encoded_args = [encode(arg) for arg in args] return subprocess.Popen(encoded_args, cwd=cwd).pid
[ "@", "interruptable", "def", "_fork_posix", "(", "args", ",", "cwd", "=", "None", ")", ":", "encoded_args", "=", "[", "encode", "(", "arg", ")", "for", "arg", "in", "args", "]", "return", "subprocess", ".", "Popen", "(", "encoded_args", ",", "cwd", "=", "cwd", ")", ".", "pid" ]
launch a process in the background .
train
false
37,599
def encode_signing_format(data): return '.'.join(map(str, data))
[ "def", "encode_signing_format", "(", "data", ")", ":", "return", "'.'", ".", "join", "(", "map", "(", "str", ",", "data", ")", ")" ]
prepare a iterable for singing .
train
false
37,600
def compute_hashes_from_fileobj(fileobj, chunk_size=(1024 * 1024)): if (six.PY3 and hasattr(fileobj, 'mode') and ('b' not in fileobj.mode)): raise ValueError('File-like object must be opened in binary mode!') linear_hash = hashlib.sha256() chunks = [] chunk = fileobj.read(chunk_size) while chunk: if (not isinstance(chunk, bytes)): chunk = chunk.encode((getattr(fileobj, 'encoding', '') or 'utf-8')) linear_hash.update(chunk) chunks.append(hashlib.sha256(chunk).digest()) chunk = fileobj.read(chunk_size) if (not chunks): chunks = [hashlib.sha256('').digest()] return (linear_hash.hexdigest(), bytes_to_hex(tree_hash(chunks)))
[ "def", "compute_hashes_from_fileobj", "(", "fileobj", ",", "chunk_size", "=", "(", "1024", "*", "1024", ")", ")", ":", "if", "(", "six", ".", "PY3", "and", "hasattr", "(", "fileobj", ",", "'mode'", ")", "and", "(", "'b'", "not", "in", "fileobj", ".", "mode", ")", ")", ":", "raise", "ValueError", "(", "'File-like object must be opened in binary mode!'", ")", "linear_hash", "=", "hashlib", ".", "sha256", "(", ")", "chunks", "=", "[", "]", "chunk", "=", "fileobj", ".", "read", "(", "chunk_size", ")", "while", "chunk", ":", "if", "(", "not", "isinstance", "(", "chunk", ",", "bytes", ")", ")", ":", "chunk", "=", "chunk", ".", "encode", "(", "(", "getattr", "(", "fileobj", ",", "'encoding'", ",", "''", ")", "or", "'utf-8'", ")", ")", "linear_hash", ".", "update", "(", "chunk", ")", "chunks", ".", "append", "(", "hashlib", ".", "sha256", "(", "chunk", ")", ".", "digest", "(", ")", ")", "chunk", "=", "fileobj", ".", "read", "(", "chunk_size", ")", "if", "(", "not", "chunks", ")", ":", "chunks", "=", "[", "hashlib", ".", "sha256", "(", "''", ")", ".", "digest", "(", ")", "]", "return", "(", "linear_hash", ".", "hexdigest", "(", ")", ",", "bytes_to_hex", "(", "tree_hash", "(", "chunks", ")", ")", ")" ]
compute the linear and tree hash from a fileobj .
train
false
37,602
def vi_pos_matching(line, index=0): anchor = None target = None delta = 1 count = 0 try: while 1: if (anchor is None): try: (target, delta) = _vi_dct_matching[line[index]] anchor = line[index] count = 1 except KeyError: index += 1 continue else: if (index < 0): return (-1) if (line[index] == anchor): count += 1 elif (line[index] == target): count -= 1 if (count == 0): return index index += delta except IndexError: return (-1)
[ "def", "vi_pos_matching", "(", "line", ",", "index", "=", "0", ")", ":", "anchor", "=", "None", "target", "=", "None", "delta", "=", "1", "count", "=", "0", "try", ":", "while", "1", ":", "if", "(", "anchor", "is", "None", ")", ":", "try", ":", "(", "target", ",", "delta", ")", "=", "_vi_dct_matching", "[", "line", "[", "index", "]", "]", "anchor", "=", "line", "[", "index", "]", "count", "=", "1", "except", "KeyError", ":", "index", "+=", "1", "continue", "else", ":", "if", "(", "index", "<", "0", ")", ":", "return", "(", "-", "1", ")", "if", "(", "line", "[", "index", "]", "==", "anchor", ")", ":", "count", "+=", "1", "elif", "(", "line", "[", "index", "]", "==", "target", ")", ":", "count", "-=", "1", "if", "(", "count", "==", "0", ")", ":", "return", "index", "index", "+=", "delta", "except", "IndexError", ":", "return", "(", "-", "1", ")" ]
find matching <> .
train
true
37,604
def generate_hashes(peaks, fan_value=DEFAULT_FAN_VALUE): if PEAK_SORT: peaks.sort(key=itemgetter(1)) for i in range(len(peaks)): for j in range(1, fan_value): if ((i + j) < len(peaks)): freq1 = peaks[i][IDX_FREQ_I] freq2 = peaks[(i + j)][IDX_FREQ_I] t1 = peaks[i][IDX_TIME_J] t2 = peaks[(i + j)][IDX_TIME_J] t_delta = (t2 - t1) if ((t_delta >= MIN_HASH_TIME_DELTA) and (t_delta <= MAX_HASH_TIME_DELTA)): h = hashlib.sha1(('%s|%s|%s' % (str(freq1), str(freq2), str(t_delta)))) (yield (h.hexdigest()[0:FINGERPRINT_REDUCTION], t1))
[ "def", "generate_hashes", "(", "peaks", ",", "fan_value", "=", "DEFAULT_FAN_VALUE", ")", ":", "if", "PEAK_SORT", ":", "peaks", ".", "sort", "(", "key", "=", "itemgetter", "(", "1", ")", ")", "for", "i", "in", "range", "(", "len", "(", "peaks", ")", ")", ":", "for", "j", "in", "range", "(", "1", ",", "fan_value", ")", ":", "if", "(", "(", "i", "+", "j", ")", "<", "len", "(", "peaks", ")", ")", ":", "freq1", "=", "peaks", "[", "i", "]", "[", "IDX_FREQ_I", "]", "freq2", "=", "peaks", "[", "(", "i", "+", "j", ")", "]", "[", "IDX_FREQ_I", "]", "t1", "=", "peaks", "[", "i", "]", "[", "IDX_TIME_J", "]", "t2", "=", "peaks", "[", "(", "i", "+", "j", ")", "]", "[", "IDX_TIME_J", "]", "t_delta", "=", "(", "t2", "-", "t1", ")", "if", "(", "(", "t_delta", ">=", "MIN_HASH_TIME_DELTA", ")", "and", "(", "t_delta", "<=", "MAX_HASH_TIME_DELTA", ")", ")", ":", "h", "=", "hashlib", ".", "sha1", "(", "(", "'%s|%s|%s'", "%", "(", "str", "(", "freq1", ")", ",", "str", "(", "freq2", ")", ",", "str", "(", "t_delta", ")", ")", ")", ")", "(", "yield", "(", "h", ".", "hexdigest", "(", ")", "[", "0", ":", "FINGERPRINT_REDUCTION", "]", ",", "t1", ")", ")" ]
hash list structure: sha1_hash[0:20] time_offset [ .
train
false
37,605
def cspline1d(signal, lamb=0.0): if (lamb != 0.0): return _cubic_smooth_coeff(signal, lamb) else: return _cubic_coeff(signal)
[ "def", "cspline1d", "(", "signal", ",", "lamb", "=", "0.0", ")", ":", "if", "(", "lamb", "!=", "0.0", ")", ":", "return", "_cubic_smooth_coeff", "(", "signal", ",", "lamb", ")", "else", ":", "return", "_cubic_coeff", "(", "signal", ")" ]
compute cubic spline coefficients for rank-1 array .
train
false
37,606
def _get_skip_method(obj): if inspect.isclass(obj): if (not _is_test_cls(obj)): raise ValueError(NOT_TEST_OBJECT_ERROR_MSG) return _mark_class_skipped else: if (not _is_test_method_name(obj.__name__)): raise ValueError(NOT_TEST_OBJECT_ERROR_MSG) return _mark_method_skipped
[ "def", "_get_skip_method", "(", "obj", ")", ":", "if", "inspect", ".", "isclass", "(", "obj", ")", ":", "if", "(", "not", "_is_test_cls", "(", "obj", ")", ")", ":", "raise", "ValueError", "(", "NOT_TEST_OBJECT_ERROR_MSG", ")", "return", "_mark_class_skipped", "else", ":", "if", "(", "not", "_is_test_method_name", "(", "obj", ".", "__name__", ")", ")", ":", "raise", "ValueError", "(", "NOT_TEST_OBJECT_ERROR_MSG", ")", "return", "_mark_method_skipped" ]
make sure that we can decorate both methods and classes .
train
false
37,607
def akismet_data_as_dl(akismet_data): favorites = ('comment_content', 'permalink', 'comment_author', 'comment_author_email') def moderator_sort(key): 'Sort data by 1) favorites, 2) data values, 3) headers' try: fav_order = favorites.index(key) except ValueError: fav_order = len(favorites) is_data = (key and (key[0] in ascii_lowercase)) return (fav_order, (not is_data), key) if (not akismet_data): return SUBMISSION_NOT_AVAILABLE data = json.loads(akismet_data) keys = sorted(data.keys(), key=moderator_sort) out = format_html(u'<dl>\n {}\n</dl>', format_html_join(u'\n ', u'<dt>{}</dt><dd>{}</dd>', ((key, data[key]) for key in keys))) return out
[ "def", "akismet_data_as_dl", "(", "akismet_data", ")", ":", "favorites", "=", "(", "'comment_content'", ",", "'permalink'", ",", "'comment_author'", ",", "'comment_author_email'", ")", "def", "moderator_sort", "(", "key", ")", ":", "try", ":", "fav_order", "=", "favorites", ".", "index", "(", "key", ")", "except", "ValueError", ":", "fav_order", "=", "len", "(", "favorites", ")", "is_data", "=", "(", "key", "and", "(", "key", "[", "0", "]", "in", "ascii_lowercase", ")", ")", "return", "(", "fav_order", ",", "(", "not", "is_data", ")", ",", "key", ")", "if", "(", "not", "akismet_data", ")", ":", "return", "SUBMISSION_NOT_AVAILABLE", "data", "=", "json", ".", "loads", "(", "akismet_data", ")", "keys", "=", "sorted", "(", "data", ".", "keys", "(", ")", ",", "key", "=", "moderator_sort", ")", "out", "=", "format_html", "(", "u'<dl>\\n {}\\n</dl>'", ",", "format_html_join", "(", "u'\\n '", ",", "u'<dt>{}</dt><dd>{}</dd>'", ",", "(", "(", "key", ",", "data", "[", "key", "]", ")", "for", "key", "in", "keys", ")", ")", ")", "return", "out" ]
format akismet data as a definition list .
train
false
37,608
def deserialize_request_string(string, app): server = ServerBase(app) initial_ctx = MethodContext(server, MethodContext.SERVER) initial_ctx.in_string = [string] ctx = server.generate_contexts(initial_ctx)[0] server.get_in_object(ctx) return ctx.in_object
[ "def", "deserialize_request_string", "(", "string", ",", "app", ")", ":", "server", "=", "ServerBase", "(", "app", ")", "initial_ctx", "=", "MethodContext", "(", "server", ",", "MethodContext", ".", "SERVER", ")", "initial_ctx", ".", "in_string", "=", "[", "string", "]", "ctx", "=", "server", ".", "generate_contexts", "(", "initial_ctx", ")", "[", "0", "]", "server", ".", "get_in_object", "(", "ctx", ")", "return", "ctx", ".", "in_object" ]
deserialize request string using in_protocol in application definition .
train
false
37,610
def subplot2grid(shape, loc, rowspan=1, colspan=1, fig=None, **kwargs): if (fig is None): fig = gcf() (s1, s2) = shape subplotspec = GridSpec(s1, s2).new_subplotspec(loc, rowspan=rowspan, colspan=colspan) a = fig.add_subplot(subplotspec, **kwargs) bbox = a.bbox byebye = [] for other in fig.axes: if (other == a): continue if bbox.fully_overlaps(other.bbox): byebye.append(other) for ax in byebye: delaxes(ax) return a
[ "def", "subplot2grid", "(", "shape", ",", "loc", ",", "rowspan", "=", "1", ",", "colspan", "=", "1", ",", "fig", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "fig", "is", "None", ")", ":", "fig", "=", "gcf", "(", ")", "(", "s1", ",", "s2", ")", "=", "shape", "subplotspec", "=", "GridSpec", "(", "s1", ",", "s2", ")", ".", "new_subplotspec", "(", "loc", ",", "rowspan", "=", "rowspan", ",", "colspan", "=", "colspan", ")", "a", "=", "fig", ".", "add_subplot", "(", "subplotspec", ",", "**", "kwargs", ")", "bbox", "=", "a", ".", "bbox", "byebye", "=", "[", "]", "for", "other", "in", "fig", ".", "axes", ":", "if", "(", "other", "==", "a", ")", ":", "continue", "if", "bbox", ".", "fully_overlaps", "(", "other", ".", "bbox", ")", ":", "byebye", ".", "append", "(", "other", ")", "for", "ax", "in", "byebye", ":", "delaxes", "(", "ax", ")", "return", "a" ]
create a subplot in a grid .
train
false
37,611
def _equal_chance_permutation(objs, field='albumartist'): key = attrgetter(field) objs.sort(key=key) objs_by_artists = {} for (artist, v) in groupby(objs, key): objs_by_artists[artist] = list(v) while objs_by_artists: artist = random.choice(list(objs_by_artists.keys())) objs_from_artist = objs_by_artists[artist] i = random.randint(0, (len(objs_from_artist) - 1)) (yield objs_from_artist.pop(i)) if (not objs_from_artist): del objs_by_artists[artist]
[ "def", "_equal_chance_permutation", "(", "objs", ",", "field", "=", "'albumartist'", ")", ":", "key", "=", "attrgetter", "(", "field", ")", "objs", ".", "sort", "(", "key", "=", "key", ")", "objs_by_artists", "=", "{", "}", "for", "(", "artist", ",", "v", ")", "in", "groupby", "(", "objs", ",", "key", ")", ":", "objs_by_artists", "[", "artist", "]", "=", "list", "(", "v", ")", "while", "objs_by_artists", ":", "artist", "=", "random", ".", "choice", "(", "list", "(", "objs_by_artists", ".", "keys", "(", ")", ")", ")", "objs_from_artist", "=", "objs_by_artists", "[", "artist", "]", "i", "=", "random", ".", "randint", "(", "0", ",", "(", "len", "(", "objs_from_artist", ")", "-", "1", ")", ")", "(", "yield", "objs_from_artist", ".", "pop", "(", "i", ")", ")", "if", "(", "not", "objs_from_artist", ")", ":", "del", "objs_by_artists", "[", "artist", "]" ]
generate a permutation of the objects where every group with equal values for field have an equal chance of appearing in any given position .
train
false
37,613
def license_absent(name): ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if (not __salt__['powerpath.has_powerpath']()): ret['result'] = False ret['comment'] = 'PowerPath is not installed.' return ret licenses = [l['key'] for l in __salt__['powerpath.list_licenses']()] if (name not in licenses): ret['result'] = True ret['comment'] = 'License key {0} not present'.format(name) return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'License key {0} is set to be removed'.format(name) return ret data = __salt__['powerpath.remove_license'](name) if data['result']: ret['changes'] = {name: 'removed'} ret['result'] = True ret['comment'] = data['output'] return ret else: ret['result'] = False ret['comment'] = data['output'] return ret
[ "def", "license_absent", "(", "name", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "if", "(", "not", "__salt__", "[", "'powerpath.has_powerpath'", "]", "(", ")", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'PowerPath is not installed.'", "return", "ret", "licenses", "=", "[", "l", "[", "'key'", "]", "for", "l", "in", "__salt__", "[", "'powerpath.list_licenses'", "]", "(", ")", "]", "if", "(", "name", "not", "in", "licenses", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'License key {0} not present'", ".", "format", "(", "name", ")", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'License key {0} is set to be removed'", ".", "format", "(", "name", ")", "return", "ret", "data", "=", "__salt__", "[", "'powerpath.remove_license'", "]", "(", "name", ")", "if", "data", "[", "'result'", "]", ":", "ret", "[", "'changes'", "]", "=", "{", "name", ":", "'removed'", "}", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "data", "[", "'output'", "]", "return", "ret", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "data", "[", "'output'", "]", "return", "ret" ]
ensures that the specified powerpath license key is absent on the host .
train
true
37,614
def transaction_retry(max_retries=1): def _outer(fun): @wraps(fun) def _inner(*args, **kwargs): _max_retries = kwargs.pop(u'exception_retry_count', max_retries) for retries in count(0): try: return fun(*args, **kwargs) except Exception: if (retries >= _max_retries): raise try: rollback_unless_managed() except Exception: pass return _inner return _outer
[ "def", "transaction_retry", "(", "max_retries", "=", "1", ")", ":", "def", "_outer", "(", "fun", ")", ":", "@", "wraps", "(", "fun", ")", "def", "_inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "_max_retries", "=", "kwargs", ".", "pop", "(", "u'exception_retry_count'", ",", "max_retries", ")", "for", "retries", "in", "count", "(", "0", ")", ":", "try", ":", "return", "fun", "(", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", ":", "if", "(", "retries", ">=", "_max_retries", ")", ":", "raise", "try", ":", "rollback_unless_managed", "(", ")", "except", "Exception", ":", "pass", "return", "_inner", "return", "_outer" ]
decorator for methods doing database operations .
train
true
37,615
def EvalNormalCdfInverse(p, mu=0, sigma=1): return stats.norm.ppf(p, loc=mu, scale=sigma)
[ "def", "EvalNormalCdfInverse", "(", "p", ",", "mu", "=", "0", ",", "sigma", "=", "1", ")", ":", "return", "stats", ".", "norm", ".", "ppf", "(", "p", ",", "loc", "=", "mu", ",", "scale", "=", "sigma", ")" ]
evaluates the inverse cdf of the normal distribution .
train
false
37,616
def _get_Q(pi, k, w, codon_lst, codon_table): import numpy as np codon_num = len(codon_lst) Q = np.zeros((codon_num, codon_num)) for i in range(codon_num): for j in range(codon_num): if (i != j): Q[(i, j)] = _q(codon_lst[i], codon_lst[j], pi, k, w, codon_table=codon_table) nucl_substitutions = 0 for i in range(codon_num): Q[(i, i)] = (- sum(Q[i, :])) try: nucl_substitutions += (pi[codon_lst[i]] * (- Q[(i, i)])) except KeyError: pass Q = (Q / nucl_substitutions) return Q
[ "def", "_get_Q", "(", "pi", ",", "k", ",", "w", ",", "codon_lst", ",", "codon_table", ")", ":", "import", "numpy", "as", "np", "codon_num", "=", "len", "(", "codon_lst", ")", "Q", "=", "np", ".", "zeros", "(", "(", "codon_num", ",", "codon_num", ")", ")", "for", "i", "in", "range", "(", "codon_num", ")", ":", "for", "j", "in", "range", "(", "codon_num", ")", ":", "if", "(", "i", "!=", "j", ")", ":", "Q", "[", "(", "i", ",", "j", ")", "]", "=", "_q", "(", "codon_lst", "[", "i", "]", ",", "codon_lst", "[", "j", "]", ",", "pi", ",", "k", ",", "w", ",", "codon_table", "=", "codon_table", ")", "nucl_substitutions", "=", "0", "for", "i", "in", "range", "(", "codon_num", ")", ":", "Q", "[", "(", "i", ",", "i", ")", "]", "=", "(", "-", "sum", "(", "Q", "[", "i", ",", ":", "]", ")", ")", "try", ":", "nucl_substitutions", "+=", "(", "pi", "[", "codon_lst", "[", "i", "]", "]", "*", "(", "-", "Q", "[", "(", "i", ",", "i", ")", "]", ")", ")", "except", "KeyError", ":", "pass", "Q", "=", "(", "Q", "/", "nucl_substitutions", ")", "return", "Q" ]
q matrix for codon substitution .
train
false
37,618
def find_errors(project, resource): pymodule = project.get_pymodule(resource) finder = _BadAccessFinder(pymodule) ast.walk(pymodule.get_ast(), finder) return finder.errors
[ "def", "find_errors", "(", "project", ",", "resource", ")", ":", "pymodule", "=", "project", ".", "get_pymodule", "(", "resource", ")", "finder", "=", "_BadAccessFinder", "(", "pymodule", ")", "ast", ".", "walk", "(", "pymodule", ".", "get_ast", "(", ")", ",", "finder", ")", "return", "finder", ".", "errors" ]
find possible bad name and attribute accesses it returns a list of errors .
train
true
37,620
def get_importer(path_item): try: importer = sys.path_importer_cache[path_item] except KeyError: for hook in sys.path_hooks: try: importer = hook(path_item) except ImportError: pass else: break else: importer = None sys.path_importer_cache.setdefault(path_item, importer) if (importer is None): try: importer = ImpWrapper(path_item) except ImportError: pass return importer
[ "def", "get_importer", "(", "path_item", ")", ":", "try", ":", "importer", "=", "sys", ".", "path_importer_cache", "[", "path_item", "]", "except", "KeyError", ":", "for", "hook", "in", "sys", ".", "path_hooks", ":", "try", ":", "importer", "=", "hook", "(", "path_item", ")", "except", "ImportError", ":", "pass", "else", ":", "break", "else", ":", "importer", "=", "None", "sys", ".", "path_importer_cache", ".", "setdefault", "(", "path_item", ",", "importer", ")", "if", "(", "importer", "is", "None", ")", ":", "try", ":", "importer", "=", "ImpWrapper", "(", "path_item", ")", "except", "ImportError", ":", "pass", "return", "importer" ]
retrieve a pep 302 "importer" for the given path item if there is no importer .
train
true
37,621
def latex_to_png(s, encode=False, backend=None, wrap=False): s = cast_unicode(s) allowed_backends = LaTeXTool.instance().backends if (backend is None): backend = allowed_backends[0] if (backend not in allowed_backends): return None if (backend == 'matplotlib'): f = latex_to_png_mpl elif (backend == 'dvipng'): f = latex_to_png_dvipng else: raise ValueError('No such backend {0}'.format(backend)) bin_data = f(s, wrap) if (encode and bin_data): bin_data = encodebytes(bin_data) return bin_data
[ "def", "latex_to_png", "(", "s", ",", "encode", "=", "False", ",", "backend", "=", "None", ",", "wrap", "=", "False", ")", ":", "s", "=", "cast_unicode", "(", "s", ")", "allowed_backends", "=", "LaTeXTool", ".", "instance", "(", ")", ".", "backends", "if", "(", "backend", "is", "None", ")", ":", "backend", "=", "allowed_backends", "[", "0", "]", "if", "(", "backend", "not", "in", "allowed_backends", ")", ":", "return", "None", "if", "(", "backend", "==", "'matplotlib'", ")", ":", "f", "=", "latex_to_png_mpl", "elif", "(", "backend", "==", "'dvipng'", ")", ":", "f", "=", "latex_to_png_dvipng", "else", ":", "raise", "ValueError", "(", "'No such backend {0}'", ".", "format", "(", "backend", ")", ")", "bin_data", "=", "f", "(", "s", ",", "wrap", ")", "if", "(", "encode", "and", "bin_data", ")", ":", "bin_data", "=", "encodebytes", "(", "bin_data", ")", "return", "bin_data" ]
render a latex string to png .
train
false
37,622
def show_sls(mods, saltenv='base', test=None, **kwargs): __pillar__.update(kwargs.get('pillar', {})) __opts__['grains'] = __grains__ opts = copy.copy(__opts__) if salt.utils.test_mode(test=test, **kwargs): opts['test'] = True else: opts['test'] = __opts__.get('test', None) st_ = salt.client.ssh.state.SSHHighState(__opts__, __pillar__, __salt__, __context__['fileclient']) if isinstance(mods, string_types): mods = mods.split(',') (high_data, errors) = st_.render_highstate({saltenv: mods}) (high_data, ext_errors) = st_.state.reconcile_extend(high_data) errors += ext_errors errors += st_.state.verify_high(high_data) if errors: return errors (high_data, req_in_errors) = st_.state.requisite_in(high_data) errors += req_in_errors high_data = st_.state.apply_exclude(high_data) if errors: return errors return high_data
[ "def", "show_sls", "(", "mods", ",", "saltenv", "=", "'base'", ",", "test", "=", "None", ",", "**", "kwargs", ")", ":", "__pillar__", ".", "update", "(", "kwargs", ".", "get", "(", "'pillar'", ",", "{", "}", ")", ")", "__opts__", "[", "'grains'", "]", "=", "__grains__", "opts", "=", "copy", ".", "copy", "(", "__opts__", ")", "if", "salt", ".", "utils", ".", "test_mode", "(", "test", "=", "test", ",", "**", "kwargs", ")", ":", "opts", "[", "'test'", "]", "=", "True", "else", ":", "opts", "[", "'test'", "]", "=", "__opts__", ".", "get", "(", "'test'", ",", "None", ")", "st_", "=", "salt", ".", "client", ".", "ssh", ".", "state", ".", "SSHHighState", "(", "__opts__", ",", "__pillar__", ",", "__salt__", ",", "__context__", "[", "'fileclient'", "]", ")", "if", "isinstance", "(", "mods", ",", "string_types", ")", ":", "mods", "=", "mods", ".", "split", "(", "','", ")", "(", "high_data", ",", "errors", ")", "=", "st_", ".", "render_highstate", "(", "{", "saltenv", ":", "mods", "}", ")", "(", "high_data", ",", "ext_errors", ")", "=", "st_", ".", "state", ".", "reconcile_extend", "(", "high_data", ")", "errors", "+=", "ext_errors", "errors", "+=", "st_", ".", "state", ".", "verify_high", "(", "high_data", ")", "if", "errors", ":", "return", "errors", "(", "high_data", ",", "req_in_errors", ")", "=", "st_", ".", "state", ".", "requisite_in", "(", "high_data", ")", "errors", "+=", "req_in_errors", "high_data", "=", "st_", ".", "state", ".", "apply_exclude", "(", "high_data", ")", "if", "errors", ":", "return", "errors", "return", "high_data" ]
display the state data from a specific sls or list of sls files on the master cli example: .
train
false
37,625
def validate_string_or_None(s): if (s is None): return None try: return six.text_type(s) except ValueError: raise ValueError((u'Could not convert "%s" to string' % s))
[ "def", "validate_string_or_None", "(", "s", ")", ":", "if", "(", "s", "is", "None", ")", ":", "return", "None", "try", ":", "return", "six", ".", "text_type", "(", "s", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "(", "u'Could not convert \"%s\" to string'", "%", "s", ")", ")" ]
convert s to string or raise .
train
false
37,627
def extract_attrs(attr_string): attributes = {} for (name, val) in FIND_ATTRS.findall(attr_string): attributes[name] = val return attributes
[ "def", "extract_attrs", "(", "attr_string", ")", ":", "attributes", "=", "{", "}", "for", "(", "name", ",", "val", ")", "in", "FIND_ATTRS", ".", "findall", "(", "attr_string", ")", ":", "attributes", "[", "name", "]", "=", "val", "return", "attributes" ]
helper method to extract tag attributes as a dict .
train
false
37,628
def _consume_decimal(seq): return (int(seq[0:2], 10), seq[2:])
[ "def", "_consume_decimal", "(", "seq", ")", ":", "return", "(", "int", "(", "seq", "[", "0", ":", "2", "]", ",", "10", ")", ",", "seq", "[", "2", ":", "]", ")" ]
read 2 chars as a decimal .
train
false
37,631
def filter_limit_query(model, query, hints): if (hints is None): return query query = _filter(model, query, hints) if hints.cannot_match: return [] if (not hints.filters): return _limit(query, hints) else: return query
[ "def", "filter_limit_query", "(", "model", ",", "query", ",", "hints", ")", ":", "if", "(", "hints", "is", "None", ")", ":", "return", "query", "query", "=", "_filter", "(", "model", ",", "query", ",", "hints", ")", "if", "hints", ".", "cannot_match", ":", "return", "[", "]", "if", "(", "not", "hints", ".", "filters", ")", ":", "return", "_limit", "(", "query", ",", "hints", ")", "else", ":", "return", "query" ]
apply filtering and limit to a query .
train
false
37,632
def build_text_response(request, data, code): return build_response(request, data, code, u'ascii')
[ "def", "build_text_response", "(", "request", ",", "data", ",", "code", ")", ":", "return", "build_response", "(", "request", ",", "data", ",", "code", ",", "u'ascii'", ")" ]
build a response for textual data .
train
false
37,633
def call_ping(*args, **kwargs): errors = dict() for (dev_id, dev_status) in call_blink().items(): if (not dev_status['result']): errors[dev_id] = False return (errors or True)
[ "def", "call_ping", "(", "*", "args", ",", "**", "kwargs", ")", ":", "errors", "=", "dict", "(", ")", "for", "(", "dev_id", ",", "dev_status", ")", "in", "call_blink", "(", ")", ".", "items", "(", ")", ":", "if", "(", "not", "dev_status", "[", "'result'", "]", ")", ":", "errors", "[", "dev_id", "]", "=", "False", "return", "(", "errors", "or", "True", ")" ]
ping the lamps by issuing a short inversion blink to all available devices .
train
true
37,634
def find_dimension(name, locale=None, provider=None, namespace=None): templates = {} missing = [name] while missing: dimension = None name = missing.pop() dimension = None required_template = None try: dimension = _lookup_dimension(name, templates, namespace, provider) except TemplateRequired as e: required_template = e.template if (required_template in templates): raise BackendError(("Some model provider didn't make use of dimension template '%s' for '%s'" % (required_template, name))) if required_template: missing.append(name) if (required_template in missing): raise ModelError(("Dimension templates cycle in '%s'" % required_template)) missing.append(required_template) if dimension: templates[name] = dimension if namespace: lookup = namespace.translation_lookup(locale) if lookup: context = LocalizationContext(lookup[0]) trans = context.object_localization('dimensions', 'inner') dimension = dimension.localized(trans) return dimension
[ "def", "find_dimension", "(", "name", ",", "locale", "=", "None", ",", "provider", "=", "None", ",", "namespace", "=", "None", ")", ":", "templates", "=", "{", "}", "missing", "=", "[", "name", "]", "while", "missing", ":", "dimension", "=", "None", "name", "=", "missing", ".", "pop", "(", ")", "dimension", "=", "None", "required_template", "=", "None", "try", ":", "dimension", "=", "_lookup_dimension", "(", "name", ",", "templates", ",", "namespace", ",", "provider", ")", "except", "TemplateRequired", "as", "e", ":", "required_template", "=", "e", ".", "template", "if", "(", "required_template", "in", "templates", ")", ":", "raise", "BackendError", "(", "(", "\"Some model provider didn't make use of dimension template '%s' for '%s'\"", "%", "(", "required_template", ",", "name", ")", ")", ")", "if", "required_template", ":", "missing", ".", "append", "(", "name", ")", "if", "(", "required_template", "in", "missing", ")", ":", "raise", "ModelError", "(", "(", "\"Dimension templates cycle in '%s'\"", "%", "required_template", ")", ")", "missing", ".", "append", "(", "required_template", ")", "if", "dimension", ":", "templates", "[", "name", "]", "=", "dimension", "if", "namespace", ":", "lookup", "=", "namespace", ".", "translation_lookup", "(", "locale", ")", "if", "lookup", ":", "context", "=", "LocalizationContext", "(", "lookup", "[", "0", "]", ")", "trans", "=", "context", ".", "object_localization", "(", "'dimensions'", ",", "'inner'", ")", "dimension", "=", "dimension", ".", "localized", "(", "trans", ")", "return", "dimension" ]
returns a localized dimension with name .
train
false
37,635
def utf8_text(text): if (text and text.strip()): text = text.strip() if (not isinstance(text, unicode)): text = text.decode(u'utf-8', u'replace') text = normalize(text).encode(u'utf-8') else: text = _(u'Unknown').encode(u'utf-8') return text
[ "def", "utf8_text", "(", "text", ")", ":", "if", "(", "text", "and", "text", ".", "strip", "(", ")", ")", ":", "text", "=", "text", ".", "strip", "(", ")", "if", "(", "not", "isinstance", "(", "text", ",", "unicode", ")", ")", ":", "text", "=", "text", ".", "decode", "(", "u'utf-8'", ",", "u'replace'", ")", "text", "=", "normalize", "(", "text", ")", ".", "encode", "(", "u'utf-8'", ")", "else", ":", "text", "=", "_", "(", "u'Unknown'", ")", ".", "encode", "(", "u'utf-8'", ")", "return", "text" ]
convert a possibly null string to utf-8 bytes .
train
false
37,636
@handle_response_format @treeio_login_required def task_status_delete(request, status_id, response_format='html'): status = get_object_or_404(TaskStatus, pk=status_id) if (not request.user.profile.has_permission(status, mode='w')): return user_denied(request, message="You don't have access to this Task Status") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): status.trash = True status.save() else: status.delete() return HttpResponseRedirect(reverse('projects_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('projects_index_by_status', args=[status.id])) milestones = Object.filter_by_request(request, Milestone.objects) context = _get_default_context(request) context.update({'status': status, 'milestones': milestones}) return render_to_response('projects/status_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "task_status_delete", "(", "request", ",", "status_id", ",", "response_format", "=", "'html'", ")", ":", "status", "=", "get_object_or_404", "(", "TaskStatus", ",", "pk", "=", "status_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "status", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Task Status\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "status", ".", "trash", "=", "True", "status", ".", "save", "(", ")", "else", ":", "status", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_index'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_index_by_status'", ",", "args", "=", "[", "status", ".", "id", "]", ")", ")", "milestones", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Milestone", ".", "objects", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'status'", ":", "status", ",", "'milestones'", ":", "milestones", "}", ")", "return", "render_to_response", "(", "'projects/status_delete'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
delete a task status .
train
false
37,640
def is_sync_file(filename): if isinstance(filename, (str, unicode)): extension = filename.rpartition(u'.')[2].lower() return ((extension in sickbeard.SYNC_FILES.split(u',')) or filename.startswith(u'.syncthing') or any((fnmatch(filename, match) for match in sickbeard.SYNC_FILES.split(u',')))) return False
[ "def", "is_sync_file", "(", "filename", ")", ":", "if", "isinstance", "(", "filename", ",", "(", "str", ",", "unicode", ")", ")", ":", "extension", "=", "filename", ".", "rpartition", "(", "u'.'", ")", "[", "2", "]", ".", "lower", "(", ")", "return", "(", "(", "extension", "in", "sickbeard", ".", "SYNC_FILES", ".", "split", "(", "u','", ")", ")", "or", "filename", ".", "startswith", "(", "u'.syncthing'", ")", "or", "any", "(", "(", "fnmatch", "(", "filename", ",", "match", ")", "for", "match", "in", "sickbeard", ".", "SYNC_FILES", ".", "split", "(", "u','", ")", ")", ")", ")", "return", "False" ]
check if the provided filename is a sync file .
train
false
37,642
def test_get_call_line(): @verbose def foo(verbose=None): return _get_call_line(in_verbose=True) for v in (None, True): my_line = foo(verbose=v) assert_equal(my_line, 'my_line = foo(verbose=v) # testing') def bar(): return _get_call_line(in_verbose=False) my_line = bar() assert_equal(my_line, 'my_line = bar() # testing more')
[ "def", "test_get_call_line", "(", ")", ":", "@", "verbose", "def", "foo", "(", "verbose", "=", "None", ")", ":", "return", "_get_call_line", "(", "in_verbose", "=", "True", ")", "for", "v", "in", "(", "None", ",", "True", ")", ":", "my_line", "=", "foo", "(", "verbose", "=", "v", ")", "assert_equal", "(", "my_line", ",", "'my_line = foo(verbose=v) # testing'", ")", "def", "bar", "(", ")", ":", "return", "_get_call_line", "(", "in_verbose", "=", "False", ")", "my_line", "=", "bar", "(", ")", "assert_equal", "(", "my_line", ",", "'my_line = bar() # testing more'", ")" ]
test getting a call line .
train
false
37,643
@pytest.mark.parametrize('create_file, create_dir, filterfunc, expected', [(True, False, os.path.isfile, True), (True, False, os.path.isdir, False), (False, True, os.path.isfile, False), (False, True, os.path.isdir, True), (False, False, os.path.isfile, False), (False, False, os.path.isdir, False)]) def test_get_file_list(tmpdir, create_file, create_dir, filterfunc, expected): path = (tmpdir / 'foo') if (create_file or create_dir): path.ensure(dir=create_dir) all_files = os.listdir(str(tmpdir)) result = filescheme.get_file_list(str(tmpdir), all_files, filterfunc) item = {'name': 'foo', 'absname': str(path)} assert ((item in result) == expected)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'create_file, create_dir, filterfunc, expected'", ",", "[", "(", "True", ",", "False", ",", "os", ".", "path", ".", "isfile", ",", "True", ")", ",", "(", "True", ",", "False", ",", "os", ".", "path", ".", "isdir", ",", "False", ")", ",", "(", "False", ",", "True", ",", "os", ".", "path", ".", "isfile", ",", "False", ")", ",", "(", "False", ",", "True", ",", "os", ".", "path", ".", "isdir", ",", "True", ")", ",", "(", "False", ",", "False", ",", "os", ".", "path", ".", "isfile", ",", "False", ")", ",", "(", "False", ",", "False", ",", "os", ".", "path", ".", "isdir", ",", "False", ")", "]", ")", "def", "test_get_file_list", "(", "tmpdir", ",", "create_file", ",", "create_dir", ",", "filterfunc", ",", "expected", ")", ":", "path", "=", "(", "tmpdir", "/", "'foo'", ")", "if", "(", "create_file", "or", "create_dir", ")", ":", "path", ".", "ensure", "(", "dir", "=", "create_dir", ")", "all_files", "=", "os", ".", "listdir", "(", "str", "(", "tmpdir", ")", ")", "result", "=", "filescheme", ".", "get_file_list", "(", "str", "(", "tmpdir", ")", ",", "all_files", ",", "filterfunc", ")", "item", "=", "{", "'name'", ":", "'foo'", ",", "'absname'", ":", "str", "(", "path", ")", "}", "assert", "(", "(", "item", "in", "result", ")", "==", "expected", ")" ]
test get_file_list .
train
false
37,644
def aes_decrypt(data, expanded_key): rounds = ((len(expanded_key) // BLOCK_SIZE_BYTES) - 1) for i in range(rounds, 0, (-1)): data = xor(data, expanded_key[(i * BLOCK_SIZE_BYTES):((i + 1) * BLOCK_SIZE_BYTES)]) if (i != rounds): data = mix_columns_inv(data) data = shift_rows_inv(data) data = sub_bytes_inv(data) data = xor(data, expanded_key[:BLOCK_SIZE_BYTES]) return data
[ "def", "aes_decrypt", "(", "data", ",", "expanded_key", ")", ":", "rounds", "=", "(", "(", "len", "(", "expanded_key", ")", "//", "BLOCK_SIZE_BYTES", ")", "-", "1", ")", "for", "i", "in", "range", "(", "rounds", ",", "0", ",", "(", "-", "1", ")", ")", ":", "data", "=", "xor", "(", "data", ",", "expanded_key", "[", "(", "i", "*", "BLOCK_SIZE_BYTES", ")", ":", "(", "(", "i", "+", "1", ")", "*", "BLOCK_SIZE_BYTES", ")", "]", ")", "if", "(", "i", "!=", "rounds", ")", ":", "data", "=", "mix_columns_inv", "(", "data", ")", "data", "=", "shift_rows_inv", "(", "data", ")", "data", "=", "sub_bytes_inv", "(", "data", ")", "data", "=", "xor", "(", "data", ",", "expanded_key", "[", ":", "BLOCK_SIZE_BYTES", "]", ")", "return", "data" ]
decrypt one block with aes .
train
false
37,645
def tensor_mul(*a): if (not a): return TensMul.from_data(S.One, [], [], []) t = a[0] for tx in a[1:]: t = (t * tx) return t
[ "def", "tensor_mul", "(", "*", "a", ")", ":", "if", "(", "not", "a", ")", ":", "return", "TensMul", ".", "from_data", "(", "S", ".", "One", ",", "[", "]", ",", "[", "]", ",", "[", "]", ")", "t", "=", "a", "[", "0", "]", "for", "tx", "in", "a", "[", "1", ":", "]", ":", "t", "=", "(", "t", "*", "tx", ")", "return", "t" ]
product of tensors .
train
false
37,647
def check_uuid4(logical_line): msg = 'N357: Use oslo_utils.uuidutils or uuidsentinel(in case of test cases) to generate UUID instead of uuid4().' if ('uuid4().' in logical_line): return if ('uuid4()' in logical_line): (yield (0, msg))
[ "def", "check_uuid4", "(", "logical_line", ")", ":", "msg", "=", "'N357: Use oslo_utils.uuidutils or uuidsentinel(in case of test cases) to generate UUID instead of uuid4().'", "if", "(", "'uuid4().'", "in", "logical_line", ")", ":", "return", "if", "(", "'uuid4()'", "in", "logical_line", ")", ":", "(", "yield", "(", "0", ",", "msg", ")", ")" ]
generating uuid use oslo_utils .
train
false
37,652
def onlyOnce(fn): def wrap(*args, **kwargs): if hasattr(fn, 'called'): return fn.called = 1 return fn(*args, **kwargs) util.mergeFunctionMetadata(fn, wrap) return wrap
[ "def", "onlyOnce", "(", "fn", ")", ":", "def", "wrap", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "hasattr", "(", "fn", ",", "'called'", ")", ":", "return", "fn", ".", "called", "=", "1", "return", "fn", "(", "*", "args", ",", "**", "kwargs", ")", "util", ".", "mergeFunctionMetadata", "(", "fn", ",", "wrap", ")", "return", "wrap" ]
set up fn to only run once within an interpreter instance .
train
true
37,653
def empty_like(array): warnings.warn('chainer.cuda.empty_like is deprecated. Use cupy.empty_like instead.', DeprecationWarning) check_cuda_available() if isinstance(array, cupy.ndarray): return cupy.empty_like(array) return cupy.empty(array.shape, dtype=array.dtype)
[ "def", "empty_like", "(", "array", ")", ":", "warnings", ".", "warn", "(", "'chainer.cuda.empty_like is deprecated. Use cupy.empty_like instead.'", ",", "DeprecationWarning", ")", "check_cuda_available", "(", ")", "if", "isinstance", "(", "array", ",", "cupy", ".", "ndarray", ")", ":", "return", "cupy", ".", "empty_like", "(", "array", ")", "return", "cupy", ".", "empty", "(", "array", ".", "shape", ",", "dtype", "=", "array", ".", "dtype", ")" ]
creates an uninitialized gpu array like the given one .
train
false
37,654
def dtype_in_elemwise_supported(op): def get_all_basic_scalar(composite_op): l = [] for i in composite_op.fgraph.toposort(): if isinstance(i, theano.scalar.Composite): l += get_all_basic_scalar(i) else: l.append(i) return l if (isinstance(op, GpuElemwise) or isinstance(op, tensor.Elemwise)): if isinstance(op.scalar_op, theano.scalar.Composite): scals = get_all_basic_scalar(op.scalar_op) for s in scals: if any([(i.type.dtype not in elemwise_cuda_dtype_supported) for i in (s.inputs + s.outputs)]): return False return True
[ "def", "dtype_in_elemwise_supported", "(", "op", ")", ":", "def", "get_all_basic_scalar", "(", "composite_op", ")", ":", "l", "=", "[", "]", "for", "i", "in", "composite_op", ".", "fgraph", ".", "toposort", "(", ")", ":", "if", "isinstance", "(", "i", ",", "theano", ".", "scalar", ".", "Composite", ")", ":", "l", "+=", "get_all_basic_scalar", "(", "i", ")", "else", ":", "l", ".", "append", "(", "i", ")", "return", "l", "if", "(", "isinstance", "(", "op", ",", "GpuElemwise", ")", "or", "isinstance", "(", "op", ",", "tensor", ".", "Elemwise", ")", ")", ":", "if", "isinstance", "(", "op", ".", "scalar_op", ",", "theano", ".", "scalar", ".", "Composite", ")", ":", "scals", "=", "get_all_basic_scalar", "(", "op", ".", "scalar_op", ")", "for", "s", "in", "scals", ":", "if", "any", "(", "[", "(", "i", ".", "type", ".", "dtype", "not", "in", "elemwise_cuda_dtype_supported", ")", "for", "i", "in", "(", "s", ".", "inputs", "+", "s", ".", "outputs", ")", "]", ")", ":", "return", "False", "return", "True" ]
return true of the elemwise op is supported on the gpu .
train
false
37,656
def decompose_fullname(fullname): from r2.lib.db.thing import Thing, Relation if (fullname[0] == 't'): type_class = Thing elif (fullname[0] == 'r'): type_class = Relation (type_id36, thing_id36) = fullname[1:].split('_') type_id = int(type_id36, 36) id = int(thing_id36, 36) return (type_class, type_id, id)
[ "def", "decompose_fullname", "(", "fullname", ")", ":", "from", "r2", ".", "lib", ".", "db", ".", "thing", "import", "Thing", ",", "Relation", "if", "(", "fullname", "[", "0", "]", "==", "'t'", ")", ":", "type_class", "=", "Thing", "elif", "(", "fullname", "[", "0", "]", "==", "'r'", ")", ":", "type_class", "=", "Relation", "(", "type_id36", ",", "thing_id36", ")", "=", "fullname", "[", "1", ":", "]", ".", "split", "(", "'_'", ")", "type_id", "=", "int", "(", "type_id36", ",", "36", ")", "id", "=", "int", "(", "thing_id36", ",", "36", ")", "return", "(", "type_class", ",", "type_id", ",", "id", ")" ]
decompose_fullname -> .
train
false
37,657
def system_load(pl, format=u'{avg:.1f}', threshold_good=1, threshold_bad=2, track_cpu_count=False, short=False): global cpu_count try: cpu_num = cpu_count = (_cpu_count() if ((cpu_count is None) or track_cpu_count) else cpu_count) except NotImplementedError: pl.warn(u'Unable to get CPU count: method is not implemented') return None ret = [] for avg in os.getloadavg(): normalized = (avg / cpu_num) if (normalized < threshold_good): gradient_level = 0 elif (normalized < threshold_bad): gradient_level = (((normalized - threshold_good) * 100.0) / (threshold_bad - threshold_good)) else: gradient_level = 100 ret.append({u'contents': format.format(avg=avg), u'highlight_groups': [u'system_load_gradient', u'system_load'], u'divider_highlight_group': u'background:divider', u'gradient_level': gradient_level}) if short: return ret ret[0][u'contents'] += u' ' ret[1][u'contents'] += u' ' return ret
[ "def", "system_load", "(", "pl", ",", "format", "=", "u'{avg:.1f}'", ",", "threshold_good", "=", "1", ",", "threshold_bad", "=", "2", ",", "track_cpu_count", "=", "False", ",", "short", "=", "False", ")", ":", "global", "cpu_count", "try", ":", "cpu_num", "=", "cpu_count", "=", "(", "_cpu_count", "(", ")", "if", "(", "(", "cpu_count", "is", "None", ")", "or", "track_cpu_count", ")", "else", "cpu_count", ")", "except", "NotImplementedError", ":", "pl", ".", "warn", "(", "u'Unable to get CPU count: method is not implemented'", ")", "return", "None", "ret", "=", "[", "]", "for", "avg", "in", "os", ".", "getloadavg", "(", ")", ":", "normalized", "=", "(", "avg", "/", "cpu_num", ")", "if", "(", "normalized", "<", "threshold_good", ")", ":", "gradient_level", "=", "0", "elif", "(", "normalized", "<", "threshold_bad", ")", ":", "gradient_level", "=", "(", "(", "(", "normalized", "-", "threshold_good", ")", "*", "100.0", ")", "/", "(", "threshold_bad", "-", "threshold_good", ")", ")", "else", ":", "gradient_level", "=", "100", "ret", ".", "append", "(", "{", "u'contents'", ":", "format", ".", "format", "(", "avg", "=", "avg", ")", ",", "u'highlight_groups'", ":", "[", "u'system_load_gradient'", ",", "u'system_load'", "]", ",", "u'divider_highlight_group'", ":", "u'background:divider'", ",", "u'gradient_level'", ":", "gradient_level", "}", ")", "if", "short", ":", "return", "ret", "ret", "[", "0", "]", "[", "u'contents'", "]", "+=", "u' '", "ret", "[", "1", "]", "[", "u'contents'", "]", "+=", "u' '", "return", "ret" ]
return system load average .
train
false
37,659
def test_stacked_line_log(): stacked = StackedLine(logarithmic=True) stacked.add('one_two', [1, 2]) stacked.add('ten_twelve', [10, 12]) q = stacked.render_pyquery() assert (set([v.text for v in q('desc.value')]) == set(('1', '2', '11 (+10)', '14 (+12)')))
[ "def", "test_stacked_line_log", "(", ")", ":", "stacked", "=", "StackedLine", "(", "logarithmic", "=", "True", ")", "stacked", ".", "add", "(", "'one_two'", ",", "[", "1", ",", "2", "]", ")", "stacked", ".", "add", "(", "'ten_twelve'", ",", "[", "10", ",", "12", "]", ")", "q", "=", "stacked", ".", "render_pyquery", "(", ")", "assert", "(", "set", "(", "[", "v", ".", "text", "for", "v", "in", "q", "(", "'desc.value'", ")", "]", ")", "==", "set", "(", "(", "'1'", ",", "'2'", ",", "'11 (+10)'", ",", "'14 (+12)'", ")", ")", ")" ]
test logarithmic stacked line .
train
false
37,660
def get_characteristic_subpattern(subpatterns): if (not isinstance(subpatterns, list)): return subpatterns if (len(subpatterns) == 1): return subpatterns[0] subpatterns_with_names = [] subpatterns_with_common_names = [] common_names = ['in', 'for', 'if', 'not', 'None'] subpatterns_with_common_chars = [] common_chars = '[]().,:' for subpattern in subpatterns: if any(rec_test(subpattern, (lambda x: (type(x) is str)))): if any(rec_test(subpattern, (lambda x: (isinstance(x, str) and (x in common_chars))))): subpatterns_with_common_chars.append(subpattern) elif any(rec_test(subpattern, (lambda x: (isinstance(x, str) and (x in common_names))))): subpatterns_with_common_names.append(subpattern) else: subpatterns_with_names.append(subpattern) if subpatterns_with_names: subpatterns = subpatterns_with_names elif subpatterns_with_common_names: subpatterns = subpatterns_with_common_names elif subpatterns_with_common_chars: subpatterns = subpatterns_with_common_chars return max(subpatterns, key=len)
[ "def", "get_characteristic_subpattern", "(", "subpatterns", ")", ":", "if", "(", "not", "isinstance", "(", "subpatterns", ",", "list", ")", ")", ":", "return", "subpatterns", "if", "(", "len", "(", "subpatterns", ")", "==", "1", ")", ":", "return", "subpatterns", "[", "0", "]", "subpatterns_with_names", "=", "[", "]", "subpatterns_with_common_names", "=", "[", "]", "common_names", "=", "[", "'in'", ",", "'for'", ",", "'if'", ",", "'not'", ",", "'None'", "]", "subpatterns_with_common_chars", "=", "[", "]", "common_chars", "=", "'[]().,:'", "for", "subpattern", "in", "subpatterns", ":", "if", "any", "(", "rec_test", "(", "subpattern", ",", "(", "lambda", "x", ":", "(", "type", "(", "x", ")", "is", "str", ")", ")", ")", ")", ":", "if", "any", "(", "rec_test", "(", "subpattern", ",", "(", "lambda", "x", ":", "(", "isinstance", "(", "x", ",", "str", ")", "and", "(", "x", "in", "common_chars", ")", ")", ")", ")", ")", ":", "subpatterns_with_common_chars", ".", "append", "(", "subpattern", ")", "elif", "any", "(", "rec_test", "(", "subpattern", ",", "(", "lambda", "x", ":", "(", "isinstance", "(", "x", ",", "str", ")", "and", "(", "x", "in", "common_names", ")", ")", ")", ")", ")", ":", "subpatterns_with_common_names", ".", "append", "(", "subpattern", ")", "else", ":", "subpatterns_with_names", ".", "append", "(", "subpattern", ")", "if", "subpatterns_with_names", ":", "subpatterns", "=", "subpatterns_with_names", "elif", "subpatterns_with_common_names", ":", "subpatterns", "=", "subpatterns_with_common_names", "elif", "subpatterns_with_common_chars", ":", "subpatterns", "=", "subpatterns_with_common_chars", "return", "max", "(", "subpatterns", ",", "key", "=", "len", ")" ]
picks the most characteristic from a list of linear patterns current order used is: names > common_names > common_chars .
train
true
37,661
def backend(comment, content_object, request): return (len(comment.comment.split()) < COMMENT_MIN_WORDS)
[ "def", "backend", "(", "comment", ",", "content_object", ",", "request", ")", ":", "return", "(", "len", "(", "comment", ".", "comment", ".", "split", "(", ")", ")", "<", "COMMENT_MIN_WORDS", ")" ]
backend for setting all comments to spam .
train
false
37,665
def isIntentSupported(profile, intent, direction): try: if (not isinstance(profile, ImageCmsProfile)): profile = ImageCmsProfile(profile) if profile.profile.is_intent_supported(intent, direction): return 1 else: return (-1) except (AttributeError, IOError, TypeError, ValueError) as v: raise PyCMSError(v)
[ "def", "isIntentSupported", "(", "profile", ",", "intent", ",", "direction", ")", ":", "try", ":", "if", "(", "not", "isinstance", "(", "profile", ",", "ImageCmsProfile", ")", ")", ":", "profile", "=", "ImageCmsProfile", "(", "profile", ")", "if", "profile", ".", "profile", ".", "is_intent_supported", "(", "intent", ",", "direction", ")", ":", "return", "1", "else", ":", "return", "(", "-", "1", ")", "except", "(", "AttributeError", ",", "IOError", ",", "TypeError", ",", "ValueError", ")", "as", "v", ":", "raise", "PyCMSError", "(", "v", ")" ]
checks if a given intent is supported .
train
false
37,667
def dmp_exquo(f, g, u, K): (q, r) = dmp_div(f, g, u, K) if dmp_zero_p(r, u): return q else: raise ExactQuotientFailed(f, g)
[ "def", "dmp_exquo", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "(", "q", ",", "r", ")", "=", "dmp_div", "(", "f", ",", "g", ",", "u", ",", "K", ")", "if", "dmp_zero_p", "(", "r", ",", "u", ")", ":", "return", "q", "else", ":", "raise", "ExactQuotientFailed", "(", "f", ",", "g", ")" ]
returns polynomial quotient in k[x] .
train
false
37,668
def page_to_html_path(page): source_path = page.sources_relative_to_buildroot()[0] return (os.path.splitext(source_path)[0] + u'.html')
[ "def", "page_to_html_path", "(", "page", ")", ":", "source_path", "=", "page", ".", "sources_relative_to_buildroot", "(", ")", "[", "0", "]", "return", "(", "os", ".", "path", ".", "splitext", "(", "source_path", ")", "[", "0", "]", "+", "u'.html'", ")" ]
given a page target .
train
false
37,669
def html_output_graph(self, node): graph = node['graph'] parts = node['parts'] graph_hash = get_graph_hash(node) name = ('inheritance%s' % graph_hash) path = '_images' dest_path = os.path.join(setup.app.builder.outdir, path) if (not os.path.exists(dest_path)): os.makedirs(dest_path) png_path = os.path.join(dest_path, (name + '.png')) path = setup.app.builder.imgpath urls = {} for child in node: if (child.get('refuri') is not None): urls[child['reftitle']] = child.get('refuri') elif (child.get('refid') is not None): urls[child['reftitle']] = ('#' + child.get('refid')) image_map = graph.run_dot(['-Tpng', ('-o%s' % png_path), '-Tcmapx'], name, parts, urls) return ('<img src="%s/%s.png" usemap="#%s" class="inheritance"/>%s' % (path, name, name, image_map))
[ "def", "html_output_graph", "(", "self", ",", "node", ")", ":", "graph", "=", "node", "[", "'graph'", "]", "parts", "=", "node", "[", "'parts'", "]", "graph_hash", "=", "get_graph_hash", "(", "node", ")", "name", "=", "(", "'inheritance%s'", "%", "graph_hash", ")", "path", "=", "'_images'", "dest_path", "=", "os", ".", "path", ".", "join", "(", "setup", ".", "app", ".", "builder", ".", "outdir", ",", "path", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "dest_path", ")", ")", ":", "os", ".", "makedirs", "(", "dest_path", ")", "png_path", "=", "os", ".", "path", ".", "join", "(", "dest_path", ",", "(", "name", "+", "'.png'", ")", ")", "path", "=", "setup", ".", "app", ".", "builder", ".", "imgpath", "urls", "=", "{", "}", "for", "child", "in", "node", ":", "if", "(", "child", ".", "get", "(", "'refuri'", ")", "is", "not", "None", ")", ":", "urls", "[", "child", "[", "'reftitle'", "]", "]", "=", "child", ".", "get", "(", "'refuri'", ")", "elif", "(", "child", ".", "get", "(", "'refid'", ")", "is", "not", "None", ")", ":", "urls", "[", "child", "[", "'reftitle'", "]", "]", "=", "(", "'#'", "+", "child", ".", "get", "(", "'refid'", ")", ")", "image_map", "=", "graph", ".", "run_dot", "(", "[", "'-Tpng'", ",", "(", "'-o%s'", "%", "png_path", ")", ",", "'-Tcmapx'", "]", ",", "name", ",", "parts", ",", "urls", ")", "return", "(", "'<img src=\"%s/%s.png\" usemap=\"#%s\" class=\"inheritance\"/>%s'", "%", "(", "path", ",", "name", ",", "name", ",", "image_map", ")", ")" ]
output the graph for html .
train
true
37,670
def discoverYadis(uri): response = yadisDiscover(uri) yadis_url = response.normalized_uri body = response.response_text try: openid_services = OpenIDServiceEndpoint.fromXRDS(yadis_url, body) except XRDSError: openid_services = [] if (not openid_services): if response.isXRDS(): return discoverNoYadis(uri) openid_services = OpenIDServiceEndpoint.fromHTML(yadis_url, body) return (yadis_url, getOPOrUserServices(openid_services))
[ "def", "discoverYadis", "(", "uri", ")", ":", "response", "=", "yadisDiscover", "(", "uri", ")", "yadis_url", "=", "response", ".", "normalized_uri", "body", "=", "response", ".", "response_text", "try", ":", "openid_services", "=", "OpenIDServiceEndpoint", ".", "fromXRDS", "(", "yadis_url", ",", "body", ")", "except", "XRDSError", ":", "openid_services", "=", "[", "]", "if", "(", "not", "openid_services", ")", ":", "if", "response", ".", "isXRDS", "(", ")", ":", "return", "discoverNoYadis", "(", "uri", ")", "openid_services", "=", "OpenIDServiceEndpoint", ".", "fromHTML", "(", "yadis_url", ",", "body", ")", "return", "(", "yadis_url", ",", "getOPOrUserServices", "(", "openid_services", ")", ")" ]
discover openid services for a uri .
train
true
37,672
def nbsp(x): return x.replace(u' ', u'\xa0')
[ "def", "nbsp", "(", "x", ")", ":", "return", "x", ".", "replace", "(", "u' '", ",", "u'\\xa0'", ")" ]
convert space to non-breaking space .
train
false
37,673
def set_cors_headers_for_response(response): if (config.get(u'ckan.cors.origin_allow_all') and request.headers.get(u'Origin')): cors_origin_allowed = None if asbool(config.get(u'ckan.cors.origin_allow_all')): cors_origin_allowed = u'*' elif (config.get(u'ckan.cors.origin_whitelist') and (request.headers.get(u'Origin') in config[u'ckan.cors.origin_whitelist'].split(u' '))): cors_origin_allowed = request.headers.get(u'Origin') if (cors_origin_allowed is not None): response.headers[u'Access-Control-Allow-Origin'] = cors_origin_allowed response.headers[u'Access-Control-Allow-Methods'] = u'POST, PUT, GET, DELETE, OPTIONS' response.headers[u'Access-Control-Allow-Headers'] = u'X-CKAN-API-KEY, Authorization, Content-Type' return response
[ "def", "set_cors_headers_for_response", "(", "response", ")", ":", "if", "(", "config", ".", "get", "(", "u'ckan.cors.origin_allow_all'", ")", "and", "request", ".", "headers", ".", "get", "(", "u'Origin'", ")", ")", ":", "cors_origin_allowed", "=", "None", "if", "asbool", "(", "config", ".", "get", "(", "u'ckan.cors.origin_allow_all'", ")", ")", ":", "cors_origin_allowed", "=", "u'*'", "elif", "(", "config", ".", "get", "(", "u'ckan.cors.origin_whitelist'", ")", "and", "(", "request", ".", "headers", ".", "get", "(", "u'Origin'", ")", "in", "config", "[", "u'ckan.cors.origin_whitelist'", "]", ".", "split", "(", "u' '", ")", ")", ")", ":", "cors_origin_allowed", "=", "request", ".", "headers", ".", "get", "(", "u'Origin'", ")", "if", "(", "cors_origin_allowed", "is", "not", "None", ")", ":", "response", ".", "headers", "[", "u'Access-Control-Allow-Origin'", "]", "=", "cors_origin_allowed", "response", ".", "headers", "[", "u'Access-Control-Allow-Methods'", "]", "=", "u'POST, PUT, GET, DELETE, OPTIONS'", "response", ".", "headers", "[", "u'Access-Control-Allow-Headers'", "]", "=", "u'X-CKAN-API-KEY, Authorization, Content-Type'", "return", "response" ]
set up access control allow headers if either origin_allow_all is true .
train
false
37,676
def test_pprint_break_repr(): output = pretty.pretty(BreakingReprParent()) expected = 'TG: Breaking(\n ):' nt.assert_equal(output, expected)
[ "def", "test_pprint_break_repr", "(", ")", ":", "output", "=", "pretty", ".", "pretty", "(", "BreakingReprParent", "(", ")", ")", "expected", "=", "'TG: Breaking(\\n ):'", "nt", ".", "assert_equal", "(", "output", ",", "expected", ")" ]
test that p .
train
false
37,677
def nos_unknown_host_cb(host, fingerprint): return True
[ "def", "nos_unknown_host_cb", "(", "host", ",", "fingerprint", ")", ":", "return", "True" ]
an unknown host callback .
train
false
37,678
def int32_as_f32(builder, val): assert (val.type == Type.int(32)) return builder.bitcast(val, Type.float())
[ "def", "int32_as_f32", "(", "builder", ",", "val", ")", ":", "assert", "(", "val", ".", "type", "==", "Type", ".", "int", "(", "32", ")", ")", "return", "builder", ".", "bitcast", "(", "val", ",", "Type", ".", "float", "(", ")", ")" ]
bitcast a 32-bit integer into a float .
train
false
37,683
def format_valid_streams(plugin, streams): delimiter = ', ' validstreams = [] for (name, stream) in sorted(streams.items(), key=(lambda stream: plugin.stream_weight(stream[0]))): if (name in STREAM_SYNONYMS): continue synonymfilter = (lambda n: ((stream is streams[n]) and (n is not name))) synonyms = list(filter(synonymfilter, streams.keys())) if (len(synonyms) > 0): joined = delimiter.join(synonyms) name = '{0} ({1})'.format(name, joined) validstreams.append(name) return delimiter.join(validstreams)
[ "def", "format_valid_streams", "(", "plugin", ",", "streams", ")", ":", "delimiter", "=", "', '", "validstreams", "=", "[", "]", "for", "(", "name", ",", "stream", ")", "in", "sorted", "(", "streams", ".", "items", "(", ")", ",", "key", "=", "(", "lambda", "stream", ":", "plugin", ".", "stream_weight", "(", "stream", "[", "0", "]", ")", ")", ")", ":", "if", "(", "name", "in", "STREAM_SYNONYMS", ")", ":", "continue", "synonymfilter", "=", "(", "lambda", "n", ":", "(", "(", "stream", "is", "streams", "[", "n", "]", ")", "and", "(", "n", "is", "not", "name", ")", ")", ")", "synonyms", "=", "list", "(", "filter", "(", "synonymfilter", ",", "streams", ".", "keys", "(", ")", ")", ")", "if", "(", "len", "(", "synonyms", ")", ">", "0", ")", ":", "joined", "=", "delimiter", ".", "join", "(", "synonyms", ")", "name", "=", "'{0} ({1})'", ".", "format", "(", "name", ",", "joined", ")", "validstreams", ".", "append", "(", "name", ")", "return", "delimiter", ".", "join", "(", "validstreams", ")" ]
formats a dict of streams .
train
true
37,684
def get_properties_of_kind(kind, start=None, end=None): q = Property.all(keys_only=True) q.ancestor(Property.key_for_kind(kind)) if ((start is not None) and (start != '')): q.filter('__key__ >=', Property.key_for_property(kind, start)) if (end is not None): if (end == ''): return [] q.filter('__key__ <', Property.key_for_property(kind, end)) return [Property.key_to_property(x) for x in q.run()]
[ "def", "get_properties_of_kind", "(", "kind", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "q", "=", "Property", ".", "all", "(", "keys_only", "=", "True", ")", "q", ".", "ancestor", "(", "Property", ".", "key_for_kind", "(", "kind", ")", ")", "if", "(", "(", "start", "is", "not", "None", ")", "and", "(", "start", "!=", "''", ")", ")", ":", "q", ".", "filter", "(", "'__key__ >='", ",", "Property", ".", "key_for_property", "(", "kind", ",", "start", ")", ")", "if", "(", "end", "is", "not", "None", ")", ":", "if", "(", "end", "==", "''", ")", ":", "return", "[", "]", "q", ".", "filter", "(", "'__key__ <'", ",", "Property", ".", "key_for_property", "(", "kind", ",", "end", ")", ")", "return", "[", "Property", ".", "key_to_property", "(", "x", ")", "for", "x", "in", "q", ".", "run", "(", ")", "]" ]
return all properties of kind in the specified range .
train
false
37,685
def eye(n, cls=None): if (cls is None): from sympy.matrices import Matrix as cls return cls.eye(n)
[ "def", "eye", "(", "n", ",", "cls", "=", "None", ")", ":", "if", "(", "cls", "is", "None", ")", ":", "from", "sympy", ".", "matrices", "import", "Matrix", "as", "cls", "return", "cls", ".", "eye", "(", "n", ")" ]
return a 2-d array with ones on the diagonal and zeros elsewhere .
train
false
37,687
def test_sobel_mask(): np.random.seed(0) result = filters.sobel(np.random.uniform(size=(10, 10)), np.zeros((10, 10), bool)) assert np.all((result == 0))
[ "def", "test_sobel_mask", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "0", ")", "result", "=", "filters", ".", "sobel", "(", "np", ".", "random", ".", "uniform", "(", "size", "=", "(", "10", ",", "10", ")", ")", ",", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert", "np", ".", "all", "(", "(", "result", "==", "0", ")", ")" ]
sobel on a masked array should be zero .
train
false
37,688
def update_in(d, keys, func, default=None, factory=dict): assert (len(keys) > 0) (k, ks) = (keys[0], keys[1:]) if ks: return assoc(d, k, update_in((d[k] if (k in d) else factory()), ks, func, default, factory), factory) else: innermost = (func(d[k]) if (k in d) else func(default)) return assoc(d, k, innermost, factory)
[ "def", "update_in", "(", "d", ",", "keys", ",", "func", ",", "default", "=", "None", ",", "factory", "=", "dict", ")", ":", "assert", "(", "len", "(", "keys", ")", ">", "0", ")", "(", "k", ",", "ks", ")", "=", "(", "keys", "[", "0", "]", ",", "keys", "[", "1", ":", "]", ")", "if", "ks", ":", "return", "assoc", "(", "d", ",", "k", ",", "update_in", "(", "(", "d", "[", "k", "]", "if", "(", "k", "in", "d", ")", "else", "factory", "(", ")", ")", ",", "ks", ",", "func", ",", "default", ",", "factory", ")", ",", "factory", ")", "else", ":", "innermost", "=", "(", "func", "(", "d", "[", "k", "]", ")", "if", "(", "k", "in", "d", ")", "else", "func", "(", "default", ")", ")", "return", "assoc", "(", "d", ",", "k", ",", "innermost", ",", "factory", ")" ]
update value in a nested dictionary inputs: d - dictionary on which to operate keys - list or tuple giving the location of the value to be changed in d func - function to operate on that value if keys == [k0 .
train
false
37,689
def _mkv_screen_size(filename): try: if filename.endswith(u'.mkv'): with io.open(filename, u'rb') as f: mkv = MKV(f) return (mkv.video_tracks[0].width, mkv.video_tracks[0].height) except Exception: pass return (None, None)
[ "def", "_mkv_screen_size", "(", "filename", ")", ":", "try", ":", "if", "filename", ".", "endswith", "(", "u'.mkv'", ")", ":", "with", "io", ".", "open", "(", "filename", ",", "u'rb'", ")", "as", "f", ":", "mkv", "=", "MKV", "(", "f", ")", "return", "(", "mkv", ".", "video_tracks", "[", "0", "]", ".", "width", ",", "mkv", ".", "video_tracks", "[", "0", "]", ".", "height", ")", "except", "Exception", ":", "pass", "return", "(", "None", ",", "None", ")" ]
parses mkv file for width and height .
train
false
37,691
def program_is_alive(program_name, pid_files_dir=None): pid = get_pid_from_file(program_name, pid_files_dir) if (pid is None): return False return pid_is_alive(pid)
[ "def", "program_is_alive", "(", "program_name", ",", "pid_files_dir", "=", "None", ")", ":", "pid", "=", "get_pid_from_file", "(", "program_name", ",", "pid_files_dir", ")", "if", "(", "pid", "is", "None", ")", ":", "return", "False", "return", "pid_is_alive", "(", "pid", ")" ]
checks if the process is alive and not in zombie state .
train
false
37,692
def structure_tensor_eigvals(Axx, Axy, Ayy): return _image_orthogonal_matrix22_eigvals(Axx, Axy, Ayy)
[ "def", "structure_tensor_eigvals", "(", "Axx", ",", "Axy", ",", "Ayy", ")", ":", "return", "_image_orthogonal_matrix22_eigvals", "(", "Axx", ",", "Axy", ",", "Ayy", ")" ]
compute eigen values of structure tensor .
train
false
37,693
def setup_desktop_icons(data): from frappe.desk.doctype.desktop_icon.desktop_icon import set_desktop_icons if data.desktop_icons: set_desktop_icons(data.desktop_icons)
[ "def", "setup_desktop_icons", "(", "data", ")", ":", "from", "frappe", ".", "desk", ".", "doctype", ".", "desktop_icon", ".", "desktop_icon", "import", "set_desktop_icons", "if", "data", ".", "desktop_icons", ":", "set_desktop_icons", "(", "data", ".", "desktop_icons", ")" ]
set desktop icons form data .
train
false
37,694
def get_exp_with_draft_applied(exp_id, user_id): exp_user_data = user_models.ExplorationUserDataModel.get(user_id, exp_id) exploration = get_exploration_by_id(exp_id) return (apply_change_list(exp_id, exp_user_data.draft_change_list) if (exp_user_data and exp_user_data.draft_change_list and is_version_of_draft_valid(exp_id, exp_user_data.draft_change_list_exp_version)) else exploration)
[ "def", "get_exp_with_draft_applied", "(", "exp_id", ",", "user_id", ")", ":", "exp_user_data", "=", "user_models", ".", "ExplorationUserDataModel", ".", "get", "(", "user_id", ",", "exp_id", ")", "exploration", "=", "get_exploration_by_id", "(", "exp_id", ")", "return", "(", "apply_change_list", "(", "exp_id", ",", "exp_user_data", ".", "draft_change_list", ")", "if", "(", "exp_user_data", "and", "exp_user_data", ".", "draft_change_list", "and", "is_version_of_draft_valid", "(", "exp_id", ",", "exp_user_data", ".", "draft_change_list_exp_version", ")", ")", "else", "exploration", ")" ]
if a draft exists for the given user and exploration .
train
false
37,695
def test_testing(): assert_raises(AssertionError, assert_in, 'foo', 'bar') assert_in('foo', 'foobar') assert_raises(AssertionError, assert_not_in, 'foo', 'foobar') assert_not_in('foo', 'bar') assert_raises(AssertionError, assert_is, None, 0) assert_is(None, None)
[ "def", "test_testing", "(", ")", ":", "assert_raises", "(", "AssertionError", ",", "assert_in", ",", "'foo'", ",", "'bar'", ")", "assert_in", "(", "'foo'", ",", "'foobar'", ")", "assert_raises", "(", "AssertionError", ",", "assert_not_in", ",", "'foo'", ",", "'foobar'", ")", "assert_not_in", "(", "'foo'", ",", "'bar'", ")", "assert_raises", "(", "AssertionError", ",", "assert_is", ",", "None", ",", "0", ")", "assert_is", "(", "None", ",", "None", ")" ]
test testing ports .
train
false
37,698
def _FSpecialGauss(size, sigma): radius = (size // 2) offset = 0.0 (start, stop) = ((- radius), (radius + 1)) if ((size % 2) == 0): offset = 0.5 stop -= 1 (x, y) = np.mgrid[(offset + start):stop, (offset + start):stop] assert (len(x) == size) g = np.exp((- (((x ** 2) + (y ** 2)) / (2.0 * (sigma ** 2))))) return (g / g.sum())
[ "def", "_FSpecialGauss", "(", "size", ",", "sigma", ")", ":", "radius", "=", "(", "size", "//", "2", ")", "offset", "=", "0.0", "(", "start", ",", "stop", ")", "=", "(", "(", "-", "radius", ")", ",", "(", "radius", "+", "1", ")", ")", "if", "(", "(", "size", "%", "2", ")", "==", "0", ")", ":", "offset", "=", "0.5", "stop", "-=", "1", "(", "x", ",", "y", ")", "=", "np", ".", "mgrid", "[", "(", "offset", "+", "start", ")", ":", "stop", ",", "(", "offset", "+", "start", ")", ":", "stop", "]", "assert", "(", "len", "(", "x", ")", "==", "size", ")", "g", "=", "np", ".", "exp", "(", "(", "-", "(", "(", "(", "x", "**", "2", ")", "+", "(", "y", "**", "2", ")", ")", "/", "(", "2.0", "*", "(", "sigma", "**", "2", ")", ")", ")", ")", ")", "return", "(", "g", "/", "g", ".", "sum", "(", ")", ")" ]
function to mimic the fspecial gaussian matlab function .
train
false
37,699
def BuildFileTargets(target_list, build_file): return [p for p in target_list if (BuildFile(p) == build_file)]
[ "def", "BuildFileTargets", "(", "target_list", ",", "build_file", ")", ":", "return", "[", "p", "for", "p", "in", "target_list", "if", "(", "BuildFile", "(", "p", ")", "==", "build_file", ")", "]" ]
from a target_list .
train
false
37,700
def _appendArgs(url, args): if hasattr(args, 'items'): args = args.items() args.sort() if (len(args) == 0): return url if ('?' in url.rstrip('?')): sep = '&' else: sep = '?' return ('%s%s%s' % (url, sep, urlencode(args)))
[ "def", "_appendArgs", "(", "url", ",", "args", ")", ":", "if", "hasattr", "(", "args", ",", "'items'", ")", ":", "args", "=", "args", ".", "items", "(", ")", "args", ".", "sort", "(", ")", "if", "(", "len", "(", "args", ")", "==", "0", ")", ":", "return", "url", "if", "(", "'?'", "in", "url", ".", "rstrip", "(", "'?'", ")", ")", ":", "sep", "=", "'&'", "else", ":", "sep", "=", "'?'", "return", "(", "'%s%s%s'", "%", "(", "url", ",", "sep", ",", "urlencode", "(", "args", ")", ")", ")" ]
append some arguments to an http query .
train
true
37,701
def make_config(): global _BASE_CONFIG _BASE_CONFIG = tools.make_example_config(AIO_CONFIG_FILE, CONFD) tools.write_example_config(USER_CONFIG_FILE, _BASE_CONFIG)
[ "def", "make_config", "(", ")", ":", "global", "_BASE_CONFIG", "_BASE_CONFIG", "=", "tools", ".", "make_example_config", "(", "AIO_CONFIG_FILE", ",", "CONFD", ")", "tools", ".", "write_example_config", "(", "USER_CONFIG_FILE", ",", "_BASE_CONFIG", ")" ]
build an inventory configuration from the sample aio files .
train
false
37,703
def _TruncDelta(timedelta): return datetime.timedelta(days=timedelta.days, seconds=timedelta.seconds)
[ "def", "_TruncDelta", "(", "timedelta", ")", ":", "return", "datetime", ".", "timedelta", "(", "days", "=", "timedelta", ".", "days", ",", "seconds", "=", "timedelta", ".", "seconds", ")" ]
strips the microseconds field from a timedelta .
train
false
37,704
def render_field_and_label(field, label, field_class=u'', label_for=None, label_class=u'', layout=u'', **kwargs): if (layout == u'horizontal'): if (not label_class): label_class = get_bootstrap_setting(u'horizontal_label_class') if (not field_class): field_class = get_bootstrap_setting(u'horizontal_field_class') if (not label): label = mark_safe(u'&#160;') label_class = add_css_class(label_class, u'control-label') html = field if field_class: html = u'<div class="{klass}">{html}</div>'.format(klass=field_class, html=html) if label: html = (render_label(label, label_for=label_for, label_class=label_class) + html) return html
[ "def", "render_field_and_label", "(", "field", ",", "label", ",", "field_class", "=", "u''", ",", "label_for", "=", "None", ",", "label_class", "=", "u''", ",", "layout", "=", "u''", ",", "**", "kwargs", ")", ":", "if", "(", "layout", "==", "u'horizontal'", ")", ":", "if", "(", "not", "label_class", ")", ":", "label_class", "=", "get_bootstrap_setting", "(", "u'horizontal_label_class'", ")", "if", "(", "not", "field_class", ")", ":", "field_class", "=", "get_bootstrap_setting", "(", "u'horizontal_field_class'", ")", "if", "(", "not", "label", ")", ":", "label", "=", "mark_safe", "(", "u'&#160;'", ")", "label_class", "=", "add_css_class", "(", "label_class", ",", "u'control-label'", ")", "html", "=", "field", "if", "field_class", ":", "html", "=", "u'<div class=\"{klass}\">{html}</div>'", ".", "format", "(", "klass", "=", "field_class", ",", "html", "=", "html", ")", "if", "label", ":", "html", "=", "(", "render_label", "(", "label", ",", "label_for", "=", "label_for", ",", "label_class", "=", "label_class", ")", "+", "html", ")", "return", "html" ]
render a field with its label .
train
false
37,706
def create_identity_pool(IdentityPoolName, AllowUnauthenticatedIdentities=False, SupportedLoginProviders=None, DeveloperProviderName=None, OpenIdConnectProviderARNs=None, region=None, key=None, keyid=None, profile=None): SupportedLoginProviders = (dict() if (SupportedLoginProviders is None) else SupportedLoginProviders) OpenIdConnectProviderARNs = (list() if (OpenIdConnectProviderARNs is None) else OpenIdConnectProviderARNs) conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: request_params = dict(IdentityPoolName=IdentityPoolName, AllowUnauthenticatedIdentities=AllowUnauthenticatedIdentities, SupportedLoginProviders=SupportedLoginProviders, OpenIdConnectProviderARNs=OpenIdConnectProviderARNs) if DeveloperProviderName: request_params['DeveloperProviderName'] = DeveloperProviderName response = conn.create_identity_pool(**request_params) response.pop('ResponseMetadata', None) return {'created': True, 'identity_pool': response} except ClientError as e: return {'created': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "create_identity_pool", "(", "IdentityPoolName", ",", "AllowUnauthenticatedIdentities", "=", "False", ",", "SupportedLoginProviders", "=", "None", ",", "DeveloperProviderName", "=", "None", ",", "OpenIdConnectProviderARNs", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "SupportedLoginProviders", "=", "(", "dict", "(", ")", "if", "(", "SupportedLoginProviders", "is", "None", ")", "else", "SupportedLoginProviders", ")", "OpenIdConnectProviderARNs", "=", "(", "list", "(", ")", "if", "(", "OpenIdConnectProviderARNs", "is", "None", ")", "else", "OpenIdConnectProviderARNs", ")", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "request_params", "=", "dict", "(", "IdentityPoolName", "=", "IdentityPoolName", ",", "AllowUnauthenticatedIdentities", "=", "AllowUnauthenticatedIdentities", ",", "SupportedLoginProviders", "=", "SupportedLoginProviders", ",", "OpenIdConnectProviderARNs", "=", "OpenIdConnectProviderARNs", ")", "if", "DeveloperProviderName", ":", "request_params", "[", "'DeveloperProviderName'", "]", "=", "DeveloperProviderName", "response", "=", "conn", ".", "create_identity_pool", "(", "**", "request_params", ")", "response", ".", "pop", "(", "'ResponseMetadata'", ",", "None", ")", "return", "{", "'created'", ":", "True", ",", "'identity_pool'", ":", "response", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'created'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
creates a new identity pool .
train
true
37,707
def download_private_file(path): try: check_file_permission(path) except frappe.PermissionError: raise Forbidden(_(u"You don't have permission to access this file")) return send_private_file(path.split(u'/private', 1)[1])
[ "def", "download_private_file", "(", "path", ")", ":", "try", ":", "check_file_permission", "(", "path", ")", "except", "frappe", ".", "PermissionError", ":", "raise", "Forbidden", "(", "_", "(", "u\"You don't have permission to access this file\"", ")", ")", "return", "send_private_file", "(", "path", ".", "split", "(", "u'/private'", ",", "1", ")", "[", "1", "]", ")" ]
checks permissions and sends back private file .
train
false
37,708
def all_discrete(comp_dists): if isinstance(comp_dists, Distribution): return isinstance(comp_dists, Discrete) else: return all((isinstance(comp_dist, Discrete) for comp_dist in comp_dists))
[ "def", "all_discrete", "(", "comp_dists", ")", ":", "if", "isinstance", "(", "comp_dists", ",", "Distribution", ")", ":", "return", "isinstance", "(", "comp_dists", ",", "Discrete", ")", "else", ":", "return", "all", "(", "(", "isinstance", "(", "comp_dist", ",", "Discrete", ")", "for", "comp_dist", "in", "comp_dists", ")", ")" ]
determine if all distributions in comp_dists are discrete .
train
false
37,709
def _eq_10_42(lam_1, lam_2, t_12): a = (0.5 * (lam_1 + lam_2)) b = (0.5 * (lam_1 - lam_2)) return ((t_12 * np.exp(a)) * _sinch(b))
[ "def", "_eq_10_42", "(", "lam_1", ",", "lam_2", ",", "t_12", ")", ":", "a", "=", "(", "0.5", "*", "(", "lam_1", "+", "lam_2", ")", ")", "b", "=", "(", "0.5", "*", "(", "lam_1", "-", "lam_2", ")", ")", "return", "(", "(", "t_12", "*", "np", ".", "exp", "(", "a", ")", ")", "*", "_sinch", "(", "b", ")", ")" ]
equation of functions of matrices: theory and computation .
train
false
37,710
@must_have_permission(ADMIN) @must_be_branched_from_node def submit_draft_for_review(auth, node, draft, *args, **kwargs): data = request.get_json() meta = {} registration_choice = data.get('registrationChoice', 'immediate') validate_registration_choice(registration_choice) if (registration_choice == 'embargo'): end_date_string = data['embargoEndDate'] validate_embargo_end_date(end_date_string, node) meta['embargo_end_date'] = end_date_string meta['registration_choice'] = registration_choice draft.submit_for_review(initiated_by=auth.user, meta=meta, save=True) if (prereg_utils.get_prereg_schema() == draft.registration_schema): node.add_log(action=NodeLog.PREREG_REGISTRATION_INITIATED, params={'node': node._primary_key}, auth=auth, save=False) node.save() push_status_message(language.AFTER_SUBMIT_FOR_REVIEW, kind='info', trust=False) return ({'status': 'initiated', 'urls': {'registrations': node.web_url_for('node_registrations')}}, http.ACCEPTED)
[ "@", "must_have_permission", "(", "ADMIN", ")", "@", "must_be_branched_from_node", "def", "submit_draft_for_review", "(", "auth", ",", "node", ",", "draft", ",", "*", "args", ",", "**", "kwargs", ")", ":", "data", "=", "request", ".", "get_json", "(", ")", "meta", "=", "{", "}", "registration_choice", "=", "data", ".", "get", "(", "'registrationChoice'", ",", "'immediate'", ")", "validate_registration_choice", "(", "registration_choice", ")", "if", "(", "registration_choice", "==", "'embargo'", ")", ":", "end_date_string", "=", "data", "[", "'embargoEndDate'", "]", "validate_embargo_end_date", "(", "end_date_string", ",", "node", ")", "meta", "[", "'embargo_end_date'", "]", "=", "end_date_string", "meta", "[", "'registration_choice'", "]", "=", "registration_choice", "draft", ".", "submit_for_review", "(", "initiated_by", "=", "auth", ".", "user", ",", "meta", "=", "meta", ",", "save", "=", "True", ")", "if", "(", "prereg_utils", ".", "get_prereg_schema", "(", ")", "==", "draft", ".", "registration_schema", ")", ":", "node", ".", "add_log", "(", "action", "=", "NodeLog", ".", "PREREG_REGISTRATION_INITIATED", ",", "params", "=", "{", "'node'", ":", "node", ".", "_primary_key", "}", ",", "auth", "=", "auth", ",", "save", "=", "False", ")", "node", ".", "save", "(", ")", "push_status_message", "(", "language", ".", "AFTER_SUBMIT_FOR_REVIEW", ",", "kind", "=", "'info'", ",", "trust", "=", "False", ")", "return", "(", "{", "'status'", ":", "'initiated'", ",", "'urls'", ":", "{", "'registrations'", ":", "node", ".", "web_url_for", "(", "'node_registrations'", ")", "}", "}", ",", "http", ".", "ACCEPTED", ")" ]
submit for approvals and/or notifications :return: serialized registration :rtype: dict :raises: httperror if embargo end date is invalid .
train
false
37,712
def xonfig_main(args=None): if ((not args) or ((args[0] not in _XONFIG_MAIN_ACTIONS) and (args[0] not in {'-h', '--help'}))): args.insert(0, 'info') parser = _xonfig_create_parser() ns = parser.parse_args(args) if (ns.action is None): ns = parser.parse_args((['info'] + args)) return _XONFIG_MAIN_ACTIONS[ns.action](ns)
[ "def", "xonfig_main", "(", "args", "=", "None", ")", ":", "if", "(", "(", "not", "args", ")", "or", "(", "(", "args", "[", "0", "]", "not", "in", "_XONFIG_MAIN_ACTIONS", ")", "and", "(", "args", "[", "0", "]", "not", "in", "{", "'-h'", ",", "'--help'", "}", ")", ")", ")", ":", "args", ".", "insert", "(", "0", ",", "'info'", ")", "parser", "=", "_xonfig_create_parser", "(", ")", "ns", "=", "parser", ".", "parse_args", "(", "args", ")", "if", "(", "ns", ".", "action", "is", "None", ")", ":", "ns", "=", "parser", ".", "parse_args", "(", "(", "[", "'info'", "]", "+", "args", ")", ")", "return", "_XONFIG_MAIN_ACTIONS", "[", "ns", ".", "action", "]", "(", "ns", ")" ]
main xonfig entry point .
train
false
37,714
@loader_option() def joinedload(loadopt, attr, innerjoin=None): loader = loadopt.set_relationship_strategy(attr, {'lazy': 'joined'}) if (innerjoin is not None): loader.local_opts['innerjoin'] = innerjoin return loader
[ "@", "loader_option", "(", ")", "def", "joinedload", "(", "loadopt", ",", "attr", ",", "innerjoin", "=", "None", ")", ":", "loader", "=", "loadopt", ".", "set_relationship_strategy", "(", "attr", ",", "{", "'lazy'", ":", "'joined'", "}", ")", "if", "(", "innerjoin", "is", "not", "None", ")", ":", "loader", ".", "local_opts", "[", "'innerjoin'", "]", "=", "innerjoin", "return", "loader" ]
indicate that the given attribute should be loaded using joined eager loading .
train
false
37,715
def is_weighted(G, edge=None, weight='weight'): if (edge is not None): data = G.get_edge_data(*edge) if (data is None): msg = 'Edge {!r} does not exist.'.format(edge) raise nx.NetworkXError(msg) return (weight in data) if is_empty(G): return False return all(((weight in data) for (u, v, data) in G.edges(data=True)))
[ "def", "is_weighted", "(", "G", ",", "edge", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "if", "(", "edge", "is", "not", "None", ")", ":", "data", "=", "G", ".", "get_edge_data", "(", "*", "edge", ")", "if", "(", "data", "is", "None", ")", ":", "msg", "=", "'Edge {!r} does not exist.'", ".", "format", "(", "edge", ")", "raise", "nx", ".", "NetworkXError", "(", "msg", ")", "return", "(", "weight", "in", "data", ")", "if", "is_empty", "(", "G", ")", ":", "return", "False", "return", "all", "(", "(", "(", "weight", "in", "data", ")", "for", "(", "u", ",", "v", ",", "data", ")", "in", "G", ".", "edges", "(", "data", "=", "True", ")", ")", ")" ]
returns true if g has weighted edges .
train
false
37,716
def _get_filter_query(args): query = Q() for arg in args: if (hasattr(Document, arg) and args[arg]): append = Q(**{str((('document__' + arg) + '__id')): long(args[arg])}) append = (append | Q(**{str((('file__' + arg) + '__id')): long(args[arg])})) append = (append | Q(**{str((('weblink__' + arg) + '__id')): long(args[arg])})) query = (query & append) return query
[ "def", "_get_filter_query", "(", "args", ")", ":", "query", "=", "Q", "(", ")", "for", "arg", "in", "args", ":", "if", "(", "hasattr", "(", "Document", ",", "arg", ")", "and", "args", "[", "arg", "]", ")", ":", "append", "=", "Q", "(", "**", "{", "str", "(", "(", "(", "'document__'", "+", "arg", ")", "+", "'__id'", ")", ")", ":", "long", "(", "args", "[", "arg", "]", ")", "}", ")", "append", "=", "(", "append", "|", "Q", "(", "**", "{", "str", "(", "(", "(", "'file__'", "+", "arg", ")", "+", "'__id'", ")", ")", ":", "long", "(", "args", "[", "arg", "]", ")", "}", ")", ")", "append", "=", "(", "append", "|", "Q", "(", "**", "{", "str", "(", "(", "(", "'weblink__'", "+", "arg", ")", "+", "'__id'", ")", ")", ":", "long", "(", "args", "[", "arg", "]", ")", "}", ")", ")", "query", "=", "(", "query", "&", "append", ")", "return", "query" ]
creates a query to filter identities based on filterform arguments .
train
false
37,717
def _gen_index_name(keys): return _UUNDER.join([('%s_%s' % item) for item in keys])
[ "def", "_gen_index_name", "(", "keys", ")", ":", "return", "_UUNDER", ".", "join", "(", "[", "(", "'%s_%s'", "%", "item", ")", "for", "item", "in", "keys", "]", ")" ]
generate an index name from the set of fields it is over .
train
false
37,718
def dirbrowser_html(path): title = 'Browse directory: {}'.format(path) if is_root(path): parent = None else: parent = parent_dir(path) try: all_files = os.listdir(path) except OSError as e: html = jinja.render('error.html', title='Error while reading directory', url='file:///{}'.format(path), error=str(e), icon='') return html.encode('UTF-8', errors='xmlcharrefreplace') files = get_file_list(path, all_files, os.path.isfile) directories = get_file_list(path, all_files, os.path.isdir) html = jinja.render('dirbrowser.html', title=title, url=path, icon='', parent=parent, files=files, directories=directories) return html.encode('UTF-8', errors='xmlcharrefreplace')
[ "def", "dirbrowser_html", "(", "path", ")", ":", "title", "=", "'Browse directory: {}'", ".", "format", "(", "path", ")", "if", "is_root", "(", "path", ")", ":", "parent", "=", "None", "else", ":", "parent", "=", "parent_dir", "(", "path", ")", "try", ":", "all_files", "=", "os", ".", "listdir", "(", "path", ")", "except", "OSError", "as", "e", ":", "html", "=", "jinja", ".", "render", "(", "'error.html'", ",", "title", "=", "'Error while reading directory'", ",", "url", "=", "'file:///{}'", ".", "format", "(", "path", ")", ",", "error", "=", "str", "(", "e", ")", ",", "icon", "=", "''", ")", "return", "html", ".", "encode", "(", "'UTF-8'", ",", "errors", "=", "'xmlcharrefreplace'", ")", "files", "=", "get_file_list", "(", "path", ",", "all_files", ",", "os", ".", "path", ".", "isfile", ")", "directories", "=", "get_file_list", "(", "path", ",", "all_files", ",", "os", ".", "path", ".", "isdir", ")", "html", "=", "jinja", ".", "render", "(", "'dirbrowser.html'", ",", "title", "=", "title", ",", "url", "=", "path", ",", "icon", "=", "''", ",", "parent", "=", "parent", ",", "files", "=", "files", ",", "directories", "=", "directories", ")", "return", "html", ".", "encode", "(", "'UTF-8'", ",", "errors", "=", "'xmlcharrefreplace'", ")" ]
get the directory browser web page .
train
false
37,720
def dmp_fateman_poly_F_2(n, K): u = [K(1), K(0)] for i in range(0, (n - 1)): u = [dmp_one(i, K), u] m = (n - 1) v = dmp_add_term(u, dmp_ground(K(2), (m - 1)), 0, n, K) f = dmp_sqr([dmp_one(m, K), dmp_neg(v, m, K)], n, K) g = dmp_sqr([dmp_one(m, K), v], n, K) v = dmp_add_term(u, dmp_one((m - 1), K), 0, n, K) h = dmp_sqr([dmp_one(m, K), v], n, K) return (dmp_mul(f, h, n, K), dmp_mul(g, h, n, K), h)
[ "def", "dmp_fateman_poly_F_2", "(", "n", ",", "K", ")", ":", "u", "=", "[", "K", "(", "1", ")", ",", "K", "(", "0", ")", "]", "for", "i", "in", "range", "(", "0", ",", "(", "n", "-", "1", ")", ")", ":", "u", "=", "[", "dmp_one", "(", "i", ",", "K", ")", ",", "u", "]", "m", "=", "(", "n", "-", "1", ")", "v", "=", "dmp_add_term", "(", "u", ",", "dmp_ground", "(", "K", "(", "2", ")", ",", "(", "m", "-", "1", ")", ")", ",", "0", ",", "n", ",", "K", ")", "f", "=", "dmp_sqr", "(", "[", "dmp_one", "(", "m", ",", "K", ")", ",", "dmp_neg", "(", "v", ",", "m", ",", "K", ")", "]", ",", "n", ",", "K", ")", "g", "=", "dmp_sqr", "(", "[", "dmp_one", "(", "m", ",", "K", ")", ",", "v", "]", ",", "n", ",", "K", ")", "v", "=", "dmp_add_term", "(", "u", ",", "dmp_one", "(", "(", "m", "-", "1", ")", ",", "K", ")", ",", "0", ",", "n", ",", "K", ")", "h", "=", "dmp_sqr", "(", "[", "dmp_one", "(", "m", ",", "K", ")", ",", "v", "]", ",", "n", ",", "K", ")", "return", "(", "dmp_mul", "(", "f", ",", "h", ",", "n", ",", "K", ")", ",", "dmp_mul", "(", "g", ",", "h", ",", "n", ",", "K", ")", ",", "h", ")" ]
fatemans gcd benchmark: linearly dense quartic inputs .
train
false
37,721
def GenerateIndexFromHistory(query_history, all_indexes=None, manual_indexes=None): all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes) manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes) indexes = dict(((key, 0) for key in (all_keys - manual_keys))) for (query, count) in query_history.iteritems(): (required, kind, ancestor, props) = datastore_index.CompositeIndexForQuery(query) if required: props = datastore_index.GetRecommendedIndexProperties(props) key = (kind, ancestor, props) if (key not in manual_keys): if (key in indexes): indexes[key] += count else: indexes[key] = count if (not indexes): return '' res = [] for ((kind, ancestor, props), count) in sorted(indexes.iteritems()): res.append('') res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props)) res.append('') return '\n'.join(res)
[ "def", "GenerateIndexFromHistory", "(", "query_history", ",", "all_indexes", "=", "None", ",", "manual_indexes", "=", "None", ")", ":", "all_keys", "=", "datastore_index", ".", "IndexDefinitionsToKeys", "(", "all_indexes", ")", "manual_keys", "=", "datastore_index", ".", "IndexDefinitionsToKeys", "(", "manual_indexes", ")", "indexes", "=", "dict", "(", "(", "(", "key", ",", "0", ")", "for", "key", "in", "(", "all_keys", "-", "manual_keys", ")", ")", ")", "for", "(", "query", ",", "count", ")", "in", "query_history", ".", "iteritems", "(", ")", ":", "(", "required", ",", "kind", ",", "ancestor", ",", "props", ")", "=", "datastore_index", ".", "CompositeIndexForQuery", "(", "query", ")", "if", "required", ":", "props", "=", "datastore_index", ".", "GetRecommendedIndexProperties", "(", "props", ")", "key", "=", "(", "kind", ",", "ancestor", ",", "props", ")", "if", "(", "key", "not", "in", "manual_keys", ")", ":", "if", "(", "key", "in", "indexes", ")", ":", "indexes", "[", "key", "]", "+=", "count", "else", ":", "indexes", "[", "key", "]", "=", "count", "if", "(", "not", "indexes", ")", ":", "return", "''", "res", "=", "[", "]", "for", "(", "(", "kind", ",", "ancestor", ",", "props", ")", ",", "count", ")", "in", "sorted", "(", "indexes", ".", "iteritems", "(", ")", ")", ":", "res", ".", "append", "(", "''", ")", "res", ".", "append", "(", "datastore_index", ".", "IndexYamlForQuery", "(", "kind", ",", "ancestor", ",", "props", ")", ")", "res", ".", "append", "(", "''", ")", "return", "'\\n'", ".", "join", "(", "res", ")" ]
generate most of the text for index .
train
false
37,722
@pytest.fixture def x509_data(): pkey = PKey() pkey.generate_key(TYPE_RSA, 384) req = X509Req() req.set_pubkey(pkey) req.get_subject().commonName = 'Yoda root CA' x509 = X509() subject = x509.get_subject() subject.commonName = req.get_subject().commonName x509.set_issuer(subject) x509.set_pubkey(pkey) now = datetime.now() expire = (datetime.now() + timedelta(days=100)) x509.set_notBefore(now.strftime('%Y%m%d%H%M%SZ').encode()) x509.set_notAfter(expire.strftime('%Y%m%d%H%M%SZ').encode()) (yield (pkey, x509))
[ "@", "pytest", ".", "fixture", "def", "x509_data", "(", ")", ":", "pkey", "=", "PKey", "(", ")", "pkey", ".", "generate_key", "(", "TYPE_RSA", ",", "384", ")", "req", "=", "X509Req", "(", ")", "req", ".", "set_pubkey", "(", "pkey", ")", "req", ".", "get_subject", "(", ")", ".", "commonName", "=", "'Yoda root CA'", "x509", "=", "X509", "(", ")", "subject", "=", "x509", ".", "get_subject", "(", ")", "subject", ".", "commonName", "=", "req", ".", "get_subject", "(", ")", ".", "commonName", "x509", ".", "set_issuer", "(", "subject", ")", "x509", ".", "set_pubkey", "(", "pkey", ")", "now", "=", "datetime", ".", "now", "(", ")", "expire", "=", "(", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "days", "=", "100", ")", ")", "x509", ".", "set_notBefore", "(", "now", ".", "strftime", "(", "'%Y%m%d%H%M%SZ'", ")", ".", "encode", "(", ")", ")", "x509", ".", "set_notAfter", "(", "expire", ".", "strftime", "(", "'%Y%m%d%H%M%SZ'", ")", ".", "encode", "(", ")", ")", "(", "yield", "(", "pkey", ",", "x509", ")", ")" ]
create a new private key and start a certificate request .
train
false
37,723
def hexdecode(value): value = value.lower() return (value[2:] if value.startswith('0x') else value).decode('hex')
[ "def", "hexdecode", "(", "value", ")", ":", "value", "=", "value", ".", "lower", "(", ")", "return", "(", "value", "[", "2", ":", "]", "if", "value", ".", "startswith", "(", "'0x'", ")", "else", "value", ")", ".", "decode", "(", "'hex'", ")" ]
decodes string value from hex to plain format .
train
false
37,729
def splitwords(s): if (not s): return [] return filter(bool, map(str.strip, re.split('[ ,\n]+', s)))
[ "def", "splitwords", "(", "s", ")", ":", "if", "(", "not", "s", ")", ":", "return", "[", "]", "return", "filter", "(", "bool", ",", "map", "(", "str", ".", "strip", ",", "re", ".", "split", "(", "'[ ,\\n]+'", ",", "s", ")", ")", ")" ]
helper to split words on any comma .
train
false
37,730
def removeBackupFilesByTypes(fileTypes): for fileType in fileTypes: removeBackupFilesByType(fileType)
[ "def", "removeBackupFilesByTypes", "(", "fileTypes", ")", ":", "for", "fileType", "in", "fileTypes", ":", "removeBackupFilesByType", "(", "fileType", ")" ]
remove backup files by types .
train
false
37,732
def _check_location_uri(context, store_api, store_utils, uri): try: is_ok = (store_utils.validate_external_location(uri) and (store_api.get_size_from_backend(uri, context=context) > 0)) except (store.UnknownScheme, store.NotFound, store.BadStoreUri): is_ok = False if (not is_ok): reason = _('Invalid location') raise exception.BadStoreUri(message=reason)
[ "def", "_check_location_uri", "(", "context", ",", "store_api", ",", "store_utils", ",", "uri", ")", ":", "try", ":", "is_ok", "=", "(", "store_utils", ".", "validate_external_location", "(", "uri", ")", "and", "(", "store_api", ".", "get_size_from_backend", "(", "uri", ",", "context", "=", "context", ")", ">", "0", ")", ")", "except", "(", "store", ".", "UnknownScheme", ",", "store", ".", "NotFound", ",", "store", ".", "BadStoreUri", ")", ":", "is_ok", "=", "False", "if", "(", "not", "is_ok", ")", ":", "reason", "=", "_", "(", "'Invalid location'", ")", "raise", "exception", ".", "BadStoreUri", "(", "message", "=", "reason", ")" ]
check if an image location is valid .
train
false
37,739
def validate_string_or_none(option, value): if (value is None): return value return validate_string(option, value)
[ "def", "validate_string_or_none", "(", "option", ",", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "value", "return", "validate_string", "(", "option", ",", "value", ")" ]
validates that value is an instance of basestring or none .
train
false
37,740
def sensitivity(classify=(lambda document: False), documents=[]): return recall(classify, document, average=None)
[ "def", "sensitivity", "(", "classify", "=", "(", "lambda", "document", ":", "False", ")", ",", "documents", "=", "[", "]", ")", ":", "return", "recall", "(", "classify", ",", "document", ",", "average", "=", "None", ")" ]
returns the percentage of positive cases correctly classified as positive .
train
false
37,741
def hypermedia_in(): ct_in_map = {'application/x-www-form-urlencoded': urlencoded_processor, 'application/json': json_processor, 'application/x-yaml': yaml_processor, 'text/yaml': yaml_processor, 'text/plain': text_processor} if ((cherrypy.request.method.upper() == 'POST') and (cherrypy.request.headers.get('Content-Length', '0') == '0')): cherrypy.request.process_request_body = False cherrypy.request.unserialized_data = None cherrypy.request.body.processors.clear() cherrypy.request.body.default_proc = cherrypy.HTTPError(406, 'Content type not supported') cherrypy.request.body.processors = ct_in_map
[ "def", "hypermedia_in", "(", ")", ":", "ct_in_map", "=", "{", "'application/x-www-form-urlencoded'", ":", "urlencoded_processor", ",", "'application/json'", ":", "json_processor", ",", "'application/x-yaml'", ":", "yaml_processor", ",", "'text/yaml'", ":", "yaml_processor", ",", "'text/plain'", ":", "text_processor", "}", "if", "(", "(", "cherrypy", ".", "request", ".", "method", ".", "upper", "(", ")", "==", "'POST'", ")", "and", "(", "cherrypy", ".", "request", ".", "headers", ".", "get", "(", "'Content-Length'", ",", "'0'", ")", "==", "'0'", ")", ")", ":", "cherrypy", ".", "request", ".", "process_request_body", "=", "False", "cherrypy", ".", "request", ".", "unserialized_data", "=", "None", "cherrypy", ".", "request", ".", "body", ".", "processors", ".", "clear", "(", ")", "cherrypy", ".", "request", ".", "body", ".", "default_proc", "=", "cherrypy", ".", "HTTPError", "(", "406", ",", "'Content type not supported'", ")", "cherrypy", ".", "request", ".", "body", ".", "processors", "=", "ct_in_map" ]
unserialize post/put data of a specified content-type .
train
true
37,742
def _make_requires(flag, error): def _requires_decorator(func): if (not flag): @wraps(func) def explode(*args, **kwargs): raise NotImplementedError(error) return explode else: return func return _requires_decorator
[ "def", "_make_requires", "(", "flag", ",", "error", ")", ":", "def", "_requires_decorator", "(", "func", ")", ":", "if", "(", "not", "flag", ")", ":", "@", "wraps", "(", "func", ")", "def", "explode", "(", "*", "args", ",", "**", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "error", ")", "return", "explode", "else", ":", "return", "func", "return", "_requires_decorator" ]
builds a decorator that ensures that functions that rely on openssl functions that are not present in this build raise notimplementederror .
train
true
37,744
@task(base=BaseInstructorTask) def proctored_exam_results_csv(entry_id, xmodule_instance_args): action_name = 'generating_proctored_exam_results_report' task_fn = partial(upload_proctored_exam_results_report, xmodule_instance_args) return run_main_task(entry_id, task_fn, action_name)
[ "@", "task", "(", "base", "=", "BaseInstructorTask", ")", "def", "proctored_exam_results_csv", "(", "entry_id", ",", "xmodule_instance_args", ")", ":", "action_name", "=", "'generating_proctored_exam_results_report'", "task_fn", "=", "partial", "(", "upload_proctored_exam_results_report", ",", "xmodule_instance_args", ")", "return", "run_main_task", "(", "entry_id", ",", "task_fn", ",", "action_name", ")" ]
compute proctored exam results report for a course and upload the csv for download .
train
false
37,746
def _deterministic_vector_sign_flip(u): max_abs_rows = np.argmax(np.abs(u), axis=1) signs = np.sign(u[(range(u.shape[0]), max_abs_rows)]) u *= signs[:, np.newaxis] return u
[ "def", "_deterministic_vector_sign_flip", "(", "u", ")", ":", "max_abs_rows", "=", "np", ".", "argmax", "(", "np", ".", "abs", "(", "u", ")", ",", "axis", "=", "1", ")", "signs", "=", "np", ".", "sign", "(", "u", "[", "(", "range", "(", "u", ".", "shape", "[", "0", "]", ")", ",", "max_abs_rows", ")", "]", ")", "u", "*=", "signs", "[", ":", ",", "np", ".", "newaxis", "]", "return", "u" ]
modify the sign of vectors for reproducibility flips the sign of elements of all the vectors such that the absolute maximum element of each vector is positive .
train
false
37,747
def remove_challenge_for_url(url): if (not url): raise ValueError('URL cannot be empty') url = parse.urlparse(url) del _cache[url.netloc]
[ "def", "remove_challenge_for_url", "(", "url", ")", ":", "if", "(", "not", "url", ")", ":", "raise", "ValueError", "(", "'URL cannot be empty'", ")", "url", "=", "parse", ".", "urlparse", "(", "url", ")", "del", "_cache", "[", "url", ".", "netloc", "]" ]
removes the cached challenge for the specified url .
train
false
37,748
def total_size(o, handlers={}, verbose=False): def dict_handler(d): return chain.from_iterable(d.items()) all_handlers = {tuple: iter, list: iter, deque: iter, dict: dict_handler, set: iter, frozenset: iter} all_handlers.update(handlers) seen = set() default_size = getsizeof(0) def sizeof(o): if (id(o) in seen): return 0 seen.add(id(o)) s = getsizeof(o, default_size) for (typ, handler) in all_handlers.items(): if isinstance(o, typ): s += sum(map(sizeof, handler(o))) break return s return sizeof(o)
[ "def", "total_size", "(", "o", ",", "handlers", "=", "{", "}", ",", "verbose", "=", "False", ")", ":", "def", "dict_handler", "(", "d", ")", ":", "return", "chain", ".", "from_iterable", "(", "d", ".", "items", "(", ")", ")", "all_handlers", "=", "{", "tuple", ":", "iter", ",", "list", ":", "iter", ",", "deque", ":", "iter", ",", "dict", ":", "dict_handler", ",", "set", ":", "iter", ",", "frozenset", ":", "iter", "}", "all_handlers", ".", "update", "(", "handlers", ")", "seen", "=", "set", "(", ")", "default_size", "=", "getsizeof", "(", "0", ")", "def", "sizeof", "(", "o", ")", ":", "if", "(", "id", "(", "o", ")", "in", "seen", ")", ":", "return", "0", "seen", ".", "add", "(", "id", "(", "o", ")", ")", "s", "=", "getsizeof", "(", "o", ",", "default_size", ")", "for", "(", "typ", ",", "handler", ")", "in", "all_handlers", ".", "items", "(", ")", ":", "if", "isinstance", "(", "o", ",", "typ", ")", ":", "s", "+=", "sum", "(", "map", "(", "sizeof", ",", "handler", "(", "o", ")", ")", ")", "break", "return", "s", "return", "sizeof", "(", "o", ")" ]
returns the approximate memory footprint an object and all of its contents .
train
true
37,749
def generate_addon_user_and_category(addon, user, category): AddonUser.objects.create(addon=addon, user=user) AddonCategory.objects.create(addon=addon, category=category, feature=True)
[ "def", "generate_addon_user_and_category", "(", "addon", ",", "user", ",", "category", ")", ":", "AddonUser", ".", "objects", ".", "create", "(", "addon", "=", "addon", ",", "user", "=", "user", ")", "AddonCategory", ".", "objects", ".", "create", "(", "addon", "=", "addon", ",", "category", "=", "category", ",", "feature", "=", "True", ")" ]
generate the dedicated addonuser and addoncategory for the given addon and user .
train
false
37,750
def pause_all(): global PAUSED_ALL PAUSED_ALL = True Downloader.do.pause() logging.debug('PAUSED_ALL active')
[ "def", "pause_all", "(", ")", ":", "global", "PAUSED_ALL", "PAUSED_ALL", "=", "True", "Downloader", ".", "do", ".", "pause", "(", ")", "logging", ".", "debug", "(", "'PAUSED_ALL active'", ")" ]
pause all activities than cause disk access .
train
false
37,751
@contextmanager def log_timing(logger, task, level=logging.INFO, final_msg=None, callbacks=None): start = datetime.datetime.now() if (task is not None): logger.log(level, (str(task) + '...')) (yield) end = datetime.datetime.now() delta = (end - start) total = total_seconds(delta) if (total < 60): delta_str = ('%f seconds' % total) else: delta_str = str(delta) if (final_msg is None): logger.log(level, (str(task) + (' done. Time elapsed: %s' % delta_str))) else: logger.log(level, ' '.join((final_msg, delta_str))) if (callbacks is not None): for callback in callbacks: callback(total)
[ "@", "contextmanager", "def", "log_timing", "(", "logger", ",", "task", ",", "level", "=", "logging", ".", "INFO", ",", "final_msg", "=", "None", ",", "callbacks", "=", "None", ")", ":", "start", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "if", "(", "task", "is", "not", "None", ")", ":", "logger", ".", "log", "(", "level", ",", "(", "str", "(", "task", ")", "+", "'...'", ")", ")", "(", "yield", ")", "end", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "delta", "=", "(", "end", "-", "start", ")", "total", "=", "total_seconds", "(", "delta", ")", "if", "(", "total", "<", "60", ")", ":", "delta_str", "=", "(", "'%f seconds'", "%", "total", ")", "else", ":", "delta_str", "=", "str", "(", "delta", ")", "if", "(", "final_msg", "is", "None", ")", ":", "logger", ".", "log", "(", "level", ",", "(", "str", "(", "task", ")", "+", "(", "' done. Time elapsed: %s'", "%", "delta_str", ")", ")", ")", "else", ":", "logger", ".", "log", "(", "level", ",", "' '", ".", "join", "(", "(", "final_msg", ",", "delta_str", ")", ")", ")", "if", "(", "callbacks", "is", "not", "None", ")", ":", "for", "callback", "in", "callbacks", ":", "callback", "(", "total", ")" ]
context manager that logs the start/end of an operation .
train
false
37,753
def requires_special_home_display(name): return ((name == '~') and Color.HOME_SPECIAL_DISPLAY)
[ "def", "requires_special_home_display", "(", "name", ")", ":", "return", "(", "(", "name", "==", "'~'", ")", "and", "Color", ".", "HOME_SPECIAL_DISPLAY", ")" ]
returns true if the given directory name matches the home indicator and the chosen theme should use a special home indicator display .
train
false