id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
11,800
def create_dir_if_needed(dest_directory): if (not tf.gfile.IsDirectory(dest_directory)): tf.gfile.MakeDirs(dest_directory) return True
[ "def", "create_dir_if_needed", "(", "dest_directory", ")", ":", "if", "(", "not", "tf", ".", "gfile", ".", "IsDirectory", "(", "dest_directory", ")", ")", ":", "tf", ".", "gfile", ".", "MakeDirs", "(", "dest_directory", ")", "return", "True" ]
create directory if doesnt exist .
train
false
11,801
def create_cover(title, authors, series=None, series_index=1, prefs=None, as_qimage=False): u' templates are ignored, to ensure that the specified metadata is used. ' mi = Metadata(title, authors) if series: (mi.series, mi.series_index) = (series, series_index) d = cprefs.defaults prefs = override_prefs((prefs or cprefs), title_template=d[u'title_template'], subtitle_template=d[u'subtitle_template'], footer_template=d[u'footer_template']) return generate_cover(mi, prefs=prefs, as_qimage=as_qimage)
[ "def", "create_cover", "(", "title", ",", "authors", ",", "series", "=", "None", ",", "series_index", "=", "1", ",", "prefs", "=", "None", ",", "as_qimage", "=", "False", ")", ":", "mi", "=", "Metadata", "(", "title", ",", "authors", ")", "if", "series", ":", "(", "mi", ".", "series", ",", "mi", ".", "series_index", ")", "=", "(", "series", ",", "series_index", ")", "d", "=", "cprefs", ".", "defaults", "prefs", "=", "override_prefs", "(", "(", "prefs", "or", "cprefs", ")", ",", "title_template", "=", "d", "[", "u'title_template'", "]", ",", "subtitle_template", "=", "d", "[", "u'subtitle_template'", "]", ",", "footer_template", "=", "d", "[", "u'footer_template'", "]", ")", "return", "generate_cover", "(", "mi", ",", "prefs", "=", "prefs", ",", "as_qimage", "=", "as_qimage", ")" ]
create a cover from the specified title .
train
false
11,803
@with_setup(setup, teardown) def test_top_filters(): model = serial.load('dbm.pkl') (layer_1, layer_2) = model.hidden_layers[0:2] W1 = layer_1.get_weights() W2 = layer_2.get_weights() top_filters.get_mat_product_viewer(W1, W2) dataset_yaml_src = model.dataset_yaml_src dataset = yaml_parse.load(dataset_yaml_src) imgs = dataset.get_weights_view(W1.T) top_filters.get_connections_viewer(imgs, W1, W2)
[ "@", "with_setup", "(", "setup", ",", "teardown", ")", "def", "test_top_filters", "(", ")", ":", "model", "=", "serial", ".", "load", "(", "'dbm.pkl'", ")", "(", "layer_1", ",", "layer_2", ")", "=", "model", ".", "hidden_layers", "[", "0", ":", "2", "]", "W1", "=", "layer_1", ".", "get_weights", "(", ")", "W2", "=", "layer_2", ".", "get_weights", "(", ")", "top_filters", ".", "get_mat_product_viewer", "(", "W1", ",", "W2", ")", "dataset_yaml_src", "=", "model", ".", "dataset_yaml_src", "dataset", "=", "yaml_parse", ".", "load", "(", "dataset_yaml_src", ")", "imgs", "=", "dataset", ".", "get_weights_view", "(", "W1", ".", "T", ")", "top_filters", ".", "get_connections_viewer", "(", "imgs", ",", "W1", ",", "W2", ")" ]
test the top_filters viewer functions .
train
false
11,804
def get_scene_exception_by_name_multiple(show_name): out = [] dbData = sorted([x[u'doc'] for x in sickrage.srCore.cacheDB.db.all(u'scene_exceptions', with_doc=True)], key=(lambda d: d[u'season'])) exception_result = [x for x in dbData if (x[u'show_name'].lower() == show_name.lower())] if exception_result: return [(int(x[u'indexer_id']), int(x[u'season'])) for x in exception_result] for cur_exception in dbData: cur_exception_name = cur_exception[u'show_name'] cur_indexer_id = int(cur_exception[u'indexer_id']) cur_season = int(cur_exception[u'season']) if (show_name.lower() in (cur_exception_name.lower(), sanitizeSceneName(cur_exception_name).lower().replace(u'.', u' '))): sickrage.srCore.srLogger.debug(((u'Scene exception lookup got indexer id ' + str(cur_indexer_id)) + u', using that')) out.append((cur_indexer_id, cur_season)) if out: return out return [(None, None)]
[ "def", "get_scene_exception_by_name_multiple", "(", "show_name", ")", ":", "out", "=", "[", "]", "dbData", "=", "sorted", "(", "[", "x", "[", "u'doc'", "]", "for", "x", "in", "sickrage", ".", "srCore", ".", "cacheDB", ".", "db", ".", "all", "(", "u'scene_exceptions'", ",", "with_doc", "=", "True", ")", "]", ",", "key", "=", "(", "lambda", "d", ":", "d", "[", "u'season'", "]", ")", ")", "exception_result", "=", "[", "x", "for", "x", "in", "dbData", "if", "(", "x", "[", "u'show_name'", "]", ".", "lower", "(", ")", "==", "show_name", ".", "lower", "(", ")", ")", "]", "if", "exception_result", ":", "return", "[", "(", "int", "(", "x", "[", "u'indexer_id'", "]", ")", ",", "int", "(", "x", "[", "u'season'", "]", ")", ")", "for", "x", "in", "exception_result", "]", "for", "cur_exception", "in", "dbData", ":", "cur_exception_name", "=", "cur_exception", "[", "u'show_name'", "]", "cur_indexer_id", "=", "int", "(", "cur_exception", "[", "u'indexer_id'", "]", ")", "cur_season", "=", "int", "(", "cur_exception", "[", "u'season'", "]", ")", "if", "(", "show_name", ".", "lower", "(", ")", "in", "(", "cur_exception_name", ".", "lower", "(", ")", ",", "sanitizeSceneName", "(", "cur_exception_name", ")", ".", "lower", "(", ")", ".", "replace", "(", "u'.'", ",", "u' '", ")", ")", ")", ":", "sickrage", ".", "srCore", ".", "srLogger", ".", "debug", "(", "(", "(", "u'Scene exception lookup got indexer id '", "+", "str", "(", "cur_indexer_id", ")", ")", "+", "u', using that'", ")", ")", "out", ".", "append", "(", "(", "cur_indexer_id", ",", "cur_season", ")", ")", "if", "out", ":", "return", "out", "return", "[", "(", "None", ",", "None", ")", "]" ]
given a show name .
train
false
11,805
def _default_loader(name): code = ('\n from package_control import sys_path\n sys_path.add_dependency(%s)\n ' % repr(name)) return dedent(code).lstrip()
[ "def", "_default_loader", "(", "name", ")", ":", "code", "=", "(", "'\\n from package_control import sys_path\\n sys_path.add_dependency(%s)\\n '", "%", "repr", "(", "name", ")", ")", "return", "dedent", "(", "code", ")", ".", "lstrip", "(", ")" ]
generate the default loader code for a dependency .
train
false
11,808
def const_crowding_distance(individuals, fitnesses): distances = collections.defaultdict((lambda : 0)) individuals = list(individuals) n_obj = len(fitnesses[individuals[0]][0]) for i in range(n_obj): individuals.sort(key=(lambda x: fitnesses[x][0][i])) normalization = float((fitnesses[individuals[0]][0][i] - fitnesses[individuals[(-1)]][0][i])) distances[individuals[0]] = 1e+100 distances[individuals[(-1)]] = 1e+100 tripled = list(zip(individuals, individuals[1:(-1)], individuals[2:])) for (pre, ind, post) in tripled: distances[ind] += ((fitnesses[pre][0][i] - fitnesses[post][0][i]) / normalization) return distances
[ "def", "const_crowding_distance", "(", "individuals", ",", "fitnesses", ")", ":", "distances", "=", "collections", ".", "defaultdict", "(", "(", "lambda", ":", "0", ")", ")", "individuals", "=", "list", "(", "individuals", ")", "n_obj", "=", "len", "(", "fitnesses", "[", "individuals", "[", "0", "]", "]", "[", "0", "]", ")", "for", "i", "in", "range", "(", "n_obj", ")", ":", "individuals", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "fitnesses", "[", "x", "]", "[", "0", "]", "[", "i", "]", ")", ")", "normalization", "=", "float", "(", "(", "fitnesses", "[", "individuals", "[", "0", "]", "]", "[", "0", "]", "[", "i", "]", "-", "fitnesses", "[", "individuals", "[", "(", "-", "1", ")", "]", "]", "[", "0", "]", "[", "i", "]", ")", ")", "distances", "[", "individuals", "[", "0", "]", "]", "=", "1e+100", "distances", "[", "individuals", "[", "(", "-", "1", ")", "]", "]", "=", "1e+100", "tripled", "=", "list", "(", "zip", "(", "individuals", ",", "individuals", "[", "1", ":", "(", "-", "1", ")", "]", ",", "individuals", "[", "2", ":", "]", ")", ")", "for", "(", "pre", ",", "ind", ",", "post", ")", "in", "tripled", ":", "distances", "[", "ind", "]", "+=", "(", "(", "fitnesses", "[", "pre", "]", "[", "0", "]", "[", "i", "]", "-", "fitnesses", "[", "post", "]", "[", "0", "]", "[", "i", "]", ")", "/", "normalization", ")", "return", "distances" ]
crowding distance-measure for multiple objectives .
train
false
11,810
def hrm_training_year(row): if hasattr(row, 'hrm_training'): row = row.hrm_training try: date = row.date except AttributeError: date = None if date: return date.year else: return current.messages['NONE']
[ "def", "hrm_training_year", "(", "row", ")", ":", "if", "hasattr", "(", "row", ",", "'hrm_training'", ")", ":", "row", "=", "row", ".", "hrm_training", "try", ":", "date", "=", "row", ".", "date", "except", "AttributeError", ":", "date", "=", "None", "if", "date", ":", "return", "date", ".", "year", "else", ":", "return", "current", ".", "messages", "[", "'NONE'", "]" ]
the year of the training event .
train
false
11,811
def local_repo_clean(): with lcd(LOGDIR): local('rm le.tar.gz')
[ "def", "local_repo_clean", "(", ")", ":", "with", "lcd", "(", "LOGDIR", ")", ":", "local", "(", "'rm le.tar.gz'", ")" ]
delete tarball .
train
false
11,812
def get_indexCounters_btree_miss_ratio(name): try: result = ((get_rate((NAME_PREFIX + 'indexCounters_btree_misses')) / get_rate((NAME_PREFIX + 'indexCounters_btree_accesses'))) * 100) except ZeroDivisionError: result = 0 return result
[ "def", "get_indexCounters_btree_miss_ratio", "(", "name", ")", ":", "try", ":", "result", "=", "(", "(", "get_rate", "(", "(", "NAME_PREFIX", "+", "'indexCounters_btree_misses'", ")", ")", "/", "get_rate", "(", "(", "NAME_PREFIX", "+", "'indexCounters_btree_accesses'", ")", ")", ")", "*", "100", ")", "except", "ZeroDivisionError", ":", "result", "=", "0", "return", "result" ]
return the btree miss ratio .
train
false
11,813
def array_repr_oneline(array): r = np.array2string(array, separator=u',', suppress_small=True) return u' '.join((l.strip() for l in r.splitlines()))
[ "def", "array_repr_oneline", "(", "array", ")", ":", "r", "=", "np", ".", "array2string", "(", "array", ",", "separator", "=", "u','", ",", "suppress_small", "=", "True", ")", "return", "u' '", ".", "join", "(", "(", "l", ".", "strip", "(", ")", "for", "l", "in", "r", ".", "splitlines", "(", ")", ")", ")" ]
represents a multi-dimensional numpy array flattened onto a single line .
train
false
11,814
def NamesOfDefinedFlags(): return ['tmod_foo_bool', 'tmod_foo_str', 'tmod_foo_int']
[ "def", "NamesOfDefinedFlags", "(", ")", ":", "return", "[", "'tmod_foo_bool'", ",", "'tmod_foo_str'", ",", "'tmod_foo_int'", "]" ]
returns: list of names of the flags declared in this module .
train
false
11,815
def generate_uid(): return uuid4().int
[ "def", "generate_uid", "(", ")", ":", "return", "uuid4", "(", ")", ".", "int" ]
generates unique id .
train
false
11,816
def _fitstart(self, x): loc = np.min([x.min(), 0]) a = (4 / (stats.skew(x) ** 2)) scale = (np.std(x) / np.sqrt(a)) return (a, loc, scale)
[ "def", "_fitstart", "(", "self", ",", "x", ")", ":", "loc", "=", "np", ".", "min", "(", "[", "x", ".", "min", "(", ")", ",", "0", "]", ")", "a", "=", "(", "4", "/", "(", "stats", ".", "skew", "(", "x", ")", "**", "2", ")", ")", "scale", "=", "(", "np", ".", "std", "(", "x", ")", "/", "np", ".", "sqrt", "(", "a", ")", ")", "return", "(", "a", ",", "loc", ",", "scale", ")" ]
example method .
train
false
11,817
def _extra_config(user_defined_config, base_dir): for (root_dir, _, files) in os.walk(base_dir): for name in files: if name.endswith(('.yml', '.yaml')): with open(os.path.join(root_dir, name), 'rb') as f: du.merge_dict(user_defined_config, (yaml.safe_load(f.read()) or {})) logger.debug('Merged overrides from file {}'.format(name))
[ "def", "_extra_config", "(", "user_defined_config", ",", "base_dir", ")", ":", "for", "(", "root_dir", ",", "_", ",", "files", ")", "in", "os", ".", "walk", "(", "base_dir", ")", ":", "for", "name", "in", "files", ":", "if", "name", ".", "endswith", "(", "(", "'.yml'", ",", "'.yaml'", ")", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "name", ")", ",", "'rb'", ")", "as", "f", ":", "du", ".", "merge_dict", "(", "user_defined_config", ",", "(", "yaml", ".", "safe_load", "(", "f", ".", "read", "(", ")", ")", "or", "{", "}", ")", ")", "logger", ".", "debug", "(", "'Merged overrides from file {}'", ".", "format", "(", "name", ")", ")" ]
discover new items in any extra directories and add the new values .
train
false
11,818
def displayText(gcodeText): skein = displaySkein() skein.parseText(gcodeText) return skein.output
[ "def", "displayText", "(", "gcodeText", ")", ":", "skein", "=", "displaySkein", "(", ")", "skein", ".", "parseText", "(", "gcodeText", ")", "return", "skein", ".", "output" ]
parse a gcode text and display the commands .
train
false
11,819
def sub_irc(ircmatch): return IRC_COLOR_MAP.get(ircmatch.group(), '')
[ "def", "sub_irc", "(", "ircmatch", ")", ":", "return", "IRC_COLOR_MAP", ".", "get", "(", "ircmatch", ".", "group", "(", ")", ",", "''", ")" ]
substitute irc color info .
train
false
11,820
def UpdateVFSFileAndWait(client_id, vfs_file_urn, token=None, timeout=DEFAULT_TIMEOUT): update_flow_urn = StartFlowAndWait(client_id, token=token, timeout=timeout, flow_name='UpdateVFSFile', vfs_file_urn=vfs_file_urn) update_flow_obj = aff4.FACTORY.Open(update_flow_urn, token=token, aff4_type=flow.GRRFlow) sub_flow_urn = update_flow_obj.state.get_file_flow_urn if (not sub_flow_urn): return WaitForFlow(sub_flow_urn, token=token, timeout=timeout)
[ "def", "UpdateVFSFileAndWait", "(", "client_id", ",", "vfs_file_urn", ",", "token", "=", "None", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "update_flow_urn", "=", "StartFlowAndWait", "(", "client_id", ",", "token", "=", "token", ",", "timeout", "=", "timeout", ",", "flow_name", "=", "'UpdateVFSFile'", ",", "vfs_file_urn", "=", "vfs_file_urn", ")", "update_flow_obj", "=", "aff4", ".", "FACTORY", ".", "Open", "(", "update_flow_urn", ",", "token", "=", "token", ",", "aff4_type", "=", "flow", ".", "GRRFlow", ")", "sub_flow_urn", "=", "update_flow_obj", ".", "state", ".", "get_file_flow_urn", "if", "(", "not", "sub_flow_urn", ")", ":", "return", "WaitForFlow", "(", "sub_flow_urn", ",", "token", "=", "token", ",", "timeout", "=", "timeout", ")" ]
waits for a file to be updated on the client .
train
true
11,821
def keep_lazy_text(func): return keep_lazy(str)(func)
[ "def", "keep_lazy_text", "(", "func", ")", ":", "return", "keep_lazy", "(", "str", ")", "(", "func", ")" ]
a decorator for functions that accept lazy arguments and return text .
train
false
11,823
@_docstring('recording') def get_recordings_by_echoprint(echoprint, includes=[], release_status=[], release_type=[]): warn('Echoprints were never introduced\nand will not be found (404)', Warning, stacklevel=2) raise ResponseError(cause=compat.HTTPError(None, 404, 'Not Found', None, None))
[ "@", "_docstring", "(", "'recording'", ")", "def", "get_recordings_by_echoprint", "(", "echoprint", ",", "includes", "=", "[", "]", ",", "release_status", "=", "[", "]", ",", "release_type", "=", "[", "]", ")", ":", "warn", "(", "'Echoprints were never introduced\\nand will not be found (404)'", ",", "Warning", ",", "stacklevel", "=", "2", ")", "raise", "ResponseError", "(", "cause", "=", "compat", ".", "HTTPError", "(", "None", ",", "404", ",", "'Not Found'", ",", "None", ",", "None", ")", ")" ]
search for recordings with an echoprint <URL .
train
false
11,824
@_ConfigurableFilter(executable='HTML_TIDY_EXECUTABLE') def html_tidy_mini(infile, executable='tidy5'): return _html_tidy_runner(infile, '-quiet --show-info no --show-warnings no -utf8 --indent-attributes no --sort-attributes alpha --wrap 0 --wrap-sections no --tidy-mark no --drop-empty-elements no -modify %1', executable=executable)
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'HTML_TIDY_EXECUTABLE'", ")", "def", "html_tidy_mini", "(", "infile", ",", "executable", "=", "'tidy5'", ")", ":", "return", "_html_tidy_runner", "(", "infile", ",", "'-quiet --show-info no --show-warnings no -utf8 --indent-attributes no --sort-attributes alpha --wrap 0 --wrap-sections no --tidy-mark no --drop-empty-elements no -modify %1'", ",", "executable", "=", "executable", ")" ]
run html tidy with minimal settings .
train
false
11,825
def _pkg(jail=None, chroot=None, root=None): ret = ['pkg'] if jail: ret.extend(['-j', jail]) elif chroot: ret.extend(['-c', chroot]) elif root: ret.extend(['-r', root]) return ret
[ "def", "_pkg", "(", "jail", "=", "None", ",", "chroot", "=", "None", ",", "root", "=", "None", ")", ":", "ret", "=", "[", "'pkg'", "]", "if", "jail", ":", "ret", ".", "extend", "(", "[", "'-j'", ",", "jail", "]", ")", "elif", "chroot", ":", "ret", ".", "extend", "(", "[", "'-c'", ",", "chroot", "]", ")", "elif", "root", ":", "ret", ".", "extend", "(", "[", "'-r'", ",", "root", "]", ")", "return", "ret" ]
returns the prefix for a pkg command .
train
true
11,827
def symmetric(n): for perm in variations(list(range(n)), n): (yield Permutation(perm))
[ "def", "symmetric", "(", "n", ")", ":", "for", "perm", "in", "variations", "(", "list", "(", "range", "(", "n", ")", ")", ",", "n", ")", ":", "(", "yield", "Permutation", "(", "perm", ")", ")" ]
generates the symmetric group of order n .
train
false
11,828
def search(opts, returners, whitelist=None): return LazyLoader(_module_dirs(opts, 'search', 'search'), opts, tag='search', whitelist=whitelist, pack={'__ret__': returners})
[ "def", "search", "(", "opts", ",", "returners", ",", "whitelist", "=", "None", ")", ":", "return", "LazyLoader", "(", "_module_dirs", "(", "opts", ",", "'search'", ",", "'search'", ")", ",", "opts", ",", "tag", "=", "'search'", ",", "whitelist", "=", "whitelist", ",", "pack", "=", "{", "'__ret__'", ":", "returners", "}", ")" ]
search for blog posts .
train
false
11,829
def concat(dfs, axis=0, join='outer', interleave_partitions=False): if (not isinstance(dfs, list)): raise TypeError('dfs must be a list of DataFrames/Series objects') if (len(dfs) == 0): raise ValueError('No objects to concatenate') if (len(dfs) == 1): return dfs[0] if (join not in ('inner', 'outer')): raise ValueError("'join' must be 'inner' or 'outer'") axis = DataFrame._validate_axis(axis) dasks = [df for df in dfs if isinstance(df, _Frame)] dfs = _maybe_from_pandas(dfs) if (axis == 1): if all((df.known_divisions for df in dasks)): return concat_indexed_dataframes(dfs, axis=axis, join=join) elif ((len(dasks) == len(dfs)) and all(((not df.known_divisions) for df in dfs)) and (len({df.npartitions for df in dasks}) == 1)): warn("Concatenating dataframes with unknown divisions.\nWe're assuming that the indexes of each dataframes are \naligned. This assumption is not generally safe.") return concat_unindexed_dataframes(dfs) else: raise ValueError('Unable to concatenate DataFrame with unknown division specifying axis=1') elif all((df.known_divisions for df in dasks)): if all(((dfs[i].divisions[(-1)] < dfs[(i + 1)].divisions[0]) for i in range((len(dfs) - 1)))): divisions = [] for df in dfs[:(-1)]: divisions += df.divisions[:(-1)] divisions += dfs[(-1)].divisions return stack_partitions(dfs, divisions, join=join) elif interleave_partitions: return concat_indexed_dataframes(dfs, join=join) else: raise ValueError('All inputs have known divisions which cannot be concatenated in order. Specify interleave_partitions=True to ignore order') else: divisions = ([None] * (sum([df.npartitions for df in dfs]) + 1)) return stack_partitions(dfs, divisions, join=join)
[ "def", "concat", "(", "dfs", ",", "axis", "=", "0", ",", "join", "=", "'outer'", ",", "interleave_partitions", "=", "False", ")", ":", "if", "(", "not", "isinstance", "(", "dfs", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "'dfs must be a list of DataFrames/Series objects'", ")", "if", "(", "len", "(", "dfs", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'No objects to concatenate'", ")", "if", "(", "len", "(", "dfs", ")", "==", "1", ")", ":", "return", "dfs", "[", "0", "]", "if", "(", "join", "not", "in", "(", "'inner'", ",", "'outer'", ")", ")", ":", "raise", "ValueError", "(", "\"'join' must be 'inner' or 'outer'\"", ")", "axis", "=", "DataFrame", ".", "_validate_axis", "(", "axis", ")", "dasks", "=", "[", "df", "for", "df", "in", "dfs", "if", "isinstance", "(", "df", ",", "_Frame", ")", "]", "dfs", "=", "_maybe_from_pandas", "(", "dfs", ")", "if", "(", "axis", "==", "1", ")", ":", "if", "all", "(", "(", "df", ".", "known_divisions", "for", "df", "in", "dasks", ")", ")", ":", "return", "concat_indexed_dataframes", "(", "dfs", ",", "axis", "=", "axis", ",", "join", "=", "join", ")", "elif", "(", "(", "len", "(", "dasks", ")", "==", "len", "(", "dfs", ")", ")", "and", "all", "(", "(", "(", "not", "df", ".", "known_divisions", ")", "for", "df", "in", "dfs", ")", ")", "and", "(", "len", "(", "{", "df", ".", "npartitions", "for", "df", "in", "dasks", "}", ")", "==", "1", ")", ")", ":", "warn", "(", "\"Concatenating dataframes with unknown divisions.\\nWe're assuming that the indexes of each dataframes are \\naligned. This assumption is not generally safe.\"", ")", "return", "concat_unindexed_dataframes", "(", "dfs", ")", "else", ":", "raise", "ValueError", "(", "'Unable to concatenate DataFrame with unknown division specifying axis=1'", ")", "elif", "all", "(", "(", "df", ".", "known_divisions", "for", "df", "in", "dasks", ")", ")", ":", "if", "all", "(", "(", "(", "dfs", "[", "i", "]", ".", "divisions", "[", "(", "-", "1", ")", "]", "<", "dfs", "[", "(", "i", "+", "1", ")", "]", ".", "divisions", "[", "0", "]", ")", "for", "i", "in", "range", "(", "(", "len", "(", "dfs", ")", "-", "1", ")", ")", ")", ")", ":", "divisions", "=", "[", "]", "for", "df", "in", "dfs", "[", ":", "(", "-", "1", ")", "]", ":", "divisions", "+=", "df", ".", "divisions", "[", ":", "(", "-", "1", ")", "]", "divisions", "+=", "dfs", "[", "(", "-", "1", ")", "]", ".", "divisions", "return", "stack_partitions", "(", "dfs", ",", "divisions", ",", "join", "=", "join", ")", "elif", "interleave_partitions", ":", "return", "concat_indexed_dataframes", "(", "dfs", ",", "join", "=", "join", ")", "else", ":", "raise", "ValueError", "(", "'All inputs have known divisions which cannot be concatenated in order. Specify interleave_partitions=True to ignore order'", ")", "else", ":", "divisions", "=", "(", "[", "None", "]", "*", "(", "sum", "(", "[", "df", ".", "npartitions", "for", "df", "in", "dfs", "]", ")", "+", "1", ")", ")", "return", "stack_partitions", "(", "dfs", ",", "divisions", ",", "join", "=", "join", ")" ]
concatenate many bags together .
train
false
11,830
def is_bipartite_node_set(G, nodes): S = set(nodes) for CC in nx.connected_component_subgraphs(G): (X, Y) = sets(CC) if (not ((X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S)))): return False return True
[ "def", "is_bipartite_node_set", "(", "G", ",", "nodes", ")", ":", "S", "=", "set", "(", "nodes", ")", "for", "CC", "in", "nx", ".", "connected_component_subgraphs", "(", "G", ")", ":", "(", "X", ",", "Y", ")", "=", "sets", "(", "CC", ")", "if", "(", "not", "(", "(", "X", ".", "issubset", "(", "S", ")", "and", "Y", ".", "isdisjoint", "(", "S", ")", ")", "or", "(", "Y", ".", "issubset", "(", "S", ")", "and", "X", ".", "isdisjoint", "(", "S", ")", ")", ")", ")", ":", "return", "False", "return", "True" ]
returns true if nodes and g/nodes are a bipartition of g .
train
false
11,831
def parse_command_line(args=None, final=True): return options.parse_command_line(args, final=final)
[ "def", "parse_command_line", "(", "args", "=", "None", ",", "final", "=", "True", ")", ":", "return", "options", ".", "parse_command_line", "(", "args", ",", "final", "=", "final", ")" ]
parse command line and return a socket address .
train
false
11,832
def _indent_xml(xml, prefix=u''): if (len(xml) > 0): xml.text = ((((xml.text or u'').strip() + u'\n') + prefix) + u' ') for child in xml: _indent_xml(child, (prefix + u' ')) for child in xml[:(-1)]: child.tail = ((((child.tail or u'').strip() + u'\n') + prefix) + u' ') xml[(-1)].tail = (((xml[(-1)].tail or u'').strip() + u'\n') + prefix)
[ "def", "_indent_xml", "(", "xml", ",", "prefix", "=", "u''", ")", ":", "if", "(", "len", "(", "xml", ")", ">", "0", ")", ":", "xml", ".", "text", "=", "(", "(", "(", "(", "xml", ".", "text", "or", "u''", ")", ".", "strip", "(", ")", "+", "u'\\n'", ")", "+", "prefix", ")", "+", "u' '", ")", "for", "child", "in", "xml", ":", "_indent_xml", "(", "child", ",", "(", "prefix", "+", "u' '", ")", ")", "for", "child", "in", "xml", "[", ":", "(", "-", "1", ")", "]", ":", "child", ".", "tail", "=", "(", "(", "(", "(", "child", ".", "tail", "or", "u''", ")", ".", "strip", "(", ")", "+", "u'\\n'", ")", "+", "prefix", ")", "+", "u' '", ")", "xml", "[", "(", "-", "1", ")", "]", ".", "tail", "=", "(", "(", "(", "xml", "[", "(", "-", "1", ")", "]", ".", "tail", "or", "u''", ")", ".", "strip", "(", ")", "+", "u'\\n'", ")", "+", "prefix", ")" ]
helper for build_index(): given an xml elementtree .
train
false
11,833
def error_query(msg, q=None): if (q is None): q = _NullQuery() q.error = msg return q
[ "def", "error_query", "(", "msg", ",", "q", "=", "None", ")", ":", "if", "(", "q", "is", "None", ")", ":", "q", "=", "_NullQuery", "(", ")", "q", ".", "error", "=", "msg", "return", "q" ]
returns the query in the second argument with its error attribute set to msg .
train
false
11,834
def policyIteration(Ts, R, discountFactor, VEvaluator=None, initpolicy=None, maxIters=20): if (initpolicy is None): (policy, T) = randomPolicy(Ts) else: policy = initpolicy T = collapsedTransitions(Ts, policy) if (VEvaluator is None): VEvaluator = (lambda T: trueValues(T, R, discountFactor)) while (maxIters > 0): V = VEvaluator(T) (newpolicy, T) = greedyPolicy(Ts, R, discountFactor, V) if (sum(ravel(abs((newpolicy - policy)))) < 0.001): return (policy, T) policy = newpolicy maxIters -= 1 return (policy, T)
[ "def", "policyIteration", "(", "Ts", ",", "R", ",", "discountFactor", ",", "VEvaluator", "=", "None", ",", "initpolicy", "=", "None", ",", "maxIters", "=", "20", ")", ":", "if", "(", "initpolicy", "is", "None", ")", ":", "(", "policy", ",", "T", ")", "=", "randomPolicy", "(", "Ts", ")", "else", ":", "policy", "=", "initpolicy", "T", "=", "collapsedTransitions", "(", "Ts", ",", "policy", ")", "if", "(", "VEvaluator", "is", "None", ")", ":", "VEvaluator", "=", "(", "lambda", "T", ":", "trueValues", "(", "T", ",", "R", ",", "discountFactor", ")", ")", "while", "(", "maxIters", ">", "0", ")", ":", "V", "=", "VEvaluator", "(", "T", ")", "(", "newpolicy", ",", "T", ")", "=", "greedyPolicy", "(", "Ts", ",", "R", ",", "discountFactor", ",", "V", ")", "if", "(", "sum", "(", "ravel", "(", "abs", "(", "(", "newpolicy", "-", "policy", ")", ")", ")", ")", "<", "0.001", ")", ":", "return", "(", "policy", ",", "T", ")", "policy", "=", "newpolicy", "maxIters", "-=", "1", "return", "(", "policy", ",", "T", ")" ]
given transition matrices .
train
false
11,836
def YamlLoader(string): representation = yaml.load(string) result_cls = aff4.FACTORY.AFF4Object(representation['aff4_class']) aff4_attributes = {} for (predicate, values) in representation['attributes'].items(): attribute = aff4.Attribute.PREDICATES[predicate] tmp = aff4_attributes[attribute] = [] for (rdfvalue_cls_name, value, age) in values: rdfvalue_cls = aff4.FACTORY.RDFValue(rdfvalue_cls_name) value = rdfvalue_cls(value, age=rdfvalue.RDFDatetime(age)) tmp.append(value) result = result_cls(urn=representation['_urn'], clone=aff4_attributes, mode='rw', age=representation['age_policy']) (result.new_attributes, result.synced_attributes) = (result.synced_attributes, {}) result._dirty = True return result
[ "def", "YamlLoader", "(", "string", ")", ":", "representation", "=", "yaml", ".", "load", "(", "string", ")", "result_cls", "=", "aff4", ".", "FACTORY", ".", "AFF4Object", "(", "representation", "[", "'aff4_class'", "]", ")", "aff4_attributes", "=", "{", "}", "for", "(", "predicate", ",", "values", ")", "in", "representation", "[", "'attributes'", "]", ".", "items", "(", ")", ":", "attribute", "=", "aff4", ".", "Attribute", ".", "PREDICATES", "[", "predicate", "]", "tmp", "=", "aff4_attributes", "[", "attribute", "]", "=", "[", "]", "for", "(", "rdfvalue_cls_name", ",", "value", ",", "age", ")", "in", "values", ":", "rdfvalue_cls", "=", "aff4", ".", "FACTORY", ".", "RDFValue", "(", "rdfvalue_cls_name", ")", "value", "=", "rdfvalue_cls", "(", "value", ",", "age", "=", "rdfvalue", ".", "RDFDatetime", "(", "age", ")", ")", "tmp", ".", "append", "(", "value", ")", "result", "=", "result_cls", "(", "urn", "=", "representation", "[", "'_urn'", "]", ",", "clone", "=", "aff4_attributes", ",", "mode", "=", "'rw'", ",", "age", "=", "representation", "[", "'age_policy'", "]", ")", "(", "result", ".", "new_attributes", ",", "result", ".", "synced_attributes", ")", "=", "(", "result", ".", "synced_attributes", ",", "{", "}", ")", "result", ".", "_dirty", "=", "True", "return", "result" ]
load an aff4 object from a serialized yaml representation .
train
true
11,838
def reload_(name): term(name)
[ "def", "reload_", "(", "name", ")", ":", "term", "(", "name", ")" ]
reload running jboss instance jboss_config configuration dictionary with properties specified above .
train
false
11,839
def effective_get(get=None, collection=None): collection_get = (collection._default_get if collection else None) return (get or _globals.get('get') or collection_get)
[ "def", "effective_get", "(", "get", "=", "None", ",", "collection", "=", "None", ")", ":", "collection_get", "=", "(", "collection", ".", "_default_get", "if", "collection", "else", "None", ")", "return", "(", "get", "or", "_globals", ".", "get", "(", "'get'", ")", "or", "collection_get", ")" ]
get the effective get method used in a given situation .
train
false
11,840
@frappe.whitelist() def get_app_list(): out = {} installed = frappe.get_installed_apps() for app in frappe.get_all_apps(True): app_hooks = frappe.get_hooks(app_name=app) if ((app not in installed) and app_hooks.get(u'hide_in_installer')): continue out[app] = {} for key in (u'app_name', u'app_title', u'app_description', u'app_icon', u'app_publisher', u'app_version', u'app_url', u'app_color'): val = (app_hooks.get(key) or []) out[app][key] = (val[0] if len(val) else u'') if (app in installed): out[app][u'installed'] = 1 for app_from_list in get_app_listing().values(): if (app_from_list.app_name in out): out[app_from_list.app_name].update(app_from_list) elif (not frappe.conf.disallow_app_listing): out[app_from_list.app_name] = app_from_list return out
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_app_list", "(", ")", ":", "out", "=", "{", "}", "installed", "=", "frappe", ".", "get_installed_apps", "(", ")", "for", "app", "in", "frappe", ".", "get_all_apps", "(", "True", ")", ":", "app_hooks", "=", "frappe", ".", "get_hooks", "(", "app_name", "=", "app", ")", "if", "(", "(", "app", "not", "in", "installed", ")", "and", "app_hooks", ".", "get", "(", "u'hide_in_installer'", ")", ")", ":", "continue", "out", "[", "app", "]", "=", "{", "}", "for", "key", "in", "(", "u'app_name'", ",", "u'app_title'", ",", "u'app_description'", ",", "u'app_icon'", ",", "u'app_publisher'", ",", "u'app_version'", ",", "u'app_url'", ",", "u'app_color'", ")", ":", "val", "=", "(", "app_hooks", ".", "get", "(", "key", ")", "or", "[", "]", ")", "out", "[", "app", "]", "[", "key", "]", "=", "(", "val", "[", "0", "]", "if", "len", "(", "val", ")", "else", "u''", ")", "if", "(", "app", "in", "installed", ")", ":", "out", "[", "app", "]", "[", "u'installed'", "]", "=", "1", "for", "app_from_list", "in", "get_app_listing", "(", ")", ".", "values", "(", ")", ":", "if", "(", "app_from_list", ".", "app_name", "in", "out", ")", ":", "out", "[", "app_from_list", ".", "app_name", "]", ".", "update", "(", "app_from_list", ")", "elif", "(", "not", "frappe", ".", "conf", ".", "disallow_app_listing", ")", ":", "out", "[", "app_from_list", ".", "app_name", "]", "=", "app_from_list", "return", "out" ]
get list of all apps with properties .
train
false
11,843
def dmp_half_gcdex(f, g, u, K): if (not u): return dup_half_gcdex(f, g, K) else: raise MultivariatePolynomialError(f, g)
[ "def", "dmp_half_gcdex", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_half_gcdex", "(", "f", ",", "g", ",", "K", ")", "else", ":", "raise", "MultivariatePolynomialError", "(", "f", ",", "g", ")" ]
half extended euclidean algorithm in f[x] .
train
false
11,844
def strip_math(s): remove = (u'\\mathdefault', u'\\rm', u'\\cal', u'\\tt', u'\\it', u'\\', u'{', u'}') s = s[1:(-1)] for r in remove: s = s.replace(r, u'') return s
[ "def", "strip_math", "(", "s", ")", ":", "remove", "=", "(", "u'\\\\mathdefault'", ",", "u'\\\\rm'", ",", "u'\\\\cal'", ",", "u'\\\\tt'", ",", "u'\\\\it'", ",", "u'\\\\'", ",", "u'{'", ",", "u'}'", ")", "s", "=", "s", "[", "1", ":", "(", "-", "1", ")", "]", "for", "r", "in", "remove", ":", "s", "=", "s", ".", "replace", "(", "r", ",", "u''", ")", "return", "s" ]
remove latex formatting from mathtext .
train
false
11,845
def get_installed_apps(sort=False, frappe_last=False): if getattr(flags, u'in_install_db', True): return [] if (not db): connect() installed = json.loads((db.get_global(u'installed_apps') or u'[]')) if sort: installed = [app for app in get_all_apps(True) if (app in installed)] if frappe_last: if (u'frappe' in installed): installed.remove(u'frappe') installed.append(u'frappe') return installed
[ "def", "get_installed_apps", "(", "sort", "=", "False", ",", "frappe_last", "=", "False", ")", ":", "if", "getattr", "(", "flags", ",", "u'in_install_db'", ",", "True", ")", ":", "return", "[", "]", "if", "(", "not", "db", ")", ":", "connect", "(", ")", "installed", "=", "json", ".", "loads", "(", "(", "db", ".", "get_global", "(", "u'installed_apps'", ")", "or", "u'[]'", ")", ")", "if", "sort", ":", "installed", "=", "[", "app", "for", "app", "in", "get_all_apps", "(", "True", ")", "if", "(", "app", "in", "installed", ")", "]", "if", "frappe_last", ":", "if", "(", "u'frappe'", "in", "installed", ")", ":", "installed", ".", "remove", "(", "u'frappe'", ")", "installed", ".", "append", "(", "u'frappe'", ")", "return", "installed" ]
modules in settings .
train
false
11,846
def RegisterUtility(utility_name, version_mapping=None): def MethodDecorator(utility_method, version): 'Decorates a method in the utility class.' registry_name = (('%s/%s' % (utility_name, version)) if version else utility_name) @wraps(utility_method) def Wrapper(*args, **kwargs): with _UTILITY_LOCK: _utility_registry.Add(registry_name) return utility_method(*args, **kwargs) return Wrapper def ClassDecorator(cls): 'Decorates a utility class.' for (name, method) in inspect.getmembers(cls, inspect.ismethod): if (not name.startswith('_')): if (not getattr(method, '__self__', None)): setattr(cls, name, MethodDecorator(method, (version_mapping.get(name) if version_mapping else None))) return cls return ClassDecorator
[ "def", "RegisterUtility", "(", "utility_name", ",", "version_mapping", "=", "None", ")", ":", "def", "MethodDecorator", "(", "utility_method", ",", "version", ")", ":", "registry_name", "=", "(", "(", "'%s/%s'", "%", "(", "utility_name", ",", "version", ")", ")", "if", "version", "else", "utility_name", ")", "@", "wraps", "(", "utility_method", ")", "def", "Wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "with", "_UTILITY_LOCK", ":", "_utility_registry", ".", "Add", "(", "registry_name", ")", "return", "utility_method", "(", "*", "args", ",", "**", "kwargs", ")", "return", "Wrapper", "def", "ClassDecorator", "(", "cls", ")", ":", "for", "(", "name", ",", "method", ")", "in", "inspect", ".", "getmembers", "(", "cls", ",", "inspect", ".", "ismethod", ")", ":", "if", "(", "not", "name", ".", "startswith", "(", "'_'", ")", ")", ":", "if", "(", "not", "getattr", "(", "method", ",", "'__self__'", ",", "None", ")", ")", ":", "setattr", "(", "cls", ",", "name", ",", "MethodDecorator", "(", "method", ",", "(", "version_mapping", ".", "get", "(", "name", ")", "if", "version_mapping", "else", "None", ")", ")", ")", "return", "cls", "return", "ClassDecorator" ]
decorator that registers a class with the given utility name .
train
false
11,847
def parse_patterns(patterns_spec, ignore_patterns_spec, separator=';'): patterns = patterns_spec.split(separator) ignore_patterns = ignore_patterns_spec.split(separator) if (ignore_patterns == ['']): ignore_patterns = [] return (patterns, ignore_patterns)
[ "def", "parse_patterns", "(", "patterns_spec", ",", "ignore_patterns_spec", ",", "separator", "=", "';'", ")", ":", "patterns", "=", "patterns_spec", ".", "split", "(", "separator", ")", "ignore_patterns", "=", "ignore_patterns_spec", ".", "split", "(", "separator", ")", "if", "(", "ignore_patterns", "==", "[", "''", "]", ")", ":", "ignore_patterns", "=", "[", "]", "return", "(", "patterns", ",", "ignore_patterns", ")" ]
parses pattern argument specs and returns a two-tuple of .
train
false
11,848
def _convert_agg_to_wx_image(agg, bbox): if (bbox is None): image = wxc.EmptyImage(int(agg.width), int(agg.height)) image.SetData(agg.tostring_rgb()) return image else: return wx.ImageFromBitmap(_WX28_clipped_agg_as_bitmap(agg, bbox))
[ "def", "_convert_agg_to_wx_image", "(", "agg", ",", "bbox", ")", ":", "if", "(", "bbox", "is", "None", ")", ":", "image", "=", "wxc", ".", "EmptyImage", "(", "int", "(", "agg", ".", "width", ")", ",", "int", "(", "agg", ".", "height", ")", ")", "image", ".", "SetData", "(", "agg", ".", "tostring_rgb", "(", ")", ")", "return", "image", "else", ":", "return", "wx", ".", "ImageFromBitmap", "(", "_WX28_clipped_agg_as_bitmap", "(", "agg", ",", "bbox", ")", ")" ]
convert the region of the agg buffer bounded by bbox to a wx .
train
true
11,849
def test_transform_path_pri(): frame_transform_graph.invalidate_cache() (tpath, td) = frame_transform_graph.find_shortest_path(ICRS, Galactic) assert (tpath == [ICRS, FK5, Galactic]) assert (td == 2) (tpath, td) = frame_transform_graph.find_shortest_path(FK4, Galactic) assert (tpath == [FK4, FK4NoETerms, Galactic]) assert (td == 2)
[ "def", "test_transform_path_pri", "(", ")", ":", "frame_transform_graph", ".", "invalidate_cache", "(", ")", "(", "tpath", ",", "td", ")", "=", "frame_transform_graph", ".", "find_shortest_path", "(", "ICRS", ",", "Galactic", ")", "assert", "(", "tpath", "==", "[", "ICRS", ",", "FK5", ",", "Galactic", "]", ")", "assert", "(", "td", "==", "2", ")", "(", "tpath", ",", "td", ")", "=", "frame_transform_graph", ".", "find_shortest_path", "(", "FK4", ",", "Galactic", ")", "assert", "(", "tpath", "==", "[", "FK4", ",", "FK4NoETerms", ",", "Galactic", "]", ")", "assert", "(", "td", "==", "2", ")" ]
this checks that the transformation path prioritization works by making sure the icrs -> gal transformation always goes through fk5 and not fk4 .
train
false
11,851
def find_location(location): params = {'address': location, 'key': dev_key} if bias: params['region'] = bias json = requests.get(geocode_api, params=params).json() error = check_status(json['status']) if error: raise APIError(error) return json['results'][0]['geometry']['location']
[ "def", "find_location", "(", "location", ")", ":", "params", "=", "{", "'address'", ":", "location", ",", "'key'", ":", "dev_key", "}", "if", "bias", ":", "params", "[", "'region'", "]", "=", "bias", "json", "=", "requests", ".", "get", "(", "geocode_api", ",", "params", "=", "params", ")", ".", "json", "(", ")", "error", "=", "check_status", "(", "json", "[", "'status'", "]", ")", "if", "error", ":", "raise", "APIError", "(", "error", ")", "return", "json", "[", "'results'", "]", "[", "0", "]", "[", "'geometry'", "]", "[", "'location'", "]" ]
takes a location as a string .
train
false
11,852
def _score_cpu_topology(topology, wanttopology): score = 0 if ((wanttopology.sockets != (-1)) and (topology.sockets == wanttopology.sockets)): score = (score + 1) if ((wanttopology.cores != (-1)) and (topology.cores == wanttopology.cores)): score = (score + 1) if ((wanttopology.threads != (-1)) and (topology.threads == wanttopology.threads)): score = (score + 1) return score
[ "def", "_score_cpu_topology", "(", "topology", ",", "wanttopology", ")", ":", "score", "=", "0", "if", "(", "(", "wanttopology", ".", "sockets", "!=", "(", "-", "1", ")", ")", "and", "(", "topology", ".", "sockets", "==", "wanttopology", ".", "sockets", ")", ")", ":", "score", "=", "(", "score", "+", "1", ")", "if", "(", "(", "wanttopology", ".", "cores", "!=", "(", "-", "1", ")", ")", "and", "(", "topology", ".", "cores", "==", "wanttopology", ".", "cores", ")", ")", ":", "score", "=", "(", "score", "+", "1", ")", "if", "(", "(", "wanttopology", ".", "threads", "!=", "(", "-", "1", ")", ")", "and", "(", "topology", ".", "threads", "==", "wanttopology", ".", "threads", ")", ")", ":", "score", "=", "(", "score", "+", "1", ")", "return", "score" ]
compare a topology against a desired configuration .
train
false
11,853
def _dataset_version(path, name): ver_fname = op.join(path, 'version.txt') if op.exists(ver_fname): with open(ver_fname, 'r') as fid: version = fid.readline().strip() else: version = ('0.3' if (name == 'sample') else '0.7') return version
[ "def", "_dataset_version", "(", "path", ",", "name", ")", ":", "ver_fname", "=", "op", ".", "join", "(", "path", ",", "'version.txt'", ")", "if", "op", ".", "exists", "(", "ver_fname", ")", ":", "with", "open", "(", "ver_fname", ",", "'r'", ")", "as", "fid", ":", "version", "=", "fid", ".", "readline", "(", ")", ".", "strip", "(", ")", "else", ":", "version", "=", "(", "'0.3'", "if", "(", "name", "==", "'sample'", ")", "else", "'0.7'", ")", "return", "version" ]
get the version of the dataset .
train
false
11,854
def write_descriptor_js(output_root): return _write_js(output_root, _list_descriptors())
[ "def", "write_descriptor_js", "(", "output_root", ")", ":", "return", "_write_js", "(", "output_root", ",", "_list_descriptors", "(", ")", ")" ]
write all registered xmoduledescriptor js and coffee files to output root .
train
false
11,856
def batch_norm_dnn(layer, **kwargs): nonlinearity = getattr(layer, 'nonlinearity', None) if (nonlinearity is not None): layer.nonlinearity = nonlinearities.identity if (hasattr(layer, 'b') and (layer.b is not None)): del layer.params[layer.b] layer.b = None bn_name = (kwargs.pop('name', None) or (getattr(layer, 'name', None) and (layer.name + '_bn'))) layer = BatchNormDNNLayer(layer, name=bn_name, **kwargs) if (nonlinearity is not None): from .special import NonlinearityLayer nonlin_name = (bn_name and (bn_name + '_nonlin')) layer = NonlinearityLayer(layer, nonlinearity, name=nonlin_name) return layer
[ "def", "batch_norm_dnn", "(", "layer", ",", "**", "kwargs", ")", ":", "nonlinearity", "=", "getattr", "(", "layer", ",", "'nonlinearity'", ",", "None", ")", "if", "(", "nonlinearity", "is", "not", "None", ")", ":", "layer", ".", "nonlinearity", "=", "nonlinearities", ".", "identity", "if", "(", "hasattr", "(", "layer", ",", "'b'", ")", "and", "(", "layer", ".", "b", "is", "not", "None", ")", ")", ":", "del", "layer", ".", "params", "[", "layer", ".", "b", "]", "layer", ".", "b", "=", "None", "bn_name", "=", "(", "kwargs", ".", "pop", "(", "'name'", ",", "None", ")", "or", "(", "getattr", "(", "layer", ",", "'name'", ",", "None", ")", "and", "(", "layer", ".", "name", "+", "'_bn'", ")", ")", ")", "layer", "=", "BatchNormDNNLayer", "(", "layer", ",", "name", "=", "bn_name", ",", "**", "kwargs", ")", "if", "(", "nonlinearity", "is", "not", "None", ")", ":", "from", ".", "special", "import", "NonlinearityLayer", "nonlin_name", "=", "(", "bn_name", "and", "(", "bn_name", "+", "'_nonlin'", ")", ")", "layer", "=", "NonlinearityLayer", "(", "layer", ",", "nonlinearity", ",", "name", "=", "nonlin_name", ")", "return", "layer" ]
apply cudnn batch normalization to an existing layer .
train
false
11,857
def hello2(): return T('Hello World')
[ "def", "hello2", "(", ")", ":", "return", "T", "(", "'Hello World'", ")" ]
simple page without template but with internationalization .
train
false
11,858
def attribute_value(state, text, i, formats, user_data): ch = text[i] if (ch in space_chars): return [(1, None)] if (ch in {u'"', u"'"}): state.parse = (SQ_VAL if (ch == u"'") else DQ_VAL) return [(1, formats[u'string'])] state.parse = IN_OPENING_TAG state.attribute_name = None m = unquoted_val_pat.match(text, i) if (m is None): return [(1, formats[u'no-attr-value'])] return [(len(m.group()), formats[u'string'])]
[ "def", "attribute_value", "(", "state", ",", "text", ",", "i", ",", "formats", ",", "user_data", ")", ":", "ch", "=", "text", "[", "i", "]", "if", "(", "ch", "in", "space_chars", ")", ":", "return", "[", "(", "1", ",", "None", ")", "]", "if", "(", "ch", "in", "{", "u'\"'", ",", "u\"'\"", "}", ")", ":", "state", ".", "parse", "=", "(", "SQ_VAL", "if", "(", "ch", "==", "u\"'\"", ")", "else", "DQ_VAL", ")", "return", "[", "(", "1", ",", "formats", "[", "u'string'", "]", ")", "]", "state", ".", "parse", "=", "IN_OPENING_TAG", "state", ".", "attribute_name", "=", "None", "m", "=", "unquoted_val_pat", ".", "match", "(", "text", ",", "i", ")", "if", "(", "m", "is", "None", ")", ":", "return", "[", "(", "1", ",", "formats", "[", "u'no-attr-value'", "]", ")", "]", "return", "[", "(", "len", "(", "m", ".", "group", "(", ")", ")", ",", "formats", "[", "u'string'", "]", ")", "]" ]
after attribute = .
train
false
11,859
def randSplitFeatures(features, partTrain): featuresTrain = [] featuresTest = [] for (i, f) in enumerate(features): [numOfSamples, numOfDims] = f.shape randperm = numpy.random.permutation(range(numOfSamples)) nTrainSamples = int(round((partTrain * numOfSamples))) featuresTrain.append(f[randperm[0:nTrainSamples]]) featuresTest.append(f[randperm[nTrainSamples::None]]) return (featuresTrain, featuresTest)
[ "def", "randSplitFeatures", "(", "features", ",", "partTrain", ")", ":", "featuresTrain", "=", "[", "]", "featuresTest", "=", "[", "]", "for", "(", "i", ",", "f", ")", "in", "enumerate", "(", "features", ")", ":", "[", "numOfSamples", ",", "numOfDims", "]", "=", "f", ".", "shape", "randperm", "=", "numpy", ".", "random", ".", "permutation", "(", "range", "(", "numOfSamples", ")", ")", "nTrainSamples", "=", "int", "(", "round", "(", "(", "partTrain", "*", "numOfSamples", ")", ")", ")", "featuresTrain", ".", "append", "(", "f", "[", "randperm", "[", "0", ":", "nTrainSamples", "]", "]", ")", "featuresTest", ".", "append", "(", "f", "[", "randperm", "[", "nTrainSamples", ":", ":", "None", "]", "]", ")", "return", "(", "featuresTrain", ",", "featuresTest", ")" ]
def randsplitfeatures: this function splits a feature set for training and testing .
train
false
11,860
def call_use_cached_files(tup): try: (cache, key, results_dir) = tup res = cache.use_cached_files(key, results_dir) if res: sys.stderr.write(u'.') else: sys.stderr.write(u' ') return res except NonfatalArtifactCacheError as e: logger.warn(u'Error calling use_cached_files in artifact cache: {0}'.format(e)) return False
[ "def", "call_use_cached_files", "(", "tup", ")", ":", "try", ":", "(", "cache", ",", "key", ",", "results_dir", ")", "=", "tup", "res", "=", "cache", ".", "use_cached_files", "(", "key", ",", "results_dir", ")", "if", "res", ":", "sys", ".", "stderr", ".", "write", "(", "u'.'", ")", "else", ":", "sys", ".", "stderr", ".", "write", "(", "u' '", ")", "return", "res", "except", "NonfatalArtifactCacheError", "as", "e", ":", "logger", ".", "warn", "(", "u'Error calling use_cached_files in artifact cache: {0}'", ".", "format", "(", "e", ")", ")", "return", "False" ]
importable helper for multi-proc calling of artifactcache .
train
true
11,861
def disassociate_eip_address(public_ip=None, association_id=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: return conn.disassociate_address(public_ip, association_id) except boto.exception.BotoServerError as e: log.error(e) return False
[ "def", "disassociate_eip_address", "(", "public_ip", "=", "None", ",", "association_id", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "return", "conn", ".", "disassociate_address", "(", "public_ip", ",", "association_id", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "False" ]
disassociate an elastic ip address from a currently running instance .
train
true
11,862
@pytest.mark.parametrize('attr', ['stderr', '__stderr__']) def test_init_faulthandler_stderr_none(monkeypatch, attr): monkeypatch.setattr(sys, attr, None) earlyinit.init_faulthandler()
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'attr'", ",", "[", "'stderr'", ",", "'__stderr__'", "]", ")", "def", "test_init_faulthandler_stderr_none", "(", "monkeypatch", ",", "attr", ")", ":", "monkeypatch", ".", "setattr", "(", "sys", ",", "attr", ",", "None", ")", "earlyinit", ".", "init_faulthandler", "(", ")" ]
make sure init_faulthandler works when sys .
train
false
11,864
def test_format_config(): f = HTMLFormatter() cfg = Config() with capture_output() as captured: result = f(cfg) nt.assert_is(result, None) nt.assert_equal(captured.stderr, '') with capture_output() as captured: result = f(Config) nt.assert_is(result, None) nt.assert_equal(captured.stderr, '')
[ "def", "test_format_config", "(", ")", ":", "f", "=", "HTMLFormatter", "(", ")", "cfg", "=", "Config", "(", ")", "with", "capture_output", "(", ")", "as", "captured", ":", "result", "=", "f", "(", "cfg", ")", "nt", ".", "assert_is", "(", "result", ",", "None", ")", "nt", ".", "assert_equal", "(", "captured", ".", "stderr", ",", "''", ")", "with", "capture_output", "(", ")", "as", "captured", ":", "result", "=", "f", "(", "Config", ")", "nt", ".", "assert_is", "(", "result", ",", "None", ")", "nt", ".", "assert_equal", "(", "captured", ".", "stderr", ",", "''", ")" ]
config objects dont pretend to support fancy reprs with lazy attrs .
train
false
11,868
def _kwswitch(kw, **kwargs): (keys, values) = list(zip(*sorted(kwargs.items()))) match_idx = scope.call_method_pure(keys, 'index', kw) return scope.switch(match_idx, *values)
[ "def", "_kwswitch", "(", "kw", ",", "**", "kwargs", ")", ":", "(", "keys", ",", "values", ")", "=", "list", "(", "zip", "(", "*", "sorted", "(", "kwargs", ".", "items", "(", ")", ")", ")", ")", "match_idx", "=", "scope", ".", "call_method_pure", "(", "keys", ",", "'index'", ",", "kw", ")", "return", "scope", ".", "switch", "(", "match_idx", ",", "*", "values", ")" ]
conditional evaluation according to string value .
train
false
11,869
def odd_ext(x, n, axis=(-1)): if (n < 1): return x if (n > (x.shape[axis] - 1)): raise ValueError((('The extension length n (%d) is too big. ' + 'It must not exceed x.shape[axis]-1, which is %d.') % (n, (x.shape[axis] - 1)))) left_end = axis_slice(x, start=0, stop=1, axis=axis) left_ext = axis_slice(x, start=n, stop=0, step=(-1), axis=axis) right_end = axis_slice(x, start=(-1), axis=axis) right_ext = axis_slice(x, start=(-2), stop=(- (n + 2)), step=(-1), axis=axis) ext = np.concatenate((((2 * left_end) - left_ext), x, ((2 * right_end) - right_ext)), axis=axis) return ext
[ "def", "odd_ext", "(", "x", ",", "n", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "if", "(", "n", "<", "1", ")", ":", "return", "x", "if", "(", "n", ">", "(", "x", ".", "shape", "[", "axis", "]", "-", "1", ")", ")", ":", "raise", "ValueError", "(", "(", "(", "'The extension length n (%d) is too big. '", "+", "'It must not exceed x.shape[axis]-1, which is %d.'", ")", "%", "(", "n", ",", "(", "x", ".", "shape", "[", "axis", "]", "-", "1", ")", ")", ")", ")", "left_end", "=", "axis_slice", "(", "x", ",", "start", "=", "0", ",", "stop", "=", "1", ",", "axis", "=", "axis", ")", "left_ext", "=", "axis_slice", "(", "x", ",", "start", "=", "n", ",", "stop", "=", "0", ",", "step", "=", "(", "-", "1", ")", ",", "axis", "=", "axis", ")", "right_end", "=", "axis_slice", "(", "x", ",", "start", "=", "(", "-", "1", ")", ",", "axis", "=", "axis", ")", "right_ext", "=", "axis_slice", "(", "x", ",", "start", "=", "(", "-", "2", ")", ",", "stop", "=", "(", "-", "(", "n", "+", "2", ")", ")", ",", "step", "=", "(", "-", "1", ")", ",", "axis", "=", "axis", ")", "ext", "=", "np", ".", "concatenate", "(", "(", "(", "(", "2", "*", "left_end", ")", "-", "left_ext", ")", ",", "x", ",", "(", "(", "2", "*", "right_end", ")", "-", "right_ext", ")", ")", ",", "axis", "=", "axis", ")", "return", "ext" ]
odd extension at the boundaries of an array generate a new ndarray by making an odd extension of x along an axis .
train
false
11,870
def get_possible_name_fields_for_model(model): if hasattr(model, u'name_field'): (yield model.name_field) for field in model._meta.local_fields: if (field.name in [u'name', u'title']): (yield field.name) if hasattr(model, u'_parler_meta'): for field in model._parler_meta.root_model._meta.get_fields(): if (field.name not in (u'master', u'id', u'language_code', u'description')): (yield field.name)
[ "def", "get_possible_name_fields_for_model", "(", "model", ")", ":", "if", "hasattr", "(", "model", ",", "u'name_field'", ")", ":", "(", "yield", "model", ".", "name_field", ")", "for", "field", "in", "model", ".", "_meta", ".", "local_fields", ":", "if", "(", "field", ".", "name", "in", "[", "u'name'", ",", "u'title'", "]", ")", ":", "(", "yield", "field", ".", "name", ")", "if", "hasattr", "(", "model", ",", "u'_parler_meta'", ")", ":", "for", "field", "in", "model", ".", "_parler_meta", ".", "root_model", ".", "_meta", ".", "get_fields", "(", ")", ":", "if", "(", "field", ".", "name", "not", "in", "(", "u'master'", ",", "u'id'", ",", "u'language_code'", ",", "u'description'", ")", ")", ":", "(", "yield", "field", ".", "name", ")" ]
get possible name fields for given model this function yields strings of field names that could possible be identified as name fields for model .
train
false
11,871
def test_one_qubit_commutators(): for g1 in (IdentityGate, X, Y, Z, H, T, S): for g2 in (IdentityGate, X, Y, Z, H, T, S): e = Commutator(g1(0), g2(0)) a = matrix_to_zero(represent(e, nqubits=1, format='sympy')) b = matrix_to_zero(represent(e.doit(), nqubits=1, format='sympy')) assert (a == b) e = Commutator(g1(0), g2(1)) assert (e.doit() == 0)
[ "def", "test_one_qubit_commutators", "(", ")", ":", "for", "g1", "in", "(", "IdentityGate", ",", "X", ",", "Y", ",", "Z", ",", "H", ",", "T", ",", "S", ")", ":", "for", "g2", "in", "(", "IdentityGate", ",", "X", ",", "Y", ",", "Z", ",", "H", ",", "T", ",", "S", ")", ":", "e", "=", "Commutator", "(", "g1", "(", "0", ")", ",", "g2", "(", "0", ")", ")", "a", "=", "matrix_to_zero", "(", "represent", "(", "e", ",", "nqubits", "=", "1", ",", "format", "=", "'sympy'", ")", ")", "b", "=", "matrix_to_zero", "(", "represent", "(", "e", ".", "doit", "(", ")", ",", "nqubits", "=", "1", ",", "format", "=", "'sympy'", ")", ")", "assert", "(", "a", "==", "b", ")", "e", "=", "Commutator", "(", "g1", "(", "0", ")", ",", "g2", "(", "1", ")", ")", "assert", "(", "e", ".", "doit", "(", ")", "==", "0", ")" ]
test single qubit gate commutation relations .
train
false
11,872
def setup_generic_relations(model_class): Action = get_model('actstream', 'action') if (Action is None): raise RegistrationError('Unable get actstream.Action. Potential circular imports in initialisation. Try moving actstream app to come after the apps which have models to register in the INSTALLED_APPS setting.') related_attr_name = 'related_name' related_attr_value = ('actions_with_%s' % label(model_class)) if (django.VERSION[:2] >= (1, 7)): related_attr_name = 'related_query_name' relations = {} for field in ('actor', 'target', 'action_object'): attr = ('%s_actions' % field) attr_value = ('%s_as_%s' % (related_attr_value, field)) kwargs = {'content_type_field': ('%s_content_type' % field), 'object_id_field': ('%s_object_id' % field), related_attr_name: attr_value} rel = generic.GenericRelation('actstream.Action', **kwargs) rel.contribute_to_class(model_class, attr) relations[field] = rel setattr(Action, attr_value, None) return relations
[ "def", "setup_generic_relations", "(", "model_class", ")", ":", "Action", "=", "get_model", "(", "'actstream'", ",", "'action'", ")", "if", "(", "Action", "is", "None", ")", ":", "raise", "RegistrationError", "(", "'Unable get actstream.Action. Potential circular imports in initialisation. Try moving actstream app to come after the apps which have models to register in the INSTALLED_APPS setting.'", ")", "related_attr_name", "=", "'related_name'", "related_attr_value", "=", "(", "'actions_with_%s'", "%", "label", "(", "model_class", ")", ")", "if", "(", "django", ".", "VERSION", "[", ":", "2", "]", ">=", "(", "1", ",", "7", ")", ")", ":", "related_attr_name", "=", "'related_query_name'", "relations", "=", "{", "}", "for", "field", "in", "(", "'actor'", ",", "'target'", ",", "'action_object'", ")", ":", "attr", "=", "(", "'%s_actions'", "%", "field", ")", "attr_value", "=", "(", "'%s_as_%s'", "%", "(", "related_attr_value", ",", "field", ")", ")", "kwargs", "=", "{", "'content_type_field'", ":", "(", "'%s_content_type'", "%", "field", ")", ",", "'object_id_field'", ":", "(", "'%s_object_id'", "%", "field", ")", ",", "related_attr_name", ":", "attr_value", "}", "rel", "=", "generic", ".", "GenericRelation", "(", "'actstream.Action'", ",", "**", "kwargs", ")", "rel", ".", "contribute_to_class", "(", "model_class", ",", "attr", ")", "relations", "[", "field", "]", "=", "rel", "setattr", "(", "Action", ",", "attr_value", ",", "None", ")", "return", "relations" ]
set up genericrelations for actionable models .
train
true
11,874
def get_container_openshift_version(facts): for filename in ['/etc/sysconfig/%s-master', '/etc/sysconfig/%s-node']: env_path = (filename % facts['common']['service_type']) if (not os.path.exists(env_path)): continue with open(env_path) as env_file: for line in env_file: if line.startswith('IMAGE_VERSION='): tag = line[len('IMAGE_VERSION='):].strip() version = tag[1:].split('-')[0] return version return None
[ "def", "get_container_openshift_version", "(", "facts", ")", ":", "for", "filename", "in", "[", "'/etc/sysconfig/%s-master'", ",", "'/etc/sysconfig/%s-node'", "]", ":", "env_path", "=", "(", "filename", "%", "facts", "[", "'common'", "]", "[", "'service_type'", "]", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "env_path", ")", ")", ":", "continue", "with", "open", "(", "env_path", ")", "as", "env_file", ":", "for", "line", "in", "env_file", ":", "if", "line", ".", "startswith", "(", "'IMAGE_VERSION='", ")", ":", "tag", "=", "line", "[", "len", "(", "'IMAGE_VERSION='", ")", ":", "]", ".", "strip", "(", ")", "version", "=", "tag", "[", "1", ":", "]", ".", "split", "(", "'-'", ")", "[", "0", "]", "return", "version", "return", "None" ]
if containerized .
train
false
11,875
def get_votes(obj): obj_type = apps.get_model('contenttypes', 'ContentType').objects.get_for_model(obj) try: return Votes.objects.get(content_type=obj_type, object_id=obj.id).count except Votes.DoesNotExist: return 0
[ "def", "get_votes", "(", "obj", ")", ":", "obj_type", "=", "apps", ".", "get_model", "(", "'contenttypes'", ",", "'ContentType'", ")", ".", "objects", ".", "get_for_model", "(", "obj", ")", "try", ":", "return", "Votes", ".", "objects", ".", "get", "(", "content_type", "=", "obj_type", ",", "object_id", "=", "obj", ".", "id", ")", ".", "count", "except", "Votes", ".", "DoesNotExist", ":", "return", "0" ]
get the number of votes an object has .
train
false
11,876
def _sanitize_html(source): return TAG_PATTERN.sub(_sanitize_tag, source)
[ "def", "_sanitize_html", "(", "source", ")", ":", "return", "TAG_PATTERN", ".", "sub", "(", "_sanitize_tag", ",", "source", ")" ]
return source with all non-allowed tags removed .
train
false
11,877
@then(u'we see completions refresh started') def step_see_refresh_started(context): _expect_exact(context, u'refresh started in the background', timeout=2)
[ "@", "then", "(", "u'we see completions refresh started'", ")", "def", "step_see_refresh_started", "(", "context", ")", ":", "_expect_exact", "(", "context", ",", "u'refresh started in the background'", ",", "timeout", "=", "2", ")" ]
wait to see refresh output .
train
false
11,879
def encode_base64(msg): orig = msg.get_payload() encdata = _bencode(orig) msg.set_payload(encdata) msg['Content-Transfer-Encoding'] = 'base64'
[ "def", "encode_base64", "(", "msg", ")", ":", "orig", "=", "msg", ".", "get_payload", "(", ")", "encdata", "=", "_bencode", "(", "orig", ")", "msg", ".", "set_payload", "(", "encdata", ")", "msg", "[", "'Content-Transfer-Encoding'", "]", "=", "'base64'" ]
encode the messages payload in base64 .
train
true
11,880
def _write_cron_lines(user, lines): path = salt.utils.files.mkstemp() if (_check_instance_uid_match(user) or (__grains__.get('os_family') in ('Solaris', 'AIX'))): with salt.utils.fpopen(path, 'w+', uid=__salt__['file.user_to_uid'](user), mode=384) as fp_: fp_.writelines(lines) ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path), runas=user, python_shell=False) else: with salt.utils.fpopen(path, 'w+', mode=384) as fp_: fp_.writelines(lines) ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path, user), python_shell=False) os.remove(path) return ret
[ "def", "_write_cron_lines", "(", "user", ",", "lines", ")", ":", "path", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", ")", "if", "(", "_check_instance_uid_match", "(", "user", ")", "or", "(", "__grains__", ".", "get", "(", "'os_family'", ")", "in", "(", "'Solaris'", ",", "'AIX'", ")", ")", ")", ":", "with", "salt", ".", "utils", ".", "fpopen", "(", "path", ",", "'w+'", ",", "uid", "=", "__salt__", "[", "'file.user_to_uid'", "]", "(", "user", ")", ",", "mode", "=", "384", ")", "as", "fp_", ":", "fp_", ".", "writelines", "(", "lines", ")", "ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "_get_cron_cmdstr", "(", "path", ")", ",", "runas", "=", "user", ",", "python_shell", "=", "False", ")", "else", ":", "with", "salt", ".", "utils", ".", "fpopen", "(", "path", ",", "'w+'", ",", "mode", "=", "384", ")", "as", "fp_", ":", "fp_", ".", "writelines", "(", "lines", ")", "ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "_get_cron_cmdstr", "(", "path", ",", "user", ")", ",", "python_shell", "=", "False", ")", "os", ".", "remove", "(", "path", ")", "return", "ret" ]
takes a list of lines to be committed to a users crontab and writes it .
train
true
11,882
def test_mixed_newstyle_oldstyle_init(): class foo: def __init__(self): self.x = 3 class bar(foo, ): def __init__(self): self.x = 4 class baz(foo, ): pass class ns(object, ): pass class full(bar, baz, ns, ): pass a = full() AreEqual(a.x, 4) class full(bar, baz, ns, ): def __init__(self): self.x = 5 a = full() AreEqual(a.x, 5) class ns(object, ): def __init__(self): self.x = 6 class full(bar, baz, ns, ): pass a = full() AreEqual(a.x, 4)
[ "def", "test_mixed_newstyle_oldstyle_init", "(", ")", ":", "class", "foo", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "x", "=", "3", "class", "bar", "(", "foo", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "x", "=", "4", "class", "baz", "(", "foo", ",", ")", ":", "pass", "class", "ns", "(", "object", ",", ")", ":", "pass", "class", "full", "(", "bar", ",", "baz", ",", "ns", ",", ")", ":", "pass", "a", "=", "full", "(", ")", "AreEqual", "(", "a", ".", "x", ",", "4", ")", "class", "full", "(", "bar", ",", "baz", ",", "ns", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "x", "=", "5", "a", "=", "full", "(", ")", "AreEqual", "(", "a", ".", "x", ",", "5", ")", "class", "ns", "(", "object", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "x", "=", "6", "class", "full", "(", "bar", ",", "baz", ",", "ns", ",", ")", ":", "pass", "a", "=", "full", "(", ")", "AreEqual", "(", "a", ".", "x", ",", "4", ")" ]
mixed new-style & old-style class should run init if its defined in the old-style class .
train
false
11,884
@blueprint.route('/users/<user>/meters') def list_meters_by_user(user): rq = flask.request meters = rq.storage_conn.get_meters(user=user, project=acl.get_limited_to_project(rq.headers), metaquery=_get_metaquery(rq.args)) return flask.jsonify(meters=[m.as_dict() for m in meters])
[ "@", "blueprint", ".", "route", "(", "'/users/<user>/meters'", ")", "def", "list_meters_by_user", "(", "user", ")", ":", "rq", "=", "flask", ".", "request", "meters", "=", "rq", ".", "storage_conn", ".", "get_meters", "(", "user", "=", "user", ",", "project", "=", "acl", ".", "get_limited_to_project", "(", "rq", ".", "headers", ")", ",", "metaquery", "=", "_get_metaquery", "(", "rq", ".", "args", ")", ")", "return", "flask", ".", "jsonify", "(", "meters", "=", "[", "m", ".", "as_dict", "(", ")", "for", "m", "in", "meters", "]", ")" ]
return a list of meters by user .
train
false
11,885
def _tmp_access_rule(method, ip=None, ttl=None, port=None, direction='in', port_origin='d', ip_origin='d', comment=''): if _status_csf(): if (ip is None): return {'error': 'You must supply an ip address or CIDR.'} if (ttl is None): return {'error': 'You must supply a ttl.'} args = _build_tmp_access_args(method, ip, ttl, port, direction, comment) return __csf_cmd(args)
[ "def", "_tmp_access_rule", "(", "method", ",", "ip", "=", "None", ",", "ttl", "=", "None", ",", "port", "=", "None", ",", "direction", "=", "'in'", ",", "port_origin", "=", "'d'", ",", "ip_origin", "=", "'d'", ",", "comment", "=", "''", ")", ":", "if", "_status_csf", "(", ")", ":", "if", "(", "ip", "is", "None", ")", ":", "return", "{", "'error'", ":", "'You must supply an ip address or CIDR.'", "}", "if", "(", "ttl", "is", "None", ")", ":", "return", "{", "'error'", ":", "'You must supply a ttl.'", "}", "args", "=", "_build_tmp_access_args", "(", "method", ",", "ip", ",", "ttl", ",", "port", ",", "direction", ",", "comment", ")", "return", "__csf_cmd", "(", "args", ")" ]
handles the cmd execution for tempdeny and tempallow commands .
train
true
11,886
def get_sans_from_cert(cert, typ=OpenSSL.crypto.FILETYPE_PEM): return _get_sans_from_cert_or_req(cert, OpenSSL.crypto.load_certificate, typ)
[ "def", "get_sans_from_cert", "(", "cert", ",", "typ", "=", "OpenSSL", ".", "crypto", ".", "FILETYPE_PEM", ")", ":", "return", "_get_sans_from_cert_or_req", "(", "cert", ",", "OpenSSL", ".", "crypto", ".", "load_certificate", ",", "typ", ")" ]
get a list of subject alternative names from a certificate .
train
false
11,887
def pollProcess(process, suppress_errors=False): while True: dataToStdout('.') time.sleep(1) returncode = process.poll() if (returncode is not None): if (not suppress_errors): if (returncode == 0): dataToStdout(' done\n') elif (returncode < 0): dataToStdout((' process terminated by signal %d\n' % returncode)) elif (returncode > 0): dataToStdout((' quit unexpectedly with return code %d\n' % returncode)) break
[ "def", "pollProcess", "(", "process", ",", "suppress_errors", "=", "False", ")", ":", "while", "True", ":", "dataToStdout", "(", "'.'", ")", "time", ".", "sleep", "(", "1", ")", "returncode", "=", "process", ".", "poll", "(", ")", "if", "(", "returncode", "is", "not", "None", ")", ":", "if", "(", "not", "suppress_errors", ")", ":", "if", "(", "returncode", "==", "0", ")", ":", "dataToStdout", "(", "' done\\n'", ")", "elif", "(", "returncode", "<", "0", ")", ":", "dataToStdout", "(", "(", "' process terminated by signal %d\\n'", "%", "returncode", ")", ")", "elif", "(", "returncode", ">", "0", ")", ":", "dataToStdout", "(", "(", "' quit unexpectedly with return code %d\\n'", "%", "returncode", ")", ")", "break" ]
checks for process status .
train
false
11,888
def convertXMLElementRenameByPaths(geometryOutput, xmlElement): xmlElement.className = 'path' for geometryOutputChild in geometryOutput: pathElement = xml_simple_reader.XMLElement() pathElement.setParentAddToChildren(xmlElement) convertXMLElementRename(geometryOutputChild, pathElement)
[ "def", "convertXMLElementRenameByPaths", "(", "geometryOutput", ",", "xmlElement", ")", ":", "xmlElement", ".", "className", "=", "'path'", "for", "geometryOutputChild", "in", "geometryOutput", ":", "pathElement", "=", "xml_simple_reader", ".", "XMLElement", "(", ")", "pathElement", ".", "setParentAddToChildren", "(", "xmlElement", ")", "convertXMLElementRename", "(", "geometryOutputChild", ",", "pathElement", ")" ]
convert the xml element to a path xml element and add paths .
train
false
11,889
def check_share_permission(doctype, name): if (not frappe.has_permission(doctype, ptype=u'share', doc=name)): frappe.throw(_(u'No permission to {0} {1} {2}'.format(u'share', doctype, name)), frappe.PermissionError)
[ "def", "check_share_permission", "(", "doctype", ",", "name", ")", ":", "if", "(", "not", "frappe", ".", "has_permission", "(", "doctype", ",", "ptype", "=", "u'share'", ",", "doc", "=", "name", ")", ")", ":", "frappe", ".", "throw", "(", "_", "(", "u'No permission to {0} {1} {2}'", ".", "format", "(", "u'share'", ",", "doctype", ",", "name", ")", ")", ",", "frappe", ".", "PermissionError", ")" ]
check if the user can share with other users .
train
false
11,890
def write_head_pos(fname, pos): _check_fname(fname, overwrite=True) pos = np.array(pos, np.float64) if ((pos.ndim != 2) or (pos.shape[1] != 10)): raise ValueError('pos must be a 2D array of shape (N, 10)') with open(fname, 'wb') as fid: fid.write(' Time q1 q2 q3 q4 q5 q6 g-value error velocity\n'.encode('ASCII')) for p in pos: fmts = (['% 9.3f'] + (['% 8.5f'] * 9)) fid.write((((' ' + ' '.join(fmts)) + '\n') % tuple(p)).encode('ASCII'))
[ "def", "write_head_pos", "(", "fname", ",", "pos", ")", ":", "_check_fname", "(", "fname", ",", "overwrite", "=", "True", ")", "pos", "=", "np", ".", "array", "(", "pos", ",", "np", ".", "float64", ")", "if", "(", "(", "pos", ".", "ndim", "!=", "2", ")", "or", "(", "pos", ".", "shape", "[", "1", "]", "!=", "10", ")", ")", ":", "raise", "ValueError", "(", "'pos must be a 2D array of shape (N, 10)'", ")", "with", "open", "(", "fname", ",", "'wb'", ")", "as", "fid", ":", "fid", ".", "write", "(", "' Time q1 q2 q3 q4 q5 q6 g-value error velocity\\n'", ".", "encode", "(", "'ASCII'", ")", ")", "for", "p", "in", "pos", ":", "fmts", "=", "(", "[", "'% 9.3f'", "]", "+", "(", "[", "'% 8.5f'", "]", "*", "9", ")", ")", "fid", ".", "write", "(", "(", "(", "(", "' '", "+", "' '", ".", "join", "(", "fmts", ")", ")", "+", "'\\n'", ")", "%", "tuple", "(", "p", ")", ")", ".", "encode", "(", "'ASCII'", ")", ")" ]
write maxfilter-formatted head position parameters .
train
false
11,891
def mailing_list(): tablename = 'pr_group' table = s3db[tablename] s3.filter = (table.group_type == 5) table.group_type.writable = False table.group_type.readable = False table.name.label = T('Mailing List Name') s3.crud_strings[tablename] = s3.pr_mailing_list_crud_strings list_fields = s3db.configure(tablename, list_fields=['id', 'name', 'description']) _rheader = s3db.pr_rheader _tabs = [(T('Organization'), 'organisation/'), (T('Mailing List Details'), None)] if (len(request.args) > 0): _tabs.append((T('Members'), 'group_membership')) if ('viewing' in request.vars): (tablename, record_id) = request.vars.viewing.rsplit('.', 1) if (tablename == 'org_organisation'): table = s3db[tablename] _rheader = s3db.org_rheader _tabs = [] s3db.add_components('pr_group', pr_group_membership='group_id') rheader = (lambda r: _rheader(r, tabs=_tabs)) return s3_rest_controller('pr', 'group', rheader=rheader)
[ "def", "mailing_list", "(", ")", ":", "tablename", "=", "'pr_group'", "table", "=", "s3db", "[", "tablename", "]", "s3", ".", "filter", "=", "(", "table", ".", "group_type", "==", "5", ")", "table", ".", "group_type", ".", "writable", "=", "False", "table", ".", "group_type", ".", "readable", "=", "False", "table", ".", "name", ".", "label", "=", "T", "(", "'Mailing List Name'", ")", "s3", ".", "crud_strings", "[", "tablename", "]", "=", "s3", ".", "pr_mailing_list_crud_strings", "list_fields", "=", "s3db", ".", "configure", "(", "tablename", ",", "list_fields", "=", "[", "'id'", ",", "'name'", ",", "'description'", "]", ")", "_rheader", "=", "s3db", ".", "pr_rheader", "_tabs", "=", "[", "(", "T", "(", "'Organization'", ")", ",", "'organisation/'", ")", ",", "(", "T", "(", "'Mailing List Details'", ")", ",", "None", ")", "]", "if", "(", "len", "(", "request", ".", "args", ")", ">", "0", ")", ":", "_tabs", ".", "append", "(", "(", "T", "(", "'Members'", ")", ",", "'group_membership'", ")", ")", "if", "(", "'viewing'", "in", "request", ".", "vars", ")", ":", "(", "tablename", ",", "record_id", ")", "=", "request", ".", "vars", ".", "viewing", ".", "rsplit", "(", "'.'", ",", "1", ")", "if", "(", "tablename", "==", "'org_organisation'", ")", ":", "table", "=", "s3db", "[", "tablename", "]", "_rheader", "=", "s3db", ".", "org_rheader", "_tabs", "=", "[", "]", "s3db", ".", "add_components", "(", "'pr_group'", ",", "pr_group_membership", "=", "'group_id'", ")", "rheader", "=", "(", "lambda", "r", ":", "_rheader", "(", "r", ",", "tabs", "=", "_tabs", ")", ")", "return", "s3_rest_controller", "(", "'pr'", ",", "'group'", ",", "rheader", "=", "rheader", ")" ]
restful crud controller .
train
false
11,893
def _to_stublist(degree_sequence): return list(chaini((([n] * d) for (n, d) in enumerate(degree_sequence))))
[ "def", "_to_stublist", "(", "degree_sequence", ")", ":", "return", "list", "(", "chaini", "(", "(", "(", "[", "n", "]", "*", "d", ")", "for", "(", "n", ",", "d", ")", "in", "enumerate", "(", "degree_sequence", ")", ")", ")", ")" ]
returns a list of degree-repeated node numbers .
train
false
11,894
def equalize(node1, node2): keys = [key for key in os.listdir(KEY_DIRECTORY) if key.endswith('.key')] keyname = keys[0].split('.')[0] to_move = (abs((node1['load'] - node2['load'])) / 2) mb_to_move = round((to_move / (1024 ** 2)), 2) if (node1['load'] > node2['load']): logging.info('Moving {} MiB from {} to {}'.format(mb_to_move, node1['ip'], node2['ip'])) percentile = (100 - int(((to_move / node1['load']) * 100))) new_token = ssh(node1['ip'], keyname, 'appscale-get-token {}'.format(percentile), method=check_output).strip() repair = [new_token, node1['token']] cleanup_ip = node1['ip'] else: logging.info('Moving {} MiB from {} to {}'.format(mb_to_move, node2['ip'], node1['ip'])) percentile = int(((to_move / node2['load']) * 100)) new_token = ssh(node2['ip'], keyname, 'appscale-get-token {}'.format(percentile), method=check_output).strip() repair = [node1['token'], new_token] cleanup_ip = node2['ip'] logging.info('Moving {} to {}'.format(node1['ip'], (new_token[:60] + '...'))) ssh(node1['ip'], keyname, '{} move {}'.format(NODE_TOOL, new_token)) start = (repair[0][:60] + '...') end = (repair[1][:60] + '...') logging.info('Repairing {} to {}'.format(start, end)) check_output([NODE_TOOL, 'repair', '-st', repair[0], '-et', repair[1]]) logging.info('Cleaning up {}'.format(cleanup_ip)) ssh(cleanup_ip, keyname, '{} cleanup'.format(NODE_TOOL))
[ "def", "equalize", "(", "node1", ",", "node2", ")", ":", "keys", "=", "[", "key", "for", "key", "in", "os", ".", "listdir", "(", "KEY_DIRECTORY", ")", "if", "key", ".", "endswith", "(", "'.key'", ")", "]", "keyname", "=", "keys", "[", "0", "]", ".", "split", "(", "'.'", ")", "[", "0", "]", "to_move", "=", "(", "abs", "(", "(", "node1", "[", "'load'", "]", "-", "node2", "[", "'load'", "]", ")", ")", "/", "2", ")", "mb_to_move", "=", "round", "(", "(", "to_move", "/", "(", "1024", "**", "2", ")", ")", ",", "2", ")", "if", "(", "node1", "[", "'load'", "]", ">", "node2", "[", "'load'", "]", ")", ":", "logging", ".", "info", "(", "'Moving {} MiB from {} to {}'", ".", "format", "(", "mb_to_move", ",", "node1", "[", "'ip'", "]", ",", "node2", "[", "'ip'", "]", ")", ")", "percentile", "=", "(", "100", "-", "int", "(", "(", "(", "to_move", "/", "node1", "[", "'load'", "]", ")", "*", "100", ")", ")", ")", "new_token", "=", "ssh", "(", "node1", "[", "'ip'", "]", ",", "keyname", ",", "'appscale-get-token {}'", ".", "format", "(", "percentile", ")", ",", "method", "=", "check_output", ")", ".", "strip", "(", ")", "repair", "=", "[", "new_token", ",", "node1", "[", "'token'", "]", "]", "cleanup_ip", "=", "node1", "[", "'ip'", "]", "else", ":", "logging", ".", "info", "(", "'Moving {} MiB from {} to {}'", ".", "format", "(", "mb_to_move", ",", "node2", "[", "'ip'", "]", ",", "node1", "[", "'ip'", "]", ")", ")", "percentile", "=", "int", "(", "(", "(", "to_move", "/", "node2", "[", "'load'", "]", ")", "*", "100", ")", ")", "new_token", "=", "ssh", "(", "node2", "[", "'ip'", "]", ",", "keyname", ",", "'appscale-get-token {}'", ".", "format", "(", "percentile", ")", ",", "method", "=", "check_output", ")", ".", "strip", "(", ")", "repair", "=", "[", "node1", "[", "'token'", "]", ",", "new_token", "]", "cleanup_ip", "=", "node2", "[", "'ip'", "]", "logging", ".", "info", "(", "'Moving {} to {}'", ".", "format", "(", "node1", "[", "'ip'", "]", ",", "(", "new_token", "[", ":", "60", "]", "+", "'...'", ")", ")", ")", "ssh", "(", "node1", "[", "'ip'", "]", ",", "keyname", ",", "'{} move {}'", ".", "format", "(", "NODE_TOOL", ",", "new_token", ")", ")", "start", "=", "(", "repair", "[", "0", "]", "[", ":", "60", "]", "+", "'...'", ")", "end", "=", "(", "repair", "[", "1", "]", "[", ":", "60", "]", "+", "'...'", ")", "logging", ".", "info", "(", "'Repairing {} to {}'", ".", "format", "(", "start", ",", "end", ")", ")", "check_output", "(", "[", "NODE_TOOL", ",", "'repair'", ",", "'-st'", ",", "repair", "[", "0", "]", ",", "'-et'", ",", "repair", "[", "1", "]", "]", ")", "logging", ".", "info", "(", "'Cleaning up {}'", ".", "format", "(", "cleanup_ip", ")", ")", "ssh", "(", "cleanup_ip", ",", "keyname", ",", "'{} cleanup'", ".", "format", "(", "NODE_TOOL", ")", ")" ]
equalize image using local histogram .
train
false
11,895
def get_tag(name, session): try: return session.query(ArchiveTag).filter((ArchiveTag.name == name)).one() except NoResultFound: source = ArchiveTag(name) return source
[ "def", "get_tag", "(", "name", ",", "session", ")", ":", "try", ":", "return", "session", ".", "query", "(", "ArchiveTag", ")", ".", "filter", "(", "(", "ArchiveTag", ".", "name", "==", "name", ")", ")", ".", "one", "(", ")", "except", "NoResultFound", ":", "source", "=", "ArchiveTag", "(", "name", ")", "return", "source" ]
return the magic tag for .
train
false
11,896
def shutdown_program(): logging.info('Performing sabnzbd shutdown') sabnzbd.halt() cherrypy.engine.exit() sabnzbd.SABSTOP = True
[ "def", "shutdown_program", "(", ")", ":", "logging", ".", "info", "(", "'Performing sabnzbd shutdown'", ")", "sabnzbd", ".", "halt", "(", ")", "cherrypy", ".", "engine", ".", "exit", "(", ")", "sabnzbd", ".", "SABSTOP", "=", "True" ]
stop program after halting and saving .
train
false
11,897
def has_meta_cmd(query): try: first_token = query.split()[0] if (first_token.lower() in (u'alter', u'create', u'drop')): return True except Exception: return False return False
[ "def", "has_meta_cmd", "(", "query", ")", ":", "try", ":", "first_token", "=", "query", ".", "split", "(", ")", "[", "0", "]", "if", "(", "first_token", ".", "lower", "(", ")", "in", "(", "u'alter'", ",", "u'create'", ",", "u'drop'", ")", ")", ":", "return", "True", "except", "Exception", ":", "return", "False", "return", "False" ]
determines if the completion needs a refresh by checking if the sql statement is an alter .
train
false
11,898
def telnet_logins(src_ip_port, dst_ip_port, load, ack, seq): global telnet_stream msg = None if (src_ip_port in telnet_stream): try: telnet_stream[src_ip_port] += load.decode('utf8') except UnicodeDecodeError: pass if (('\r' in telnet_stream[src_ip_port]) or ('\n' in telnet_stream[src_ip_port])): telnet_split = telnet_stream[src_ip_port].split(' ', 1) cred_type = telnet_split[0] value = telnet_split[1].replace('\r\n', '').replace('\r', '').replace('\n', '') msg = ('Telnet %s: %s' % (cred_type, value)) printer(src_ip_port, dst_ip_port, msg) del telnet_stream[src_ip_port] if (len(telnet_stream) > 100): telnet_stream.popitem(last=False) mod_load = load.lower().strip() if (mod_load.endswith('username:') or mod_load.endswith('login:')): telnet_stream[dst_ip_port] = 'username ' elif mod_load.endswith('password:'): telnet_stream[dst_ip_port] = 'password '
[ "def", "telnet_logins", "(", "src_ip_port", ",", "dst_ip_port", ",", "load", ",", "ack", ",", "seq", ")", ":", "global", "telnet_stream", "msg", "=", "None", "if", "(", "src_ip_port", "in", "telnet_stream", ")", ":", "try", ":", "telnet_stream", "[", "src_ip_port", "]", "+=", "load", ".", "decode", "(", "'utf8'", ")", "except", "UnicodeDecodeError", ":", "pass", "if", "(", "(", "'\\r'", "in", "telnet_stream", "[", "src_ip_port", "]", ")", "or", "(", "'\\n'", "in", "telnet_stream", "[", "src_ip_port", "]", ")", ")", ":", "telnet_split", "=", "telnet_stream", "[", "src_ip_port", "]", ".", "split", "(", "' '", ",", "1", ")", "cred_type", "=", "telnet_split", "[", "0", "]", "value", "=", "telnet_split", "[", "1", "]", ".", "replace", "(", "'\\r\\n'", ",", "''", ")", ".", "replace", "(", "'\\r'", ",", "''", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", "msg", "=", "(", "'Telnet %s: %s'", "%", "(", "cred_type", ",", "value", ")", ")", "printer", "(", "src_ip_port", ",", "dst_ip_port", ",", "msg", ")", "del", "telnet_stream", "[", "src_ip_port", "]", "if", "(", "len", "(", "telnet_stream", ")", ">", "100", ")", ":", "telnet_stream", ".", "popitem", "(", "last", "=", "False", ")", "mod_load", "=", "load", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "(", "mod_load", ".", "endswith", "(", "'username:'", ")", "or", "mod_load", ".", "endswith", "(", "'login:'", ")", ")", ":", "telnet_stream", "[", "dst_ip_port", "]", "=", "'username '", "elif", "mod_load", ".", "endswith", "(", "'password:'", ")", ":", "telnet_stream", "[", "dst_ip_port", "]", "=", "'password '" ]
catch telnet logins and passwords .
train
false
11,899
def Draw(im, mode=None): try: return im.getdraw(mode) except AttributeError: return ImageDraw(im, mode)
[ "def", "Draw", "(", "im", ",", "mode", "=", "None", ")", ":", "try", ":", "return", "im", ".", "getdraw", "(", "mode", ")", "except", "AttributeError", ":", "return", "ImageDraw", "(", "im", ",", "mode", ")" ]
a simple 2d drawing interface for pil images .
train
false
11,900
def _import_by_name(name): try: name_parts = name.split('.') modname = '.'.join(name_parts[:(-1)]) if modname: try: __import__(modname) mod = sys.modules[modname] return (getattr(mod, name_parts[(-1)]), mod) except (ImportError, IndexError, AttributeError): pass last_j = 0 modname = None for j in reversed(range(1, (len(name_parts) + 1))): last_j = j modname = '.'.join(name_parts[:j]) try: __import__(modname) except ImportError: continue if (modname in sys.modules): break if (last_j < len(name_parts)): parent = None obj = sys.modules[modname] for obj_name in name_parts[last_j:]: parent = obj obj = getattr(obj, obj_name) return (obj, parent) else: return (sys.modules[modname], None) except (ValueError, ImportError, AttributeError, KeyError) as e: raise ImportError(*e.args)
[ "def", "_import_by_name", "(", "name", ")", ":", "try", ":", "name_parts", "=", "name", ".", "split", "(", "'.'", ")", "modname", "=", "'.'", ".", "join", "(", "name_parts", "[", ":", "(", "-", "1", ")", "]", ")", "if", "modname", ":", "try", ":", "__import__", "(", "modname", ")", "mod", "=", "sys", ".", "modules", "[", "modname", "]", "return", "(", "getattr", "(", "mod", ",", "name_parts", "[", "(", "-", "1", ")", "]", ")", ",", "mod", ")", "except", "(", "ImportError", ",", "IndexError", ",", "AttributeError", ")", ":", "pass", "last_j", "=", "0", "modname", "=", "None", "for", "j", "in", "reversed", "(", "range", "(", "1", ",", "(", "len", "(", "name_parts", ")", "+", "1", ")", ")", ")", ":", "last_j", "=", "j", "modname", "=", "'.'", ".", "join", "(", "name_parts", "[", ":", "j", "]", ")", "try", ":", "__import__", "(", "modname", ")", "except", "ImportError", ":", "continue", "if", "(", "modname", "in", "sys", ".", "modules", ")", ":", "break", "if", "(", "last_j", "<", "len", "(", "name_parts", ")", ")", ":", "parent", "=", "None", "obj", "=", "sys", ".", "modules", "[", "modname", "]", "for", "obj_name", "in", "name_parts", "[", "last_j", ":", "]", ":", "parent", "=", "obj", "obj", "=", "getattr", "(", "obj", ",", "obj_name", ")", "return", "(", "obj", ",", "parent", ")", "else", ":", "return", "(", "sys", ".", "modules", "[", "modname", "]", ",", "None", ")", "except", "(", "ValueError", ",", "ImportError", ",", "AttributeError", ",", "KeyError", ")", "as", "e", ":", "raise", "ImportError", "(", "*", "e", ".", "args", ")" ]
import a python object given its full name .
train
true
11,902
def make_dictValidator(keyvalidator, valuevalidator): def v(d): newd = util.OrderedDict() for (key, value) in d.iteritems(): newd[keyvalidator(key)] = valuevalidator(value) return newd v.keyvalidator = keyvalidator v.valuevalidator = valuevalidator return v
[ "def", "make_dictValidator", "(", "keyvalidator", ",", "valuevalidator", ")", ":", "def", "v", "(", "d", ")", ":", "newd", "=", "util", ".", "OrderedDict", "(", ")", "for", "(", "key", ",", "value", ")", "in", "d", ".", "iteritems", "(", ")", ":", "newd", "[", "keyvalidator", "(", "key", ")", "]", "=", "valuevalidator", "(", "value", ")", "return", "newd", "v", ".", "keyvalidator", "=", "keyvalidator", "v", ".", "valuevalidator", "=", "valuevalidator", "return", "v" ]
compose and return a dict validator -- a validator that validates each key and value in a dictionary .
train
false
11,904
def crosstalk(width, height, connections): d = {('x', 0, i): i for i in range(width)} for j in range(1, height): d.update({('x', j, i): (noop, [('x', (j - 1), randint(0, width)) for _ in range(connections)]) for i in range(width)}) return (d, [('x', (height - 1), i) for i in range(width)])
[ "def", "crosstalk", "(", "width", ",", "height", ",", "connections", ")", ":", "d", "=", "{", "(", "'x'", ",", "0", ",", "i", ")", ":", "i", "for", "i", "in", "range", "(", "width", ")", "}", "for", "j", "in", "range", "(", "1", ",", "height", ")", ":", "d", ".", "update", "(", "{", "(", "'x'", ",", "j", ",", "i", ")", ":", "(", "noop", ",", "[", "(", "'x'", ",", "(", "j", "-", "1", ")", ",", "randint", "(", "0", ",", "width", ")", ")", "for", "_", "in", "range", "(", "connections", ")", "]", ")", "for", "i", "in", "range", "(", "width", ")", "}", ")", "return", "(", "d", ",", "[", "(", "'x'", ",", "(", "height", "-", "1", ")", ",", "i", ")", "for", "i", "in", "range", "(", "width", ")", "]", ")" ]
natural looking dask with some inter-connections .
train
false
11,905
def delete_vpnservice(vpnservice, profile=None): conn = _auth(profile) return conn.delete_vpnservice(vpnservice)
[ "def", "delete_vpnservice", "(", "vpnservice", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "delete_vpnservice", "(", "vpnservice", ")" ]
deletes the specified vpn service cli example: .
train
false
11,907
def get_state_ptr(context, builder, name): assert (name in ('py', 'np')) func_name = ('numba_get_%s_random_state' % name) fnty = ir.FunctionType(rnd_state_ptr_t, ()) fn = builder.module.get_or_insert_function(fnty, func_name) fn.attributes.add('readnone') fn.attributes.add('nounwind') return builder.call(fn, ())
[ "def", "get_state_ptr", "(", "context", ",", "builder", ",", "name", ")", ":", "assert", "(", "name", "in", "(", "'py'", ",", "'np'", ")", ")", "func_name", "=", "(", "'numba_get_%s_random_state'", "%", "name", ")", "fnty", "=", "ir", ".", "FunctionType", "(", "rnd_state_ptr_t", ",", "(", ")", ")", "fn", "=", "builder", ".", "module", ".", "get_or_insert_function", "(", "fnty", ",", "func_name", ")", "fn", ".", "attributes", ".", "add", "(", "'readnone'", ")", "fn", ".", "attributes", ".", "add", "(", "'nounwind'", ")", "return", "builder", ".", "call", "(", "fn", ",", "(", ")", ")" ]
get a pointer to the given thread-local random state .
train
false
11,909
def walk_metadata(metadata_url, headers=None, expect_json=False): metadata = dict() for line in query_metadata(metadata_url, headers, expect_json): if (line.endswith('/') and (not (line == 'public-keys/'))): key = line[:(-1)] metadata[key] = walk_metadata((metadata_url + line), headers, expect_json) else: results = query_metadata((metadata_url + line), headers, expect_json) if (len(results) == 1): metadata[line] = results.pop() else: metadata[line] = results return metadata
[ "def", "walk_metadata", "(", "metadata_url", ",", "headers", "=", "None", ",", "expect_json", "=", "False", ")", ":", "metadata", "=", "dict", "(", ")", "for", "line", "in", "query_metadata", "(", "metadata_url", ",", "headers", ",", "expect_json", ")", ":", "if", "(", "line", ".", "endswith", "(", "'/'", ")", "and", "(", "not", "(", "line", "==", "'public-keys/'", ")", ")", ")", ":", "key", "=", "line", "[", ":", "(", "-", "1", ")", "]", "metadata", "[", "key", "]", "=", "walk_metadata", "(", "(", "metadata_url", "+", "line", ")", ",", "headers", ",", "expect_json", ")", "else", ":", "results", "=", "query_metadata", "(", "(", "metadata_url", "+", "line", ")", ",", "headers", ",", "expect_json", ")", "if", "(", "len", "(", "results", ")", "==", "1", ")", ":", "metadata", "[", "line", "]", "=", "results", ".", "pop", "(", ")", "else", ":", "metadata", "[", "line", "]", "=", "results", "return", "metadata" ]
walk the metadata tree and return a dictionary of the entire tree args: metadata_url : metadata url headers : headers to set for metadata request expect_json : does the metadata_url return json returns: dict: the result of walking the metadata tree .
train
false
11,910
def trustPage(request): return direct_to_template(request, 'server/trust.html', {'trust_handler_url': getViewURL(request, processTrustResult)})
[ "def", "trustPage", "(", "request", ")", ":", "return", "direct_to_template", "(", "request", ",", "'server/trust.html'", ",", "{", "'trust_handler_url'", ":", "getViewURL", "(", "request", ",", "processTrustResult", ")", "}", ")" ]
display the trust page template .
train
false
11,911
def _standardize_iterables(node): if (not node.iterables): return iterables = node.iterables fields = set(node.inputs.copyable_trait_names()) synchronize = False if node.synchronize: if (len(iterables) == 2): (first, last) = iterables if all(((isinstance(item, (str, bytes)) and (item in fields)) for item in first)): iterables = _transpose_iterables(first, last) if isinstance(iterables, tuple): iterables = [iterables] _validate_iterables(node, iterables, fields) if isinstance(iterables, list): if (not node.itersource): def make_field_func(*pair): return (pair[0], (lambda : pair[1])) iter_items = [make_field_func(*field_value1) for field_value1 in iterables] iterables = dict(iter_items) node.iterables = iterables
[ "def", "_standardize_iterables", "(", "node", ")", ":", "if", "(", "not", "node", ".", "iterables", ")", ":", "return", "iterables", "=", "node", ".", "iterables", "fields", "=", "set", "(", "node", ".", "inputs", ".", "copyable_trait_names", "(", ")", ")", "synchronize", "=", "False", "if", "node", ".", "synchronize", ":", "if", "(", "len", "(", "iterables", ")", "==", "2", ")", ":", "(", "first", ",", "last", ")", "=", "iterables", "if", "all", "(", "(", "(", "isinstance", "(", "item", ",", "(", "str", ",", "bytes", ")", ")", "and", "(", "item", "in", "fields", ")", ")", "for", "item", "in", "first", ")", ")", ":", "iterables", "=", "_transpose_iterables", "(", "first", ",", "last", ")", "if", "isinstance", "(", "iterables", ",", "tuple", ")", ":", "iterables", "=", "[", "iterables", "]", "_validate_iterables", "(", "node", ",", "iterables", ",", "fields", ")", "if", "isinstance", "(", "iterables", ",", "list", ")", ":", "if", "(", "not", "node", ".", "itersource", ")", ":", "def", "make_field_func", "(", "*", "pair", ")", ":", "return", "(", "pair", "[", "0", "]", ",", "(", "lambda", ":", "pair", "[", "1", "]", ")", ")", "iter_items", "=", "[", "make_field_func", "(", "*", "field_value1", ")", "for", "field_value1", "in", "iterables", "]", "iterables", "=", "dict", "(", "iter_items", ")", "node", ".", "iterables", "=", "iterables" ]
converts the given iterables to a {field: function} dictionary .
train
false
11,912
def test_stringer(): _ast = import_buffer_to_ast('(defn square [x] (* x x))', '') assert (type(_ast.body[0]) == ast.FunctionDef)
[ "def", "test_stringer", "(", ")", ":", "_ast", "=", "import_buffer_to_ast", "(", "'(defn square [x] (* x x))'", ",", "''", ")", "assert", "(", "type", "(", "_ast", ".", "body", "[", "0", "]", ")", "==", "ast", ".", "FunctionDef", ")" ]
make sure the basics of the importer work .
train
false
11,913
def shellQuote(value): return ("'%s'" % value.replace("'", '\'"\'"\''))
[ "def", "shellQuote", "(", "value", ")", ":", "return", "(", "\"'%s'\"", "%", "value", ".", "replace", "(", "\"'\"", ",", "'\\'\"\\'\"\\''", ")", ")" ]
return the string value in a form that can safely be inserted into a shell command .
train
false
11,914
def redis_info(request): redis_info = {} for key in django_settings.REDIS_BACKENDS.keys(): redis_info[key] = {} client = redis_client(key) redis_info[key]['connection'] = django_settings.REDIS_BACKENDS[key] try: cfg = client.config_get() redis_info[key]['config'] = [{'key': k, 'value': cfg[k]} for k in sorted(cfg)] info = client.info() redis_info[key]['info'] = [{'key': k, 'value': info[k]} for k in sorted(info)] except ConnectionError: redis_info[key]['down'] = True return render_to_response('kadmin/redis.html', {'redis_info': redis_info, 'title': 'Redis Information'}, RequestContext(request, {}))
[ "def", "redis_info", "(", "request", ")", ":", "redis_info", "=", "{", "}", "for", "key", "in", "django_settings", ".", "REDIS_BACKENDS", ".", "keys", "(", ")", ":", "redis_info", "[", "key", "]", "=", "{", "}", "client", "=", "redis_client", "(", "key", ")", "redis_info", "[", "key", "]", "[", "'connection'", "]", "=", "django_settings", ".", "REDIS_BACKENDS", "[", "key", "]", "try", ":", "cfg", "=", "client", ".", "config_get", "(", ")", "redis_info", "[", "key", "]", "[", "'config'", "]", "=", "[", "{", "'key'", ":", "k", ",", "'value'", ":", "cfg", "[", "k", "]", "}", "for", "k", "in", "sorted", "(", "cfg", ")", "]", "info", "=", "client", ".", "info", "(", ")", "redis_info", "[", "key", "]", "[", "'info'", "]", "=", "[", "{", "'key'", ":", "k", ",", "'value'", ":", "info", "[", "k", "]", "}", "for", "k", "in", "sorted", "(", "info", ")", "]", "except", "ConnectionError", ":", "redis_info", "[", "key", "]", "[", "'down'", "]", "=", "True", "return", "render_to_response", "(", "'kadmin/redis.html'", ",", "{", "'redis_info'", ":", "redis_info", ",", "'title'", ":", "'Redis Information'", "}", ",", "RequestContext", "(", "request", ",", "{", "}", ")", ")" ]
admin view that displays redis info+config output for all backends .
train
false
11,915
def _root_broyden1_doc(): pass
[ "def", "_root_broyden1_doc", "(", ")", ":", "pass" ]
options nit : int .
train
false
11,916
def _serialize_item(i, item, stream): stream.writeQString(_encode_url(item.url)) stream.writeQString(item.title) stream.writeQString(_encode_url(item.original_url)) stream.writeUInt32(BACK_FORWARD_TREE_VERSION) stream.writeUInt64(0) stream.writeInt64((i + 1)) stream.writeUInt64(0) stream.writeQString(None) stream.writeBool(False) stream.writeInt64((i + 1)) stream.writeQString(None) try: stream.writeInt32(item.user_data['scroll-pos'].x()) except (KeyError, TypeError): stream.writeInt32(0) try: stream.writeInt32(item.user_data['scroll-pos'].y()) except (KeyError, TypeError): stream.writeInt32(0) stream.writeFloat(1) stream.writeBool(False) stream.writeQString(None) stream.writeBool(False)
[ "def", "_serialize_item", "(", "i", ",", "item", ",", "stream", ")", ":", "stream", ".", "writeQString", "(", "_encode_url", "(", "item", ".", "url", ")", ")", "stream", ".", "writeQString", "(", "item", ".", "title", ")", "stream", ".", "writeQString", "(", "_encode_url", "(", "item", ".", "original_url", ")", ")", "stream", ".", "writeUInt32", "(", "BACK_FORWARD_TREE_VERSION", ")", "stream", ".", "writeUInt64", "(", "0", ")", "stream", ".", "writeInt64", "(", "(", "i", "+", "1", ")", ")", "stream", ".", "writeUInt64", "(", "0", ")", "stream", ".", "writeQString", "(", "None", ")", "stream", ".", "writeBool", "(", "False", ")", "stream", ".", "writeInt64", "(", "(", "i", "+", "1", ")", ")", "stream", ".", "writeQString", "(", "None", ")", "try", ":", "stream", ".", "writeInt32", "(", "item", ".", "user_data", "[", "'scroll-pos'", "]", ".", "x", "(", ")", ")", "except", "(", "KeyError", ",", "TypeError", ")", ":", "stream", ".", "writeInt32", "(", "0", ")", "try", ":", "stream", ".", "writeInt32", "(", "item", ".", "user_data", "[", "'scroll-pos'", "]", ".", "y", "(", ")", ")", "except", "(", "KeyError", ",", "TypeError", ")", ":", "stream", ".", "writeInt32", "(", "0", ")", "stream", ".", "writeFloat", "(", "1", ")", "stream", ".", "writeBool", "(", "False", ")", "stream", ".", "writeQString", "(", "None", ")", "stream", ".", "writeBool", "(", "False", ")" ]
internal function: serialize native types .
train
false
11,917
def get_loader_cls(loader): return symbol_by_name(loader, LOADER_ALIASES, imp=import_from_cwd)
[ "def", "get_loader_cls", "(", "loader", ")", ":", "return", "symbol_by_name", "(", "loader", ",", "LOADER_ALIASES", ",", "imp", "=", "import_from_cwd", ")" ]
get loader class by name/alias .
train
false
11,918
@FileSystem.in_directory(current_directory, 'django', 'couves') def test_django_agains_couves_nohooks(): (status, out) = run_scenario(**{'--tags': 'nothingwillbefound'}) expect('Couves before all').to.not_be.within(out) expect('Couves after all').to.not_be.within(out)
[ "@", "FileSystem", ".", "in_directory", "(", "current_directory", ",", "'django'", ",", "'couves'", ")", "def", "test_django_agains_couves_nohooks", "(", ")", ":", "(", "status", ",", "out", ")", "=", "run_scenario", "(", "**", "{", "'--tags'", ":", "'nothingwillbefound'", "}", ")", "expect", "(", "'Couves before all'", ")", ".", "to", ".", "not_be", ".", "within", "(", "out", ")", "expect", "(", "'Couves after all'", ")", ".", "to", ".", "not_be", ".", "within", "(", "out", ")" ]
it only calls @before .
train
false
11,919
def upgradeWithIQResponseTracker(xs): def callback(iq): '\n Handle iq response by firing associated deferred.\n ' if getattr(iq, 'handled', False): return try: d = xs.iqDeferreds[iq['id']] except KeyError: pass else: del xs.iqDeferreds[iq['id']] iq.handled = True if (iq['type'] == 'error'): d.errback(error.exceptionFromStanza(iq)) else: d.callback(iq) def disconnected(_): "\n Make sure deferreds do not linger on after disconnect.\n\n This errbacks all deferreds of iq's for which no response has been\n received with a L{ConnectionLost} failure. Otherwise, the deferreds\n will never be fired.\n " iqDeferreds = xs.iqDeferreds xs.iqDeferreds = {} for d in iqDeferreds.itervalues(): d.errback(ConnectionLost()) xs.iqDeferreds = {} xs.iqDefaultTimeout = getattr(xs, 'iqDefaultTimeout', None) xs.addObserver(xmlstream.STREAM_END_EVENT, disconnected) xs.addObserver('/iq[@type="result"]', callback) xs.addObserver('/iq[@type="error"]', callback) directlyProvides(xs, ijabber.IIQResponseTracker)
[ "def", "upgradeWithIQResponseTracker", "(", "xs", ")", ":", "def", "callback", "(", "iq", ")", ":", "if", "getattr", "(", "iq", ",", "'handled'", ",", "False", ")", ":", "return", "try", ":", "d", "=", "xs", ".", "iqDeferreds", "[", "iq", "[", "'id'", "]", "]", "except", "KeyError", ":", "pass", "else", ":", "del", "xs", ".", "iqDeferreds", "[", "iq", "[", "'id'", "]", "]", "iq", ".", "handled", "=", "True", "if", "(", "iq", "[", "'type'", "]", "==", "'error'", ")", ":", "d", ".", "errback", "(", "error", ".", "exceptionFromStanza", "(", "iq", ")", ")", "else", ":", "d", ".", "callback", "(", "iq", ")", "def", "disconnected", "(", "_", ")", ":", "iqDeferreds", "=", "xs", ".", "iqDeferreds", "xs", ".", "iqDeferreds", "=", "{", "}", "for", "d", "in", "iqDeferreds", ".", "itervalues", "(", ")", ":", "d", ".", "errback", "(", "ConnectionLost", "(", ")", ")", "xs", ".", "iqDeferreds", "=", "{", "}", "xs", ".", "iqDefaultTimeout", "=", "getattr", "(", "xs", ",", "'iqDefaultTimeout'", ",", "None", ")", "xs", ".", "addObserver", "(", "xmlstream", ".", "STREAM_END_EVENT", ",", "disconnected", ")", "xs", ".", "addObserver", "(", "'/iq[@type=\"result\"]'", ",", "callback", ")", "xs", ".", "addObserver", "(", "'/iq[@type=\"error\"]'", ",", "callback", ")", "directlyProvides", "(", "xs", ",", "ijabber", ".", "IIQResponseTracker", ")" ]
enhances an xmlstream for iq response tracking .
train
false
11,921
def gc_collect(): gc.collect() if is_jython: time.sleep(0.1) gc.collect() gc.collect()
[ "def", "gc_collect", "(", ")", ":", "gc", ".", "collect", "(", ")", "if", "is_jython", ":", "time", ".", "sleep", "(", "0.1", ")", "gc", ".", "collect", "(", ")", "gc", ".", "collect", "(", ")" ]
force as many objects as possible to be collected .
train
false
11,922
def simulate_delete(app, path, **kwargs): return simulate_request(app, 'DELETE', path, **kwargs)
[ "def", "simulate_delete", "(", "app", ",", "path", ",", "**", "kwargs", ")", ":", "return", "simulate_request", "(", "app", ",", "'DELETE'", ",", "path", ",", "**", "kwargs", ")" ]
simulates a delete request to a wsgi application .
train
false
11,923
def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): seq = random_powerlaw_tree_sequence(n, gamma=gamma, seed=seed, tries=tries) G = degree_sequence_tree(seq) G.name = ('random_powerlaw_tree(%s,%s)' % (n, gamma)) return G
[ "def", "random_powerlaw_tree", "(", "n", ",", "gamma", "=", "3", ",", "seed", "=", "None", ",", "tries", "=", "100", ")", ":", "seq", "=", "random_powerlaw_tree_sequence", "(", "n", ",", "gamma", "=", "gamma", ",", "seed", "=", "seed", ",", "tries", "=", "tries", ")", "G", "=", "degree_sequence_tree", "(", "seq", ")", "G", ".", "name", "=", "(", "'random_powerlaw_tree(%s,%s)'", "%", "(", "n", ",", "gamma", ")", ")", "return", "G" ]
returns a tree with a power law degree distribution .
train
false
11,924
def user(pid): if ((not isinstance(pid, int)) or (pid < 0)): return None if stem.util.proc.is_available(): try: import pwd uid = stem.util.proc.uid(pid) if (uid and uid.isdigit()): return pwd.getpwuid(int(uid)).pw_name except: pass if is_available('ps'): results = call(('ps -o user %s' % pid), []) if (len(results) >= 2): return results[1].strip() return None
[ "def", "user", "(", "pid", ")", ":", "if", "(", "(", "not", "isinstance", "(", "pid", ",", "int", ")", ")", "or", "(", "pid", "<", "0", ")", ")", ":", "return", "None", "if", "stem", ".", "util", ".", "proc", ".", "is_available", "(", ")", ":", "try", ":", "import", "pwd", "uid", "=", "stem", ".", "util", ".", "proc", ".", "uid", "(", "pid", ")", "if", "(", "uid", "and", "uid", ".", "isdigit", "(", ")", ")", ":", "return", "pwd", ".", "getpwuid", "(", "int", "(", "uid", ")", ")", ".", "pw_name", "except", ":", "pass", "if", "is_available", "(", "'ps'", ")", ":", "results", "=", "call", "(", "(", "'ps -o user %s'", "%", "pid", ")", ",", "[", "]", ")", "if", "(", "len", "(", "results", ")", ">=", "2", ")", ":", "return", "results", "[", "1", "]", ".", "strip", "(", ")", "return", "None" ]
require the existence of a postgresql user .
train
false
11,925
def hash_question(question, timestamp): timestamp = format_timestamp(timestamp) hexsha = checksum_question(question, timestamp) return u''.join((hexsha, timestamp, b64encode(question.encode(u'utf-8')).decode(u'ascii')))
[ "def", "hash_question", "(", "question", ",", "timestamp", ")", ":", "timestamp", "=", "format_timestamp", "(", "timestamp", ")", "hexsha", "=", "checksum_question", "(", "question", ",", "timestamp", ")", "return", "u''", ".", "join", "(", "(", "hexsha", ",", "timestamp", ",", "b64encode", "(", "question", ".", "encode", "(", "u'utf-8'", ")", ")", ".", "decode", "(", "u'ascii'", ")", ")", ")" ]
hashes question so that it can be later verified .
train
false