id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
18,074
def CDLSHORTLINE(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLSHORTLINE)
[ "def", "CDLSHORTLINE", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLSHORTLINE", ")" ]
short line candle .
train
false
18,075
def _is_suggestion_valid(thread_id, exploration_id): states = get_exploration_by_id(exploration_id).states suggestion = feedback_models.SuggestionModel.get_by_exploration_and_thread_id(exploration_id, thread_id) return (suggestion.state_name in states)
[ "def", "_is_suggestion_valid", "(", "thread_id", ",", "exploration_id", ")", ":", "states", "=", "get_exploration_by_id", "(", "exploration_id", ")", ".", "states", "suggestion", "=", "feedback_models", ".", "SuggestionModel", ".", "get_by_exploration_and_thread_id", "(", "exploration_id", ",", "thread_id", ")", "return", "(", "suggestion", ".", "state_name", "in", "states", ")" ]
check if the suggestion is still valid .
train
false
18,076
def _discard_if_outside_interval(f, M, inf, sup, K, negative, fast, mobius): F = K.get_field() while True: (u, v) = _mobius_to_interval(M, F) if negative: (u, v) = ((- v), (- u)) if (((inf is None) or (u >= inf)) and ((sup is None) or (v <= sup))): if (not mobius): return (u, v) else: return (f, M) elif (((sup is not None) and (u > sup)) or ((inf is not None) and (v < inf))): return None else: (f, M) = dup_step_refine_real_root(f, M, K, fast=fast)
[ "def", "_discard_if_outside_interval", "(", "f", ",", "M", ",", "inf", ",", "sup", ",", "K", ",", "negative", ",", "fast", ",", "mobius", ")", ":", "F", "=", "K", ".", "get_field", "(", ")", "while", "True", ":", "(", "u", ",", "v", ")", "=", "_mobius_to_interval", "(", "M", ",", "F", ")", "if", "negative", ":", "(", "u", ",", "v", ")", "=", "(", "(", "-", "v", ")", ",", "(", "-", "u", ")", ")", "if", "(", "(", "(", "inf", "is", "None", ")", "or", "(", "u", ">=", "inf", ")", ")", "and", "(", "(", "sup", "is", "None", ")", "or", "(", "v", "<=", "sup", ")", ")", ")", ":", "if", "(", "not", "mobius", ")", ":", "return", "(", "u", ",", "v", ")", "else", ":", "return", "(", "f", ",", "M", ")", "elif", "(", "(", "(", "sup", "is", "not", "None", ")", "and", "(", "u", ">", "sup", ")", ")", "or", "(", "(", "inf", "is", "not", "None", ")", "and", "(", "v", "<", "inf", ")", ")", ")", ":", "return", "None", "else", ":", "(", "f", ",", "M", ")", "=", "dup_step_refine_real_root", "(", "f", ",", "M", ",", "K", ",", "fast", "=", "fast", ")" ]
discard an isolating interval if outside .
train
false
18,077
def test_exif(h, f): if (h[6:10] == 'Exif'): return 'jpeg'
[ "def", "test_exif", "(", "h", ",", "f", ")", ":", "if", "(", "h", "[", "6", ":", "10", "]", "==", "'Exif'", ")", ":", "return", "'jpeg'" ]
jpeg data in exif format .
train
false
18,080
def run_post_push_script(component): run_hook(component, None, component.post_push_script)
[ "def", "run_post_push_script", "(", "component", ")", ":", "run_hook", "(", "component", ",", "None", ",", "component", ".", "post_push_script", ")" ]
run post push hook .
train
false
18,083
def _maybe_remove(store, key): try: store.remove(key) except: pass
[ "def", "_maybe_remove", "(", "store", ",", "key", ")", ":", "try", ":", "store", ".", "remove", "(", "key", ")", "except", ":", "pass" ]
for tests using tables .
train
false
18,084
@register.tag(u'get_available_languages') def do_get_available_languages(parser, token): args = token.contents.split() if ((len(args) != 3) or (args[1] != u'as')): raise TemplateSyntaxError((u"'get_available_languages' requires 'as variable' (got %r)" % args)) return GetAvailableLanguagesNode(args[2])
[ "@", "register", ".", "tag", "(", "u'get_available_languages'", ")", "def", "do_get_available_languages", "(", "parser", ",", "token", ")", ":", "args", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "(", "(", "len", "(", "args", ")", "!=", "3", ")", "or", "(", "args", "[", "1", "]", "!=", "u'as'", ")", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "u\"'get_available_languages' requires 'as variable' (got %r)\"", "%", "args", ")", ")", "return", "GetAvailableLanguagesNode", "(", "args", "[", "2", "]", ")" ]
this will store a list of available languages in the context .
train
false
18,085
def get_dd_regions(): check_libcloud_or_fail() all_regions = API_ENDPOINTS.keys() regions = [region[3:] for region in all_regions if region.startswith('dd-')] return regions
[ "def", "get_dd_regions", "(", ")", ":", "check_libcloud_or_fail", "(", ")", "all_regions", "=", "API_ENDPOINTS", ".", "keys", "(", ")", "regions", "=", "[", "region", "[", "3", ":", "]", "for", "region", "in", "all_regions", "if", "region", ".", "startswith", "(", "'dd-'", ")", "]", "return", "regions" ]
get the list of available regions whose vendor is dimension data .
train
false
18,086
@pytest.fixture(params=list(range(0, (pickle.HIGHEST_PROTOCOL + 1)))) def protocol(request): return request.param
[ "@", "pytest", ".", "fixture", "(", "params", "=", "list", "(", "range", "(", "0", ",", "(", "pickle", ".", "HIGHEST_PROTOCOL", "+", "1", ")", ")", ")", ")", "def", "protocol", "(", "request", ")", ":", "return", "request", ".", "param" ]
fixture to run all the tests for all available pickle protocols .
train
false
18,089
@login_required @csrf_exempt def follow_unfollow(request, content_type_id, object_id, do_follow=True, actor_only=True): ctype = get_object_or_404(ContentType, pk=content_type_id) instance = get_object_or_404(ctype.model_class(), pk=object_id) if do_follow: actions.follow(request.user, instance, actor_only=actor_only) return respond(request, 201) actions.unfollow(request.user, instance) return respond(request, 204)
[ "@", "login_required", "@", "csrf_exempt", "def", "follow_unfollow", "(", "request", ",", "content_type_id", ",", "object_id", ",", "do_follow", "=", "True", ",", "actor_only", "=", "True", ")", ":", "ctype", "=", "get_object_or_404", "(", "ContentType", ",", "pk", "=", "content_type_id", ")", "instance", "=", "get_object_or_404", "(", "ctype", ".", "model_class", "(", ")", ",", "pk", "=", "object_id", ")", "if", "do_follow", ":", "actions", ".", "follow", "(", "request", ".", "user", ",", "instance", ",", "actor_only", "=", "actor_only", ")", "return", "respond", "(", "request", ",", "201", ")", "actions", ".", "unfollow", "(", "request", ".", "user", ",", "instance", ")", "return", "respond", "(", "request", ",", "204", ")" ]
creates or deletes the follow relationship between request .
train
true
18,090
def corr2cov(corr, std): if (np.size(std) == 1): std = (std * np.ones(corr.shape[0])) cov = ((corr * std[:, None]) * std[None, :]) return cov
[ "def", "corr2cov", "(", "corr", ",", "std", ")", ":", "if", "(", "np", ".", "size", "(", "std", ")", "==", "1", ")", ":", "std", "=", "(", "std", "*", "np", ".", "ones", "(", "corr", ".", "shape", "[", "0", "]", ")", ")", "cov", "=", "(", "(", "corr", "*", "std", "[", ":", ",", "None", "]", ")", "*", "std", "[", "None", ",", ":", "]", ")", "return", "cov" ]
convert correlation matrix to covariance matrix parameters corr : ndarray .
train
false
18,092
def test_score_3(): tpot_obj = TPOTRegressor(scoring='neg_mean_squared_error') tpot_obj._pbar = tqdm(total=1, disable=True) known_score = 8.967374340787371 tpot_obj._optimized_pipeline = creator.Individual.from_string('ExtraTreesRegressor(GradientBoostingRegressor(input_matrix, 100.0, 0.11), 0.17999999999999999)', tpot_obj._pset) tpot_obj._fitted_pipeline = tpot_obj._toolbox.compile(expr=tpot_obj._optimized_pipeline) tpot_obj._fitted_pipeline.fit(training_features_r, training_classes_r) score = tpot_obj.score(testing_features_r, testing_classes_r) def isclose(a, b, rel_tol=1e-09, abs_tol=0.0): return (abs((a - b)) <= max((rel_tol * max(abs(a), abs(b))), abs_tol)) assert isclose(known_score, score)
[ "def", "test_score_3", "(", ")", ":", "tpot_obj", "=", "TPOTRegressor", "(", "scoring", "=", "'neg_mean_squared_error'", ")", "tpot_obj", ".", "_pbar", "=", "tqdm", "(", "total", "=", "1", ",", "disable", "=", "True", ")", "known_score", "=", "8.967374340787371", "tpot_obj", ".", "_optimized_pipeline", "=", "creator", ".", "Individual", ".", "from_string", "(", "'ExtraTreesRegressor(GradientBoostingRegressor(input_matrix, 100.0, 0.11), 0.17999999999999999)'", ",", "tpot_obj", ".", "_pset", ")", "tpot_obj", ".", "_fitted_pipeline", "=", "tpot_obj", ".", "_toolbox", ".", "compile", "(", "expr", "=", "tpot_obj", ".", "_optimized_pipeline", ")", "tpot_obj", ".", "_fitted_pipeline", ".", "fit", "(", "training_features_r", ",", "training_classes_r", ")", "score", "=", "tpot_obj", ".", "score", "(", "testing_features_r", ",", "testing_classes_r", ")", "def", "isclose", "(", "a", ",", "b", ",", "rel_tol", "=", "1e-09", ",", "abs_tol", "=", "0.0", ")", ":", "return", "(", "abs", "(", "(", "a", "-", "b", ")", ")", "<=", "max", "(", "(", "rel_tol", "*", "max", "(", "abs", "(", "a", ")", ",", "abs", "(", "b", ")", ")", ")", ",", "abs_tol", ")", ")", "assert", "isclose", "(", "known_score", ",", "score", ")" ]
assert that the tpotregressor score function outputs a known score for a fixed pipeline .
train
false
18,093
def detect_filename(filename): name = os.path.basename(filename) for (autoload, storeclass) in FILE_DETECT: if ((not isinstance(autoload, tuple)) and name.endswith(autoload)): return storeclass elif (name.startswith(autoload[0]) and name.endswith(autoload[1])): return storeclass return None
[ "def", "detect_filename", "(", "filename", ")", ":", "name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "for", "(", "autoload", ",", "storeclass", ")", "in", "FILE_DETECT", ":", "if", "(", "(", "not", "isinstance", "(", "autoload", ",", "tuple", ")", ")", "and", "name", ".", "endswith", "(", "autoload", ")", ")", ":", "return", "storeclass", "elif", "(", "name", ".", "startswith", "(", "autoload", "[", "0", "]", ")", "and", "name", ".", "endswith", "(", "autoload", "[", "1", "]", ")", ")", ":", "return", "storeclass", "return", "None" ]
filename based format autodetection .
train
false
18,095
def should_check_directory(directory_path, exclude_patterns, include_patterns): def normalize_path(path): return path.replace(os.path.sep, '/') path_with_slash = (normalize_path(directory_path) + '/') possible_child_patterns = [pattern for pattern in map(normalize_path, include_patterns) if (pattern + '/').startswith(path_with_slash)] directory_included = should_include(directory_path, exclude_patterns, include_patterns) return (directory_included or (len(possible_child_patterns) > 0))
[ "def", "should_check_directory", "(", "directory_path", ",", "exclude_patterns", ",", "include_patterns", ")", ":", "def", "normalize_path", "(", "path", ")", ":", "return", "path", ".", "replace", "(", "os", ".", "path", ".", "sep", ",", "'/'", ")", "path_with_slash", "=", "(", "normalize_path", "(", "directory_path", ")", "+", "'/'", ")", "possible_child_patterns", "=", "[", "pattern", "for", "pattern", "in", "map", "(", "normalize_path", ",", "include_patterns", ")", "if", "(", "pattern", "+", "'/'", ")", ".", "startswith", "(", "path_with_slash", ")", "]", "directory_included", "=", "should_include", "(", "directory_path", ",", "exclude_patterns", ",", "include_patterns", ")", "return", "(", "directory_included", "or", "(", "len", "(", "possible_child_patterns", ")", ">", "0", ")", ")" ]
given a directory path .
train
false
18,097
def _role_remove(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): if (not user_exists(name, user, host, port, maintenance_db, password=password, runas=runas)): log.info("User '{0}' does not exist".format(name)) return False sub_cmd = 'DROP ROLE "{0}"'.format(name) _psql_prepare_and_run(['-c', sub_cmd], runas=runas, host=host, user=user, port=port, maintenance_db=maintenance_db, password=password) if (not user_exists(name, user, host, port, maintenance_db, password=password, runas=runas)): return True else: log.info("Failed to delete user '{0}'.".format(name)) return False
[ "def", "_role_remove", "(", "name", ",", "user", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "maintenance_db", "=", "None", ",", "password", "=", "None", ",", "runas", "=", "None", ")", ":", "if", "(", "not", "user_exists", "(", "name", ",", "user", ",", "host", ",", "port", ",", "maintenance_db", ",", "password", "=", "password", ",", "runas", "=", "runas", ")", ")", ":", "log", ".", "info", "(", "\"User '{0}' does not exist\"", ".", "format", "(", "name", ")", ")", "return", "False", "sub_cmd", "=", "'DROP ROLE \"{0}\"'", ".", "format", "(", "name", ")", "_psql_prepare_and_run", "(", "[", "'-c'", ",", "sub_cmd", "]", ",", "runas", "=", "runas", ",", "host", "=", "host", ",", "user", "=", "user", ",", "port", "=", "port", ",", "maintenance_db", "=", "maintenance_db", ",", "password", "=", "password", ")", "if", "(", "not", "user_exists", "(", "name", ",", "user", ",", "host", ",", "port", ",", "maintenance_db", ",", "password", "=", "password", ",", "runas", "=", "runas", ")", ")", ":", "return", "True", "else", ":", "log", ".", "info", "(", "\"Failed to delete user '{0}'.\"", ".", "format", "(", "name", ")", ")", "return", "False" ]
removes a role from the postgres server .
train
true
18,100
def pool_align(vm, object_name, align): size_of_obj = vm.profile.get_obj_size(object_name) extra = (size_of_obj % align) if extra: size_of_obj += (align - extra) return size_of_obj
[ "def", "pool_align", "(", "vm", ",", "object_name", ",", "align", ")", ":", "size_of_obj", "=", "vm", ".", "profile", ".", "get_obj_size", "(", "object_name", ")", "extra", "=", "(", "size_of_obj", "%", "align", ")", "if", "extra", ":", "size_of_obj", "+=", "(", "align", "-", "extra", ")", "return", "size_of_obj" ]
returns the size of the object accounting for pool alignment .
train
false
18,101
def _get_function_globals_for_reduction(func): func_id = bytecode.FunctionIdentity.from_function(func) bc = bytecode.ByteCode(func_id) globs = bc.get_used_globals() for (k, v) in globs.items(): if isinstance(v, ModuleType): globs[k] = _ModuleRef(v.__name__) globs['__name__'] = func.__module__ return globs
[ "def", "_get_function_globals_for_reduction", "(", "func", ")", ":", "func_id", "=", "bytecode", ".", "FunctionIdentity", ".", "from_function", "(", "func", ")", "bc", "=", "bytecode", ".", "ByteCode", "(", "func_id", ")", "globs", "=", "bc", ".", "get_used_globals", "(", ")", "for", "(", "k", ",", "v", ")", "in", "globs", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "ModuleType", ")", ":", "globs", "[", "k", "]", "=", "_ModuleRef", "(", "v", ".", "__name__", ")", "globs", "[", "'__name__'", "]", "=", "func", ".", "__module__", "return", "globs" ]
analyse *func* and return a dictionary of global values suitable for reduction .
train
false
18,102
def _list_gids(): output = __salt__['cmd.run'](['dscacheutil', '-q', 'group'], output_loglevel='quiet', python_shell=False) ret = set() for line in salt.utils.itertools.split(output, '\n'): if line.startswith('gid:'): ret.update(line.split()[1:]) return sorted(ret)
[ "def", "_list_gids", "(", ")", ":", "output", "=", "__salt__", "[", "'cmd.run'", "]", "(", "[", "'dscacheutil'", ",", "'-q'", ",", "'group'", "]", ",", "output_loglevel", "=", "'quiet'", ",", "python_shell", "=", "False", ")", "ret", "=", "set", "(", ")", "for", "line", "in", "salt", ".", "utils", ".", "itertools", ".", "split", "(", "output", ",", "'\\n'", ")", ":", "if", "line", ".", "startswith", "(", "'gid:'", ")", ":", "ret", ".", "update", "(", "line", ".", "split", "(", ")", "[", "1", ":", "]", ")", "return", "sorted", "(", "ret", ")" ]
return a list of gids in use .
train
true
18,103
def traverse(resource, path): if is_nonstr_iter(path): if path: path = _join_path_tuple(tuple(path)) else: path = '' path = ascii_native_(path) if (path and (path[0] == '/')): resource = find_root(resource) reg = get_current_registry() request_factory = reg.queryUtility(IRequestFactory) if (request_factory is None): from pyramid.request import Request request_factory = Request request = request_factory.blank(path) request.registry = reg traverser = reg.queryAdapter(resource, ITraverser) if (traverser is None): traverser = ResourceTreeTraverser(resource) return traverser(request)
[ "def", "traverse", "(", "resource", ",", "path", ")", ":", "if", "is_nonstr_iter", "(", "path", ")", ":", "if", "path", ":", "path", "=", "_join_path_tuple", "(", "tuple", "(", "path", ")", ")", "else", ":", "path", "=", "''", "path", "=", "ascii_native_", "(", "path", ")", "if", "(", "path", "and", "(", "path", "[", "0", "]", "==", "'/'", ")", ")", ":", "resource", "=", "find_root", "(", "resource", ")", "reg", "=", "get_current_registry", "(", ")", "request_factory", "=", "reg", ".", "queryUtility", "(", "IRequestFactory", ")", "if", "(", "request_factory", "is", "None", ")", ":", "from", "pyramid", ".", "request", "import", "Request", "request_factory", "=", "Request", "request", "=", "request_factory", ".", "blank", "(", "path", ")", "request", ".", "registry", "=", "reg", "traverser", "=", "reg", ".", "queryAdapter", "(", "resource", ",", "ITraverser", ")", "if", "(", "traverser", "is", "None", ")", ":", "traverser", "=", "ResourceTreeTraverser", "(", "resource", ")", "return", "traverser", "(", "request", ")" ]
traverse and visit the given expression structure using the default iterator .
train
false
18,104
def ccEstablishment(): a = TpPd(pd=3) b = MessageType(mesType=4) c = SetupContainer() packet = ((a / b) / c) return packet
[ "def", "ccEstablishment", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "4", ")", "c", "=", "SetupContainer", "(", ")", "packet", "=", "(", "(", "a", "/", "b", ")", "/", "c", ")", "return", "packet" ]
cc-establishment section 9 .
train
true
18,105
def make_url(namespace, max_age): signer = MessageSigner(g.secrets['websocket']) signature = signer.make_signature(namespace, max_age=datetime.timedelta(seconds=max_age)) query_string = urllib.urlencode({'m': signature}) return urlparse.urlunparse(('wss', g.websocket_host, namespace, None, query_string, None))
[ "def", "make_url", "(", "namespace", ",", "max_age", ")", ":", "signer", "=", "MessageSigner", "(", "g", ".", "secrets", "[", "'websocket'", "]", ")", "signature", "=", "signer", ".", "make_signature", "(", "namespace", ",", "max_age", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "max_age", ")", ")", "query_string", "=", "urllib", ".", "urlencode", "(", "{", "'m'", ":", "signature", "}", ")", "return", "urlparse", ".", "urlunparse", "(", "(", "'wss'", ",", "g", ".", "websocket_host", ",", "namespace", ",", "None", ",", "query_string", ",", "None", ")", ")" ]
build an appveyor api url .
train
false
18,106
def unit_propagate_int_repr(clauses, s): negated = {(- s)} return [(clause - negated) for clause in clauses if (s not in clause)]
[ "def", "unit_propagate_int_repr", "(", "clauses", ",", "s", ")", ":", "negated", "=", "{", "(", "-", "s", ")", "}", "return", "[", "(", "clause", "-", "negated", ")", "for", "clause", "in", "clauses", "if", "(", "s", "not", "in", "clause", ")", "]" ]
same as unit_propagate .
train
false
18,107
def get_seed_ratio(provider): if (provider == 'rutracker.org'): seed_ratio = headphones.CONFIG.RUTRACKER_RATIO elif (provider == 'Kick Ass Torrents'): seed_ratio = headphones.CONFIG.KAT_RATIO elif (provider == 'What.cd'): seed_ratio = headphones.CONFIG.WHATCD_RATIO elif (provider == 'PassTheHeadphones.Me'): seed_ratio = headphones.CONFIG.PTH_RATIO elif (provider == 'The Pirate Bay'): seed_ratio = headphones.CONFIG.PIRATEBAY_RATIO elif (provider == 'Old Pirate Bay'): seed_ratio = headphones.CONFIG.OLDPIRATEBAY_RATIO elif (provider == 'Waffles.ch'): seed_ratio = headphones.CONFIG.WAFFLES_RATIO elif (provider == 'Mininova'): seed_ratio = headphones.CONFIG.MININOVA_RATIO elif (provider == 'Strike'): seed_ratio = headphones.CONFIG.STRIKE_RATIO else: seed_ratio = None if (seed_ratio is not None): try: seed_ratio = float(seed_ratio) except ValueError: logger.warn(('Could not get seed ratio for %s' % provider)) return seed_ratio
[ "def", "get_seed_ratio", "(", "provider", ")", ":", "if", "(", "provider", "==", "'rutracker.org'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "RUTRACKER_RATIO", "elif", "(", "provider", "==", "'Kick Ass Torrents'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "KAT_RATIO", "elif", "(", "provider", "==", "'What.cd'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "WHATCD_RATIO", "elif", "(", "provider", "==", "'PassTheHeadphones.Me'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "PTH_RATIO", "elif", "(", "provider", "==", "'The Pirate Bay'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "PIRATEBAY_RATIO", "elif", "(", "provider", "==", "'Old Pirate Bay'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "OLDPIRATEBAY_RATIO", "elif", "(", "provider", "==", "'Waffles.ch'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "WAFFLES_RATIO", "elif", "(", "provider", "==", "'Mininova'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "MININOVA_RATIO", "elif", "(", "provider", "==", "'Strike'", ")", ":", "seed_ratio", "=", "headphones", ".", "CONFIG", ".", "STRIKE_RATIO", "else", ":", "seed_ratio", "=", "None", "if", "(", "seed_ratio", "is", "not", "None", ")", ":", "try", ":", "seed_ratio", "=", "float", "(", "seed_ratio", ")", "except", "ValueError", ":", "logger", ".", "warn", "(", "(", "'Could not get seed ratio for %s'", "%", "provider", ")", ")", "return", "seed_ratio" ]
return the seed ratio for the specified provider .
train
false
18,110
def _get_codeobj(pyfile): from imp import PY_COMPILED, PY_SOURCE (result, fileobj, fullpath) = _check_if_pyc(pyfile) try: data = fileobj.read() finally: fileobj.close() if (result is PY_COMPILED): import marshal code_obj = marshal.loads(data[8:]) elif (result is PY_SOURCE): code_obj = compile(data, fullpath, 'exec') else: raise Exception('Input file is unknown format: {0}'.format(fullpath)) return code_obj
[ "def", "_get_codeobj", "(", "pyfile", ")", ":", "from", "imp", "import", "PY_COMPILED", ",", "PY_SOURCE", "(", "result", ",", "fileobj", ",", "fullpath", ")", "=", "_check_if_pyc", "(", "pyfile", ")", "try", ":", "data", "=", "fileobj", ".", "read", "(", ")", "finally", ":", "fileobj", ".", "close", "(", ")", "if", "(", "result", "is", "PY_COMPILED", ")", ":", "import", "marshal", "code_obj", "=", "marshal", ".", "loads", "(", "data", "[", "8", ":", "]", ")", "elif", "(", "result", "is", "PY_SOURCE", ")", ":", "code_obj", "=", "compile", "(", "data", ",", "fullpath", ",", "'exec'", ")", "else", ":", "raise", "Exception", "(", "'Input file is unknown format: {0}'", ".", "format", "(", "fullpath", ")", ")", "return", "code_obj" ]
returns the code object .
train
false
18,111
def generate_managed_section(cluster): addresses = list() for node in cluster.agent_nodes: if (node.private_address is not None): addresses.append([node.private_address, node.address]) else: addresses.append(node.address) return {'managed': {'addresses': addresses, 'upgrade': True}}
[ "def", "generate_managed_section", "(", "cluster", ")", ":", "addresses", "=", "list", "(", ")", "for", "node", "in", "cluster", ".", "agent_nodes", ":", "if", "(", "node", ".", "private_address", "is", "not", "None", ")", ":", "addresses", ".", "append", "(", "[", "node", ".", "private_address", ",", "node", ".", "address", "]", ")", "else", ":", "addresses", ".", "append", "(", "node", ".", "address", ")", "return", "{", "'managed'", ":", "{", "'addresses'", ":", "addresses", ",", "'upgrade'", ":", "True", "}", "}" ]
generate a managed configuration section for the given cluster .
train
false
18,112
@register.simple_tag def bootstrap_formset_errors(*args, **kwargs): return render_formset_errors(*args, **kwargs)
[ "@", "register", ".", "simple_tag", "def", "bootstrap_formset_errors", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "render_formset_errors", "(", "*", "args", ",", "**", "kwargs", ")" ]
render formset errors **tag name**:: bootstrap_formset_errors **parameters**: formset the formset that is being rendered layout context value that is available in the template bootstrap3/form_errors .
train
false
18,113
def getVector3ListsRecursively(floatLists): if (len(floatLists) < 1): return Vector3() firstElement = floatLists[0] if (firstElement.__class__ == Vector3): return floatLists if (firstElement.__class__ != list): return getVector3ByFloatList(floatLists, Vector3()) vector3ListsRecursively = [] for floatList in floatLists: vector3ListsRecursively.append(getVector3ListsRecursively(floatList)) return vector3ListsRecursively
[ "def", "getVector3ListsRecursively", "(", "floatLists", ")", ":", "if", "(", "len", "(", "floatLists", ")", "<", "1", ")", ":", "return", "Vector3", "(", ")", "firstElement", "=", "floatLists", "[", "0", "]", "if", "(", "firstElement", ".", "__class__", "==", "Vector3", ")", ":", "return", "floatLists", "if", "(", "firstElement", ".", "__class__", "!=", "list", ")", ":", "return", "getVector3ByFloatList", "(", "floatLists", ",", "Vector3", "(", ")", ")", "vector3ListsRecursively", "=", "[", "]", "for", "floatList", "in", "floatLists", ":", "vector3ListsRecursively", ".", "append", "(", "getVector3ListsRecursively", "(", "floatList", ")", ")", "return", "vector3ListsRecursively" ]
get vector3 lists recursively .
train
false
18,114
def get_gui_setting(gui_settings, value, default, is_bool=False): val = gui_settings.value(value, default) if is_bool: val = ((val == True) or (val == 'true')) return val
[ "def", "get_gui_setting", "(", "gui_settings", ",", "value", ",", "default", ",", "is_bool", "=", "False", ")", ":", "val", "=", "gui_settings", ".", "value", "(", "value", ",", "default", ")", "if", "is_bool", ":", "val", "=", "(", "(", "val", "==", "True", ")", "or", "(", "val", "==", "'true'", ")", ")", "return", "val" ]
utility method to get a specific gui setting .
train
false
18,115
def is_success(status): return (200 <= status <= 299)
[ "def", "is_success", "(", "status", ")", ":", "return", "(", "200", "<=", "status", "<=", "299", ")" ]
check if http status code is successful .
train
false
18,116
def save_local_facts(filename, facts): try: fact_dir = os.path.dirname(filename) try: os.makedirs(fact_dir) except OSError as exception: if (exception.errno != errno.EEXIST): raise with open(filename, 'w') as fact_file: fact_file.write(module.jsonify(facts)) os.chmod(filename, 384) except (IOError, OSError) as ex: raise OpenShiftFactsFileWriteError(('Could not create fact file: %s, error: %s' % (filename, ex)))
[ "def", "save_local_facts", "(", "filename", ",", "facts", ")", ":", "try", ":", "fact_dir", "=", "os", ".", "path", ".", "dirname", "(", "filename", ")", "try", ":", "os", ".", "makedirs", "(", "fact_dir", ")", "except", "OSError", "as", "exception", ":", "if", "(", "exception", ".", "errno", "!=", "errno", ".", "EEXIST", ")", ":", "raise", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fact_file", ":", "fact_file", ".", "write", "(", "module", ".", "jsonify", "(", "facts", ")", ")", "os", ".", "chmod", "(", "filename", ",", "384", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "ex", ":", "raise", "OpenShiftFactsFileWriteError", "(", "(", "'Could not create fact file: %s, error: %s'", "%", "(", "filename", ",", "ex", ")", ")", ")" ]
save local facts args: filename : local facts file facts : facts to set .
train
false
18,117
def create_range(range_class): if (not hasattr(range_class, 'name')): raise exceptions.ValidationError('A custom range must have a name attribute') if (range_class.name.__class__.__name__ == '__proxy__'): raise exceptions.ValidationError('Custom ranges must have text names (not ugettext proxies)') try: return Range.objects.create(name=range_class.name, proxy_class=_class_path(range_class)) except IntegrityError: raise ValueError('The passed range already exists in the database.')
[ "def", "create_range", "(", "range_class", ")", ":", "if", "(", "not", "hasattr", "(", "range_class", ",", "'name'", ")", ")", ":", "raise", "exceptions", ".", "ValidationError", "(", "'A custom range must have a name attribute'", ")", "if", "(", "range_class", ".", "name", ".", "__class__", ".", "__name__", "==", "'__proxy__'", ")", ":", "raise", "exceptions", ".", "ValidationError", "(", "'Custom ranges must have text names (not ugettext proxies)'", ")", "try", ":", "return", "Range", ".", "objects", ".", "create", "(", "name", "=", "range_class", ".", "name", ",", "proxy_class", "=", "_class_path", "(", "range_class", ")", ")", "except", "IntegrityError", ":", "raise", "ValueError", "(", "'The passed range already exists in the database.'", ")" ]
create a custom range instance from the passed range class this function creates the appropriate database record for this custom range .
train
false
18,118
def ReadTag(buffer, pos): start = pos while (ord(buffer[pos]) & 128): pos += 1 pos += 1 return (buffer[start:pos], pos)
[ "def", "ReadTag", "(", "buffer", ",", "pos", ")", ":", "start", "=", "pos", "while", "(", "ord", "(", "buffer", "[", "pos", "]", ")", "&", "128", ")", ":", "pos", "+=", "1", "pos", "+=", "1", "return", "(", "buffer", "[", "start", ":", "pos", "]", ",", "pos", ")" ]
read a tag from the buffer .
train
false
18,120
@hug.default_output_format() def output_formatter(data): return hug.output_format.json(data)
[ "@", "hug", ".", "default_output_format", "(", ")", "def", "output_formatter", "(", "data", ")", ":", "return", "hug", ".", "output_format", ".", "json", "(", "data", ")" ]
for testing .
train
false
18,122
def ksstat(x, cdf, alternative='two_sided', args=()): nobs = float(len(x)) if isinstance(cdf, string_types): cdf = getattr(stats.distributions, cdf).cdf elif hasattr(cdf, 'cdf'): cdf = getattr(cdf, 'cdf') x = np.sort(x) cdfvals = cdf(x, *args) if (alternative in ['two_sided', 'greater']): Dplus = ((np.arange(1.0, (nobs + 1)) / nobs) - cdfvals).max() if (alternative == 'greater'): return Dplus if (alternative in ['two_sided', 'less']): Dmin = (cdfvals - (np.arange(0.0, nobs) / nobs)).max() if (alternative == 'less'): return Dmin D = np.max([Dplus, Dmin]) return D
[ "def", "ksstat", "(", "x", ",", "cdf", ",", "alternative", "=", "'two_sided'", ",", "args", "=", "(", ")", ")", ":", "nobs", "=", "float", "(", "len", "(", "x", ")", ")", "if", "isinstance", "(", "cdf", ",", "string_types", ")", ":", "cdf", "=", "getattr", "(", "stats", ".", "distributions", ",", "cdf", ")", ".", "cdf", "elif", "hasattr", "(", "cdf", ",", "'cdf'", ")", ":", "cdf", "=", "getattr", "(", "cdf", ",", "'cdf'", ")", "x", "=", "np", ".", "sort", "(", "x", ")", "cdfvals", "=", "cdf", "(", "x", ",", "*", "args", ")", "if", "(", "alternative", "in", "[", "'two_sided'", ",", "'greater'", "]", ")", ":", "Dplus", "=", "(", "(", "np", ".", "arange", "(", "1.0", ",", "(", "nobs", "+", "1", ")", ")", "/", "nobs", ")", "-", "cdfvals", ")", ".", "max", "(", ")", "if", "(", "alternative", "==", "'greater'", ")", ":", "return", "Dplus", "if", "(", "alternative", "in", "[", "'two_sided'", ",", "'less'", "]", ")", ":", "Dmin", "=", "(", "cdfvals", "-", "(", "np", ".", "arange", "(", "0.0", ",", "nobs", ")", "/", "nobs", ")", ")", ".", "max", "(", ")", "if", "(", "alternative", "==", "'less'", ")", ":", "return", "Dmin", "D", "=", "np", ".", "max", "(", "[", "Dplus", ",", "Dmin", "]", ")", "return", "D" ]
calculate statistic for the kolmogorov-smirnov test for goodness of fit this calculates the test statistic for a test of the distribution g(x) of an observed variable against a given distribution f(x) .
train
false
18,123
def int2mask(mask): return ((18446744069414584320L >> mask) & 4294967295L)
[ "def", "int2mask", "(", "mask", ")", ":", "return", "(", "(", "18446744069414584320", "L", ">>", "mask", ")", "&", "4294967295", "L", ")" ]
converts the number of bits set to 1 in a mask to the integer corresponding to the ip address of the mask (ip2int for 24) from scapy:utils .
train
false
18,124
def ids(probabilities): return [str(c) for c in np.argmax(probabilities, 1)]
[ "def", "ids", "(", "probabilities", ")", ":", "return", "[", "str", "(", "c", ")", "for", "c", "in", "np", ".", "argmax", "(", "probabilities", ",", "1", ")", "]" ]
returns a tuple of root_id .
train
false
18,125
def _walk_through(job_dir): serial = salt.payload.Serial(__opts__) for top in os.listdir(job_dir): t_path = os.path.join(job_dir, top) for final in os.listdir(t_path): load_path = os.path.join(t_path, final, LOAD_P) if (not os.path.isfile(load_path)): continue job = serial.load(salt.utils.fopen(load_path, 'rb')) jid = job['jid'] (yield (jid, job, t_path, final))
[ "def", "_walk_through", "(", "job_dir", ")", ":", "serial", "=", "salt", ".", "payload", ".", "Serial", "(", "__opts__", ")", "for", "top", "in", "os", ".", "listdir", "(", "job_dir", ")", ":", "t_path", "=", "os", ".", "path", ".", "join", "(", "job_dir", ",", "top", ")", "for", "final", "in", "os", ".", "listdir", "(", "t_path", ")", ":", "load_path", "=", "os", ".", "path", ".", "join", "(", "t_path", ",", "final", ",", "LOAD_P", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "load_path", ")", ")", ":", "continue", "job", "=", "serial", ".", "load", "(", "salt", ".", "utils", ".", "fopen", "(", "load_path", ",", "'rb'", ")", ")", "jid", "=", "job", "[", "'jid'", "]", "(", "yield", "(", "jid", ",", "job", ",", "t_path", ",", "final", ")", ")" ]
walk through the job dir and return jobs .
train
false
18,126
@register.tag def javascript(parser, token): try: (_, name) = token.split_contents() except ValueError: raise template.TemplateSyntaxError(('%r requires exactly one argument: the name of a group in the PIPELINE_JS setting' % token.split_contents()[0])) return ThemeJavascriptNode(name)
[ "@", "register", ".", "tag", "def", "javascript", "(", "parser", ",", "token", ")", ":", "try", ":", "(", "_", ",", "name", ")", "=", "token", ".", "split_contents", "(", ")", "except", "ValueError", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "'%r requires exactly one argument: the name of a group in the PIPELINE_JS setting'", "%", "token", ".", "split_contents", "(", ")", "[", "0", "]", ")", ")", "return", "ThemeJavascriptNode", "(", "name", ")" ]
template tag to serve javascript from django-pipeline .
train
false
18,127
def owner_of_lock(id): import pwd try: statinfo = os.lstat(_lock_file(id)) return pwd.getpwuid(statinfo.st_uid).pw_name except: return ''
[ "def", "owner_of_lock", "(", "id", ")", ":", "import", "pwd", "try", ":", "statinfo", "=", "os", ".", "lstat", "(", "_lock_file", "(", "id", ")", ")", "return", "pwd", ".", "getpwuid", "(", "statinfo", ".", "st_uid", ")", ".", "pw_name", "except", ":", "return", "''" ]
username that has locked the device id .
train
false
18,130
def get_order_from_categories(otu_table, category_labels): category_labels = np.array(category_labels) sample_order = [] for label in np.unique(category_labels): label_ix = (category_labels == label) selected = [s for (i, s) in zip(label_ix, otu_table.ids()) if i] sub_otu_table = filter_samples_from_otu_table(otu_table, selected, (- np.inf), np.inf) data = np.asarray(list(sub_otu_table.iter_data(axis='observation'))) label_ix_ix = get_clusters(data, axis='column') sample_order += list(np.nonzero(label_ix)[0][np.array(label_ix_ix)]) return np.array(sample_order)
[ "def", "get_order_from_categories", "(", "otu_table", ",", "category_labels", ")", ":", "category_labels", "=", "np", ".", "array", "(", "category_labels", ")", "sample_order", "=", "[", "]", "for", "label", "in", "np", ".", "unique", "(", "category_labels", ")", ":", "label_ix", "=", "(", "category_labels", "==", "label", ")", "selected", "=", "[", "s", "for", "(", "i", ",", "s", ")", "in", "zip", "(", "label_ix", ",", "otu_table", ".", "ids", "(", ")", ")", "if", "i", "]", "sub_otu_table", "=", "filter_samples_from_otu_table", "(", "otu_table", ",", "selected", ",", "(", "-", "np", ".", "inf", ")", ",", "np", ".", "inf", ")", "data", "=", "np", ".", "asarray", "(", "list", "(", "sub_otu_table", ".", "iter_data", "(", "axis", "=", "'observation'", ")", ")", ")", "label_ix_ix", "=", "get_clusters", "(", "data", ",", "axis", "=", "'column'", ")", "sample_order", "+=", "list", "(", "np", ".", "nonzero", "(", "label_ix", ")", "[", "0", "]", "[", "np", ".", "array", "(", "label_ix_ix", ")", "]", ")", "return", "np", ".", "array", "(", "sample_order", ")" ]
groups samples by category values; clusters within each group .
train
false
18,131
def volume_glance_metadata_delete_by_snapshot(context, snapshot_id): return IMPL.volume_glance_metadata_delete_by_snapshot(context, snapshot_id)
[ "def", "volume_glance_metadata_delete_by_snapshot", "(", "context", ",", "snapshot_id", ")", ":", "return", "IMPL", ".", "volume_glance_metadata_delete_by_snapshot", "(", "context", ",", "snapshot_id", ")" ]
delete the glance metadata for a snapshot .
train
false
18,132
def make_hashkey(seed): h = hashlib.md5() h.update(str(seed)) return h.hexdigest()
[ "def", "make_hashkey", "(", "seed", ")", ":", "h", "=", "hashlib", ".", "md5", "(", ")", "h", ".", "update", "(", "str", "(", "seed", ")", ")", "return", "h", ".", "hexdigest", "(", ")" ]
generate a string key by hashing .
train
false
18,133
def _serialize(value): if isinstance(value, float): return unicode(value).upper() elif isinstance(value, bool): return unicode(value).lower() return unicode(value)
[ "def", "_serialize", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "float", ")", ":", "return", "unicode", "(", "value", ")", ".", "upper", "(", ")", "elif", "isinstance", "(", "value", ",", "bool", ")", ":", "return", "unicode", "(", "value", ")", ".", "lower", "(", ")", "return", "unicode", "(", "value", ")" ]
serialization wrapper .
train
false
18,134
def filterBadReleases(name): try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(((u'Unable to parse the filename ' + name) + ' into a valid episode'), logger.WARNING) return False check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = ((check_string + '-') + parse_result.release_group) else: check_string = parse_result.release_group if (not check_string): return True for ignore_word in (resultFilters + sickbeard.IGNORE_WORDS.split(',')): ignore_word = ignore_word.strip() if ignore_word: if re.search((('(^|[\\W_])' + ignore_word) + '($|[\\W_])'), check_string, re.I): logger.log(((((u'Invalid scene release: ' + name) + ' contains ') + ignore_word) + ', ignoring it'), logger.DEBUG) return False return True
[ "def", "filterBadReleases", "(", "name", ")", ":", "try", ":", "fp", "=", "NameParser", "(", ")", "parse_result", "=", "fp", ".", "parse", "(", "name", ")", "except", "InvalidNameException", ":", "logger", ".", "log", "(", "(", "(", "u'Unable to parse the filename '", "+", "name", ")", "+", "' into a valid episode'", ")", ",", "logger", ".", "WARNING", ")", "return", "False", "check_string", "=", "''", "if", "parse_result", ".", "extra_info", ":", "check_string", "=", "parse_result", ".", "extra_info", "if", "parse_result", ".", "release_group", ":", "if", "check_string", ":", "check_string", "=", "(", "(", "check_string", "+", "'-'", ")", "+", "parse_result", ".", "release_group", ")", "else", ":", "check_string", "=", "parse_result", ".", "release_group", "if", "(", "not", "check_string", ")", ":", "return", "True", "for", "ignore_word", "in", "(", "resultFilters", "+", "sickbeard", ".", "IGNORE_WORDS", ".", "split", "(", "','", ")", ")", ":", "ignore_word", "=", "ignore_word", ".", "strip", "(", ")", "if", "ignore_word", ":", "if", "re", ".", "search", "(", "(", "(", "'(^|[\\\\W_])'", "+", "ignore_word", ")", "+", "'($|[\\\\W_])'", ")", ",", "check_string", ",", "re", ".", "I", ")", ":", "logger", ".", "log", "(", "(", "(", "(", "(", "u'Invalid scene release: '", "+", "name", ")", "+", "' contains '", ")", "+", "ignore_word", ")", "+", "', ignoring it'", ")", ",", "logger", ".", "DEBUG", ")", "return", "False", "return", "True" ]
filters out non-english and just all-around stupid releases by comparing them to the resultfilters contents .
train
false
18,135
def test_shipping_method_without_address(request_cart_with_item, client): response = client.get(reverse('checkout:shipping-method')) assert (response.status_code == 302) assert (get_redirect_location(response) == reverse('checkout:shipping-address'))
[ "def", "test_shipping_method_without_address", "(", "request_cart_with_item", ",", "client", ")", ":", "response", "=", "client", ".", "get", "(", "reverse", "(", "'checkout:shipping-method'", ")", ")", "assert", "(", "response", ".", "status_code", "==", "302", ")", "assert", "(", "get_redirect_location", "(", "response", ")", "==", "reverse", "(", "'checkout:shipping-address'", ")", ")" ]
user tries to get shipping method step without saved shipping address - if is redirected to shipping address step .
train
false
18,137
def test_nm3_sample_wrong_X(): nm3 = NearMiss(random_state=RND_SEED, version=VERSION_NEARMISS) nm3.fit(X, Y) assert_raises(RuntimeError, nm3.sample, np.random.random((100, 40)), np.array((([0] * 50) + ([1] * 50))))
[ "def", "test_nm3_sample_wrong_X", "(", ")", ":", "nm3", "=", "NearMiss", "(", "random_state", "=", "RND_SEED", ",", "version", "=", "VERSION_NEARMISS", ")", "nm3", ".", "fit", "(", "X", ",", "Y", ")", "assert_raises", "(", "RuntimeError", ",", "nm3", ".", "sample", ",", "np", ".", "random", ".", "random", "(", "(", "100", ",", "40", ")", ")", ",", "np", ".", "array", "(", "(", "(", "[", "0", "]", "*", "50", ")", "+", "(", "[", "1", "]", "*", "50", ")", ")", ")", ")" ]
test either if an error is raised when x is different at fitting and sampling .
train
false
18,138
@conf.commands.register def defragment(plist): frags = defaultdict((lambda : [])) final = [] pos = 0 for p in plist: p._defrag_pos = pos pos += 1 if (IP in p): ip = p[IP] if ((ip.frag != 0) or (ip.flags & 1)): ip = p[IP] uniq = (ip.id, ip.src, ip.dst, ip.proto) frags[uniq].append(p) continue final.append(p) defrag = [] missfrag = [] for lst in frags.itervalues(): lst.sort(key=(lambda x: x.frag)) p = lst[0] lastp = lst[(-1)] if ((p.frag > 0) or ((lastp.flags & 1) != 0)): missfrag += lst continue p = p.copy() if (conf.padding_layer in p): del p[conf.padding_layer].underlayer.payload ip = p[IP] if ((ip.len is None) or (ip.ihl is None)): clen = len(ip.payload) else: clen = (ip.len - (ip.ihl << 2)) txt = conf.raw_layer() for q in lst[1:]: if (clen != (q.frag << 3)): if (clen > (q.frag << 3)): warning(('Fragment overlap (%i > %i) %r || %r || %r' % (clen, (q.frag << 3), p, txt, q))) missfrag += lst break if ((q[IP].len is None) or (q[IP].ihl is None)): clen += len(q[IP].payload) else: clen += (q[IP].len - (q[IP].ihl << 2)) if (conf.padding_layer in q): del q[conf.padding_layer].underlayer.payload txt.add_payload(q[IP].payload.copy()) else: ip.flags &= (~ 1) del ip.chksum del ip.len p = (p / txt) p._defrag_pos = max((x._defrag_pos for x in lst)) defrag.append(p) defrag2 = [] for p in defrag: q = p.__class__(str(p)) q._defrag_pos = p._defrag_pos defrag2.append(q) final += defrag2 final += missfrag final.sort(key=(lambda x: x._defrag_pos)) for p in final: del p._defrag_pos if hasattr(plist, 'listname'): name = ('Defragmented %s' % plist.listname) else: name = 'Defragmented' return PacketList(final, name=name)
[ "@", "conf", ".", "commands", ".", "register", "def", "defragment", "(", "plist", ")", ":", "frags", "=", "defaultdict", "(", "(", "lambda", ":", "[", "]", ")", ")", "final", "=", "[", "]", "pos", "=", "0", "for", "p", "in", "plist", ":", "p", ".", "_defrag_pos", "=", "pos", "pos", "+=", "1", "if", "(", "IP", "in", "p", ")", ":", "ip", "=", "p", "[", "IP", "]", "if", "(", "(", "ip", ".", "frag", "!=", "0", ")", "or", "(", "ip", ".", "flags", "&", "1", ")", ")", ":", "ip", "=", "p", "[", "IP", "]", "uniq", "=", "(", "ip", ".", "id", ",", "ip", ".", "src", ",", "ip", ".", "dst", ",", "ip", ".", "proto", ")", "frags", "[", "uniq", "]", ".", "append", "(", "p", ")", "continue", "final", ".", "append", "(", "p", ")", "defrag", "=", "[", "]", "missfrag", "=", "[", "]", "for", "lst", "in", "frags", ".", "itervalues", "(", ")", ":", "lst", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", ".", "frag", ")", ")", "p", "=", "lst", "[", "0", "]", "lastp", "=", "lst", "[", "(", "-", "1", ")", "]", "if", "(", "(", "p", ".", "frag", ">", "0", ")", "or", "(", "(", "lastp", ".", "flags", "&", "1", ")", "!=", "0", ")", ")", ":", "missfrag", "+=", "lst", "continue", "p", "=", "p", ".", "copy", "(", ")", "if", "(", "conf", ".", "padding_layer", "in", "p", ")", ":", "del", "p", "[", "conf", ".", "padding_layer", "]", ".", "underlayer", ".", "payload", "ip", "=", "p", "[", "IP", "]", "if", "(", "(", "ip", ".", "len", "is", "None", ")", "or", "(", "ip", ".", "ihl", "is", "None", ")", ")", ":", "clen", "=", "len", "(", "ip", ".", "payload", ")", "else", ":", "clen", "=", "(", "ip", ".", "len", "-", "(", "ip", ".", "ihl", "<<", "2", ")", ")", "txt", "=", "conf", ".", "raw_layer", "(", ")", "for", "q", "in", "lst", "[", "1", ":", "]", ":", "if", "(", "clen", "!=", "(", "q", ".", "frag", "<<", "3", ")", ")", ":", "if", "(", "clen", ">", "(", "q", ".", "frag", "<<", "3", ")", ")", ":", "warning", "(", "(", "'Fragment overlap (%i > %i) %r || %r || %r'", "%", "(", "clen", ",", "(", "q", ".", "frag", "<<", "3", ")", ",", "p", ",", "txt", ",", "q", ")", ")", ")", "missfrag", "+=", "lst", "break", "if", "(", "(", "q", "[", "IP", "]", ".", "len", "is", "None", ")", "or", "(", "q", "[", "IP", "]", ".", "ihl", "is", "None", ")", ")", ":", "clen", "+=", "len", "(", "q", "[", "IP", "]", ".", "payload", ")", "else", ":", "clen", "+=", "(", "q", "[", "IP", "]", ".", "len", "-", "(", "q", "[", "IP", "]", ".", "ihl", "<<", "2", ")", ")", "if", "(", "conf", ".", "padding_layer", "in", "q", ")", ":", "del", "q", "[", "conf", ".", "padding_layer", "]", ".", "underlayer", ".", "payload", "txt", ".", "add_payload", "(", "q", "[", "IP", "]", ".", "payload", ".", "copy", "(", ")", ")", "else", ":", "ip", ".", "flags", "&=", "(", "~", "1", ")", "del", "ip", ".", "chksum", "del", "ip", ".", "len", "p", "=", "(", "p", "/", "txt", ")", "p", ".", "_defrag_pos", "=", "max", "(", "(", "x", ".", "_defrag_pos", "for", "x", "in", "lst", ")", ")", "defrag", ".", "append", "(", "p", ")", "defrag2", "=", "[", "]", "for", "p", "in", "defrag", ":", "q", "=", "p", ".", "__class__", "(", "str", "(", "p", ")", ")", "q", ".", "_defrag_pos", "=", "p", ".", "_defrag_pos", "defrag2", ".", "append", "(", "q", ")", "final", "+=", "defrag2", "final", "+=", "missfrag", "final", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", ".", "_defrag_pos", ")", ")", "for", "p", "in", "final", ":", "del", "p", ".", "_defrag_pos", "if", "hasattr", "(", "plist", ",", "'listname'", ")", ":", "name", "=", "(", "'Defragmented %s'", "%", "plist", ".", "listname", ")", "else", ":", "name", "=", "'Defragmented'", "return", "PacketList", "(", "final", ",", "name", "=", "name", ")" ]
defragment mounted btrfs filesystem .
train
false
18,139
def save_draft(crispin_client, account_id, message_id, args): with session_scope(account_id) as db_session: message = db_session.query(Message).get(message_id) version = args.get('version') if (message is None): log.info('tried to save nonexistent message as draft', message_id=message_id, account_id=account_id) return if (not message.is_draft): log.warning('tried to save non-draft message as draft', message_id=message_id, account_id=account_id) return if (version != message.version): log.warning('tried to save outdated version of draft') return remote_save_draft(crispin_client, account_id, message_id)
[ "def", "save_draft", "(", "crispin_client", ",", "account_id", ",", "message_id", ",", "args", ")", ":", "with", "session_scope", "(", "account_id", ")", "as", "db_session", ":", "message", "=", "db_session", ".", "query", "(", "Message", ")", ".", "get", "(", "message_id", ")", "version", "=", "args", ".", "get", "(", "'version'", ")", "if", "(", "message", "is", "None", ")", ":", "log", ".", "info", "(", "'tried to save nonexistent message as draft'", ",", "message_id", "=", "message_id", ",", "account_id", "=", "account_id", ")", "return", "if", "(", "not", "message", ".", "is_draft", ")", ":", "log", ".", "warning", "(", "'tried to save non-draft message as draft'", ",", "message_id", "=", "message_id", ",", "account_id", "=", "account_id", ")", "return", "if", "(", "version", "!=", "message", ".", "version", ")", ":", "log", ".", "warning", "(", "'tried to save outdated version of draft'", ")", "return", "remote_save_draft", "(", "crispin_client", ",", "account_id", ",", "message_id", ")" ]
sync a new draft back to the remote backend .
train
false
18,142
def check_next_url(next): if ((not next) or ('://' in next)): return None return next
[ "def", "check_next_url", "(", "next", ")", ":", "if", "(", "(", "not", "next", ")", "or", "(", "'://'", "in", "next", ")", ")", ":", "return", "None", "return", "next" ]
checks to make sure the next url is not redirecting to another page .
train
false
18,143
def remove_source(zone, source, permanent=True): if (source not in get_sources(zone, permanent)): log.info('Source is not bound to zone.') cmd = '--zone={0} --remove-source={1}'.format(zone, source) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd)
[ "def", "remove_source", "(", "zone", ",", "source", ",", "permanent", "=", "True", ")", ":", "if", "(", "source", "not", "in", "get_sources", "(", "zone", ",", "permanent", ")", ")", ":", "log", ".", "info", "(", "'Source is not bound to zone.'", ")", "cmd", "=", "'--zone={0} --remove-source={1}'", ".", "format", "(", "zone", ",", "source", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "return", "__firewall_cmd", "(", "cmd", ")" ]
remove a source bound to a zone .
train
true
18,145
def _get_prototype(dic, prot, protparents): if ('prototype' in dic): for prototype in make_iter(dic['prototype']): new_prot = _get_prototype(protparents.get(prototype, {}), prot, protparents) prot.update(new_prot) prot.update(dic) prot.pop('prototype', None) return prot
[ "def", "_get_prototype", "(", "dic", ",", "prot", ",", "protparents", ")", ":", "if", "(", "'prototype'", "in", "dic", ")", ":", "for", "prototype", "in", "make_iter", "(", "dic", "[", "'prototype'", "]", ")", ":", "new_prot", "=", "_get_prototype", "(", "protparents", ".", "get", "(", "prototype", ",", "{", "}", ")", ",", "prot", ",", "protparents", ")", "prot", ".", "update", "(", "new_prot", ")", "prot", ".", "update", "(", "dic", ")", "prot", ".", "pop", "(", "'prototype'", ",", "None", ")", "return", "prot" ]
recursively traverse a prototype dictionary .
train
false
18,147
def three_way_cmp(x, y): return ((x > y) - (x < y))
[ "def", "three_way_cmp", "(", "x", ",", "y", ")", ":", "return", "(", "(", "x", ">", "y", ")", "-", "(", "x", "<", "y", ")", ")" ]
return -1 if x < y .
train
false
18,148
def create_substitution_leaf(leaf, new_elements, new_model=None, internal=False): if (new_model is None): new_model = leaf.model new_join_context = [tuple(context) for context in leaf.join_context] new_leaf = ExtendedLeaf(new_elements, new_model, join_context=new_join_context, internal=internal) return new_leaf
[ "def", "create_substitution_leaf", "(", "leaf", ",", "new_elements", ",", "new_model", "=", "None", ",", "internal", "=", "False", ")", ":", "if", "(", "new_model", "is", "None", ")", ":", "new_model", "=", "leaf", ".", "model", "new_join_context", "=", "[", "tuple", "(", "context", ")", "for", "context", "in", "leaf", ".", "join_context", "]", "new_leaf", "=", "ExtendedLeaf", "(", "new_elements", ",", "new_model", ",", "join_context", "=", "new_join_context", ",", "internal", "=", "internal", ")", "return", "new_leaf" ]
from a leaf .
train
false
18,149
def _save_state(state): try: with open(config['statefile'].as_filename(), 'w') as f: pickle.dump(state, f) except IOError as exc: log.error(u'state file could not be written: {0}'.format(exc))
[ "def", "_save_state", "(", "state", ")", ":", "try", ":", "with", "open", "(", "config", "[", "'statefile'", "]", ".", "as_filename", "(", ")", ",", "'w'", ")", "as", "f", ":", "pickle", ".", "dump", "(", "state", ",", "f", ")", "except", "IOError", "as", "exc", ":", "log", ".", "error", "(", "u'state file could not be written: {0}'", ".", "format", "(", "exc", ")", ")" ]
writes the state dictionary out to disk .
train
false
18,150
def test_detect_screen_size(): try: page._detect_screen_size(True, 25) except (TypeError, io.UnsupportedOperation): pass
[ "def", "test_detect_screen_size", "(", ")", ":", "try", ":", "page", ".", "_detect_screen_size", "(", "True", ",", "25", ")", "except", "(", "TypeError", ",", "io", ".", "UnsupportedOperation", ")", ":", "pass" ]
simple smoketest for page .
train
false
18,152
def _ExtractCLPath(output_of_where): for line in output_of_where.strip().splitlines(): if line.startswith('LOC:'): return line[len('LOC:'):].strip()
[ "def", "_ExtractCLPath", "(", "output_of_where", ")", ":", "for", "line", "in", "output_of_where", ".", "strip", "(", ")", ".", "splitlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "'LOC:'", ")", ":", "return", "line", "[", "len", "(", "'LOC:'", ")", ":", "]", ".", "strip", "(", ")" ]
gets the path to cl .
train
false
18,153
def change_queue_complete_action(action, new=True): global QUEUECOMPLETE, QUEUECOMPLETEACTION, QUEUECOMPLETEARG _action = None _argument = None if ('script_' in action): _action = run_script _argument = action.replace('script_', '') elif (new or cfg.queue_complete_pers.get()): if (action == 'shutdown_pc'): _action = system_shutdown elif (action == 'hibernate_pc'): _action = system_hibernate elif (action == 'standby_pc'): _action = system_standby elif (action == 'shutdown_program'): _action = shutdown_program else: action = None else: action = None if new: cfg.queue_complete.set((action or '')) config.save_config() QUEUECOMPLETE = action QUEUECOMPLETEACTION = _action QUEUECOMPLETEARG = _argument
[ "def", "change_queue_complete_action", "(", "action", ",", "new", "=", "True", ")", ":", "global", "QUEUECOMPLETE", ",", "QUEUECOMPLETEACTION", ",", "QUEUECOMPLETEARG", "_action", "=", "None", "_argument", "=", "None", "if", "(", "'script_'", "in", "action", ")", ":", "_action", "=", "run_script", "_argument", "=", "action", ".", "replace", "(", "'script_'", ",", "''", ")", "elif", "(", "new", "or", "cfg", ".", "queue_complete_pers", ".", "get", "(", ")", ")", ":", "if", "(", "action", "==", "'shutdown_pc'", ")", ":", "_action", "=", "system_shutdown", "elif", "(", "action", "==", "'hibernate_pc'", ")", ":", "_action", "=", "system_hibernate", "elif", "(", "action", "==", "'standby_pc'", ")", ":", "_action", "=", "system_standby", "elif", "(", "action", "==", "'shutdown_program'", ")", ":", "_action", "=", "shutdown_program", "else", ":", "action", "=", "None", "else", ":", "action", "=", "None", "if", "new", ":", "cfg", ".", "queue_complete", ".", "set", "(", "(", "action", "or", "''", ")", ")", "config", ".", "save_config", "(", ")", "QUEUECOMPLETE", "=", "action", "QUEUECOMPLETEACTION", "=", "_action", "QUEUECOMPLETEARG", "=", "_argument" ]
action or script to be performed once the queue has been completed scripts are prefixed with script_ when "new" is false .
train
false
18,154
def _atomic(e): from sympy import Derivative, Function, Symbol pot = preorder_traversal(e) seen = set() try: free = e.free_symbols except AttributeError: return {e} atoms = set() for p in pot: if (p in seen): pot.skip() continue seen.add(p) if (isinstance(p, Symbol) and (p in free)): atoms.add(p) elif isinstance(p, (Derivative, Function)): pot.skip() atoms.add(p) return atoms
[ "def", "_atomic", "(", "e", ")", ":", "from", "sympy", "import", "Derivative", ",", "Function", ",", "Symbol", "pot", "=", "preorder_traversal", "(", "e", ")", "seen", "=", "set", "(", ")", "try", ":", "free", "=", "e", ".", "free_symbols", "except", "AttributeError", ":", "return", "{", "e", "}", "atoms", "=", "set", "(", ")", "for", "p", "in", "pot", ":", "if", "(", "p", "in", "seen", ")", ":", "pot", ".", "skip", "(", ")", "continue", "seen", ".", "add", "(", "p", ")", "if", "(", "isinstance", "(", "p", ",", "Symbol", ")", "and", "(", "p", "in", "free", ")", ")", ":", "atoms", ".", "add", "(", "p", ")", "elif", "isinstance", "(", "p", ",", "(", "Derivative", ",", "Function", ")", ")", ":", "pot", ".", "skip", "(", ")", "atoms", ".", "add", "(", "p", ")", "return", "atoms" ]
return atom-like quantities as far as substitution is concerned: derivatives .
train
false
18,156
def unique_id_for_user(user, save=True): return anonymous_id_for_user(user, None, save=save)
[ "def", "unique_id_for_user", "(", "user", ",", "save", "=", "True", ")", ":", "return", "anonymous_id_for_user", "(", "user", ",", "None", ",", "save", "=", "save", ")" ]
return a unique id for a user .
train
false
18,157
def get_instances_with_cached_ips(orig_func, *args, **kwargs): instances = orig_func(*args, **kwargs) if isinstance(instances, list): for instance in instances: instance['info_cache'] = {'network_info': get_fake_cache()} else: instances['info_cache'] = {'network_info': get_fake_cache()} return instances
[ "def", "get_instances_with_cached_ips", "(", "orig_func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "instances", "=", "orig_func", "(", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "instances", ",", "list", ")", ":", "for", "instance", "in", "instances", ":", "instance", "[", "'info_cache'", "]", "=", "{", "'network_info'", ":", "get_fake_cache", "(", ")", "}", "else", ":", "instances", "[", "'info_cache'", "]", "=", "{", "'network_info'", ":", "get_fake_cache", "(", ")", "}", "return", "instances" ]
kludge the cache into instance(s) without having to create db entries .
train
false
18,158
def _get_marker_param(params): return params.pop('marker', None)
[ "def", "_get_marker_param", "(", "params", ")", ":", "return", "params", ".", "pop", "(", "'marker'", ",", "None", ")" ]
extract marker id from request or fail .
train
false
18,159
def log_coach_report_view(request): if ('facility_user' in request.session): try: user = request.session['facility_user'] UserLog.begin_user_activity(user, activity_type='coachreport') UserLog.update_user_activity(user, activity_type='login') UserLog.end_user_activity(user, activity_type='coachreport') except ValidationError as e: logging.error(('Failed to update Teacher userlog activity login: %s' % e))
[ "def", "log_coach_report_view", "(", "request", ")", ":", "if", "(", "'facility_user'", "in", "request", ".", "session", ")", ":", "try", ":", "user", "=", "request", ".", "session", "[", "'facility_user'", "]", "UserLog", ".", "begin_user_activity", "(", "user", ",", "activity_type", "=", "'coachreport'", ")", "UserLog", ".", "update_user_activity", "(", "user", ",", "activity_type", "=", "'login'", ")", "UserLog", ".", "end_user_activity", "(", "user", ",", "activity_type", "=", "'coachreport'", ")", "except", "ValidationError", "as", "e", ":", "logging", ".", "error", "(", "(", "'Failed to update Teacher userlog activity login: %s'", "%", "e", ")", ")" ]
record coach report view by teacher .
train
false
18,160
def intersects(iterable1, iterable2): return (find((lambda x: (x in iterable1)), iterable2) is not None)
[ "def", "intersects", "(", "iterable1", ",", "iterable2", ")", ":", "return", "(", "find", "(", "(", "lambda", "x", ":", "(", "x", "in", "iterable1", ")", ")", ",", "iterable2", ")", "is", "not", "None", ")" ]
returns true if the given lists have at least one item in common .
train
false
18,161
def registered_extensions(): if (not bool(EXTENSION)): init() return EXTENSION
[ "def", "registered_extensions", "(", ")", ":", "if", "(", "not", "bool", "(", "EXTENSION", ")", ")", ":", "init", "(", ")", "return", "EXTENSION" ]
returns a dictionary containing all file extensions belonging to registered plugins .
train
false
18,162
def GetProxyConfig(http_host=None, http_port=None, https_host=None, https_port=None, cafile=None, disable_certificate_validation=None): http_proxy = None https_proxy = None if http_host: http_proxy = googleads.common.ProxyConfig.Proxy(http_host, http_port) if https_host: https_proxy = googleads.common.ProxyConfig.Proxy(https_host, https_port) return googleads.common.ProxyConfig(http_proxy, https_proxy, cafile=cafile, disable_certificate_validation=disable_certificate_validation)
[ "def", "GetProxyConfig", "(", "http_host", "=", "None", ",", "http_port", "=", "None", ",", "https_host", "=", "None", ",", "https_port", "=", "None", ",", "cafile", "=", "None", ",", "disable_certificate_validation", "=", "None", ")", ":", "http_proxy", "=", "None", "https_proxy", "=", "None", "if", "http_host", ":", "http_proxy", "=", "googleads", ".", "common", ".", "ProxyConfig", ".", "Proxy", "(", "http_host", ",", "http_port", ")", "if", "https_host", ":", "https_proxy", "=", "googleads", ".", "common", ".", "ProxyConfig", ".", "Proxy", "(", "https_host", ",", "https_port", ")", "return", "googleads", ".", "common", ".", "ProxyConfig", "(", "http_proxy", ",", "https_proxy", ",", "cafile", "=", "cafile", ",", "disable_certificate_validation", "=", "disable_certificate_validation", ")" ]
returns an initialized proxyconfig for use in testing .
train
false
18,165
def serializeSdr(sdr): return ''.join((str(bit) for bit in sdr))
[ "def", "serializeSdr", "(", "sdr", ")", ":", "return", "''", ".", "join", "(", "(", "str", "(", "bit", ")", "for", "bit", "in", "sdr", ")", ")" ]
serialize python list object containing only 0s and 1s to string .
train
false
18,166
def test_pad_input(): template = (0.5 * diamond(2)) image = (0.5 * np.ones((9, 19))) mid = slice(2, 7) image[mid, :3] -= template[:, (-3):] image[mid, 4:9] += template image[mid, (-9):(-4)] -= template image[mid, (-3):] += template[:, :3] result = match_template(image, template, pad_input=True, constant_values=image.mean()) sorted_result = np.argsort(result.flat) (i, j) = np.unravel_index(sorted_result[:2], result.shape) assert_equal(j, (12, 0)) (i, j) = np.unravel_index(sorted_result[(-2):], result.shape) assert_equal(j, (18, 6))
[ "def", "test_pad_input", "(", ")", ":", "template", "=", "(", "0.5", "*", "diamond", "(", "2", ")", ")", "image", "=", "(", "0.5", "*", "np", ".", "ones", "(", "(", "9", ",", "19", ")", ")", ")", "mid", "=", "slice", "(", "2", ",", "7", ")", "image", "[", "mid", ",", ":", "3", "]", "-=", "template", "[", ":", ",", "(", "-", "3", ")", ":", "]", "image", "[", "mid", ",", "4", ":", "9", "]", "+=", "template", "image", "[", "mid", ",", "(", "-", "9", ")", ":", "(", "-", "4", ")", "]", "-=", "template", "image", "[", "mid", ",", "(", "-", "3", ")", ":", "]", "+=", "template", "[", ":", ",", ":", "3", "]", "result", "=", "match_template", "(", "image", ",", "template", ",", "pad_input", "=", "True", ",", "constant_values", "=", "image", ".", "mean", "(", ")", ")", "sorted_result", "=", "np", ".", "argsort", "(", "result", ".", "flat", ")", "(", "i", ",", "j", ")", "=", "np", ".", "unravel_index", "(", "sorted_result", "[", ":", "2", "]", ",", "result", ".", "shape", ")", "assert_equal", "(", "j", ",", "(", "12", ",", "0", ")", ")", "(", "i", ",", "j", ")", "=", "np", ".", "unravel_index", "(", "sorted_result", "[", "(", "-", "2", ")", ":", "]", ",", "result", ".", "shape", ")", "assert_equal", "(", "j", ",", "(", "18", ",", "6", ")", ")" ]
test match_template when pad_input=true .
train
false
18,167
def set_sync_mode(noinput=False): from .core.utils.redis_rq import rq_workers_are_running if rq_workers_are_running(): redis_warning = '\nYou currently have RQ workers running.\n\nRunning in synchronous mode may conflict with jobs that are dispatched to your workers.\n\nIt is safer to stop any workers before using synchronous commands.\n\n' if noinput: print(('Warning: %s' % redis_warning)) else: resp = input(('%sDo you wish to proceed? [Ny] ' % redis_warning)) if (resp not in ('y', 'yes')): print('RQ workers running, not proceeding.') exit(2) for q in settings.RQ_QUEUES.itervalues(): q['ASYNC'] = False
[ "def", "set_sync_mode", "(", "noinput", "=", "False", ")", ":", "from", ".", "core", ".", "utils", ".", "redis_rq", "import", "rq_workers_are_running", "if", "rq_workers_are_running", "(", ")", ":", "redis_warning", "=", "'\\nYou currently have RQ workers running.\\n\\nRunning in synchronous mode may conflict with jobs that are dispatched to your workers.\\n\\nIt is safer to stop any workers before using synchronous commands.\\n\\n'", "if", "noinput", ":", "print", "(", "(", "'Warning: %s'", "%", "redis_warning", ")", ")", "else", ":", "resp", "=", "input", "(", "(", "'%sDo you wish to proceed? [Ny] '", "%", "redis_warning", ")", ")", "if", "(", "resp", "not", "in", "(", "'y'", ",", "'yes'", ")", ")", ":", "print", "(", "'RQ workers running, not proceeding.'", ")", "exit", "(", "2", ")", "for", "q", "in", "settings", ".", "RQ_QUEUES", ".", "itervalues", "(", ")", ":", "q", "[", "'ASYNC'", "]", "=", "False" ]
sets async = false on all redis worker queues .
train
false
18,168
def test_git_require_branch(): from fabtools.require.git import working_copy try: working_copy(REMOTE_URL, path='wc', branch='test_git') assert is_dir('wc') assert is_dir('wc/.git') with cd('wc'): remotes = run('git remote -v') assert (remotes == 'origin DCTB https://github.com/disko/fabtools.git (fetch)\r\norigin DCTB https://github.com/disko/fabtools.git (push)') assert (_current_branch() == 'test_git') finally: run('rm -rf wc')
[ "def", "test_git_require_branch", "(", ")", ":", "from", "fabtools", ".", "require", ".", "git", "import", "working_copy", "try", ":", "working_copy", "(", "REMOTE_URL", ",", "path", "=", "'wc'", ",", "branch", "=", "'test_git'", ")", "assert", "is_dir", "(", "'wc'", ")", "assert", "is_dir", "(", "'wc/.git'", ")", "with", "cd", "(", "'wc'", ")", ":", "remotes", "=", "run", "(", "'git remote -v'", ")", "assert", "(", "remotes", "==", "'origin DCTB https://github.com/disko/fabtools.git (fetch)\\r\\norigin DCTB https://github.com/disko/fabtools.git (push)'", ")", "assert", "(", "_current_branch", "(", ")", "==", "'test_git'", ")", "finally", ":", "run", "(", "'rm -rf wc'", ")" ]
test checkout of a branch .
train
false
18,170
def _populators(mapper, context, path, row, adapter, new_populators, existing_populators, eager_populators): delayed_populators = [] pops = (new_populators, existing_populators, delayed_populators, eager_populators) for prop in mapper._props.values(): for (i, pop) in enumerate(prop.create_row_processor(context, path, mapper, row, adapter)): if (pop is not None): pops[i].append((prop.key, pop)) if delayed_populators: new_populators.extend(delayed_populators)
[ "def", "_populators", "(", "mapper", ",", "context", ",", "path", ",", "row", ",", "adapter", ",", "new_populators", ",", "existing_populators", ",", "eager_populators", ")", ":", "delayed_populators", "=", "[", "]", "pops", "=", "(", "new_populators", ",", "existing_populators", ",", "delayed_populators", ",", "eager_populators", ")", "for", "prop", "in", "mapper", ".", "_props", ".", "values", "(", ")", ":", "for", "(", "i", ",", "pop", ")", "in", "enumerate", "(", "prop", ".", "create_row_processor", "(", "context", ",", "path", ",", "mapper", ",", "row", ",", "adapter", ")", ")", ":", "if", "(", "pop", "is", "not", "None", ")", ":", "pops", "[", "i", "]", ".", "append", "(", "(", "prop", ".", "key", ",", "pop", ")", ")", "if", "delayed_populators", ":", "new_populators", ".", "extend", "(", "delayed_populators", ")" ]
produce a collection of attribute level row processor callables .
train
false
18,171
def wsgi_soap11_application(services, tns='spyne.simple.soap', validator=None, name=None): from spyne.protocol.soap import Soap11 from spyne.server.wsgi import WsgiApplication application = Application(services, tns, name=name, in_protocol=Soap11(validator=validator), out_protocol=Soap11()) return WsgiApplication(application)
[ "def", "wsgi_soap11_application", "(", "services", ",", "tns", "=", "'spyne.simple.soap'", ",", "validator", "=", "None", ",", "name", "=", "None", ")", ":", "from", "spyne", ".", "protocol", ".", "soap", "import", "Soap11", "from", "spyne", ".", "server", ".", "wsgi", "import", "WsgiApplication", "application", "=", "Application", "(", "services", ",", "tns", ",", "name", "=", "name", ",", "in_protocol", "=", "Soap11", "(", "validator", "=", "validator", ")", ",", "out_protocol", "=", "Soap11", "(", ")", ")", "return", "WsgiApplication", "(", "application", ")" ]
wraps services argument inside a wsgiapplication that uses soap 1 .
train
false
18,173
def GetMountpoints(data=None): expiry = 60 insert_time = MOUNTPOINT_CACHE[0] if ((insert_time + expiry) > time.time()): return MOUNTPOINT_CACHE[1] devices = {} if (data is None): data = '\n'.join([open(x, 'rb').read() for x in ['/proc/mounts', '/etc/mtab']]) for line in data.splitlines(): try: (device, mnt_point, fs_type, _) = line.split(' ', 3) mnt_point = os.path.normpath(mnt_point) devices[mnt_point] = (device, fs_type) except ValueError: pass MOUNTPOINT_CACHE[0] = time.time() MOUNTPOINT_CACHE[1] = devices return devices
[ "def", "GetMountpoints", "(", "data", "=", "None", ")", ":", "expiry", "=", "60", "insert_time", "=", "MOUNTPOINT_CACHE", "[", "0", "]", "if", "(", "(", "insert_time", "+", "expiry", ")", ">", "time", ".", "time", "(", ")", ")", ":", "return", "MOUNTPOINT_CACHE", "[", "1", "]", "devices", "=", "{", "}", "if", "(", "data", "is", "None", ")", ":", "data", "=", "'\\n'", ".", "join", "(", "[", "open", "(", "x", ",", "'rb'", ")", ".", "read", "(", ")", "for", "x", "in", "[", "'/proc/mounts'", ",", "'/etc/mtab'", "]", "]", ")", "for", "line", "in", "data", ".", "splitlines", "(", ")", ":", "try", ":", "(", "device", ",", "mnt_point", ",", "fs_type", ",", "_", ")", "=", "line", ".", "split", "(", "' '", ",", "3", ")", "mnt_point", "=", "os", ".", "path", ".", "normpath", "(", "mnt_point", ")", "devices", "[", "mnt_point", "]", "=", "(", "device", ",", "fs_type", ")", "except", "ValueError", ":", "pass", "MOUNTPOINT_CACHE", "[", "0", "]", "=", "time", ".", "time", "(", ")", "MOUNTPOINT_CACHE", "[", "1", "]", "=", "devices", "return", "devices" ]
list all the filesystems mounted on the system .
train
true
18,174
def get_file_chunks_in_range(context, filediff, interfilediff, first_line, num_lines): f = get_file_from_filediff(context, filediff, interfilediff) if f: return get_chunks_in_range(f[u'chunks'], first_line, num_lines) else: return []
[ "def", "get_file_chunks_in_range", "(", "context", ",", "filediff", ",", "interfilediff", ",", "first_line", ",", "num_lines", ")", ":", "f", "=", "get_file_from_filediff", "(", "context", ",", "filediff", ",", "interfilediff", ")", "if", "f", ":", "return", "get_chunks_in_range", "(", "f", "[", "u'chunks'", "]", ",", "first_line", ",", "num_lines", ")", "else", ":", "return", "[", "]" ]
generate the chunks within a range of lines in the specified filediff .
train
false
18,175
def flavor_destroy(context, flavor_id): return IMPL.flavor_destroy(context, flavor_id)
[ "def", "flavor_destroy", "(", "context", ",", "flavor_id", ")", ":", "return", "IMPL", ".", "flavor_destroy", "(", "context", ",", "flavor_id", ")" ]
marks specific flavor as deleted .
train
false
18,176
def setIDFunction(idFunction): global _idFunction oldIDFunction = _idFunction _idFunction = idFunction return oldIDFunction
[ "def", "setIDFunction", "(", "idFunction", ")", ":", "global", "_idFunction", "oldIDFunction", "=", "_idFunction", "_idFunction", "=", "idFunction", "return", "oldIDFunction" ]
change the function used by l{unsignedid} to determine the integer id value of an object .
train
false
18,177
def clean_sort_param_es(request, date_sort='created'): sort_map = {'name': 'name_sort', 'nomination': 'latest_version.nomination_date'} sort = request.GET.get('sort', date_sort) order = request.GET.get('order', 'asc') if (sort not in ('name', 'created', 'nomination')): sort = date_sort sort = sort_map.get(sort, date_sort) if (order not in ('desc', 'asc')): order = 'asc' return (sort, order)
[ "def", "clean_sort_param_es", "(", "request", ",", "date_sort", "=", "'created'", ")", ":", "sort_map", "=", "{", "'name'", ":", "'name_sort'", ",", "'nomination'", ":", "'latest_version.nomination_date'", "}", "sort", "=", "request", ".", "GET", ".", "get", "(", "'sort'", ",", "date_sort", ")", "order", "=", "request", ".", "GET", ".", "get", "(", "'order'", ",", "'asc'", ")", "if", "(", "sort", "not", "in", "(", "'name'", ",", "'created'", ",", "'nomination'", ")", ")", ":", "sort", "=", "date_sort", "sort", "=", "sort_map", ".", "get", "(", "sort", ",", "date_sort", ")", "if", "(", "order", "not", "in", "(", "'desc'", ",", "'asc'", ")", ")", ":", "order", "=", "'asc'", "return", "(", "sort", ",", "order", ")" ]
handles empty and invalid values for sort and sort order .
train
false
18,178
def _dirtyPatches(): httplib._MAXLINE = ((1 * 1024) * 1024)
[ "def", "_dirtyPatches", "(", ")", ":", "httplib", ".", "_MAXLINE", "=", "(", "(", "1", "*", "1024", ")", "*", "1024", ")" ]
place for "dirty" python related patches .
train
false
18,180
@_sa_util.deprecated('0.7', message=':func:`.compile_mappers` is renamed to :func:`.configure_mappers`') def compile_mappers(): configure_mappers()
[ "@", "_sa_util", ".", "deprecated", "(", "'0.7'", ",", "message", "=", "':func:`.compile_mappers` is renamed to :func:`.configure_mappers`'", ")", "def", "compile_mappers", "(", ")", ":", "configure_mappers", "(", ")" ]
initialize the inter-mapper relationships of all mappers that have been defined .
train
false
18,181
def delete_conference(id, user): if is_owner_user(id, user): conferences = get_memcached(get_key('conferences')) del conferences[id] set_memcached(get_key('conferences'), conferences) return get_new_message_for_user(user)
[ "def", "delete_conference", "(", "id", ",", "user", ")", ":", "if", "is_owner_user", "(", "id", ",", "user", ")", ":", "conferences", "=", "get_memcached", "(", "get_key", "(", "'conferences'", ")", ")", "del", "conferences", "[", "id", "]", "set_memcached", "(", "get_key", "(", "'conferences'", ")", ",", "conferences", ")", "return", "get_new_message_for_user", "(", "user", ")" ]
delete conference .
train
false
18,182
def rsa_crt_dmp1(private_exponent, p): return (private_exponent % (p - 1))
[ "def", "rsa_crt_dmp1", "(", "private_exponent", ",", "p", ")", ":", "return", "(", "private_exponent", "%", "(", "p", "-", "1", ")", ")" ]
compute the crt private_exponent % value from the rsa private_exponent (d) and p .
train
false
18,183
def _retryable_sailthru_error(error): code = error.get_error_code() return ((code == 9) or (code == 43))
[ "def", "_retryable_sailthru_error", "(", "error", ")", ":", "code", "=", "error", ".", "get_error_code", "(", ")", "return", "(", "(", "code", "==", "9", ")", "or", "(", "code", "==", "43", ")", ")" ]
return true if error should be retried .
train
false
18,185
def init_python_session(): from code import InteractiveConsole class SymPyConsole(InteractiveConsole, ): 'An interactive console with readline support. ' def __init__(self): InteractiveConsole.__init__(self) try: import readline except ImportError: pass else: import os import atexit readline.parse_and_bind('tab: complete') if hasattr(readline, 'read_history_file'): history = os.path.expanduser('~/.sympy-history') try: readline.read_history_file(history) except IOError: pass atexit.register(readline.write_history_file, history) return SymPyConsole()
[ "def", "init_python_session", "(", ")", ":", "from", "code", "import", "InteractiveConsole", "class", "SymPyConsole", "(", "InteractiveConsole", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "InteractiveConsole", ".", "__init__", "(", "self", ")", "try", ":", "import", "readline", "except", "ImportError", ":", "pass", "else", ":", "import", "os", "import", "atexit", "readline", ".", "parse_and_bind", "(", "'tab: complete'", ")", "if", "hasattr", "(", "readline", ",", "'read_history_file'", ")", ":", "history", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.sympy-history'", ")", "try", ":", "readline", ".", "read_history_file", "(", "history", ")", "except", "IOError", ":", "pass", "atexit", ".", "register", "(", "readline", ".", "write_history_file", ",", "history", ")", "return", "SymPyConsole", "(", ")" ]
construct new python session .
train
false
18,186
def campaign_response_summary(): return s3_rest_controller()
[ "def", "campaign_response_summary", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
18,187
def create_error_model(model_name, context): return error_models[model_name](context.call_conv)
[ "def", "create_error_model", "(", "model_name", ",", "context", ")", ":", "return", "error_models", "[", "model_name", "]", "(", "context", ".", "call_conv", ")" ]
create an error model instance for the given target context .
train
false
18,188
def cat_sff_files(list_of_file_handles): if (list_of_file_handles == []): return ([], None) try: flowgrams_and_headers = map(lazy_parse_sff_handle, list_of_file_handles) except ValueError: raise FileFormatError(('Wrong flogram file format. Make sure you pass the sff.txt format ' + 'produced by sffinfo. The binary .sff will not work here.')) flowgram_iterators = [a for (a, b) in flowgrams_and_headers] return (chain(*flowgram_iterators), flowgrams_and_headers[0][1])
[ "def", "cat_sff_files", "(", "list_of_file_handles", ")", ":", "if", "(", "list_of_file_handles", "==", "[", "]", ")", ":", "return", "(", "[", "]", ",", "None", ")", "try", ":", "flowgrams_and_headers", "=", "map", "(", "lazy_parse_sff_handle", ",", "list_of_file_handles", ")", "except", "ValueError", ":", "raise", "FileFormatError", "(", "(", "'Wrong flogram file format. Make sure you pass the sff.txt format '", "+", "'produced by sffinfo. The binary .sff will not work here.'", ")", ")", "flowgram_iterators", "=", "[", "a", "for", "(", "a", ",", "b", ")", "in", "flowgrams_and_headers", "]", "return", "(", "chain", "(", "*", "flowgram_iterators", ")", ",", "flowgrams_and_headers", "[", "0", "]", "[", "1", "]", ")" ]
virtually concat several sff files list_of_file_handles: list of open filehandles to .
train
false
18,189
def vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes, instance_id, last_refreshed=None, update_totals=False): return IMPL.vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes, instance_id, last_refreshed=last_refreshed, update_totals=update_totals)
[ "def", "vol_usage_update", "(", "context", ",", "id", ",", "rd_req", ",", "rd_bytes", ",", "wr_req", ",", "wr_bytes", ",", "instance_id", ",", "last_refreshed", "=", "None", ",", "update_totals", "=", "False", ")", ":", "return", "IMPL", ".", "vol_usage_update", "(", "context", ",", "id", ",", "rd_req", ",", "rd_bytes", ",", "wr_req", ",", "wr_bytes", ",", "instance_id", ",", "last_refreshed", "=", "last_refreshed", ",", "update_totals", "=", "update_totals", ")" ]
update cached volume usage for a volume creates new record if needed .
train
false
18,192
def get_visibility_partition_info(xblock): user_partitions = get_user_partition_info(xblock, schemes=['verification', 'cohort']) cohort_partitions = [] verification_partitions = [] has_selected_groups = False selected_verified_partition_id = None for p in user_partitions: has_selected = any((g['selected'] for g in p['groups'])) has_selected_groups = (has_selected_groups or has_selected) if (p['scheme'] == 'cohort'): cohort_partitions.append(p) elif (p['scheme'] == 'verification'): verification_partitions.append(p) if has_selected: selected_verified_partition_id = p['id'] return {'user_partitions': user_partitions, 'cohort_partitions': cohort_partitions, 'verification_partitions': verification_partitions, 'has_selected_groups': has_selected_groups, 'selected_verified_partition_id': selected_verified_partition_id}
[ "def", "get_visibility_partition_info", "(", "xblock", ")", ":", "user_partitions", "=", "get_user_partition_info", "(", "xblock", ",", "schemes", "=", "[", "'verification'", ",", "'cohort'", "]", ")", "cohort_partitions", "=", "[", "]", "verification_partitions", "=", "[", "]", "has_selected_groups", "=", "False", "selected_verified_partition_id", "=", "None", "for", "p", "in", "user_partitions", ":", "has_selected", "=", "any", "(", "(", "g", "[", "'selected'", "]", "for", "g", "in", "p", "[", "'groups'", "]", ")", ")", "has_selected_groups", "=", "(", "has_selected_groups", "or", "has_selected", ")", "if", "(", "p", "[", "'scheme'", "]", "==", "'cohort'", ")", ":", "cohort_partitions", ".", "append", "(", "p", ")", "elif", "(", "p", "[", "'scheme'", "]", "==", "'verification'", ")", ":", "verification_partitions", ".", "append", "(", "p", ")", "if", "has_selected", ":", "selected_verified_partition_id", "=", "p", "[", "'id'", "]", "return", "{", "'user_partitions'", ":", "user_partitions", ",", "'cohort_partitions'", ":", "cohort_partitions", ",", "'verification_partitions'", ":", "verification_partitions", ",", "'has_selected_groups'", ":", "has_selected_groups", ",", "'selected_verified_partition_id'", ":", "selected_verified_partition_id", "}" ]
retrieve user partition information for the component visibility editor .
train
false
18,193
def write_stored_info(target, field=None, value=None): if (not field): return info_file = get_info_filename(target) LOG.info(_('Writing stored info to %s'), info_file) fileutils.ensure_tree(os.path.dirname(info_file)) lock_name = ('info-%s' % os.path.split(target)[(-1)]) lock_path = os.path.join(CONF.instances_path, 'locks') @lockutils.synchronized(lock_name, 'nova-', external=True, lock_path=lock_path) def write_file(info_file, field, value): d = {} if os.path.exists(info_file): with open(info_file, 'r') as f: d = _read_possible_json(f.read(), info_file) d[field] = value d[('%s-timestamp' % field)] = time.time() with open(info_file, 'w') as f: f.write(json.dumps(d)) write_file(info_file, field, value)
[ "def", "write_stored_info", "(", "target", ",", "field", "=", "None", ",", "value", "=", "None", ")", ":", "if", "(", "not", "field", ")", ":", "return", "info_file", "=", "get_info_filename", "(", "target", ")", "LOG", ".", "info", "(", "_", "(", "'Writing stored info to %s'", ")", ",", "info_file", ")", "fileutils", ".", "ensure_tree", "(", "os", ".", "path", ".", "dirname", "(", "info_file", ")", ")", "lock_name", "=", "(", "'info-%s'", "%", "os", ".", "path", ".", "split", "(", "target", ")", "[", "(", "-", "1", ")", "]", ")", "lock_path", "=", "os", ".", "path", ".", "join", "(", "CONF", ".", "instances_path", ",", "'locks'", ")", "@", "lockutils", ".", "synchronized", "(", "lock_name", ",", "'nova-'", ",", "external", "=", "True", ",", "lock_path", "=", "lock_path", ")", "def", "write_file", "(", "info_file", ",", "field", ",", "value", ")", ":", "d", "=", "{", "}", "if", "os", ".", "path", ".", "exists", "(", "info_file", ")", ":", "with", "open", "(", "info_file", ",", "'r'", ")", "as", "f", ":", "d", "=", "_read_possible_json", "(", "f", ".", "read", "(", ")", ",", "info_file", ")", "d", "[", "field", "]", "=", "value", "d", "[", "(", "'%s-timestamp'", "%", "field", ")", "]", "=", "time", ".", "time", "(", ")", "with", "open", "(", "info_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "json", ".", "dumps", "(", "d", ")", ")", "write_file", "(", "info_file", ",", "field", ",", "value", ")" ]
write information about an image .
train
false
18,195
def NewPathCheck(path): PathCheck(os.path.dirname(path)) return os.path.abspath(path)
[ "def", "NewPathCheck", "(", "path", ")", ":", "PathCheck", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ")", "return", "os", ".", "path", ".", "abspath", "(", "path", ")" ]
verify that a string is a valid path to a directory .
train
false
18,196
def _rot(axis, angle): if (axis == 1): return Matrix(rot_axis1(angle).T) elif (axis == 2): return Matrix(rot_axis2(angle).T) elif (axis == 3): return Matrix(rot_axis3(angle).T)
[ "def", "_rot", "(", "axis", ",", "angle", ")", ":", "if", "(", "axis", "==", "1", ")", ":", "return", "Matrix", "(", "rot_axis1", "(", "angle", ")", ".", "T", ")", "elif", "(", "axis", "==", "2", ")", ":", "return", "Matrix", "(", "rot_axis2", "(", "angle", ")", ".", "T", ")", "elif", "(", "axis", "==", "3", ")", ":", "return", "Matrix", "(", "rot_axis3", "(", "angle", ")", ".", "T", ")" ]
dcm for simple axis 1 .
train
false
18,197
def iter_modules(modules_directory=MODULES_DIR): modules = index_modules(modules_directory) modules = map((lambda x: ''.join(['routersploit.modules.', x])), modules) for path in modules: try: (yield import_exploit(path)) except RoutersploitException: pass
[ "def", "iter_modules", "(", "modules_directory", "=", "MODULES_DIR", ")", ":", "modules", "=", "index_modules", "(", "modules_directory", ")", "modules", "=", "map", "(", "(", "lambda", "x", ":", "''", ".", "join", "(", "[", "'routersploit.modules.'", ",", "x", "]", ")", ")", ",", "modules", ")", "for", "path", "in", "modules", ":", "try", ":", "(", "yield", "import_exploit", "(", "path", ")", ")", "except", "RoutersploitException", ":", "pass" ]
yields for all submodules on path .
train
false
18,198
def build_opener(*handlers, **kw): def isclass(obj): return (isinstance(obj, class_type_) or hasattr(obj, '__bases__')) opener = OpenerDirector() default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, HTTPDefaultErrorHandler, HTTPRedirectHandler, FTPHandler, FileHandler, HTTPErrorProcessor] check_classes = list(default_classes) check_classes.append(HTTPSContextHandler) skip = [] for klass in check_classes: for check in handlers: if isclass(check): if issubclass(check, klass): skip.append(klass) elif isinstance(check, klass): skip.append(klass) for klass in default_classes: if (klass not in skip): opener.add_handler(klass()) ssl_context = kw.get('ssl_context') if (HTTPSContextHandler not in skip): opener.add_handler(HTTPSContextHandler(ssl_context)) for h in handlers: if isclass(h): h = h() opener.add_handler(h) return opener
[ "def", "build_opener", "(", "*", "handlers", ",", "**", "kw", ")", ":", "def", "isclass", "(", "obj", ")", ":", "return", "(", "isinstance", "(", "obj", ",", "class_type_", ")", "or", "hasattr", "(", "obj", ",", "'__bases__'", ")", ")", "opener", "=", "OpenerDirector", "(", ")", "default_classes", "=", "[", "ProxyHandler", ",", "UnknownHandler", ",", "HTTPHandler", ",", "HTTPDefaultErrorHandler", ",", "HTTPRedirectHandler", ",", "FTPHandler", ",", "FileHandler", ",", "HTTPErrorProcessor", "]", "check_classes", "=", "list", "(", "default_classes", ")", "check_classes", ".", "append", "(", "HTTPSContextHandler", ")", "skip", "=", "[", "]", "for", "klass", "in", "check_classes", ":", "for", "check", "in", "handlers", ":", "if", "isclass", "(", "check", ")", ":", "if", "issubclass", "(", "check", ",", "klass", ")", ":", "skip", ".", "append", "(", "klass", ")", "elif", "isinstance", "(", "check", ",", "klass", ")", ":", "skip", ".", "append", "(", "klass", ")", "for", "klass", "in", "default_classes", ":", "if", "(", "klass", "not", "in", "skip", ")", ":", "opener", ".", "add_handler", "(", "klass", "(", ")", ")", "ssl_context", "=", "kw", ".", "get", "(", "'ssl_context'", ")", "if", "(", "HTTPSContextHandler", "not", "in", "skip", ")", ":", "opener", ".", "add_handler", "(", "HTTPSContextHandler", "(", "ssl_context", ")", ")", "for", "h", "in", "handlers", ":", "if", "isclass", "(", "h", ")", ":", "h", "=", "h", "(", ")", "opener", ".", "add_handler", "(", "h", ")", "return", "opener" ]
create an opener object from a list of handlers .
train
false
18,200
@core_helper def facets(): return config.get(u'search.facets', DEFAULT_FACET_NAMES).split()
[ "@", "core_helper", "def", "facets", "(", ")", ":", "return", "config", ".", "get", "(", "u'search.facets'", ",", "DEFAULT_FACET_NAMES", ")", ".", "split", "(", ")" ]
returns a list of the current facet names .
train
false
18,201
def _mul2012(num1, num2): return ((num1 * num2) >> 20)
[ "def", "_mul2012", "(", "num1", ",", "num2", ")", ":", "return", "(", "(", "num1", "*", "num2", ")", ">>", "20", ")" ]
multiply two numbers in 20 .
train
false
18,203
def truncate_hour(dt, measure): return ashour(truncate_second(dt, (measure * 3600)))
[ "def", "truncate_hour", "(", "dt", ",", "measure", ")", ":", "return", "ashour", "(", "truncate_second", "(", "dt", ",", "(", "measure", "*", "3600", ")", ")", ")" ]
truncate by hour .
train
false
18,205
def normalize_float(f): if (abs((f - round(f))) < 1e-13): return round(f) return f
[ "def", "normalize_float", "(", "f", ")", ":", "if", "(", "abs", "(", "(", "f", "-", "round", "(", "f", ")", ")", ")", "<", "1e-13", ")", ":", "return", "round", "(", "f", ")", "return", "f" ]
round float errors .
train
false
18,206
def _filter_host(field, value, match_level=None): if (match_level is None): if ('#' in value): match_level = 'pool' elif ('@' in value): match_level = 'backend' else: match_level = 'host' conn_str = CONF.database.connection if (conn_str.startswith('mysql') and (conn_str[5] in ['+', ':'])): cmp_value = func.binary(value) like_op = 'LIKE BINARY' else: cmp_value = value like_op = 'LIKE' conditions = [(field == cmp_value)] if (match_level != 'pool'): conditions.append(field.op(like_op)((value + '#%'))) if (match_level == 'host'): conditions.append(field.op(like_op)((value + '@%'))) return or_(*conditions)
[ "def", "_filter_host", "(", "field", ",", "value", ",", "match_level", "=", "None", ")", ":", "if", "(", "match_level", "is", "None", ")", ":", "if", "(", "'#'", "in", "value", ")", ":", "match_level", "=", "'pool'", "elif", "(", "'@'", "in", "value", ")", ":", "match_level", "=", "'backend'", "else", ":", "match_level", "=", "'host'", "conn_str", "=", "CONF", ".", "database", ".", "connection", "if", "(", "conn_str", ".", "startswith", "(", "'mysql'", ")", "and", "(", "conn_str", "[", "5", "]", "in", "[", "'+'", ",", "':'", "]", ")", ")", ":", "cmp_value", "=", "func", ".", "binary", "(", "value", ")", "like_op", "=", "'LIKE BINARY'", "else", ":", "cmp_value", "=", "value", "like_op", "=", "'LIKE'", "conditions", "=", "[", "(", "field", "==", "cmp_value", ")", "]", "if", "(", "match_level", "!=", "'pool'", ")", ":", "conditions", ".", "append", "(", "field", ".", "op", "(", "like_op", ")", "(", "(", "value", "+", "'#%'", ")", ")", ")", "if", "(", "match_level", "==", "'host'", ")", ":", "conditions", ".", "append", "(", "field", ".", "op", "(", "like_op", ")", "(", "(", "value", "+", "'@%'", ")", ")", ")", "return", "or_", "(", "*", "conditions", ")" ]
generate a filter condition for host and cluster fields .
train
false
18,207
def _add_tag(request, question_id): tag_name = request.POST.get('tag-name', '').strip() if tag_name: question = get_object_or_404(Question, pk=question_id) try: canonical_name = add_existing_tag(tag_name, question.tags) except Tag.DoesNotExist: if request.user.has_perm('taggit.add_tag'): question.tags.add(tag_name) canonical_name = tag_name else: raise tag_added.send(sender=Question, question_id=question.id, tag_name=canonical_name) return (question, canonical_name) return (None, None)
[ "def", "_add_tag", "(", "request", ",", "question_id", ")", ":", "tag_name", "=", "request", ".", "POST", ".", "get", "(", "'tag-name'", ",", "''", ")", ".", "strip", "(", ")", "if", "tag_name", ":", "question", "=", "get_object_or_404", "(", "Question", ",", "pk", "=", "question_id", ")", "try", ":", "canonical_name", "=", "add_existing_tag", "(", "tag_name", ",", "question", ".", "tags", ")", "except", "Tag", ".", "DoesNotExist", ":", "if", "request", ".", "user", ".", "has_perm", "(", "'taggit.add_tag'", ")", ":", "question", ".", "tags", ".", "add", "(", "tag_name", ")", "canonical_name", "=", "tag_name", "else", ":", "raise", "tag_added", ".", "send", "(", "sender", "=", "Question", ",", "question_id", "=", "question", ".", "id", ",", "tag_name", "=", "canonical_name", ")", "return", "(", "question", ",", "canonical_name", ")", "return", "(", "None", ",", "None", ")" ]
add a named tag to a question .
train
false
18,208
def _regex_iptables_save(cmd_output, filters=None): if ('iptables.save_filters' not in __context__): __context__['iptables.save_filters'] = [] for pattern in (filters or _conf_save_filters()): try: __context__['iptables.save_filters'].append(re.compile(pattern)) except re.error as e: log.warning("Skipping regex rule: '{0}': {1}".format(pattern, e)) continue if (len(__context__['iptables.save_filters']) > 0): _filtered_cmd_output = [line for line in cmd_output.splitlines(True) if (not any((reg.search(line) for reg in __context__['iptables.save_filters'])))] return ''.join(_filtered_cmd_output) return cmd_output
[ "def", "_regex_iptables_save", "(", "cmd_output", ",", "filters", "=", "None", ")", ":", "if", "(", "'iptables.save_filters'", "not", "in", "__context__", ")", ":", "__context__", "[", "'iptables.save_filters'", "]", "=", "[", "]", "for", "pattern", "in", "(", "filters", "or", "_conf_save_filters", "(", ")", ")", ":", "try", ":", "__context__", "[", "'iptables.save_filters'", "]", ".", "append", "(", "re", ".", "compile", "(", "pattern", ")", ")", "except", "re", ".", "error", "as", "e", ":", "log", ".", "warning", "(", "\"Skipping regex rule: '{0}': {1}\"", ".", "format", "(", "pattern", ",", "e", ")", ")", "continue", "if", "(", "len", "(", "__context__", "[", "'iptables.save_filters'", "]", ")", ">", "0", ")", ":", "_filtered_cmd_output", "=", "[", "line", "for", "line", "in", "cmd_output", ".", "splitlines", "(", "True", ")", "if", "(", "not", "any", "(", "(", "reg", ".", "search", "(", "line", ")", "for", "reg", "in", "__context__", "[", "'iptables.save_filters'", "]", ")", ")", ")", "]", "return", "''", ".", "join", "(", "_filtered_cmd_output", ")", "return", "cmd_output" ]
return string with save_filter regex entries removed .
train
true
18,209
def norm_text_angle(a): a = ((a + 180) % 180) if (a > 90): a = (a - 180) return a
[ "def", "norm_text_angle", "(", "a", ")", ":", "a", "=", "(", "(", "a", "+", "180", ")", "%", "180", ")", "if", "(", "a", ">", "90", ")", ":", "a", "=", "(", "a", "-", "180", ")", "return", "a" ]
return angle between -90 and +90 .
train
false