id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
18,740
def supports_proxies(*proxy_types): def _supports_proxies(fn): def __supports_proxies(*args, **kwargs): proxy_type = get_proxy_type() if (proxy_type not in proxy_types): raise CommandExecutionError("'{0}' proxy is not supported by function {1}".format(proxy_type, fn.__name__)) return fn(*args, **clean_kwargs(**kwargs)) return __supports_proxies return _supports_proxies
[ "def", "supports_proxies", "(", "*", "proxy_types", ")", ":", "def", "_supports_proxies", "(", "fn", ")", ":", "def", "__supports_proxies", "(", "*", "args", ",", "**", "kwargs", ")", ":", "proxy_type", "=", "get_proxy_type", "(", ")", "if", "(", "proxy_type", "not", "in", "proxy_types", ")", ":", "raise", "CommandExecutionError", "(", "\"'{0}' proxy is not supported by function {1}\"", ".", "format", "(", "proxy_type", ",", "fn", ".", "__name__", ")", ")", "return", "fn", "(", "*", "args", ",", "**", "clean_kwargs", "(", "**", "kwargs", ")", ")", "return", "__supports_proxies", "return", "_supports_proxies" ]
decorator to specify which proxy types are supported by a function proxy_types: arbitrary list of strings with the supported types of proxies .
train
true
18,742
def success(request, message): add_message(get_request(request), constants.SUCCESS, message)
[ "def", "success", "(", "request", ",", "message", ")", ":", "add_message", "(", "get_request", "(", "request", ")", ",", "constants", ".", "SUCCESS", ",", "message", ")" ]
test if task result yields success .
train
false
18,744
def test_renn_iter_wrong(): max_iter = (-1) renn = RepeatedEditedNearestNeighbours(max_iter=max_iter, random_state=RND_SEED) assert_raises(ValueError, renn.fit_sample, X, Y)
[ "def", "test_renn_iter_wrong", "(", ")", ":", "max_iter", "=", "(", "-", "1", ")", "renn", "=", "RepeatedEditedNearestNeighbours", "(", "max_iter", "=", "max_iter", ",", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "ValueError", ",", "renn", ".", "fit_sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised when the numbr of iteration is wrong .
train
false
18,746
@world.absorb def wait_for_invisible(css_selector, timeout=GLOBAL_WAIT_FOR_TIMEOUT): wait_for(func=(lambda _: EC.invisibility_of_element_located((By.CSS_SELECTOR, css_selector))), timeout=timeout, timeout_msg='Timed out waiting for {} to be invisible.'.format(css_selector))
[ "@", "world", ".", "absorb", "def", "wait_for_invisible", "(", "css_selector", ",", "timeout", "=", "GLOBAL_WAIT_FOR_TIMEOUT", ")", ":", "wait_for", "(", "func", "=", "(", "lambda", "_", ":", "EC", ".", "invisibility_of_element_located", "(", "(", "By", ".", "CSS_SELECTOR", ",", "css_selector", ")", ")", ")", ",", "timeout", "=", "timeout", ",", "timeout_msg", "=", "'Timed out waiting for {} to be invisible.'", ".", "format", "(", "css_selector", ")", ")" ]
wait for the element to be either invisible or not present on the dom .
train
false
18,748
def _maybe_strip_extension(number): match = _EXTN_PATTERN.search(number) if (match and _is_viable_phone_number(number[:match.start()])): for group in match.groups(): if (group is not None): return (group, number[:match.start()]) return ('', number)
[ "def", "_maybe_strip_extension", "(", "number", ")", ":", "match", "=", "_EXTN_PATTERN", ".", "search", "(", "number", ")", "if", "(", "match", "and", "_is_viable_phone_number", "(", "number", "[", ":", "match", ".", "start", "(", ")", "]", ")", ")", ":", "for", "group", "in", "match", ".", "groups", "(", ")", ":", "if", "(", "group", "is", "not", "None", ")", ":", "return", "(", "group", ",", "number", "[", ":", "match", ".", "start", "(", ")", "]", ")", "return", "(", "''", ",", "number", ")" ]
strip extension from the end of a number string .
train
true
18,749
def strings_differ(string1, string2, compare_digest=compare_digest): len_eq = (len(string1) == len(string2)) if len_eq: invalid_bits = 0 left = string1 else: invalid_bits = 1 left = string2 right = string2 if (compare_digest is not None): invalid_bits += (not compare_digest(left, right)) else: for (a, b) in zip(left, right): invalid_bits += (a != b) return (invalid_bits != 0)
[ "def", "strings_differ", "(", "string1", ",", "string2", ",", "compare_digest", "=", "compare_digest", ")", ":", "len_eq", "=", "(", "len", "(", "string1", ")", "==", "len", "(", "string2", ")", ")", "if", "len_eq", ":", "invalid_bits", "=", "0", "left", "=", "string1", "else", ":", "invalid_bits", "=", "1", "left", "=", "string2", "right", "=", "string2", "if", "(", "compare_digest", "is", "not", "None", ")", ":", "invalid_bits", "+=", "(", "not", "compare_digest", "(", "left", ",", "right", ")", ")", "else", ":", "for", "(", "a", ",", "b", ")", "in", "zip", "(", "left", ",", "right", ")", ":", "invalid_bits", "+=", "(", "a", "!=", "b", ")", "return", "(", "invalid_bits", "!=", "0", ")" ]
check whether two strings differ while avoiding timing attacks .
train
false
18,750
def sokalsneath(u, v): u = _validate_vector(u) v = _validate_vector(v) if (u.dtype == bool): ntt = (u & v).sum() else: ntt = (u * v).sum() (nft, ntf) = _nbool_correspond_ft_tf(u, v) denom = (ntt + (2.0 * (ntf + nft))) if (denom == 0): raise ValueError('Sokal-Sneath dissimilarity is not defined for vectors that are entirely false.') return (float((2.0 * (ntf + nft))) / denom)
[ "def", "sokalsneath", "(", "u", ",", "v", ")", ":", "u", "=", "_validate_vector", "(", "u", ")", "v", "=", "_validate_vector", "(", "v", ")", "if", "(", "u", ".", "dtype", "==", "bool", ")", ":", "ntt", "=", "(", "u", "&", "v", ")", ".", "sum", "(", ")", "else", ":", "ntt", "=", "(", "u", "*", "v", ")", ".", "sum", "(", ")", "(", "nft", ",", "ntf", ")", "=", "_nbool_correspond_ft_tf", "(", "u", ",", "v", ")", "denom", "=", "(", "ntt", "+", "(", "2.0", "*", "(", "ntf", "+", "nft", ")", ")", ")", "if", "(", "denom", "==", "0", ")", ":", "raise", "ValueError", "(", "'Sokal-Sneath dissimilarity is not defined for vectors that are entirely false.'", ")", "return", "(", "float", "(", "(", "2.0", "*", "(", "ntf", "+", "nft", ")", ")", ")", "/", "denom", ")" ]
computes the sokal-sneath dissimilarity between two boolean 1-d arrays .
train
false
18,751
def unique_variable(pattern=None, ignore=None): if (pattern is not None): if is_indvar(pattern.name): prefix = u'z' elif is_funcvar(pattern.name): prefix = u'F' elif is_eventvar(pattern.name): prefix = u'e0' else: assert False, u'Cannot generate a unique constant' else: prefix = u'z' v = Variable((u'%s%s' % (prefix, _counter.get()))) while ((ignore is not None) and (v in ignore)): v = Variable((u'%s%s' % (prefix, _counter.get()))) return v
[ "def", "unique_variable", "(", "pattern", "=", "None", ",", "ignore", "=", "None", ")", ":", "if", "(", "pattern", "is", "not", "None", ")", ":", "if", "is_indvar", "(", "pattern", ".", "name", ")", ":", "prefix", "=", "u'z'", "elif", "is_funcvar", "(", "pattern", ".", "name", ")", ":", "prefix", "=", "u'F'", "elif", "is_eventvar", "(", "pattern", ".", "name", ")", ":", "prefix", "=", "u'e0'", "else", ":", "assert", "False", ",", "u'Cannot generate a unique constant'", "else", ":", "prefix", "=", "u'z'", "v", "=", "Variable", "(", "(", "u'%s%s'", "%", "(", "prefix", ",", "_counter", ".", "get", "(", ")", ")", ")", ")", "while", "(", "(", "ignore", "is", "not", "None", ")", "and", "(", "v", "in", "ignore", ")", ")", ":", "v", "=", "Variable", "(", "(", "u'%s%s'", "%", "(", "prefix", ",", "_counter", ".", "get", "(", ")", ")", ")", ")", "return", "v" ]
return a new .
train
false
18,752
def requestLayer(config, path_info): if (type(config) in (str, unicode)): key = (hasattr(config, '__hash__') and (config, getcwd())) if (key in _previous_configs): config = _previous_configs[key] else: config = parseConfig(config) if key: _previous_configs[key] = config else: assert hasattr(config, 'cache'), 'Configuration object must have a cache.' assert hasattr(config, 'layers'), 'Configuration object must have layers.' assert hasattr(config, 'dirpath'), 'Configuration object must have a dirpath.' path_info = ('/' + (path_info or '').lstrip('/')) if (path_info == '/'): return Core.Layer(config, None, None) layername = splitPathInfo(path_info)[0] if (layername not in config.layers): raise Core.KnownUnknown(('"%s" is not a layer I know about. Here are some that I do know about: %s.' % (layername, ', '.join(sorted(config.layers.keys()))))) return config.layers[layername]
[ "def", "requestLayer", "(", "config", ",", "path_info", ")", ":", "if", "(", "type", "(", "config", ")", "in", "(", "str", ",", "unicode", ")", ")", ":", "key", "=", "(", "hasattr", "(", "config", ",", "'__hash__'", ")", "and", "(", "config", ",", "getcwd", "(", ")", ")", ")", "if", "(", "key", "in", "_previous_configs", ")", ":", "config", "=", "_previous_configs", "[", "key", "]", "else", ":", "config", "=", "parseConfig", "(", "config", ")", "if", "key", ":", "_previous_configs", "[", "key", "]", "=", "config", "else", ":", "assert", "hasattr", "(", "config", ",", "'cache'", ")", ",", "'Configuration object must have a cache.'", "assert", "hasattr", "(", "config", ",", "'layers'", ")", ",", "'Configuration object must have layers.'", "assert", "hasattr", "(", "config", ",", "'dirpath'", ")", ",", "'Configuration object must have a dirpath.'", "path_info", "=", "(", "'/'", "+", "(", "path_info", "or", "''", ")", ".", "lstrip", "(", "'/'", ")", ")", "if", "(", "path_info", "==", "'/'", ")", ":", "return", "Core", ".", "Layer", "(", "config", ",", "None", ",", "None", ")", "layername", "=", "splitPathInfo", "(", "path_info", ")", "[", "0", "]", "if", "(", "layername", "not", "in", "config", ".", "layers", ")", ":", "raise", "Core", ".", "KnownUnknown", "(", "(", "'\"%s\" is not a layer I know about. Here are some that I do know about: %s.'", "%", "(", "layername", ",", "', '", ".", "join", "(", "sorted", "(", "config", ".", "layers", ".", "keys", "(", ")", ")", ")", ")", ")", ")", "return", "config", ".", "layers", "[", "layername", "]" ]
return a layer .
train
false
18,753
def _sanitize_values(arr): if hasattr(arr, 'values'): arr = arr.values else: if is_scalar(arr): arr = [arr] if isinstance(arr, np.ndarray): pass elif (is_list_like(arr) and (len(arr) > 0)): arr = _possibly_convert_platform(arr) else: arr = np.asarray(arr) return arr
[ "def", "_sanitize_values", "(", "arr", ")", ":", "if", "hasattr", "(", "arr", ",", "'values'", ")", ":", "arr", "=", "arr", ".", "values", "else", ":", "if", "is_scalar", "(", "arr", ")", ":", "arr", "=", "[", "arr", "]", "if", "isinstance", "(", "arr", ",", "np", ".", "ndarray", ")", ":", "pass", "elif", "(", "is_list_like", "(", "arr", ")", "and", "(", "len", "(", "arr", ")", ">", "0", ")", ")", ":", "arr", "=", "_possibly_convert_platform", "(", "arr", ")", "else", ":", "arr", "=", "np", ".", "asarray", "(", "arr", ")", "return", "arr" ]
return an ndarray for our input .
train
true
18,754
def ftest_power(effect_size, df_num, df_denom, alpha, ncc=1): nc = ((effect_size ** 2) * ((df_denom + df_num) + ncc)) crit = stats.f.isf(alpha, df_denom, df_num) pow_ = stats.ncf.sf(crit, df_denom, df_num, nc) return pow_
[ "def", "ftest_power", "(", "effect_size", ",", "df_num", ",", "df_denom", ",", "alpha", ",", "ncc", "=", "1", ")", ":", "nc", "=", "(", "(", "effect_size", "**", "2", ")", "*", "(", "(", "df_denom", "+", "df_num", ")", "+", "ncc", ")", ")", "crit", "=", "stats", ".", "f", ".", "isf", "(", "alpha", ",", "df_denom", ",", "df_num", ")", "pow_", "=", "stats", ".", "ncf", ".", "sf", "(", "crit", ",", "df_denom", ",", "df_num", ",", "nc", ")", "return", "pow_" ]
calculate the power of a f-test .
train
false
18,755
def update_global_secondary_index(table_name, global_indexes, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) table = Table(table_name, connection=conn) return table.update_global_secondary_index(global_indexes)
[ "def", "update_global_secondary_index", "(", "table_name", ",", "global_indexes", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "table", "=", "Table", "(", "table_name", ",", "connection", "=", "conn", ")", "return", "table", ".", "update_global_secondary_index", "(", "global_indexes", ")" ]
updates the throughput of the given global secondary indexes .
train
true
18,756
@write @csrf_exempt @post_required def paypal(request): try: return _paypal(request) except Exception as e: paypal_log.error(('%s\n%s' % (e, request)), exc_info=True) if settings.IN_TEST_SUITE: raise return http.HttpResponseServerError('Unknown error.')
[ "@", "write", "@", "csrf_exempt", "@", "post_required", "def", "paypal", "(", "request", ")", ":", "try", ":", "return", "_paypal", "(", "request", ")", "except", "Exception", "as", "e", ":", "paypal_log", ".", "error", "(", "(", "'%s\\n%s'", "%", "(", "e", ",", "request", ")", ")", ",", "exc_info", "=", "True", ")", "if", "settings", ".", "IN_TEST_SUITE", ":", "raise", "return", "http", ".", "HttpResponseServerError", "(", "'Unknown error.'", ")" ]
handle paypal ipn post-back for contribution transactions .
train
false
18,757
def test_param_splitting(): def check(src, result): grammar = load_grammar(('%s.%s' % sys.version_info[:2])) m = ParserWithRecovery(grammar, u(src)).module if is_py3: assert (not m.subscopes) else: assert ([str(param.name) for param in m.subscopes[0].params] == result) check('def x(a, (b, c)):\n pass', ['a']) check('def x((b, c)):\n pass', [])
[ "def", "test_param_splitting", "(", ")", ":", "def", "check", "(", "src", ",", "result", ")", ":", "grammar", "=", "load_grammar", "(", "(", "'%s.%s'", "%", "sys", ".", "version_info", "[", ":", "2", "]", ")", ")", "m", "=", "ParserWithRecovery", "(", "grammar", ",", "u", "(", "src", ")", ")", ".", "module", "if", "is_py3", ":", "assert", "(", "not", "m", ".", "subscopes", ")", "else", ":", "assert", "(", "[", "str", "(", "param", ".", "name", ")", "for", "param", "in", "m", ".", "subscopes", "[", "0", "]", ".", "params", "]", "==", "result", ")", "check", "(", "'def x(a, (b, c)):\\n pass'", ",", "[", "'a'", "]", ")", "check", "(", "'def x((b, c)):\\n pass'", ",", "[", "]", ")" ]
jedi splits parameters into params .
train
false
18,758
def _load_plugins(plugins, debug=True): plugs = [] for plugin in plugins: setup_class = plugin.get('setup_class') plugin_name = plugin.get('__name__').split()[(-1)] mod_name = '.'.join(setup_class.split('.')[:(-1)]) class_name = setup_class.split('.')[(-1)] try: mod = __import__(mod_name, globals(), locals(), [class_name]) except SyntaxError as e: raise exception.PluginSyntaxError(('Plugin %s (%s) contains a syntax error at line %s' % (plugin_name, e.filename, e.lineno))) except ImportError as e: raise exception.PluginLoadError(('Failed to import plugin %s: %s' % (plugin_name, e[0]))) klass = getattr(mod, class_name, None) if (not klass): raise exception.PluginError(('Plugin class %s does not exist' % setup_class)) if (not issubclass(klass, clustersetup.ClusterSetup)): raise exception.PluginError(('Plugin %s must be a subclass of starcluster.clustersetup.ClusterSetup' % setup_class)) (args, kwargs) = utils.get_arg_spec(klass.__init__, debug=debug) config_args = [] missing_args = [] for arg in args: if (arg in plugin): config_args.append(plugin.get(arg)) else: missing_args.append(arg) if debug: log.debug(('config_args = %s' % config_args)) if missing_args: raise exception.PluginError(('Not enough settings provided for plugin %s (missing: %s)' % (plugin_name, ', '.join(missing_args)))) config_kwargs = {} for arg in kwargs: if (arg in plugin): config_kwargs[arg] = plugin.get(arg) if debug: log.debug(('config_kwargs = %s' % config_kwargs)) try: plug_obj = klass(*config_args, **config_kwargs) except Exception as exc: log.error('Error occured:', exc_info=True) raise exception.PluginLoadError(('Failed to load plugin %s with the following error: %s - %s' % (setup_class, exc.__class__.__name__, exc.message))) if (not hasattr(plug_obj, '__name__')): setattr(plug_obj, '__name__', plugin_name) plugs.append(plug_obj) return plugs
[ "def", "_load_plugins", "(", "plugins", ",", "debug", "=", "True", ")", ":", "plugs", "=", "[", "]", "for", "plugin", "in", "plugins", ":", "setup_class", "=", "plugin", ".", "get", "(", "'setup_class'", ")", "plugin_name", "=", "plugin", ".", "get", "(", "'__name__'", ")", ".", "split", "(", ")", "[", "(", "-", "1", ")", "]", "mod_name", "=", "'.'", ".", "join", "(", "setup_class", ".", "split", "(", "'.'", ")", "[", ":", "(", "-", "1", ")", "]", ")", "class_name", "=", "setup_class", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", "try", ":", "mod", "=", "__import__", "(", "mod_name", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "[", "class_name", "]", ")", "except", "SyntaxError", "as", "e", ":", "raise", "exception", ".", "PluginSyntaxError", "(", "(", "'Plugin %s (%s) contains a syntax error at line %s'", "%", "(", "plugin_name", ",", "e", ".", "filename", ",", "e", ".", "lineno", ")", ")", ")", "except", "ImportError", "as", "e", ":", "raise", "exception", ".", "PluginLoadError", "(", "(", "'Failed to import plugin %s: %s'", "%", "(", "plugin_name", ",", "e", "[", "0", "]", ")", ")", ")", "klass", "=", "getattr", "(", "mod", ",", "class_name", ",", "None", ")", "if", "(", "not", "klass", ")", ":", "raise", "exception", ".", "PluginError", "(", "(", "'Plugin class %s does not exist'", "%", "setup_class", ")", ")", "if", "(", "not", "issubclass", "(", "klass", ",", "clustersetup", ".", "ClusterSetup", ")", ")", ":", "raise", "exception", ".", "PluginError", "(", "(", "'Plugin %s must be a subclass of starcluster.clustersetup.ClusterSetup'", "%", "setup_class", ")", ")", "(", "args", ",", "kwargs", ")", "=", "utils", ".", "get_arg_spec", "(", "klass", ".", "__init__", ",", "debug", "=", "debug", ")", "config_args", "=", "[", "]", "missing_args", "=", "[", "]", "for", "arg", "in", "args", ":", "if", "(", "arg", "in", "plugin", ")", ":", "config_args", ".", "append", "(", "plugin", ".", "get", "(", "arg", ")", ")", "else", ":", "missing_args", ".", "append", "(", "arg", ")", "if", "debug", ":", "log", ".", "debug", "(", "(", "'config_args = %s'", "%", "config_args", ")", ")", "if", "missing_args", ":", "raise", "exception", ".", "PluginError", "(", "(", "'Not enough settings provided for plugin %s (missing: %s)'", "%", "(", "plugin_name", ",", "', '", ".", "join", "(", "missing_args", ")", ")", ")", ")", "config_kwargs", "=", "{", "}", "for", "arg", "in", "kwargs", ":", "if", "(", "arg", "in", "plugin", ")", ":", "config_kwargs", "[", "arg", "]", "=", "plugin", ".", "get", "(", "arg", ")", "if", "debug", ":", "log", ".", "debug", "(", "(", "'config_kwargs = %s'", "%", "config_kwargs", ")", ")", "try", ":", "plug_obj", "=", "klass", "(", "*", "config_args", ",", "**", "config_kwargs", ")", "except", "Exception", "as", "exc", ":", "log", ".", "error", "(", "'Error occured:'", ",", "exc_info", "=", "True", ")", "raise", "exception", ".", "PluginLoadError", "(", "(", "'Failed to load plugin %s with the following error: %s - %s'", "%", "(", "setup_class", ",", "exc", ".", "__class__", ".", "__name__", ",", "exc", ".", "message", ")", ")", ")", "if", "(", "not", "hasattr", "(", "plug_obj", ",", "'__name__'", ")", ")", ":", "setattr", "(", "plug_obj", ",", "'__name__'", ",", "plugin_name", ")", "plugs", ".", "append", "(", "plug_obj", ")", "return", "plugs" ]
load the plugins specified in the configuration .
train
false
18,759
def _parse_interval(value): try: return sorted(aniso8601.parse_interval(value)) except ValueError: try: return (aniso8601.parse_datetime(value), None) except ValueError: return (aniso8601.parse_date(value), None)
[ "def", "_parse_interval", "(", "value", ")", ":", "try", ":", "return", "sorted", "(", "aniso8601", ".", "parse_interval", "(", "value", ")", ")", "except", "ValueError", ":", "try", ":", "return", "(", "aniso8601", ".", "parse_datetime", "(", "value", ")", ",", "None", ")", "except", "ValueError", ":", "return", "(", "aniso8601", ".", "parse_date", "(", "value", ")", ",", "None", ")" ]
convert an interval string like 1w3d6h into the number of seconds .
train
true
18,761
@pytest.fixture def os_mock(mocker): m = mocker.patch('qutebrowser.config.configtypes.os', autospec=True) m.path.expandvars.side_effect = (lambda x: x.replace('$HOME', '/home/foo')) m.path.expanduser.side_effect = (lambda x: x.replace('~', '/home/foo')) m.path.join.side_effect = (lambda *parts: '/'.join(parts)) return m
[ "@", "pytest", ".", "fixture", "def", "os_mock", "(", "mocker", ")", ":", "m", "=", "mocker", ".", "patch", "(", "'qutebrowser.config.configtypes.os'", ",", "autospec", "=", "True", ")", "m", ".", "path", ".", "expandvars", ".", "side_effect", "=", "(", "lambda", "x", ":", "x", ".", "replace", "(", "'$HOME'", ",", "'/home/foo'", ")", ")", "m", ".", "path", ".", "expanduser", ".", "side_effect", "=", "(", "lambda", "x", ":", "x", ".", "replace", "(", "'~'", ",", "'/home/foo'", ")", ")", "m", ".", "path", ".", "join", ".", "side_effect", "=", "(", "lambda", "*", "parts", ":", "'/'", ".", "join", "(", "parts", ")", ")", "return", "m" ]
fixture that mocks and returns os from the configtypes module .
train
false
18,762
def update_nexusport_binding(port_id, new_vlan_id): LOG.debug(_('update_nexusport_binding called')) session = db.get_session() try: binding = session.query(nexus_models_v2.NexusPortBinding).filter_by(port_id=port_id).one() if new_vlan_id: binding['vlan_id'] = new_vlan_id session.merge(binding) session.flush() return binding except exc.NoResultFound: raise c_exc.NexusPortBindingNotFound()
[ "def", "update_nexusport_binding", "(", "port_id", ",", "new_vlan_id", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'update_nexusport_binding called'", ")", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "binding", "=", "session", ".", "query", "(", "nexus_models_v2", ".", "NexusPortBinding", ")", ".", "filter_by", "(", "port_id", "=", "port_id", ")", ".", "one", "(", ")", "if", "new_vlan_id", ":", "binding", "[", "'vlan_id'", "]", "=", "new_vlan_id", "session", ".", "merge", "(", "binding", ")", "session", ".", "flush", "(", ")", "return", "binding", "except", "exc", ".", "NoResultFound", ":", "raise", "c_exc", ".", "NexusPortBindingNotFound", "(", ")" ]
updates nexusport binding .
train
false
18,764
def instance_data_get_for_project(context, project_id, session=None): return IMPL.instance_data_get_for_project(context, project_id, session=session)
[ "def", "instance_data_get_for_project", "(", "context", ",", "project_id", ",", "session", "=", "None", ")", ":", "return", "IMPL", ".", "instance_data_get_for_project", "(", "context", ",", "project_id", ",", "session", "=", "session", ")" ]
get for project .
train
false
18,765
@task.task(ignore_result=True) def retry_open_graph_share(share, reset_retries=False): logger.info('retrying open graph share %s', share) share.retry(reset_retries=reset_retries)
[ "@", "task", ".", "task", "(", "ignore_result", "=", "True", ")", "def", "retry_open_graph_share", "(", "share", ",", "reset_retries", "=", "False", ")", ":", "logger", ".", "info", "(", "'retrying open graph share %s'", ",", "share", ")", "share", ".", "retry", "(", "reset_retries", "=", "reset_retries", ")" ]
we will retry open graph shares after 15m to make sure we dont miss out on any shares if facebook is having a minor outage .
train
false
18,767
@task @no_help @timed def compile_coffeescript(*files): if (not files): files = ['`{}`'.format(coffeescript_files())] sh(cmd('node_modules/.bin/coffee', '--compile', *files))
[ "@", "task", "@", "no_help", "@", "timed", "def", "compile_coffeescript", "(", "*", "files", ")", ":", "if", "(", "not", "files", ")", ":", "files", "=", "[", "'`{}`'", ".", "format", "(", "coffeescript_files", "(", ")", ")", "]", "sh", "(", "cmd", "(", "'node_modules/.bin/coffee'", ",", "'--compile'", ",", "*", "files", ")", ")" ]
compile coffeescript to javascript .
train
false
18,769
def virtual_network_to_dict(vnet): results = dict(id=vnet.id, name=vnet.name, location=vnet.location, type=vnet.type, tags=vnet.tags, provisioning_state=vnet.provisioning_state, etag=vnet.etag) if (vnet.dhcp_options and (len(vnet.dhcp_options.dns_servers) > 0)): results['dns_servers'] = [] for server in vnet.dhcp_options.dns_servers: results['dns_servers'].append(server) if (vnet.address_space and (len(vnet.address_space.address_prefixes) > 0)): results['address_prefixes'] = [] for space in vnet.address_space.address_prefixes: results['address_prefixes'].append(space) return results
[ "def", "virtual_network_to_dict", "(", "vnet", ")", ":", "results", "=", "dict", "(", "id", "=", "vnet", ".", "id", ",", "name", "=", "vnet", ".", "name", ",", "location", "=", "vnet", ".", "location", ",", "type", "=", "vnet", ".", "type", ",", "tags", "=", "vnet", ".", "tags", ",", "provisioning_state", "=", "vnet", ".", "provisioning_state", ",", "etag", "=", "vnet", ".", "etag", ")", "if", "(", "vnet", ".", "dhcp_options", "and", "(", "len", "(", "vnet", ".", "dhcp_options", ".", "dns_servers", ")", ">", "0", ")", ")", ":", "results", "[", "'dns_servers'", "]", "=", "[", "]", "for", "server", "in", "vnet", ".", "dhcp_options", ".", "dns_servers", ":", "results", "[", "'dns_servers'", "]", ".", "append", "(", "server", ")", "if", "(", "vnet", ".", "address_space", "and", "(", "len", "(", "vnet", ".", "address_space", ".", "address_prefixes", ")", ">", "0", ")", ")", ":", "results", "[", "'address_prefixes'", "]", "=", "[", "]", "for", "space", "in", "vnet", ".", "address_space", ".", "address_prefixes", ":", "results", "[", "'address_prefixes'", "]", ".", "append", "(", "space", ")", "return", "results" ]
convert a virtual network object to a dict .
train
false
18,770
def IsEnabled(feature_name, default=False): try: return (feature_name in __builtin__._APPENGINE_FEATURE_FLAGS) except AttributeError: return default
[ "def", "IsEnabled", "(", "feature_name", ",", "default", "=", "False", ")", ":", "try", ":", "return", "(", "feature_name", "in", "__builtin__", ".", "_APPENGINE_FEATURE_FLAGS", ")", "except", "AttributeError", ":", "return", "default" ]
indicates if a specific feature flag is enabled .
train
false
18,772
def find_source_folders(self, folder): for resource in folder.get_folders(): if self._is_package(resource): return [folder] for resource in folder.get_files(): if resource.name.endswith('.py'): return [folder] return []
[ "def", "find_source_folders", "(", "self", ",", "folder", ")", ":", "for", "resource", "in", "folder", ".", "get_folders", "(", ")", ":", "if", "self", ".", "_is_package", "(", "resource", ")", ":", "return", "[", "folder", "]", "for", "resource", "in", "folder", ".", "get_files", "(", ")", ":", "if", "resource", ".", "name", ".", "endswith", "(", "'.py'", ")", ":", "return", "[", "folder", "]", "return", "[", "]" ]
look only python files an packages .
train
false
18,773
def get_eligibilities_for_user(username, course_key=None): eligibilities = CreditEligibility.get_user_eligibilities(username) if course_key: course_key = CourseKey.from_string(unicode(course_key)) eligibilities = eligibilities.filter(course__course_key=course_key) return [{'course_key': unicode(eligibility.course.course_key), 'deadline': eligibility.deadline} for eligibility in eligibilities]
[ "def", "get_eligibilities_for_user", "(", "username", ",", "course_key", "=", "None", ")", ":", "eligibilities", "=", "CreditEligibility", ".", "get_user_eligibilities", "(", "username", ")", "if", "course_key", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "unicode", "(", "course_key", ")", ")", "eligibilities", "=", "eligibilities", ".", "filter", "(", "course__course_key", "=", "course_key", ")", "return", "[", "{", "'course_key'", ":", "unicode", "(", "eligibility", ".", "course", ".", "course_key", ")", ",", "'deadline'", ":", "eligibility", ".", "deadline", "}", "for", "eligibility", "in", "eligibilities", "]" ]
retrieve all courses or particular course for which the user is eligible for credit .
train
false
18,775
def method_id(name, encode_types): function_types = [_canonical_type(type_) for type_ in encode_types] function_signature = '{function_name}({canonical_types})'.format(function_name=name, canonical_types=','.join(function_types)) function_keccak = utils.sha3(function_signature) first_bytes = function_keccak[:4] return big_endian_to_int(first_bytes)
[ "def", "method_id", "(", "name", ",", "encode_types", ")", ":", "function_types", "=", "[", "_canonical_type", "(", "type_", ")", "for", "type_", "in", "encode_types", "]", "function_signature", "=", "'{function_name}({canonical_types})'", ".", "format", "(", "function_name", "=", "name", ",", "canonical_types", "=", "','", ".", "join", "(", "function_types", ")", ")", "function_keccak", "=", "utils", ".", "sha3", "(", "function_signature", ")", "first_bytes", "=", "function_keccak", "[", ":", "4", "]", "return", "big_endian_to_int", "(", "first_bytes", ")" ]
return the unique method id .
train
true
18,776
def get_segments_with_phys_nets(context, phys_nets): if (not phys_nets): return [] with context.session.begin(subtransactions=True): segments = context.session.query(segment_model.NetworkSegment).filter(segment_model.NetworkSegment.physical_network.in_(phys_nets)) return segments
[ "def", "get_segments_with_phys_nets", "(", "context", ",", "phys_nets", ")", ":", "if", "(", "not", "phys_nets", ")", ":", "return", "[", "]", "with", "context", ".", "session", ".", "begin", "(", "subtransactions", "=", "True", ")", ":", "segments", "=", "context", ".", "session", ".", "query", "(", "segment_model", ".", "NetworkSegment", ")", ".", "filter", "(", "segment_model", ".", "NetworkSegment", ".", "physical_network", ".", "in_", "(", "phys_nets", ")", ")", "return", "segments" ]
get segments from physical networks .
train
false
18,778
def test_completion_for_un_snippet(script): (res, env) = setup_completion(script, 'pip un', '1') assert (res.stdout.strip().split() == ['uninstall']), res.stdout
[ "def", "test_completion_for_un_snippet", "(", "script", ")", ":", "(", "res", ",", "env", ")", "=", "setup_completion", "(", "script", ",", "'pip un'", ",", "'1'", ")", "assert", "(", "res", ".", "stdout", ".", "strip", "(", ")", ".", "split", "(", ")", "==", "[", "'uninstall'", "]", ")", ",", "res", ".", "stdout" ]
test getting completion for un should return uninstall .
train
false
18,779
def run_post_commit_script(component, translation, filename): run_hook(component, translation, component.post_commit_script, None, filename)
[ "def", "run_post_commit_script", "(", "component", ",", "translation", ",", "filename", ")", ":", "run_hook", "(", "component", ",", "translation", ",", "component", ".", "post_commit_script", ",", "None", ",", "filename", ")" ]
post commit hook .
train
false
18,780
def log_hooks(debug=False): request = cherrypy.serving.request msg = [] from cherrypy import _cprequest points = _cprequest.hookpoints for k in request.hooks.keys(): if (k not in points): points.append(k) for k in points: msg.append((' %s:' % k)) v = request.hooks.get(k, []) v.sort() for h in v: msg.append((' %r' % h)) cherrypy.log(((('\nRequest Hooks for ' + cherrypy.url()) + ':\n') + '\n'.join(msg)), 'HTTP')
[ "def", "log_hooks", "(", "debug", "=", "False", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "msg", "=", "[", "]", "from", "cherrypy", "import", "_cprequest", "points", "=", "_cprequest", ".", "hookpoints", "for", "k", "in", "request", ".", "hooks", ".", "keys", "(", ")", ":", "if", "(", "k", "not", "in", "points", ")", ":", "points", ".", "append", "(", "k", ")", "for", "k", "in", "points", ":", "msg", ".", "append", "(", "(", "' %s:'", "%", "k", ")", ")", "v", "=", "request", ".", "hooks", ".", "get", "(", "k", ",", "[", "]", ")", "v", ".", "sort", "(", ")", "for", "h", "in", "v", ":", "msg", ".", "append", "(", "(", "' %r'", "%", "h", ")", ")", "cherrypy", ".", "log", "(", "(", "(", "(", "'\\nRequest Hooks for '", "+", "cherrypy", ".", "url", "(", ")", ")", "+", "':\\n'", ")", "+", "'\\n'", ".", "join", "(", "msg", ")", ")", ",", "'HTTP'", ")" ]
write request .
train
false
18,782
def p_multiplicative_expression_2(t): pass
[ "def", "p_multiplicative_expression_2", "(", "t", ")", ":", "pass" ]
multiplicative_expression : multiplicative_expression times cast_expression .
train
false
18,784
def resource_view_update(context, data_dict): model = context['model'] id = _get_or_bust(data_dict, 'id') resource_view = model.ResourceView.get(id) if (not resource_view): raise NotFound view_plugin = ckan.lib.datapreview.get_view_plugin(resource_view.view_type) schema = (context.get('schema') or schema_.default_update_resource_view_schema(view_plugin)) plugin_schema = view_plugin.info().get('schema', {}) schema.update(plugin_schema) (data, errors) = _validate(data_dict, schema, context) if errors: model.Session.rollback() raise ValidationError(errors) context['resource_view'] = resource_view context['resource'] = model.Resource.get(resource_view.resource_id) _check_access('resource_view_update', context, data_dict) if context.get('preview'): return data resource_view = model_save.resource_view_dict_save(data, context) if (not context.get('defer_commit')): model.repo.commit() return model_dictize.resource_view_dictize(resource_view, context)
[ "def", "resource_view_update", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "id", "=", "_get_or_bust", "(", "data_dict", ",", "'id'", ")", "resource_view", "=", "model", ".", "ResourceView", ".", "get", "(", "id", ")", "if", "(", "not", "resource_view", ")", ":", "raise", "NotFound", "view_plugin", "=", "ckan", ".", "lib", ".", "datapreview", ".", "get_view_plugin", "(", "resource_view", ".", "view_type", ")", "schema", "=", "(", "context", ".", "get", "(", "'schema'", ")", "or", "schema_", ".", "default_update_resource_view_schema", "(", "view_plugin", ")", ")", "plugin_schema", "=", "view_plugin", ".", "info", "(", ")", ".", "get", "(", "'schema'", ",", "{", "}", ")", "schema", ".", "update", "(", "plugin_schema", ")", "(", "data", ",", "errors", ")", "=", "_validate", "(", "data_dict", ",", "schema", ",", "context", ")", "if", "errors", ":", "model", ".", "Session", ".", "rollback", "(", ")", "raise", "ValidationError", "(", "errors", ")", "context", "[", "'resource_view'", "]", "=", "resource_view", "context", "[", "'resource'", "]", "=", "model", ".", "Resource", ".", "get", "(", "resource_view", ".", "resource_id", ")", "_check_access", "(", "'resource_view_update'", ",", "context", ",", "data_dict", ")", "if", "context", ".", "get", "(", "'preview'", ")", ":", "return", "data", "resource_view", "=", "model_save", ".", "resource_view_dict_save", "(", "data", ",", "context", ")", "if", "(", "not", "context", ".", "get", "(", "'defer_commit'", ")", ")", ":", "model", ".", "repo", ".", "commit", "(", ")", "return", "model_dictize", ".", "resource_view_dictize", "(", "resource_view", ",", "context", ")" ]
update a resource view .
train
false
18,788
def add_instance_fault_from_exc(context, conductor, instance, fault, exc_info=None): code = 500 message = fault.__class__.__name__ if hasattr(fault, 'kwargs'): code = fault.kwargs.get('code', 500) message = fault.kwargs.get('value', message) details = unicode(fault) if (exc_info and (code == 500)): tb = exc_info[2] details += ('\n' + ''.join(traceback.format_tb(tb))) values = {'instance_uuid': instance['uuid'], 'code': code, 'message': unicode(message), 'details': unicode(details), 'host': CONF.host} conductor.instance_fault_create(context, values)
[ "def", "add_instance_fault_from_exc", "(", "context", ",", "conductor", ",", "instance", ",", "fault", ",", "exc_info", "=", "None", ")", ":", "code", "=", "500", "message", "=", "fault", ".", "__class__", ".", "__name__", "if", "hasattr", "(", "fault", ",", "'kwargs'", ")", ":", "code", "=", "fault", ".", "kwargs", ".", "get", "(", "'code'", ",", "500", ")", "message", "=", "fault", ".", "kwargs", ".", "get", "(", "'value'", ",", "message", ")", "details", "=", "unicode", "(", "fault", ")", "if", "(", "exc_info", "and", "(", "code", "==", "500", ")", ")", ":", "tb", "=", "exc_info", "[", "2", "]", "details", "+=", "(", "'\\n'", "+", "''", ".", "join", "(", "traceback", ".", "format_tb", "(", "tb", ")", ")", ")", "values", "=", "{", "'instance_uuid'", ":", "instance", "[", "'uuid'", "]", ",", "'code'", ":", "code", ",", "'message'", ":", "unicode", "(", "message", ")", ",", "'details'", ":", "unicode", "(", "details", ")", ",", "'host'", ":", "CONF", ".", "host", "}", "conductor", ".", "instance_fault_create", "(", "context", ",", "values", ")" ]
adds the specified fault to the database .
train
false
18,789
def disk_used(path): size = 0 for file in (os.listdir(path) + ['.']): stat = os.stat(os.path.join(path, file)) if hasattr(stat, 'st_blocks'): size += (stat.st_blocks * 512) else: size += (((stat.st_size // 512) + 1) * 512) return int((size / 1024.0))
[ "def", "disk_used", "(", "path", ")", ":", "size", "=", "0", "for", "file", "in", "(", "os", ".", "listdir", "(", "path", ")", "+", "[", "'.'", "]", ")", ":", "stat", "=", "os", ".", "stat", "(", "os", ".", "path", ".", "join", "(", "path", ",", "file", ")", ")", "if", "hasattr", "(", "stat", ",", "'st_blocks'", ")", ":", "size", "+=", "(", "stat", ".", "st_blocks", "*", "512", ")", "else", ":", "size", "+=", "(", "(", "(", "stat", ".", "st_size", "//", "512", ")", "+", "1", ")", "*", "512", ")", "return", "int", "(", "(", "size", "/", "1024.0", ")", ")" ]
return the disk usage in a directory .
train
false
18,790
@login_required @view def edit_photo(request, form=None, non_validation_error=False): if (form is None): form = mysite.account.forms.EditPhotoForm() data = mysite.profile.views.get_personal_data(request.user.get_profile()) data['edit_photo_form'] = form if non_validation_error: data['non_validation_error'] = True return (request, 'account/edit_photo.html', data)
[ "@", "login_required", "@", "view", "def", "edit_photo", "(", "request", ",", "form", "=", "None", ",", "non_validation_error", "=", "False", ")", ":", "if", "(", "form", "is", "None", ")", ":", "form", "=", "mysite", ".", "account", ".", "forms", ".", "EditPhotoForm", "(", ")", "data", "=", "mysite", ".", "profile", ".", "views", ".", "get_personal_data", "(", "request", ".", "user", ".", "get_profile", "(", ")", ")", "data", "[", "'edit_photo_form'", "]", "=", "form", "if", "non_validation_error", ":", "data", "[", "'non_validation_error'", "]", "=", "True", "return", "(", "request", ",", "'account/edit_photo.html'", ",", "data", ")" ]
set or change your profile photo .
train
false
18,795
def signalTest(remote='ubuntu2'): h = RemoteHost('h0', server=remote) h.shell.send_signal(SIGINT) h.shell.poll() if (h.shell.returncode is None): info('signalTest: SUCCESS: ', h, 'has not exited after SIGINT', '\n') else: info('signalTest: FAILURE:', h, 'exited with code', h.shell.returncode, '\n') h.stop()
[ "def", "signalTest", "(", "remote", "=", "'ubuntu2'", ")", ":", "h", "=", "RemoteHost", "(", "'h0'", ",", "server", "=", "remote", ")", "h", ".", "shell", ".", "send_signal", "(", "SIGINT", ")", "h", ".", "shell", ".", "poll", "(", ")", "if", "(", "h", ".", "shell", ".", "returncode", "is", "None", ")", ":", "info", "(", "'signalTest: SUCCESS: '", ",", "h", ",", "'has not exited after SIGINT'", ",", "'\\n'", ")", "else", ":", "info", "(", "'signalTest: FAILURE:'", ",", "h", ",", "'exited with code'", ",", "h", ".", "shell", ".", "returncode", ",", "'\\n'", ")", "h", ".", "stop", "(", ")" ]
make sure hosts are robust to signals .
train
false
18,796
def get_carrier(): return getattr(_local, 'carrier', None)
[ "def", "get_carrier", "(", ")", ":", "return", "getattr", "(", "_local", ",", "'carrier'", ",", "None", ")" ]
returns the name of the current carrier for the request lifecycle .
train
false
18,797
def is_boolean(value): if isinstance(value, string_type): try: return bool_dict[value.lower()] except KeyError: raise VdtTypeError(value) if (value == False): return False elif (value == True): return True else: raise VdtTypeError(value)
[ "def", "is_boolean", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "string_type", ")", ":", "try", ":", "return", "bool_dict", "[", "value", ".", "lower", "(", ")", "]", "except", "KeyError", ":", "raise", "VdtTypeError", "(", "value", ")", "if", "(", "value", "==", "False", ")", ":", "return", "False", "elif", "(", "value", "==", "True", ")", ":", "return", "True", "else", ":", "raise", "VdtTypeError", "(", "value", ")" ]
check if the value represents a boolean .
train
true
18,798
def delete_preview_files(sender, instance, **kw): for filename in [instance.image_path, instance.thumbnail_path]: if storage.exists(filename): log.info(('Removing filename: %s for preview: %s' % (filename, instance.pk))) storage.delete(filename)
[ "def", "delete_preview_files", "(", "sender", ",", "instance", ",", "**", "kw", ")", ":", "for", "filename", "in", "[", "instance", ".", "image_path", ",", "instance", ".", "thumbnail_path", "]", ":", "if", "storage", ".", "exists", "(", "filename", ")", ":", "log", ".", "info", "(", "(", "'Removing filename: %s for preview: %s'", "%", "(", "filename", ",", "instance", ".", "pk", ")", ")", ")", "storage", ".", "delete", "(", "filename", ")" ]
on delete of the preview object from the database .
train
false
18,800
def forward_drop(): run(settings.iptables, '-P', 'FORWARD', 'DROP')
[ "def", "forward_drop", "(", ")", ":", "run", "(", "settings", ".", "iptables", ",", "'-P'", ",", "'FORWARD'", ",", "'DROP'", ")" ]
disable any and all forwarding unless explicitly said so .
train
false
18,801
def float32_floatX(f): def new_f(*args, **kwargs): '\n .. todo::\n\n WRITEME\n ' old_floatX = theano.config.floatX theano.config.floatX = 'float32' try: f(*args, **kwargs) finally: theano.config.floatX = old_floatX new_f.func_name = f.func_name return new_f
[ "def", "float32_floatX", "(", "f", ")", ":", "def", "new_f", "(", "*", "args", ",", "**", "kwargs", ")", ":", "old_floatX", "=", "theano", ".", "config", ".", "floatX", "theano", ".", "config", ".", "floatX", "=", "'float32'", "try", ":", "f", "(", "*", "args", ",", "**", "kwargs", ")", "finally", ":", "theano", ".", "config", ".", "floatX", "=", "old_floatX", "new_f", ".", "func_name", "=", "f", ".", "func_name", "return", "new_f" ]
this function changes floatx to float32 for the call to f .
train
false
18,802
def test_nvidia_driver1(): a = numpy.random.rand(10000).astype('float32') A = cuda.shared_constructor(a) f = theano.function(inputs=[], outputs=A.sum(), mode=mode_with_gpu, profile=False) topo = f.maker.fgraph.toposort() assert (len(topo) == 2) if (sum((isinstance(node.op, B.GpuCAReduce) for node in topo)) != 1): msg = '\n DCTB '.join(([('Expected exactly one occurrence of GpuCAReduce ' + 'but got:')] + [str(app) for app in topo])) raise AssertionError(msg) if (not numpy.allclose(f(), a.sum())): raise Exception('The nvidia driver version installed with this OS does not give good results for reduction.Installing the nvidia driver available on the same download page as the cuda package will fix the problem: http://developer.nvidia.com/cuda-downloads')
[ "def", "test_nvidia_driver1", "(", ")", ":", "a", "=", "numpy", ".", "random", ".", "rand", "(", "10000", ")", ".", "astype", "(", "'float32'", ")", "A", "=", "cuda", ".", "shared_constructor", "(", "a", ")", "f", "=", "theano", ".", "function", "(", "inputs", "=", "[", "]", ",", "outputs", "=", "A", ".", "sum", "(", ")", ",", "mode", "=", "mode_with_gpu", ",", "profile", "=", "False", ")", "topo", "=", "f", ".", "maker", ".", "fgraph", ".", "toposort", "(", ")", "assert", "(", "len", "(", "topo", ")", "==", "2", ")", "if", "(", "sum", "(", "(", "isinstance", "(", "node", ".", "op", ",", "B", ".", "GpuCAReduce", ")", "for", "node", "in", "topo", ")", ")", "!=", "1", ")", ":", "msg", "=", "'\\n DCTB '", ".", "join", "(", "(", "[", "(", "'Expected exactly one occurrence of GpuCAReduce '", "+", "'but got:'", ")", "]", "+", "[", "str", "(", "app", ")", "for", "app", "in", "topo", "]", ")", ")", "raise", "AssertionError", "(", "msg", ")", "if", "(", "not", "numpy", ".", "allclose", "(", "f", "(", ")", ",", "a", ".", "sum", "(", ")", ")", ")", ":", "raise", "Exception", "(", "'The nvidia driver version installed with this OS does not give good results for reduction.Installing the nvidia driver available on the same download page as the cuda package will fix the problem: http://developer.nvidia.com/cuda-downloads'", ")" ]
some nvidia driver give bad result for reduction this execute some reduction test to ensure it run correctly .
train
false
18,803
def dtype(x): return x.dtype.name
[ "def", "dtype", "(", "x", ")", ":", "return", "x", ".", "dtype", ".", "name" ]
returns the dtype of a keras tensor or variable .
train
false
18,804
def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None): if (cls is None): cls = MultiDict if (isinstance(s, text_type) and (not isinstance(separator, text_type))): separator = separator.decode((charset or 'ascii')) elif (isinstance(s, bytes) and (not isinstance(separator, bytes))): separator = separator.encode((charset or 'ascii')) return cls(_url_decode_impl(s.split(separator), charset, decode_keys, include_empty, errors))
[ "def", "url_decode", "(", "s", ",", "charset", "=", "'utf-8'", ",", "decode_keys", "=", "False", ",", "include_empty", "=", "True", ",", "errors", "=", "'replace'", ",", "separator", "=", "'&'", ",", "cls", "=", "None", ")", ":", "if", "(", "cls", "is", "None", ")", ":", "cls", "=", "MultiDict", "if", "(", "isinstance", "(", "s", ",", "text_type", ")", "and", "(", "not", "isinstance", "(", "separator", ",", "text_type", ")", ")", ")", ":", "separator", "=", "separator", ".", "decode", "(", "(", "charset", "or", "'ascii'", ")", ")", "elif", "(", "isinstance", "(", "s", ",", "bytes", ")", "and", "(", "not", "isinstance", "(", "separator", ",", "bytes", ")", ")", ")", ":", "separator", "=", "separator", ".", "encode", "(", "(", "charset", "or", "'ascii'", ")", ")", "return", "cls", "(", "_url_decode_impl", "(", "s", ".", "split", "(", "separator", ")", ",", "charset", ",", "decode_keys", ",", "include_empty", ",", "errors", ")", ")" ]
parse a querystring and return it as :class:multidict .
train
true
18,805
def compact_whitespace(s): return _MULTI_WHITESPACE.sub(' ', s).strip()
[ "def", "compact_whitespace", "(", "s", ")", ":", "return", "_MULTI_WHITESPACE", ".", "sub", "(", "' '", ",", "s", ")", ".", "strip", "(", ")" ]
replaces redundant whitespace from strings with a single space .
train
false
18,806
def generate_verifier(length=8): return ''.join([str(random.randint(0, 9)) for i in range(length)])
[ "def", "generate_verifier", "(", "length", "=", "8", ")", ":", "return", "''", ".", "join", "(", "[", "str", "(", "random", ".", "randint", "(", "0", ",", "9", ")", ")", "for", "i", "in", "range", "(", "length", ")", "]", ")" ]
generate pseudorandom number .
train
false
18,808
def _package_path(name): loader = pkgutil.get_loader(name) if ((loader is None) or (name == u'__main__')): return None if hasattr(loader, u'get_filename'): filepath = loader.get_filename(name) else: __import__(name) filepath = sys.modules[name].__file__ return os.path.dirname(os.path.abspath(filepath))
[ "def", "_package_path", "(", "name", ")", ":", "loader", "=", "pkgutil", ".", "get_loader", "(", "name", ")", "if", "(", "(", "loader", "is", "None", ")", "or", "(", "name", "==", "u'__main__'", ")", ")", ":", "return", "None", "if", "hasattr", "(", "loader", ",", "u'get_filename'", ")", ":", "filepath", "=", "loader", ".", "get_filename", "(", "name", ")", "else", ":", "__import__", "(", "name", ")", "filepath", "=", "sys", ".", "modules", "[", "name", "]", ".", "__file__", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "filepath", ")", ")" ]
returns the path to the package containing the named module or none if the path could not be identified .
train
true
18,809
def _solve_discrete_lyapunov_direct(a, q): lhs = kron(a, a.conj()) lhs = (np.eye(lhs.shape[0]) - lhs) x = solve(lhs, q.flatten()) return np.reshape(x, q.shape)
[ "def", "_solve_discrete_lyapunov_direct", "(", "a", ",", "q", ")", ":", "lhs", "=", "kron", "(", "a", ",", "a", ".", "conj", "(", ")", ")", "lhs", "=", "(", "np", ".", "eye", "(", "lhs", ".", "shape", "[", "0", "]", ")", "-", "lhs", ")", "x", "=", "solve", "(", "lhs", ",", "q", ".", "flatten", "(", ")", ")", "return", "np", ".", "reshape", "(", "x", ",", "q", ".", "shape", ")" ]
solves the discrete lyapunov equation directly .
train
false
18,810
def _value_name(value): return '{} {}'.format(_node_name(value.node), value.label)
[ "def", "_value_name", "(", "value", ")", ":", "return", "'{} {}'", ".", "format", "(", "_node_name", "(", "value", ".", "node", ")", ",", "value", ".", "label", ")" ]
return the name of the value .
train
false
18,812
def _clean_url(url): url = url.encode('utf8') url = ''.join(((urllib.quote(c) if (ord(c) >= 127) else c) for c in url)) return url
[ "def", "_clean_url", "(", "url", ")", ":", "url", "=", "url", ".", "encode", "(", "'utf8'", ")", "url", "=", "''", ".", "join", "(", "(", "(", "urllib", ".", "quote", "(", "c", ")", "if", "(", "ord", "(", "c", ")", ">=", "127", ")", "else", "c", ")", "for", "c", "in", "url", ")", ")", "return", "url" ]
remove all parameters from a url .
train
false
18,813
def get_dir(path, dest, saltenv='base'): src = __context__['fileclient'].cache_dir(path, saltenv, cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_'])) src = ' '.join(src) single = salt.client.ssh.Single(__opts__, '', **__salt__.kwargs) ret = single.shell.send(src, dest) return (not ret[2])
[ "def", "get_dir", "(", "path", ",", "dest", ",", "saltenv", "=", "'base'", ")", ":", "src", "=", "__context__", "[", "'fileclient'", "]", ".", "cache_dir", "(", "path", ",", "saltenv", ",", "cachedir", "=", "os", ".", "path", ".", "join", "(", "'salt-ssh'", ",", "__salt__", ".", "kwargs", "[", "'id_'", "]", ")", ")", "src", "=", "' '", ".", "join", "(", "src", ")", "single", "=", "salt", ".", "client", ".", "ssh", ".", "Single", "(", "__opts__", ",", "''", ",", "**", "__salt__", ".", "kwargs", ")", "ret", "=", "single", ".", "shell", ".", "send", "(", "src", ",", "dest", ")", "return", "(", "not", "ret", "[", "2", "]", ")" ]
used to recursively copy a directory from the salt master cli example: .
train
true
18,814
def paddingSize(value, align): if ((value % align) != 0): return (align - (value % align)) else: return 0
[ "def", "paddingSize", "(", "value", ",", "align", ")", ":", "if", "(", "(", "value", "%", "align", ")", "!=", "0", ")", ":", "return", "(", "align", "-", "(", "value", "%", "align", ")", ")", "else", ":", "return", "0" ]
compute size of a padding field .
train
false
18,815
def _activities_from_groups_followed_by_user_query(user_id, limit): import ckan.model as model follower_objects = model.UserFollowingGroup.followee_list(user_id) if (not follower_objects): return model.Session.query(model.Activity).filter('0=1') return _activities_union_all(*[_activities_limit(_group_activity_query(follower.object_id), limit) for follower in follower_objects])
[ "def", "_activities_from_groups_followed_by_user_query", "(", "user_id", ",", "limit", ")", ":", "import", "ckan", ".", "model", "as", "model", "follower_objects", "=", "model", ".", "UserFollowingGroup", ".", "followee_list", "(", "user_id", ")", "if", "(", "not", "follower_objects", ")", ":", "return", "model", ".", "Session", ".", "query", "(", "model", ".", "Activity", ")", ".", "filter", "(", "'0=1'", ")", "return", "_activities_union_all", "(", "*", "[", "_activities_limit", "(", "_group_activity_query", "(", "follower", ".", "object_id", ")", ",", "limit", ")", "for", "follower", "in", "follower_objects", "]", ")" ]
return a query for all activities about groups the given user follows .
train
false
18,816
def start_transfer(context, read_file_handle, data_size, write_file_handle=None, image_service=None, image_id=None, image_meta=None): if (not image_meta): image_meta = {} thread_safe_pipe = io_util.ThreadSafePipe(QUEUE_BUFFER_SIZE, data_size) read_thread = io_util.IOThread(read_file_handle, thread_safe_pipe) if write_file_handle: write_thread = io_util.IOThread(thread_safe_pipe, write_file_handle) elif (image_service and image_id): write_thread = io_util.GlanceWriteThread(context, thread_safe_pipe, image_service, image_id, image_meta) read_event = read_thread.start() write_event = write_thread.start() try: read_event.wait() write_event.wait() except Exception as exc: read_thread.stop() write_thread.stop() LOG.exception(exc) raise exception.NovaException(exc) finally: read_file_handle.close() if write_file_handle: write_file_handle.close()
[ "def", "start_transfer", "(", "context", ",", "read_file_handle", ",", "data_size", ",", "write_file_handle", "=", "None", ",", "image_service", "=", "None", ",", "image_id", "=", "None", ",", "image_meta", "=", "None", ")", ":", "if", "(", "not", "image_meta", ")", ":", "image_meta", "=", "{", "}", "thread_safe_pipe", "=", "io_util", ".", "ThreadSafePipe", "(", "QUEUE_BUFFER_SIZE", ",", "data_size", ")", "read_thread", "=", "io_util", ".", "IOThread", "(", "read_file_handle", ",", "thread_safe_pipe", ")", "if", "write_file_handle", ":", "write_thread", "=", "io_util", ".", "IOThread", "(", "thread_safe_pipe", ",", "write_file_handle", ")", "elif", "(", "image_service", "and", "image_id", ")", ":", "write_thread", "=", "io_util", ".", "GlanceWriteThread", "(", "context", ",", "thread_safe_pipe", ",", "image_service", ",", "image_id", ",", "image_meta", ")", "read_event", "=", "read_thread", ".", "start", "(", ")", "write_event", "=", "write_thread", ".", "start", "(", ")", "try", ":", "read_event", ".", "wait", "(", ")", "write_event", ".", "wait", "(", ")", "except", "Exception", "as", "exc", ":", "read_thread", ".", "stop", "(", ")", "write_thread", ".", "stop", "(", ")", "LOG", ".", "exception", "(", "exc", ")", "raise", "exception", ".", "NovaException", "(", "exc", ")", "finally", ":", "read_file_handle", ".", "close", "(", ")", "if", "write_file_handle", ":", "write_file_handle", ".", "close", "(", ")" ]
start the data transfer from the reader to the writer .
train
false
18,819
def get_problem_set_grade_distrib(course_id, problem_set): db_query = models.StudentModule.objects.filter(course_id__exact=course_id, grade__isnull=False, module_type__exact='problem', module_state_key__in=problem_set).values('module_state_key', 'grade', 'max_grade').annotate(count_grade=Count('grade')).order_by('module_state_key', 'grade') prob_grade_distrib = {} for row in db_query: row_loc = course_id.make_usage_key_from_deprecated_string(row['module_state_key']) if (row_loc not in prob_grade_distrib): prob_grade_distrib[row_loc] = {'max_grade': 0, 'grade_distrib': []} curr_grade_distrib = prob_grade_distrib[row_loc] curr_grade_distrib['grade_distrib'].append((row['grade'], row['count_grade'])) if (curr_grade_distrib['max_grade'] < row['max_grade']): curr_grade_distrib['max_grade'] = row['max_grade'] return prob_grade_distrib
[ "def", "get_problem_set_grade_distrib", "(", "course_id", ",", "problem_set", ")", ":", "db_query", "=", "models", ".", "StudentModule", ".", "objects", ".", "filter", "(", "course_id__exact", "=", "course_id", ",", "grade__isnull", "=", "False", ",", "module_type__exact", "=", "'problem'", ",", "module_state_key__in", "=", "problem_set", ")", ".", "values", "(", "'module_state_key'", ",", "'grade'", ",", "'max_grade'", ")", ".", "annotate", "(", "count_grade", "=", "Count", "(", "'grade'", ")", ")", ".", "order_by", "(", "'module_state_key'", ",", "'grade'", ")", "prob_grade_distrib", "=", "{", "}", "for", "row", "in", "db_query", ":", "row_loc", "=", "course_id", ".", "make_usage_key_from_deprecated_string", "(", "row", "[", "'module_state_key'", "]", ")", "if", "(", "row_loc", "not", "in", "prob_grade_distrib", ")", ":", "prob_grade_distrib", "[", "row_loc", "]", "=", "{", "'max_grade'", ":", "0", ",", "'grade_distrib'", ":", "[", "]", "}", "curr_grade_distrib", "=", "prob_grade_distrib", "[", "row_loc", "]", "curr_grade_distrib", "[", "'grade_distrib'", "]", ".", "append", "(", "(", "row", "[", "'grade'", "]", ",", "row", "[", "'count_grade'", "]", ")", ")", "if", "(", "curr_grade_distrib", "[", "'max_grade'", "]", "<", "row", "[", "'max_grade'", "]", ")", ":", "curr_grade_distrib", "[", "'max_grade'", "]", "=", "row", "[", "'max_grade'", "]", "return", "prob_grade_distrib" ]
returns the grade distribution for the problems specified in problem_set .
train
false
18,821
def network_hosts(value, options=None, version=None): ipaddr_filter_out = _filter_ipaddr(value, options=options, version=version) if (not ipaddr_filter_out): return if (not isinstance(value, (list, tuple, types.GeneratorType))): return _network_hosts(ipaddr_filter_out[0]) return [_network_hosts(ip_a) for ip_a in ipaddr_filter_out]
[ "def", "network_hosts", "(", "value", ",", "options", "=", "None", ",", "version", "=", "None", ")", ":", "ipaddr_filter_out", "=", "_filter_ipaddr", "(", "value", ",", "options", "=", "options", ",", "version", "=", "version", ")", "if", "(", "not", "ipaddr_filter_out", ")", ":", "return", "if", "(", "not", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ",", "types", ".", "GeneratorType", ")", ")", ")", ":", "return", "_network_hosts", "(", "ipaddr_filter_out", "[", "0", "]", ")", "return", "[", "_network_hosts", "(", "ip_a", ")", "for", "ip_a", "in", "ipaddr_filter_out", "]" ]
return the list of hosts within a network .
train
true
18,822
def check_include_file(include_dirs, filename, package): if (not has_include_file(include_dirs, filename)): raise CheckFailed(('The C/C++ header for %s (%s) could not be found. You may need to install the development package.' % (package, filename)))
[ "def", "check_include_file", "(", "include_dirs", ",", "filename", ",", "package", ")", ":", "if", "(", "not", "has_include_file", "(", "include_dirs", ",", "filename", ")", ")", ":", "raise", "CheckFailed", "(", "(", "'The C/C++ header for %s (%s) could not be found. You may need to install the development package.'", "%", "(", "package", ",", "filename", ")", ")", ")" ]
raises an exception if the given include file can not be found .
train
false
18,823
def getSidesMinimumThreeBasedOnPrecision(elementNode, radius): return max(getSidesBasedOnPrecision(elementNode, radius), 3)
[ "def", "getSidesMinimumThreeBasedOnPrecision", "(", "elementNode", ",", "radius", ")", ":", "return", "max", "(", "getSidesBasedOnPrecision", "(", "elementNode", ",", "radius", ")", ",", "3", ")" ]
get the number of poygon sides .
train
false
18,824
def host_get(host=None, name=None, hostids=None, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'host.get' params = {'output': 'extend', 'filter': {}} if ((not name) and (not hostids) and (not host)): return False if name: params['filter'].setdefault('name', name) if hostids: params.setdefault('hostids', hostids) if host: params['filter'].setdefault('host', host) params = _params_extend(params, **connection_args) ret = _query(method, params, conn_args['url'], conn_args['auth']) return (ret['result'] if (len(ret['result']) > 0) else False) else: raise KeyError except KeyError: return False
[ "def", "host_get", "(", "host", "=", "None", ",", "name", "=", "None", ",", "hostids", "=", "None", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'host.get'", "params", "=", "{", "'output'", ":", "'extend'", ",", "'filter'", ":", "{", "}", "}", "if", "(", "(", "not", "name", ")", "and", "(", "not", "hostids", ")", "and", "(", "not", "host", ")", ")", ":", "return", "False", "if", "name", ":", "params", "[", "'filter'", "]", ".", "setdefault", "(", "'name'", ",", "name", ")", "if", "hostids", ":", "params", ".", "setdefault", "(", "'hostids'", ",", "hostids", ")", "if", "host", ":", "params", "[", "'filter'", "]", ".", "setdefault", "(", "'host'", ",", "host", ")", "params", "=", "_params_extend", "(", "params", ",", "**", "connection_args", ")", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "(", "ret", "[", "'result'", "]", "if", "(", "len", "(", "ret", "[", "'result'", "]", ")", ">", "0", ")", "else", "False", ")", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "False" ]
retrieve hosts according to the given parameters .
train
true
18,825
@task @needs('pavelib.i18n.i18n_validate_transifex_config') @timed def i18n_transifex_push(): sh('i18n_tool transifex push')
[ "@", "task", "@", "needs", "(", "'pavelib.i18n.i18n_validate_transifex_config'", ")", "@", "timed", "def", "i18n_transifex_push", "(", ")", ":", "sh", "(", "'i18n_tool transifex push'", ")" ]
push source strings to transifex for translation .
train
false
18,826
def get_lexer(environment): key = (environment.block_start_string, environment.block_end_string, environment.variable_start_string, environment.variable_end_string, environment.comment_start_string, environment.comment_end_string, environment.line_statement_prefix, environment.line_comment_prefix, environment.trim_blocks, environment.newline_sequence) lexer = _lexer_cache.get(key) if (lexer is None): lexer = Lexer(environment) _lexer_cache[key] = lexer return lexer
[ "def", "get_lexer", "(", "environment", ")", ":", "key", "=", "(", "environment", ".", "block_start_string", ",", "environment", ".", "block_end_string", ",", "environment", ".", "variable_start_string", ",", "environment", ".", "variable_end_string", ",", "environment", ".", "comment_start_string", ",", "environment", ".", "comment_end_string", ",", "environment", ".", "line_statement_prefix", ",", "environment", ".", "line_comment_prefix", ",", "environment", ".", "trim_blocks", ",", "environment", ".", "newline_sequence", ")", "lexer", "=", "_lexer_cache", ".", "get", "(", "key", ")", "if", "(", "lexer", "is", "None", ")", ":", "lexer", "=", "Lexer", "(", "environment", ")", "_lexer_cache", "[", "key", "]", "=", "lexer", "return", "lexer" ]
return a lexer which is probably cached .
train
true
18,827
@register.simple_tag def format_date_range(date_from, date_to, separator=' - ', format_str='{dt:%B} {dt.day}, {dt:%Y}', year_f=', {dt:%Y}', month_f='{dt:%B}'): if (isinstance(date_to, datetime.datetime) and isinstance(date_from, datetime.datetime)): date_to = date_to.date() date_from = date_from.date() if (date_to and (date_to != date_from)): from_format = to_format = format_str if (date_from.year == date_to.year): from_format = from_format.replace(year_f, '') if (date_from.month == date_to.month): to_format = to_format.replace(month_f, '') return separator.join((from_format.format(dt=date_from), to_format.format(dt=date_to))) return format_str.format(dt=date_from)
[ "@", "register", ".", "simple_tag", "def", "format_date_range", "(", "date_from", ",", "date_to", ",", "separator", "=", "' - '", ",", "format_str", "=", "'{dt:%B} {dt.day}, {dt:%Y}'", ",", "year_f", "=", "', {dt:%Y}'", ",", "month_f", "=", "'{dt:%B}'", ")", ":", "if", "(", "isinstance", "(", "date_to", ",", "datetime", ".", "datetime", ")", "and", "isinstance", "(", "date_from", ",", "datetime", ".", "datetime", ")", ")", ":", "date_to", "=", "date_to", ".", "date", "(", ")", "date_from", "=", "date_from", ".", "date", "(", ")", "if", "(", "date_to", "and", "(", "date_to", "!=", "date_from", ")", ")", ":", "from_format", "=", "to_format", "=", "format_str", "if", "(", "date_from", ".", "year", "==", "date_to", ".", "year", ")", ":", "from_format", "=", "from_format", ".", "replace", "(", "year_f", ",", "''", ")", "if", "(", "date_from", ".", "month", "==", "date_to", ".", "month", ")", ":", "to_format", "=", "to_format", ".", "replace", "(", "month_f", ",", "''", ")", "return", "separator", ".", "join", "(", "(", "from_format", ".", "format", "(", "dt", "=", "date_from", ")", ",", "to_format", ".", "format", "(", "dt", "=", "date_to", ")", ")", ")", "return", "format_str", ".", "format", "(", "dt", "=", "date_from", ")" ]
takes a start date .
train
false
18,828
def set_subscription_policy(topic_name, subscription_name): pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) subscription = topic.subscription(subscription_name) policy = subscription.get_iam_policy() policy.viewers.add(policy.all_users()) policy.editors.add(policy.group('cloud-logs@google.com')) subscription.set_iam_policy(policy) print 'IAM policy for subscription {} on topic {} set.'.format(topic.name, subscription.name)
[ "def", "set_subscription_policy", "(", "topic_name", ",", "subscription_name", ")", ":", "pubsub_client", "=", "pubsub", ".", "Client", "(", ")", "topic", "=", "pubsub_client", ".", "topic", "(", "topic_name", ")", "subscription", "=", "topic", ".", "subscription", "(", "subscription_name", ")", "policy", "=", "subscription", ".", "get_iam_policy", "(", ")", "policy", ".", "viewers", ".", "add", "(", "policy", ".", "all_users", "(", ")", ")", "policy", ".", "editors", ".", "add", "(", "policy", ".", "group", "(", "'cloud-logs@google.com'", ")", ")", "subscription", ".", "set_iam_policy", "(", "policy", ")", "print", "'IAM policy for subscription {} on topic {} set.'", ".", "format", "(", "topic", ".", "name", ",", "subscription", ".", "name", ")" ]
sets the iam policy for a topic .
train
false
18,830
def _repr_odict(dumper, data): return _repr_pairs(dumper, u'tag:yaml.org,2002:omap', six.iteritems(data))
[ "def", "_repr_odict", "(", "dumper", ",", "data", ")", ":", "return", "_repr_pairs", "(", "dumper", ",", "u'tag:yaml.org,2002:omap'", ",", "six", ".", "iteritems", "(", "data", ")", ")" ]
represent ordereddict in yaml dump .
train
false
18,831
def HashEmails(emails): return [hashlib.sha256(email.strip().lower()).hexdigest() for email in emails]
[ "def", "HashEmails", "(", "emails", ")", ":", "return", "[", "hashlib", ".", "sha256", "(", "email", ".", "strip", "(", ")", ".", "lower", "(", ")", ")", ".", "hexdigest", "(", ")", "for", "email", "in", "emails", "]" ]
hashes the given emails using sha-256 .
train
false
18,832
@open_file(1, mode='w') def write_p2g(G, path, encoding='utf-8'): path.write(('%s\n' % G.name).encode(encoding)) path.write(('%s %s\n' % (G.order(), G.size())).encode(encoding)) nodes = list(G) nodenumber = dict(zip(nodes, range(len(nodes)))) for n in nodes: path.write(('%s\n' % n).encode(encoding)) for nbr in G.neighbors(n): path.write(('%s ' % nodenumber[nbr]).encode(encoding)) path.write('\n'.encode(encoding))
[ "@", "open_file", "(", "1", ",", "mode", "=", "'w'", ")", "def", "write_p2g", "(", "G", ",", "path", ",", "encoding", "=", "'utf-8'", ")", ":", "path", ".", "write", "(", "(", "'%s\\n'", "%", "G", ".", "name", ")", ".", "encode", "(", "encoding", ")", ")", "path", ".", "write", "(", "(", "'%s %s\\n'", "%", "(", "G", ".", "order", "(", ")", ",", "G", ".", "size", "(", ")", ")", ")", ".", "encode", "(", "encoding", ")", ")", "nodes", "=", "list", "(", "G", ")", "nodenumber", "=", "dict", "(", "zip", "(", "nodes", ",", "range", "(", "len", "(", "nodes", ")", ")", ")", ")", "for", "n", "in", "nodes", ":", "path", ".", "write", "(", "(", "'%s\\n'", "%", "n", ")", ".", "encode", "(", "encoding", ")", ")", "for", "nbr", "in", "G", ".", "neighbors", "(", "n", ")", ":", "path", ".", "write", "(", "(", "'%s '", "%", "nodenumber", "[", "nbr", "]", ")", ".", "encode", "(", "encoding", ")", ")", "path", ".", "write", "(", "'\\n'", ".", "encode", "(", "encoding", ")", ")" ]
write networkx graph in p2g format .
train
false
18,834
def _check_is_partition(locs, n): if (len(locs) != n): return False hit = np.zeros(n, bool) hit[locs] = True if (not np.all(hit)): return False return True
[ "def", "_check_is_partition", "(", "locs", ",", "n", ")", ":", "if", "(", "len", "(", "locs", ")", "!=", "n", ")", ":", "return", "False", "hit", "=", "np", ".", "zeros", "(", "n", ",", "bool", ")", "hit", "[", "locs", "]", "=", "True", "if", "(", "not", "np", ".", "all", "(", "hit", ")", ")", ":", "return", "False", "return", "True" ]
check whether locs is a reordering of the array np .
train
false
18,835
def SimpleAnalyzer(expression=default_pattern, gaps=False): return (RegexTokenizer(expression=expression, gaps=gaps) | LowercaseFilter())
[ "def", "SimpleAnalyzer", "(", "expression", "=", "default_pattern", ",", "gaps", "=", "False", ")", ":", "return", "(", "RegexTokenizer", "(", "expression", "=", "expression", ",", "gaps", "=", "gaps", ")", "|", "LowercaseFilter", "(", ")", ")" ]
composes a regextokenizer with a lowercasefilter .
train
false
18,836
def thing_type_exists(thingTypeName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) res = conn.describe_thing_type(thingTypeName=thingTypeName) if res.get('thingTypeName'): return {'exists': True} else: return {'exists': False} except ClientError as e: err = salt.utils.boto3.get_error(e) if (e.response.get('Error', {}).get('Code') == 'ResourceNotFoundException'): return {'exists': False} return {'error': err}
[ "def", "thing_type_exists", "(", "thingTypeName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "res", "=", "conn", ".", "describe_thing_type", "(", "thingTypeName", "=", "thingTypeName", ")", "if", "res", ".", "get", "(", "'thingTypeName'", ")", ":", "return", "{", "'exists'", ":", "True", "}", "else", ":", "return", "{", "'exists'", ":", "False", "}", "except", "ClientError", "as", "e", ":", "err", "=", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "if", "(", "e", ".", "response", ".", "get", "(", "'Error'", ",", "{", "}", ")", ".", "get", "(", "'Code'", ")", "==", "'ResourceNotFoundException'", ")", ":", "return", "{", "'exists'", ":", "False", "}", "return", "{", "'error'", ":", "err", "}" ]
given a thing type name .
train
false
18,837
def _toggle_options(event, params): import matplotlib.pyplot as plt if (len(params['projs']) > 0): if (params['fig_proj'] is None): _draw_proj_checkbox(event, params, draw_current_state=False) else: plt.close(params['fig_proj']) del params['proj_checks'] params['fig_proj'] = None
[ "def", "_toggle_options", "(", "event", ",", "params", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "if", "(", "len", "(", "params", "[", "'projs'", "]", ")", ">", "0", ")", ":", "if", "(", "params", "[", "'fig_proj'", "]", "is", "None", ")", ":", "_draw_proj_checkbox", "(", "event", ",", "params", ",", "draw_current_state", "=", "False", ")", "else", ":", "plt", ".", "close", "(", "params", "[", "'fig_proj'", "]", ")", "del", "params", "[", "'proj_checks'", "]", "params", "[", "'fig_proj'", "]", "=", "None" ]
toggle options dialog .
train
false
18,838
def MakeModuleForTypelibInterface(typelib_ob, progressInstance=None, bForDemand=bForDemandDefault, bBuildHidden=1): import makepy try: makepy.GenerateFromTypeLibSpec(typelib_ob, progressInstance=progressInstance, bForDemand=bForDemandDefault, bBuildHidden=bBuildHidden) except pywintypes.com_error: return None tla = typelib_ob.GetLibAttr() guid = tla[0] lcid = tla[1] major = tla[3] minor = tla[4] return GetModuleForTypelib(guid, lcid, major, minor)
[ "def", "MakeModuleForTypelibInterface", "(", "typelib_ob", ",", "progressInstance", "=", "None", ",", "bForDemand", "=", "bForDemandDefault", ",", "bBuildHidden", "=", "1", ")", ":", "import", "makepy", "try", ":", "makepy", ".", "GenerateFromTypeLibSpec", "(", "typelib_ob", ",", "progressInstance", "=", "progressInstance", ",", "bForDemand", "=", "bForDemandDefault", ",", "bBuildHidden", "=", "bBuildHidden", ")", "except", "pywintypes", ".", "com_error", ":", "return", "None", "tla", "=", "typelib_ob", ".", "GetLibAttr", "(", ")", "guid", "=", "tla", "[", "0", "]", "lcid", "=", "tla", "[", "1", "]", "major", "=", "tla", "[", "3", "]", "minor", "=", "tla", "[", "4", "]", "return", "GetModuleForTypelib", "(", "guid", ",", "lcid", ",", "major", ",", "minor", ")" ]
generate support for a type library .
train
false
18,839
def GetFileSystems(): version = OSXVersion() (major, minor) = version.VersionAsMajorMinor() libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('c')) if ((major <= 10) and (minor <= 5)): use_64 = False fs_struct = StatFSStruct else: use_64 = True fs_struct = StatFS64Struct struct_size = fs_struct.GetSize() buf_size = (struct_size * 20) cbuf = ctypes.create_string_buffer(buf_size) if use_64: ret = libc.getfsstat64(ctypes.byref(cbuf), buf_size, 2) else: ret = libc.getfsstat(ctypes.byref(cbuf), buf_size, 2) if (ret == 0): logging.debug('getfsstat failed err: %s', ret) return [] return ParseFileSystemsStruct(fs_struct, ret, cbuf)
[ "def", "GetFileSystems", "(", ")", ":", "version", "=", "OSXVersion", "(", ")", "(", "major", ",", "minor", ")", "=", "version", ".", "VersionAsMajorMinor", "(", ")", "libc", "=", "ctypes", ".", "cdll", ".", "LoadLibrary", "(", "ctypes", ".", "util", ".", "find_library", "(", "'c'", ")", ")", "if", "(", "(", "major", "<=", "10", ")", "and", "(", "minor", "<=", "5", ")", ")", ":", "use_64", "=", "False", "fs_struct", "=", "StatFSStruct", "else", ":", "use_64", "=", "True", "fs_struct", "=", "StatFS64Struct", "struct_size", "=", "fs_struct", ".", "GetSize", "(", ")", "buf_size", "=", "(", "struct_size", "*", "20", ")", "cbuf", "=", "ctypes", ".", "create_string_buffer", "(", "buf_size", ")", "if", "use_64", ":", "ret", "=", "libc", ".", "getfsstat64", "(", "ctypes", ".", "byref", "(", "cbuf", ")", ",", "buf_size", ",", "2", ")", "else", ":", "ret", "=", "libc", ".", "getfsstat", "(", "ctypes", ".", "byref", "(", "cbuf", ")", ",", "buf_size", ",", "2", ")", "if", "(", "ret", "==", "0", ")", ":", "logging", ".", "debug", "(", "'getfsstat failed err: %s'", ",", "ret", ")", "return", "[", "]", "return", "ParseFileSystemsStruct", "(", "fs_struct", ",", "ret", ",", "cbuf", ")" ]
make syscalls to get the mounted filesystems .
train
true
18,840
def preferred_ip(vm_, ips): proto = config.get_cloud_config_value('protocol', vm_, __opts__, default='ipv4', search_global=False) family = socket.AF_INET if (proto == 'ipv6'): family = socket.AF_INET6 for ip in ips: try: socket.inet_pton(family, ip) return ip except Exception: continue return False
[ "def", "preferred_ip", "(", "vm_", ",", "ips", ")", ":", "proto", "=", "config", ".", "get_cloud_config_value", "(", "'protocol'", ",", "vm_", ",", "__opts__", ",", "default", "=", "'ipv4'", ",", "search_global", "=", "False", ")", "family", "=", "socket", ".", "AF_INET", "if", "(", "proto", "==", "'ipv6'", ")", ":", "family", "=", "socket", ".", "AF_INET6", "for", "ip", "in", "ips", ":", "try", ":", "socket", ".", "inet_pton", "(", "family", ",", "ip", ")", "return", "ip", "except", "Exception", ":", "continue", "return", "False" ]
return the preferred internet protocol .
train
true
18,841
def _match_history_log_path(path, job_id=None): m = _HISTORY_LOG_PATH_RE.match(path) if (not m): return None if (not ((job_id is None) or (m.group('job_id') == job_id))): return None return dict(job_id=m.group('job_id'), yarn=('.jhist' in m.group('suffix')))
[ "def", "_match_history_log_path", "(", "path", ",", "job_id", "=", "None", ")", ":", "m", "=", "_HISTORY_LOG_PATH_RE", ".", "match", "(", "path", ")", "if", "(", "not", "m", ")", ":", "return", "None", "if", "(", "not", "(", "(", "job_id", "is", "None", ")", "or", "(", "m", ".", "group", "(", "'job_id'", ")", "==", "job_id", ")", ")", ")", ":", "return", "None", "return", "dict", "(", "job_id", "=", "m", ".", "group", "(", "'job_id'", ")", ",", "yarn", "=", "(", "'.jhist'", "in", "m", ".", "group", "(", "'suffix'", ")", ")", ")" ]
yield paths/uris of all job history files in the given directories .
train
false
18,847
def sortby(tree, col, descending): data = [(tree.set(child, col), child) for child in tree.get_children('')] data.sort(reverse=descending) for (indx, item) in enumerate(data): tree.move(item[1], '', indx) tree.heading(col, command=(lambda col=col: sortby(tree, col, int((not descending)))))
[ "def", "sortby", "(", "tree", ",", "col", ",", "descending", ")", ":", "data", "=", "[", "(", "tree", ".", "set", "(", "child", ",", "col", ")", ",", "child", ")", "for", "child", "in", "tree", ".", "get_children", "(", "''", ")", "]", "data", ".", "sort", "(", "reverse", "=", "descending", ")", "for", "(", "indx", ",", "item", ")", "in", "enumerate", "(", "data", ")", ":", "tree", ".", "move", "(", "item", "[", "1", "]", ",", "''", ",", "indx", ")", "tree", ".", "heading", "(", "col", ",", "command", "=", "(", "lambda", "col", "=", "col", ":", "sortby", "(", "tree", ",", "col", ",", "int", "(", "(", "not", "descending", ")", ")", ")", ")", ")" ]
sort tree contents when a column is clicked on .
train
false
18,849
def test_mockclient(): raw = mne.io.read_raw_fif(raw_fname, preload=True, verbose=False) picks = mne.pick_types(raw.info, meg='grad', eeg=False, eog=True, stim=True, exclude=raw.info['bads']) (event_id, tmin, tmax) = (1, (-0.2), 0.5) epochs = Epochs(raw, events[:7], event_id=event_id, tmin=tmin, tmax=tmax, picks=picks, baseline=(None, 0), preload=True) data = epochs.get_data() rt_client = MockRtClient(raw) rt_epochs = RtEpochs(rt_client, event_id, tmin, tmax, picks=picks, isi_max=0.5) rt_epochs.start() rt_client.send_data(rt_epochs, picks, tmin=0, tmax=10, buffer_size=1000) rt_data = rt_epochs.get_data() assert_true((rt_data.shape == data.shape)) assert_array_equal(rt_data, data)
[ "def", "test_mockclient", "(", ")", ":", "raw", "=", "mne", ".", "io", ".", "read_raw_fif", "(", "raw_fname", ",", "preload", "=", "True", ",", "verbose", "=", "False", ")", "picks", "=", "mne", ".", "pick_types", "(", "raw", ".", "info", ",", "meg", "=", "'grad'", ",", "eeg", "=", "False", ",", "eog", "=", "True", ",", "stim", "=", "True", ",", "exclude", "=", "raw", ".", "info", "[", "'bads'", "]", ")", "(", "event_id", ",", "tmin", ",", "tmax", ")", "=", "(", "1", ",", "(", "-", "0.2", ")", ",", "0.5", ")", "epochs", "=", "Epochs", "(", "raw", ",", "events", "[", ":", "7", "]", ",", "event_id", "=", "event_id", ",", "tmin", "=", "tmin", ",", "tmax", "=", "tmax", ",", "picks", "=", "picks", ",", "baseline", "=", "(", "None", ",", "0", ")", ",", "preload", "=", "True", ")", "data", "=", "epochs", ".", "get_data", "(", ")", "rt_client", "=", "MockRtClient", "(", "raw", ")", "rt_epochs", "=", "RtEpochs", "(", "rt_client", ",", "event_id", ",", "tmin", ",", "tmax", ",", "picks", "=", "picks", ",", "isi_max", "=", "0.5", ")", "rt_epochs", ".", "start", "(", ")", "rt_client", ".", "send_data", "(", "rt_epochs", ",", "picks", ",", "tmin", "=", "0", ",", "tmax", "=", "10", ",", "buffer_size", "=", "1000", ")", "rt_data", "=", "rt_epochs", ".", "get_data", "(", ")", "assert_true", "(", "(", "rt_data", ".", "shape", "==", "data", ".", "shape", ")", ")", "assert_array_equal", "(", "rt_data", ",", "data", ")" ]
test the rtmockclient .
train
false
18,850
def validate_axis(ndim, axis): if ((axis > (ndim - 1)) or (axis < (- ndim))): raise ValueError(('Axis must be between -%d and %d, got %d' % (ndim, (ndim - 1), axis))) if (axis < 0): return (axis + ndim) else: return axis
[ "def", "validate_axis", "(", "ndim", ",", "axis", ")", ":", "if", "(", "(", "axis", ">", "(", "ndim", "-", "1", ")", ")", "or", "(", "axis", "<", "(", "-", "ndim", ")", ")", ")", ":", "raise", "ValueError", "(", "(", "'Axis must be between -%d and %d, got %d'", "%", "(", "ndim", ",", "(", "ndim", "-", "1", ")", ",", "axis", ")", ")", ")", "if", "(", "axis", "<", "0", ")", ":", "return", "(", "axis", "+", "ndim", ")", "else", ":", "return", "axis" ]
validate single axis dimension against number of dimensions .
train
false
18,851
def flip_horizontal(request, fileobjects): transpose_image(request, fileobjects, 0)
[ "def", "flip_horizontal", "(", "request", ",", "fileobjects", ")", ":", "transpose_image", "(", "request", ",", "fileobjects", ",", "0", ")" ]
flip image horizontally .
train
false
18,852
def _hash(message, method_name): if (method_name not in HASH_METHODS): raise ValueError(('Invalid hash method: %s' % method_name)) method = HASH_METHODS[method_name] hasher = method() if (hasattr(message, 'read') and hasattr(message.read, '__call__')): for block in varblock.yield_fixedblocks(message, 1024): hasher.update(block) else: hasher.update(message) return hasher.digest()
[ "def", "_hash", "(", "message", ",", "method_name", ")", ":", "if", "(", "method_name", "not", "in", "HASH_METHODS", ")", ":", "raise", "ValueError", "(", "(", "'Invalid hash method: %s'", "%", "method_name", ")", ")", "method", "=", "HASH_METHODS", "[", "method_name", "]", "hasher", "=", "method", "(", ")", "if", "(", "hasattr", "(", "message", ",", "'read'", ")", "and", "hasattr", "(", "message", ".", "read", ",", "'__call__'", ")", ")", ":", "for", "block", "in", "varblock", ".", "yield_fixedblocks", "(", "message", ",", "1024", ")", ":", "hasher", ".", "update", "(", "block", ")", "else", ":", "hasher", ".", "update", "(", "message", ")", "return", "hasher", ".", "digest", "(", ")" ]
encode binary data according to specified algorithm .
train
false
18,853
def _quote(str): if ((str is None) or _is_legal_key(str)): return str else: return (('"' + str.translate(_Translator)) + '"')
[ "def", "_quote", "(", "str", ")", ":", "if", "(", "(", "str", "is", "None", ")", "or", "_is_legal_key", "(", "str", ")", ")", ":", "return", "str", "else", ":", "return", "(", "(", "'\"'", "+", "str", ".", "translate", "(", "_Translator", ")", ")", "+", "'\"'", ")" ]
quote a string for use in a cookie header .
train
false
18,854
def kuiper(data, dtype=np.float64): data = np.sort(data, axis=0).astype(dtype) shape = data.shape n_dim = len(shape) n_trials = shape[0] j1 = ((np.arange(n_trials, dtype=dtype) + 1.0) / float(n_trials)) j2 = (np.arange(n_trials, dtype=dtype) / float(n_trials)) if (n_dim > 1): j1 = j1[:, np.newaxis] j2 = j2[:, np.newaxis] d1 = (j1 - data).max(axis=0) d2 = (data - j2).max(axis=0) n_eff = n_trials d = (d1 + d2) return (d, _prob_kuiper(d, n_eff, dtype=dtype))
[ "def", "kuiper", "(", "data", ",", "dtype", "=", "np", ".", "float64", ")", ":", "data", "=", "np", ".", "sort", "(", "data", ",", "axis", "=", "0", ")", ".", "astype", "(", "dtype", ")", "shape", "=", "data", ".", "shape", "n_dim", "=", "len", "(", "shape", ")", "n_trials", "=", "shape", "[", "0", "]", "j1", "=", "(", "(", "np", ".", "arange", "(", "n_trials", ",", "dtype", "=", "dtype", ")", "+", "1.0", ")", "/", "float", "(", "n_trials", ")", ")", "j2", "=", "(", "np", ".", "arange", "(", "n_trials", ",", "dtype", "=", "dtype", ")", "/", "float", "(", "n_trials", ")", ")", "if", "(", "n_dim", ">", "1", ")", ":", "j1", "=", "j1", "[", ":", ",", "np", ".", "newaxis", "]", "j2", "=", "j2", "[", ":", ",", "np", ".", "newaxis", "]", "d1", "=", "(", "j1", "-", "data", ")", ".", "max", "(", "axis", "=", "0", ")", "d2", "=", "(", "data", "-", "j2", ")", ".", "max", "(", "axis", "=", "0", ")", "n_eff", "=", "n_trials", "d", "=", "(", "d1", "+", "d2", ")", "return", "(", "d", ",", "_prob_kuiper", "(", "d", ",", "n_eff", ",", "dtype", "=", "dtype", ")", ")" ]
kuipers test of uniform distribution .
train
false
18,855
def wrap_module(module): if socksocket.default_proxy: module.socket.socket = socksocket else: raise GeneralProxyError('No default proxy specified')
[ "def", "wrap_module", "(", "module", ")", ":", "if", "socksocket", ".", "default_proxy", ":", "module", ".", "socket", ".", "socket", "=", "socksocket", "else", ":", "raise", "GeneralProxyError", "(", "'No default proxy specified'", ")" ]
attempts to replace a modules socket library with a socks socket .
train
false
18,857
def safe_no_home(home): if home: raise RuntimeError(('The `config.home` option has been removed and should not be used anymore. Please set the `config.base_compiledir` option instead (for instance to: %s)' % os.path.join(home, '.theano'))) return True
[ "def", "safe_no_home", "(", "home", ")", ":", "if", "home", ":", "raise", "RuntimeError", "(", "(", "'The `config.home` option has been removed and should not be used anymore. Please set the `config.base_compiledir` option instead (for instance to: %s)'", "%", "os", ".", "path", ".", "join", "(", "home", ",", "'.theano'", ")", ")", ")", "return", "True" ]
make sure the user is not attempting to use config .
train
false
18,860
def test_whos(): class A(object, ): def __repr__(self): raise Exception() _ip.user_ns['a'] = A() _ip.magic('whos')
[ "def", "test_whos", "(", ")", ":", "class", "A", "(", "object", ",", ")", ":", "def", "__repr__", "(", "self", ")", ":", "raise", "Exception", "(", ")", "_ip", ".", "user_ns", "[", "'a'", "]", "=", "A", "(", ")", "_ip", ".", "magic", "(", "'whos'", ")" ]
check that whos is protected against objects where repr() fails .
train
false
18,861
@contextmanager def filetexts(d, open=open, mode='t', use_tmpdir=True): with (tmp_cwd() if use_tmpdir else noop_context()): for (filename, text) in d.items(): f = open(filename, ('w' + mode)) try: f.write(text) finally: try: f.close() except AttributeError: pass (yield list(d)) for filename in d: if os.path.exists(filename): with ignoring(OSError): os.remove(filename)
[ "@", "contextmanager", "def", "filetexts", "(", "d", ",", "open", "=", "open", ",", "mode", "=", "'t'", ",", "use_tmpdir", "=", "True", ")", ":", "with", "(", "tmp_cwd", "(", ")", "if", "use_tmpdir", "else", "noop_context", "(", ")", ")", ":", "for", "(", "filename", ",", "text", ")", "in", "d", ".", "items", "(", ")", ":", "f", "=", "open", "(", "filename", ",", "(", "'w'", "+", "mode", ")", ")", "try", ":", "f", ".", "write", "(", "text", ")", "finally", ":", "try", ":", "f", ".", "close", "(", ")", "except", "AttributeError", ":", "pass", "(", "yield", "list", "(", "d", ")", ")", "for", "filename", "in", "d", ":", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "with", "ignoring", "(", "OSError", ")", ":", "os", ".", "remove", "(", "filename", ")" ]
dumps a number of textfiles to disk d - dict a mapping from filename to text like {a .
train
false
18,862
def printAttributeDictionaryKey(attributeDictionaryKey, xmlElement): if (attributeDictionaryKey.lower() == '_localdictionary'): localDictionary = getLocalDictionary(attributeDictionaryKey, xmlElement) if (localDictionary != None): localDictionaryKeys = localDictionary.keys() attributeValue = xmlElement.attributeDictionary[attributeDictionaryKey] if (attributeValue != ''): attributeValue = (' - ' + attributeValue) print ('Local Dictionary Variables' + attributeValue) localDictionaryKeys.sort() for localDictionaryKey in localDictionaryKeys: print ('%s: %s' % (localDictionaryKey, localDictionary[localDictionaryKey])) return value = xmlElement.attributeDictionary[attributeDictionaryKey] evaluatedValue = None if (value == ''): evaluatedValue = evaluate.getEvaluatedExpressionValue(attributeDictionaryKey, xmlElement) else: evaluatedValue = evaluate.getEvaluatedExpressionValue(value, xmlElement) print ('%s: %s' % (attributeDictionaryKey, evaluatedValue))
[ "def", "printAttributeDictionaryKey", "(", "attributeDictionaryKey", ",", "xmlElement", ")", ":", "if", "(", "attributeDictionaryKey", ".", "lower", "(", ")", "==", "'_localdictionary'", ")", ":", "localDictionary", "=", "getLocalDictionary", "(", "attributeDictionaryKey", ",", "xmlElement", ")", "if", "(", "localDictionary", "!=", "None", ")", ":", "localDictionaryKeys", "=", "localDictionary", ".", "keys", "(", ")", "attributeValue", "=", "xmlElement", ".", "attributeDictionary", "[", "attributeDictionaryKey", "]", "if", "(", "attributeValue", "!=", "''", ")", ":", "attributeValue", "=", "(", "' - '", "+", "attributeValue", ")", "print", "(", "'Local Dictionary Variables'", "+", "attributeValue", ")", "localDictionaryKeys", ".", "sort", "(", ")", "for", "localDictionaryKey", "in", "localDictionaryKeys", ":", "print", "(", "'%s: %s'", "%", "(", "localDictionaryKey", ",", "localDictionary", "[", "localDictionaryKey", "]", ")", ")", "return", "value", "=", "xmlElement", ".", "attributeDictionary", "[", "attributeDictionaryKey", "]", "evaluatedValue", "=", "None", "if", "(", "value", "==", "''", ")", ":", "evaluatedValue", "=", "evaluate", ".", "getEvaluatedExpressionValue", "(", "attributeDictionaryKey", ",", "xmlElement", ")", "else", ":", "evaluatedValue", "=", "evaluate", ".", "getEvaluatedExpressionValue", "(", "value", ",", "xmlElement", ")", "print", "(", "'%s: %s'", "%", "(", "attributeDictionaryKey", ",", "evaluatedValue", ")", ")" ]
print the attributedictionarykey .
train
false
18,863
def _cast_params(rendered, parameter_schemas): casted_params = {} for (k, v) in six.iteritems(rendered): casted_params[k] = _cast(v, (parameter_schemas[k] or {})) return casted_params
[ "def", "_cast_params", "(", "rendered", ",", "parameter_schemas", ")", ":", "casted_params", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "rendered", ")", ":", "casted_params", "[", "k", "]", "=", "_cast", "(", "v", ",", "(", "parameter_schemas", "[", "k", "]", "or", "{", "}", ")", ")", "return", "casted_params" ]
its just here to make tests happy .
train
false
18,864
@ForwardNodeSettings.subscribe('before_save') def validate_circular_reference(schema, instance): if (instance.url and (instance.owner._id in instance.url)): raise ValidationValueError('Circular URL')
[ "@", "ForwardNodeSettings", ".", "subscribe", "(", "'before_save'", ")", "def", "validate_circular_reference", "(", "schema", ",", "instance", ")", ":", "if", "(", "instance", ".", "url", "and", "(", "instance", ".", "owner", ".", "_id", "in", "instance", ".", "url", ")", ")", ":", "raise", "ValidationValueError", "(", "'Circular URL'", ")" ]
prevent node from forwarding to itself .
train
false
18,865
def ensure_engine(f): def wrapper(self, *args, **kwargs): for env_var_value in (u'false', u'true'): with environment_as(HERMETIC_ENV=u'PANTS_ENABLE_V2_ENGINE', PANTS_ENABLE_V2_ENGINE=env_var_value): f(self, *args, **kwargs) return wrapper
[ "def", "ensure_engine", "(", "f", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "for", "env_var_value", "in", "(", "u'false'", ",", "u'true'", ")", ":", "with", "environment_as", "(", "HERMETIC_ENV", "=", "u'PANTS_ENABLE_V2_ENGINE'", ",", "PANTS_ENABLE_V2_ENGINE", "=", "env_var_value", ")", ":", "f", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
a decorator for running an integration test with and without the v2 engine enabled via temporary environment variables .
train
false
18,866
def create_data_disk(vm_=None, linode_id=None, data_size=None): kwargs = {} kwargs.update({'LinodeID': linode_id, 'Label': (vm_['name'] + '_data'), 'Type': 'ext4', 'Size': data_size}) result = _query('linode', 'disk.create', args=kwargs) return _clean_data(result)
[ "def", "create_data_disk", "(", "vm_", "=", "None", ",", "linode_id", "=", "None", ",", "data_size", "=", "None", ")", ":", "kwargs", "=", "{", "}", "kwargs", ".", "update", "(", "{", "'LinodeID'", ":", "linode_id", ",", "'Label'", ":", "(", "vm_", "[", "'name'", "]", "+", "'_data'", ")", ",", "'Type'", ":", "'ext4'", ",", "'Size'", ":", "data_size", "}", ")", "result", "=", "_query", "(", "'linode'", ",", "'disk.create'", ",", "args", "=", "kwargs", ")", "return", "_clean_data", "(", "result", ")" ]
create a data disk for the linode .
train
true
18,867
def thin(image, max_iter=None): max_iter = (max_iter or sys.maxsize) assert_nD(image, 2) skel = np.asanyarray(image, dtype=bool).astype(np.uint8) mask = np.array([[8, 4, 2], [16, 0, 1], [32, 64, 128]], dtype=np.uint8) for i in range(max_iter): before = np.sum(skel) for lut in [G123_LUT, G123P_LUT]: N = ndi.correlate(skel, mask, mode='constant') D = np.take(lut, N) skel[D] = 0 after = np.sum(skel) if (before == after): break return skel.astype(np.bool)
[ "def", "thin", "(", "image", ",", "max_iter", "=", "None", ")", ":", "max_iter", "=", "(", "max_iter", "or", "sys", ".", "maxsize", ")", "assert_nD", "(", "image", ",", "2", ")", "skel", "=", "np", ".", "asanyarray", "(", "image", ",", "dtype", "=", "bool", ")", ".", "astype", "(", "np", ".", "uint8", ")", "mask", "=", "np", ".", "array", "(", "[", "[", "8", ",", "4", ",", "2", "]", ",", "[", "16", ",", "0", ",", "1", "]", ",", "[", "32", ",", "64", ",", "128", "]", "]", ",", "dtype", "=", "np", ".", "uint8", ")", "for", "i", "in", "range", "(", "max_iter", ")", ":", "before", "=", "np", ".", "sum", "(", "skel", ")", "for", "lut", "in", "[", "G123_LUT", ",", "G123P_LUT", "]", ":", "N", "=", "ndi", ".", "correlate", "(", "skel", ",", "mask", ",", "mode", "=", "'constant'", ")", "D", "=", "np", ".", "take", "(", "lut", ",", "N", ")", "skel", "[", "D", "]", "=", "0", "after", "=", "np", ".", "sum", "(", "skel", ")", "if", "(", "before", "==", "after", ")", ":", "break", "return", "skel", ".", "astype", "(", "np", ".", "bool", ")" ]
perform morphological thinning of a binary image .
train
false
18,868
def get_repo(name, basedir=None, **kwargs): repos = list_repos(basedir) repofile = '' for repo in repos: if (repo == name): repofile = repos[repo]['file'] if repofile: filerepos = _parse_repo_file(repofile)[1] return filerepos[name] return {}
[ "def", "get_repo", "(", "name", ",", "basedir", "=", "None", ",", "**", "kwargs", ")", ":", "repos", "=", "list_repos", "(", "basedir", ")", "repofile", "=", "''", "for", "repo", "in", "repos", ":", "if", "(", "repo", "==", "name", ")", ":", "repofile", "=", "repos", "[", "repo", "]", "[", "'file'", "]", "if", "repofile", ":", "filerepos", "=", "_parse_repo_file", "(", "repofile", ")", "[", "1", "]", "return", "filerepos", "[", "name", "]", "return", "{", "}" ]
display a repo from the /etc/opkg/* .
train
true
18,869
def _register_adapter(value, key): if ((not isinstance(value, (type(None), int, float, six.string_types, bytes, numpy.ndarray))) and (key != 'resumed_from')): sqlite3.register_adapter(type(value), adapt_obj)
[ "def", "_register_adapter", "(", "value", ",", "key", ")", ":", "if", "(", "(", "not", "isinstance", "(", "value", ",", "(", "type", "(", "None", ")", ",", "int", ",", "float", ",", "six", ".", "string_types", ",", "bytes", ",", "numpy", ".", "ndarray", ")", ")", ")", "and", "(", "key", "!=", "'resumed_from'", ")", ")", ":", "sqlite3", ".", "register_adapter", "(", "type", "(", "value", ")", ",", "adapt_obj", ")" ]
register an adapter if the type of value is unknown .
train
false
18,871
def select_source_in_label(src, label, random_state=None, location='random', subject=None, subjects_dir=None, surf='sphere'): lh_vertno = list() rh_vertno = list() if ((not isinstance(location, string_types)) or (location not in ('random', 'center'))): raise ValueError(('location must be "random" or "center", got %s' % (location,))) rng = check_random_state(random_state) if (label.hemi == 'lh'): vertno = lh_vertno hemi_idx = 0 else: vertno = rh_vertno hemi_idx = 1 src_sel = np.intersect1d(src[hemi_idx]['vertno'], label.vertices) if (location == 'random'): idx = src_sel[rng.randint(0, len(src_sel), 1)[0]] else: idx = label.center_of_mass(subject, restrict_vertices=src_sel, subjects_dir=subjects_dir, surf=surf) vertno.append(idx) return (lh_vertno, rh_vertno)
[ "def", "select_source_in_label", "(", "src", ",", "label", ",", "random_state", "=", "None", ",", "location", "=", "'random'", ",", "subject", "=", "None", ",", "subjects_dir", "=", "None", ",", "surf", "=", "'sphere'", ")", ":", "lh_vertno", "=", "list", "(", ")", "rh_vertno", "=", "list", "(", ")", "if", "(", "(", "not", "isinstance", "(", "location", ",", "string_types", ")", ")", "or", "(", "location", "not", "in", "(", "'random'", ",", "'center'", ")", ")", ")", ":", "raise", "ValueError", "(", "(", "'location must be \"random\" or \"center\", got %s'", "%", "(", "location", ",", ")", ")", ")", "rng", "=", "check_random_state", "(", "random_state", ")", "if", "(", "label", ".", "hemi", "==", "'lh'", ")", ":", "vertno", "=", "lh_vertno", "hemi_idx", "=", "0", "else", ":", "vertno", "=", "rh_vertno", "hemi_idx", "=", "1", "src_sel", "=", "np", ".", "intersect1d", "(", "src", "[", "hemi_idx", "]", "[", "'vertno'", "]", ",", "label", ".", "vertices", ")", "if", "(", "location", "==", "'random'", ")", ":", "idx", "=", "src_sel", "[", "rng", ".", "randint", "(", "0", ",", "len", "(", "src_sel", ")", ",", "1", ")", "[", "0", "]", "]", "else", ":", "idx", "=", "label", ".", "center_of_mass", "(", "subject", ",", "restrict_vertices", "=", "src_sel", ",", "subjects_dir", "=", "subjects_dir", ",", "surf", "=", "surf", ")", "vertno", ".", "append", "(", "idx", ")", "return", "(", "lh_vertno", ",", "rh_vertno", ")" ]
select source positions using a label .
train
false
18,873
def test_no_gamma(): win = visual.Window([600, 600], autoLog=False) assert (win.useNativeGamma == True) win.close() 'Or if gamma is provided but by a default monitor?' win = visual.Window([600, 600], monitor='blaah', autoLog=False) assert (win.useNativeGamma == True) win.close()
[ "def", "test_no_gamma", "(", ")", ":", "win", "=", "visual", ".", "Window", "(", "[", "600", ",", "600", "]", ",", "autoLog", "=", "False", ")", "assert", "(", "win", ".", "useNativeGamma", "==", "True", ")", "win", ".", "close", "(", ")", "win", "=", "visual", ".", "Window", "(", "[", "600", ",", "600", "]", ",", "monitor", "=", "'blaah'", ",", "autoLog", "=", "False", ")", "assert", "(", "win", ".", "useNativeGamma", "==", "True", ")", "win", ".", "close", "(", ")" ]
check that no gamma is used if not passed .
train
false
18,875
def _ProcessRetryParametersNode(node, cron): retry_parameters_node = xml_parser_utils.GetChild(node, 'retry-parameters') if (retry_parameters_node is None): cron.retry_parameters = None return retry_parameters = _RetryParameters() cron.retry_parameters = retry_parameters for tag in _RETRY_PARAMETER_TAGS: if (xml_parser_utils.GetChild(retry_parameters_node, tag) is not None): setattr(retry_parameters, tag.replace('-', '_'), xml_parser_utils.GetChildNodeText(retry_parameters_node, tag))
[ "def", "_ProcessRetryParametersNode", "(", "node", ",", "cron", ")", ":", "retry_parameters_node", "=", "xml_parser_utils", ".", "GetChild", "(", "node", ",", "'retry-parameters'", ")", "if", "(", "retry_parameters_node", "is", "None", ")", ":", "cron", ".", "retry_parameters", "=", "None", "return", "retry_parameters", "=", "_RetryParameters", "(", ")", "cron", ".", "retry_parameters", "=", "retry_parameters", "for", "tag", "in", "_RETRY_PARAMETER_TAGS", ":", "if", "(", "xml_parser_utils", ".", "GetChild", "(", "retry_parameters_node", ",", "tag", ")", "is", "not", "None", ")", ":", "setattr", "(", "retry_parameters", ",", "tag", ".", "replace", "(", "'-'", ",", "'_'", ")", ",", "xml_parser_utils", ".", "GetChildNodeText", "(", "retry_parameters_node", ",", "tag", ")", ")" ]
converts <retry-parameters> in node to cron .
train
false
18,876
def literal_symbol(literal): if ((literal is True) or (literal is False)): return literal try: if literal.is_Symbol: return literal if literal.is_Not: return literal_symbol(literal.args[0]) else: raise ValueError except (AttributeError, ValueError): raise ValueError('Argument must be a boolean literal.')
[ "def", "literal_symbol", "(", "literal", ")", ":", "if", "(", "(", "literal", "is", "True", ")", "or", "(", "literal", "is", "False", ")", ")", ":", "return", "literal", "try", ":", "if", "literal", ".", "is_Symbol", ":", "return", "literal", "if", "literal", ".", "is_Not", ":", "return", "literal_symbol", "(", "literal", ".", "args", "[", "0", "]", ")", "else", ":", "raise", "ValueError", "except", "(", "AttributeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "'Argument must be a boolean literal.'", ")" ]
the symbol in this literal .
train
false
18,877
@pytest.fixture def cookiejar_and_cache(stubs): jar = QNetworkCookieJar() ram_jar = cookies.RAMCookieJar() cache = stubs.FakeNetworkCache() objreg.register('cookie-jar', jar) objreg.register('ram-cookie-jar', ram_jar) objreg.register('cache', cache) (yield) objreg.delete('cookie-jar') objreg.delete('ram-cookie-jar') objreg.delete('cache')
[ "@", "pytest", ".", "fixture", "def", "cookiejar_and_cache", "(", "stubs", ")", ":", "jar", "=", "QNetworkCookieJar", "(", ")", "ram_jar", "=", "cookies", ".", "RAMCookieJar", "(", ")", "cache", "=", "stubs", ".", "FakeNetworkCache", "(", ")", "objreg", ".", "register", "(", "'cookie-jar'", ",", "jar", ")", "objreg", ".", "register", "(", "'ram-cookie-jar'", ",", "ram_jar", ")", "objreg", ".", "register", "(", "'cache'", ",", "cache", ")", "(", "yield", ")", "objreg", ".", "delete", "(", "'cookie-jar'", ")", "objreg", ".", "delete", "(", "'ram-cookie-jar'", ")", "objreg", ".", "delete", "(", "'cache'", ")" ]
fixture providing a fake cookie jar and cache .
train
false
18,879
def _make_grid(dim=(11, 4)): (x, y) = (range(dim[0]), range(dim[1])) p = np.array([[[s, i] for s in x] for i in y], dtype=np.float32) p[:, 1::2, 1] += 0.5 p = np.reshape(p, ((-1), 2), 'F') x_scale = (1.0 / (np.amax(p[:, 0]) - np.amin(p[:, 0]))) y_scale = (1.0 / (np.amax(p[:, 1]) - np.amin(p[:, 1]))) p *= (x_scale, (x_scale / 0.5)) return p
[ "def", "_make_grid", "(", "dim", "=", "(", "11", ",", "4", ")", ")", ":", "(", "x", ",", "y", ")", "=", "(", "range", "(", "dim", "[", "0", "]", ")", ",", "range", "(", "dim", "[", "1", "]", ")", ")", "p", "=", "np", ".", "array", "(", "[", "[", "[", "s", ",", "i", "]", "for", "s", "in", "x", "]", "for", "i", "in", "y", "]", ",", "dtype", "=", "np", ".", "float32", ")", "p", "[", ":", ",", "1", ":", ":", "2", ",", "1", "]", "+=", "0.5", "p", "=", "np", ".", "reshape", "(", "p", ",", "(", "(", "-", "1", ")", ",", "2", ")", ",", "'F'", ")", "x_scale", "=", "(", "1.0", "/", "(", "np", ".", "amax", "(", "p", "[", ":", ",", "0", "]", ")", "-", "np", ".", "amin", "(", "p", "[", ":", ",", "0", "]", ")", ")", ")", "y_scale", "=", "(", "1.0", "/", "(", "np", ".", "amax", "(", "p", "[", ":", ",", "1", "]", ")", "-", "np", ".", "amin", "(", "p", "[", ":", ",", "1", "]", ")", ")", ")", "p", "*=", "(", "x_scale", ",", "(", "x_scale", "/", "0.5", ")", ")", "return", "p" ]
this function generates the structure for an asymmetrical circle grid domain .
train
false
18,881
def pure2mixed(num_actions, action): mixed_action = np.zeros(num_actions) mixed_action[action] = 1 return mixed_action
[ "def", "pure2mixed", "(", "num_actions", ",", "action", ")", ":", "mixed_action", "=", "np", ".", "zeros", "(", "num_actions", ")", "mixed_action", "[", "action", "]", "=", "1", "return", "mixed_action" ]
convert a pure action to the corresponding mixed action .
train
true