id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
12,058
def write_embeddings_to_disk(config, model, sess): row_vocab_path = (config.input_base_path + '/row_vocab.txt') row_embedding_output_path = (config.output_base_path + '/row_embedding.tsv') print 'Writing row embeddings to:', row_embedding_output_path sys.stdout.flush() write_embedding_tensor_to_disk(row_vocab_path, row_embedding_output_path, sess, model.row_embedding) col_vocab_path = (config.input_base_path + '/col_vocab.txt') col_embedding_output_path = (config.output_base_path + '/col_embedding.tsv') print 'Writing column embeddings to:', col_embedding_output_path sys.stdout.flush() write_embedding_tensor_to_disk(col_vocab_path, col_embedding_output_path, sess, model.col_embedding)
[ "def", "write_embeddings_to_disk", "(", "config", ",", "model", ",", "sess", ")", ":", "row_vocab_path", "=", "(", "config", ".", "input_base_path", "+", "'/row_vocab.txt'", ")", "row_embedding_output_path", "=", "(", "config", ".", "output_base_path", "+", "'/row_embedding.tsv'", ")", "print", "'Writing row embeddings to:'", ",", "row_embedding_output_path", "sys", ".", "stdout", ".", "flush", "(", ")", "write_embedding_tensor_to_disk", "(", "row_vocab_path", ",", "row_embedding_output_path", ",", "sess", ",", "model", ".", "row_embedding", ")", "col_vocab_path", "=", "(", "config", ".", "input_base_path", "+", "'/col_vocab.txt'", ")", "col_embedding_output_path", "=", "(", "config", ".", "output_base_path", "+", "'/col_embedding.tsv'", ")", "print", "'Writing column embeddings to:'", ",", "col_embedding_output_path", "sys", ".", "stdout", ".", "flush", "(", ")", "write_embedding_tensor_to_disk", "(", "col_vocab_path", ",", "col_embedding_output_path", ",", "sess", ",", "model", ".", "col_embedding", ")" ]
writes row and column embeddings disk .
train
false
12,059
def dup_isolate_all_roots(f, K, eps=None, inf=None, sup=None, fast=False): if ((not K.is_ZZ) and (not K.is_QQ)): raise DomainError(('isolation of real and complex roots is not supported over %s' % K)) (_, factors) = dup_sqf_list(f, K) if (len(factors) == 1): ((f, k),) = factors (real_part, complex_part) = dup_isolate_all_roots_sqf(f, K, eps=eps, inf=inf, sup=sup, fast=fast) real_part = [((a, b), k) for (a, b) in real_part] complex_part = [((a, b), k) for (a, b) in complex_part] return (real_part, complex_part) else: raise NotImplementedError('only trivial square-free polynomials are supported')
[ "def", "dup_isolate_all_roots", "(", "f", ",", "K", ",", "eps", "=", "None", ",", "inf", "=", "None", ",", "sup", "=", "None", ",", "fast", "=", "False", ")", ":", "if", "(", "(", "not", "K", ".", "is_ZZ", ")", "and", "(", "not", "K", ".", "is_QQ", ")", ")", ":", "raise", "DomainError", "(", "(", "'isolation of real and complex roots is not supported over %s'", "%", "K", ")", ")", "(", "_", ",", "factors", ")", "=", "dup_sqf_list", "(", "f", ",", "K", ")", "if", "(", "len", "(", "factors", ")", "==", "1", ")", ":", "(", "(", "f", ",", "k", ")", ",", ")", "=", "factors", "(", "real_part", ",", "complex_part", ")", "=", "dup_isolate_all_roots_sqf", "(", "f", ",", "K", ",", "eps", "=", "eps", ",", "inf", "=", "inf", ",", "sup", "=", "sup", ",", "fast", "=", "fast", ")", "real_part", "=", "[", "(", "(", "a", ",", "b", ")", ",", "k", ")", "for", "(", "a", ",", "b", ")", "in", "real_part", "]", "complex_part", "=", "[", "(", "(", "a", ",", "b", ")", ",", "k", ")", "for", "(", "a", ",", "b", ")", "in", "complex_part", "]", "return", "(", "real_part", ",", "complex_part", ")", "else", ":", "raise", "NotImplementedError", "(", "'only trivial square-free polynomials are supported'", ")" ]
isolate real and complex roots of a non-square-free polynomial f .
train
false
12,060
def list_msg_types(package, include_depends): types = roslib.resources.list_package_resources(package, include_depends, 'msg', _msg_filter) return [x[:(- len(EXT))] for x in types]
[ "def", "list_msg_types", "(", "package", ",", "include_depends", ")", ":", "types", "=", "roslib", ".", "resources", ".", "list_package_resources", "(", "package", ",", "include_depends", ",", "'msg'", ",", "_msg_filter", ")", "return", "[", "x", "[", ":", "(", "-", "len", "(", "EXT", ")", ")", "]", "for", "x", "in", "types", "]" ]
list all messages in the specified package .
train
false
12,061
def make_pre_authed_env(env, method=None, path=None, agent='Swift', query_string=None, swift_source=None): newenv = {} for name in ('eventlet.posthooks', 'HTTP_USER_AGENT', 'HTTP_HOST', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_USER', 'REQUEST_METHOD', 'SCRIPT_NAME', 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'swift.cache', 'swift.source', 'swift.trans_id'): if (name in env): newenv[name] = env[name] if method: newenv['REQUEST_METHOD'] = method if path: newenv['PATH_INFO'] = path newenv['SCRIPT_NAME'] = '' if (query_string is not None): newenv['QUERY_STRING'] = query_string if agent: newenv['HTTP_USER_AGENT'] = (agent % {'orig': env.get('HTTP_USER_AGENT', '')}).strip() elif ((agent == '') and ('HTTP_USER_AGENT' in newenv)): del newenv['HTTP_USER_AGENT'] if swift_source: newenv['swift.source'] = swift_source newenv['swift.authorize'] = (lambda req: None) newenv['swift.authorize_override'] = True newenv['REMOTE_USER'] = '.wsgi.pre_authed' newenv['wsgi.input'] = StringIO('') if ('SCRIPT_NAME' not in newenv): newenv['SCRIPT_NAME'] = '' return newenv
[ "def", "make_pre_authed_env", "(", "env", ",", "method", "=", "None", ",", "path", "=", "None", ",", "agent", "=", "'Swift'", ",", "query_string", "=", "None", ",", "swift_source", "=", "None", ")", ":", "newenv", "=", "{", "}", "for", "name", "in", "(", "'eventlet.posthooks'", ",", "'HTTP_USER_AGENT'", ",", "'HTTP_HOST'", ",", "'PATH_INFO'", ",", "'QUERY_STRING'", ",", "'REMOTE_USER'", ",", "'REQUEST_METHOD'", ",", "'SCRIPT_NAME'", ",", "'SERVER_NAME'", ",", "'SERVER_PORT'", ",", "'SERVER_PROTOCOL'", ",", "'swift.cache'", ",", "'swift.source'", ",", "'swift.trans_id'", ")", ":", "if", "(", "name", "in", "env", ")", ":", "newenv", "[", "name", "]", "=", "env", "[", "name", "]", "if", "method", ":", "newenv", "[", "'REQUEST_METHOD'", "]", "=", "method", "if", "path", ":", "newenv", "[", "'PATH_INFO'", "]", "=", "path", "newenv", "[", "'SCRIPT_NAME'", "]", "=", "''", "if", "(", "query_string", "is", "not", "None", ")", ":", "newenv", "[", "'QUERY_STRING'", "]", "=", "query_string", "if", "agent", ":", "newenv", "[", "'HTTP_USER_AGENT'", "]", "=", "(", "agent", "%", "{", "'orig'", ":", "env", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "''", ")", "}", ")", ".", "strip", "(", ")", "elif", "(", "(", "agent", "==", "''", ")", "and", "(", "'HTTP_USER_AGENT'", "in", "newenv", ")", ")", ":", "del", "newenv", "[", "'HTTP_USER_AGENT'", "]", "if", "swift_source", ":", "newenv", "[", "'swift.source'", "]", "=", "swift_source", "newenv", "[", "'swift.authorize'", "]", "=", "(", "lambda", "req", ":", "None", ")", "newenv", "[", "'swift.authorize_override'", "]", "=", "True", "newenv", "[", "'REMOTE_USER'", "]", "=", "'.wsgi.pre_authed'", "newenv", "[", "'wsgi.input'", "]", "=", "StringIO", "(", "''", ")", "if", "(", "'SCRIPT_NAME'", "not", "in", "newenv", ")", ":", "newenv", "[", "'SCRIPT_NAME'", "]", "=", "''", "return", "newenv" ]
returns a new fresh wsgi environment with escalated privileges to do backend checks .
train
false
12,062
def _get_or_create_identifiers(node): (doi, metadata) = _build_ezid_metadata(node) client = EzidClient(settings.EZID_USERNAME, settings.EZID_PASSWORD) try: resp = client.create_identifier(doi, metadata) return dict(([each.strip('/') for each in pair.strip().split(':')] for pair in resp['success'].split('|'))) except HTTPError as error: if ('identifier already exists' not in error.message.lower()): raise resp = client.get_identifier(doi) doi = resp['success'] suffix = doi.strip(settings.DOI_NAMESPACE) return {'doi': doi.replace('doi:', ''), 'ark': '{0}{1}'.format(settings.ARK_NAMESPACE.replace('ark:', ''), suffix)}
[ "def", "_get_or_create_identifiers", "(", "node", ")", ":", "(", "doi", ",", "metadata", ")", "=", "_build_ezid_metadata", "(", "node", ")", "client", "=", "EzidClient", "(", "settings", ".", "EZID_USERNAME", ",", "settings", ".", "EZID_PASSWORD", ")", "try", ":", "resp", "=", "client", ".", "create_identifier", "(", "doi", ",", "metadata", ")", "return", "dict", "(", "(", "[", "each", ".", "strip", "(", "'/'", ")", "for", "each", "in", "pair", ".", "strip", "(", ")", ".", "split", "(", "':'", ")", "]", "for", "pair", "in", "resp", "[", "'success'", "]", ".", "split", "(", "'|'", ")", ")", ")", "except", "HTTPError", "as", "error", ":", "if", "(", "'identifier already exists'", "not", "in", "error", ".", "message", ".", "lower", "(", ")", ")", ":", "raise", "resp", "=", "client", ".", "get_identifier", "(", "doi", ")", "doi", "=", "resp", "[", "'success'", "]", "suffix", "=", "doi", ".", "strip", "(", "settings", ".", "DOI_NAMESPACE", ")", "return", "{", "'doi'", ":", "doi", ".", "replace", "(", "'doi:'", ",", "''", ")", ",", "'ark'", ":", "'{0}{1}'", ".", "format", "(", "settings", ".", "ARK_NAMESPACE", ".", "replace", "(", "'ark:'", ",", "''", ")", ",", "suffix", ")", "}" ]
note: arks include a leading slash .
train
false
12,063
def income_source(): return s3_rest_controller()
[ "def", "income_source", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
income sources: restful crud controller .
train
false
12,066
def is_current_user_super_admin(): return users.is_current_user_admin()
[ "def", "is_current_user_super_admin", "(", ")", ":", "return", "users", ".", "is_current_user_admin", "(", ")" ]
checks whether the current user owns this app .
train
false
12,067
@login_required @require_http_methods(['GET', 'POST']) def edit_avatar(request): try: user_profile = Profile.objects.get(user=request.user) except Profile.DoesNotExist: user_profile = Profile.objects.create(user=request.user) if (request.method == 'POST'): old_avatar_path = None if (user_profile.avatar and os.path.isfile(user_profile.avatar.path)): old_avatar_path = user_profile.avatar.path form = AvatarForm(request.POST, request.FILES, instance=user_profile) if form.is_valid(): if old_avatar_path: os.unlink(old_avatar_path) user_profile = form.save() content = _create_image_thumbnail(user_profile.avatar.path, settings.AVATAR_SIZE, pad=True) name = (user_profile.avatar.name + '.png') user_profile.avatar.delete() user_profile.avatar.save(name, content, save=True) return HttpResponseRedirect(reverse('users.edit_my_profile')) else: form = AvatarForm(instance=user_profile) return render(request, 'users/edit_avatar.html', {'form': form, 'profile': user_profile})
[ "@", "login_required", "@", "require_http_methods", "(", "[", "'GET'", ",", "'POST'", "]", ")", "def", "edit_avatar", "(", "request", ")", ":", "try", ":", "user_profile", "=", "Profile", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ")", "except", "Profile", ".", "DoesNotExist", ":", "user_profile", "=", "Profile", ".", "objects", ".", "create", "(", "user", "=", "request", ".", "user", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "old_avatar_path", "=", "None", "if", "(", "user_profile", ".", "avatar", "and", "os", ".", "path", ".", "isfile", "(", "user_profile", ".", "avatar", ".", "path", ")", ")", ":", "old_avatar_path", "=", "user_profile", ".", "avatar", ".", "path", "form", "=", "AvatarForm", "(", "request", ".", "POST", ",", "request", ".", "FILES", ",", "instance", "=", "user_profile", ")", "if", "form", ".", "is_valid", "(", ")", ":", "if", "old_avatar_path", ":", "os", ".", "unlink", "(", "old_avatar_path", ")", "user_profile", "=", "form", ".", "save", "(", ")", "content", "=", "_create_image_thumbnail", "(", "user_profile", ".", "avatar", ".", "path", ",", "settings", ".", "AVATAR_SIZE", ",", "pad", "=", "True", ")", "name", "=", "(", "user_profile", ".", "avatar", ".", "name", "+", "'.png'", ")", "user_profile", ".", "avatar", ".", "delete", "(", ")", "user_profile", ".", "avatar", ".", "save", "(", "name", ",", "content", ",", "save", "=", "True", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'users.edit_my_profile'", ")", ")", "else", ":", "form", "=", "AvatarForm", "(", "instance", "=", "user_profile", ")", "return", "render", "(", "request", ",", "'users/edit_avatar.html'", ",", "{", "'form'", ":", "form", ",", "'profile'", ":", "user_profile", "}", ")" ]
edit group avatar .
train
false
12,068
def get_suspended(): try: ref = (ct.SSEQ_CQ_REF_URL % (ct.P_TYPE['http'], ct.DOMAINS['sse'])) clt = Client((rv.SUSPENDED_URL % (ct.P_TYPE['http'], ct.DOMAINS['sseq'], ct.PAGES['ssecq'], _random(5), _random())), ref=ref, cookie=rv.MAR_SH_COOKIESTR) lines = clt.gvalue() lines = (lines.decode('utf-8') if ct.PY3 else lines) lines = lines[19:(-1)] lines = json.loads(lines) df = pd.DataFrame(lines['result'], columns=rv.TERMINATED_T_COLS) df.columns = rv.TERMINATED_COLS return df except Exception as er: print str(er)
[ "def", "get_suspended", "(", ")", ":", "try", ":", "ref", "=", "(", "ct", ".", "SSEQ_CQ_REF_URL", "%", "(", "ct", ".", "P_TYPE", "[", "'http'", "]", ",", "ct", ".", "DOMAINS", "[", "'sse'", "]", ")", ")", "clt", "=", "Client", "(", "(", "rv", ".", "SUSPENDED_URL", "%", "(", "ct", ".", "P_TYPE", "[", "'http'", "]", ",", "ct", ".", "DOMAINS", "[", "'sseq'", "]", ",", "ct", ".", "PAGES", "[", "'ssecq'", "]", ",", "_random", "(", "5", ")", ",", "_random", "(", ")", ")", ")", ",", "ref", "=", "ref", ",", "cookie", "=", "rv", ".", "MAR_SH_COOKIESTR", ")", "lines", "=", "clt", ".", "gvalue", "(", ")", "lines", "=", "(", "lines", ".", "decode", "(", "'utf-8'", ")", "if", "ct", ".", "PY3", "else", "lines", ")", "lines", "=", "lines", "[", "19", ":", "(", "-", "1", ")", "]", "lines", "=", "json", ".", "loads", "(", "lines", ")", "df", "=", "pd", ".", "DataFrame", "(", "lines", "[", "'result'", "]", ",", "columns", "=", "rv", ".", "TERMINATED_T_COLS", ")", "df", ".", "columns", "=", "rv", ".", "TERMINATED_COLS", "return", "df", "except", "Exception", "as", "er", ":", "print", "str", "(", "er", ")" ]
return dataframe code :股票代码 name :股票名称 odate:上市日期 tdate:终止上市日期 .
train
false
12,070
def _check_shape(param, param_shape, name): param = np.array(param) if (param.shape != param_shape): raise ValueError(("The parameter '%s' should have the shape of %s, but got %s" % (name, param_shape, param.shape)))
[ "def", "_check_shape", "(", "param", ",", "param_shape", ",", "name", ")", ":", "param", "=", "np", ".", "array", "(", "param", ")", "if", "(", "param", ".", "shape", "!=", "param_shape", ")", ":", "raise", "ValueError", "(", "(", "\"The parameter '%s' should have the shape of %s, but got %s\"", "%", "(", "name", ",", "param_shape", ",", "param", ".", "shape", ")", ")", ")" ]
validate the shape of the input parameter param .
train
false
12,071
def supports_color(): plat = sys.platform supported_platform = ((plat != 'Pocket PC') and ((plat != 'win32') or ('ANSICON' in os.environ))) is_a_tty = (hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()) if ((not supported_platform) or (not is_a_tty)): return False return True
[ "def", "supports_color", "(", ")", ":", "plat", "=", "sys", ".", "platform", "supported_platform", "=", "(", "(", "plat", "!=", "'Pocket PC'", ")", "and", "(", "(", "plat", "!=", "'win32'", ")", "or", "(", "'ANSICON'", "in", "os", ".", "environ", ")", ")", ")", "is_a_tty", "=", "(", "hasattr", "(", "sys", ".", "stdout", ",", "'isatty'", ")", "and", "sys", ".", "stdout", ".", "isatty", "(", ")", ")", "if", "(", "(", "not", "supported_platform", ")", "or", "(", "not", "is_a_tty", ")", ")", ":", "return", "False", "return", "True" ]
returns true if the running systems terminal supports color .
train
true
12,072
def run_package(package, tox_env): curr_dir = os.getcwd() package_dir = os.path.join(PROJECT_ROOT, package) try: os.chdir(package_dir) return_code = subprocess.call(['tox', '-e', tox_env]) return (return_code == 0) finally: os.chdir(curr_dir)
[ "def", "run_package", "(", "package", ",", "tox_env", ")", ":", "curr_dir", "=", "os", ".", "getcwd", "(", ")", "package_dir", "=", "os", ".", "path", ".", "join", "(", "PROJECT_ROOT", ",", "package", ")", "try", ":", "os", ".", "chdir", "(", "package_dir", ")", "return_code", "=", "subprocess", ".", "call", "(", "[", "'tox'", ",", "'-e'", ",", "tox_env", "]", ")", "return", "(", "return_code", "==", "0", ")", "finally", ":", "os", ".", "chdir", "(", "curr_dir", ")" ]
run tox environment for a given package .
train
false
12,073
def skipIfPyPy(message): from unittest import skipIf from .platform import is_pypy return skipIf(is_pypy(), message)
[ "def", "skipIfPyPy", "(", "message", ")", ":", "from", "unittest", "import", "skipIf", "from", ".", "platform", "import", "is_pypy", "return", "skipIf", "(", "is_pypy", "(", ")", ",", "message", ")" ]
unittest decorator to skip a test for pypy .
train
false
12,076
def build_postprocessors(md_instance, **kwargs): postprocessors = odict.OrderedDict() postprocessors[u'raw_html'] = RawHtmlPostprocessor(md_instance) postprocessors[u'amp_substitute'] = AndSubstitutePostprocessor() postprocessors[u'unescape'] = UnescapePostprocessor() return postprocessors
[ "def", "build_postprocessors", "(", "md_instance", ",", "**", "kwargs", ")", ":", "postprocessors", "=", "odict", ".", "OrderedDict", "(", ")", "postprocessors", "[", "u'raw_html'", "]", "=", "RawHtmlPostprocessor", "(", "md_instance", ")", "postprocessors", "[", "u'amp_substitute'", "]", "=", "AndSubstitutePostprocessor", "(", ")", "postprocessors", "[", "u'unescape'", "]", "=", "UnescapePostprocessor", "(", ")", "return", "postprocessors" ]
build the default postprocessors for markdown .
train
false
12,078
def _get_size_windows(): res = None try: from ctypes import create_string_buffer, windll h = windll.kernel32.GetStdHandle((-12)) csbi = create_string_buffer(22) res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) except: return None if res: data = struct.unpack('hhhhHhhhhhh', csbi.raw) cols = ((data[7] - data[5]) + 1) rows = ((data[8] - data[6]) + 1) return (cols, rows) else: return None
[ "def", "_get_size_windows", "(", ")", ":", "res", "=", "None", "try", ":", "from", "ctypes", "import", "create_string_buffer", ",", "windll", "h", "=", "windll", ".", "kernel32", ".", "GetStdHandle", "(", "(", "-", "12", ")", ")", "csbi", "=", "create_string_buffer", "(", "22", ")", "res", "=", "windll", ".", "kernel32", ".", "GetConsoleScreenBufferInfo", "(", "h", ",", "csbi", ")", "except", ":", "return", "None", "if", "res", ":", "data", "=", "struct", ".", "unpack", "(", "'hhhhHhhhhhh'", ",", "csbi", ".", "raw", ")", "cols", "=", "(", "(", "data", "[", "7", "]", "-", "data", "[", "5", "]", ")", "+", "1", ")", "rows", "=", "(", "(", "data", "[", "8", "]", "-", "data", "[", "6", "]", ")", "+", "1", ")", "return", "(", "cols", ",", "rows", ")", "else", ":", "return", "None" ]
attempt to discover the dimensions of a terminal window .
train
false
12,079
def credentials(*args, **kwargs): logging.warn(('The authomatic.credentials function is deprecated and will be removed in version 0.1.0! ' + 'Use the "credentials" method of the "Authomatic" class instead.')) return global_authomatic_instance.credentials(*args, **kwargs)
[ "def", "credentials", "(", "*", "args", ",", "**", "kwargs", ")", ":", "logging", ".", "warn", "(", "(", "'The authomatic.credentials function is deprecated and will be removed in version 0.1.0! '", "+", "'Use the \"credentials\" method of the \"Authomatic\" class instead.'", ")", ")", "return", "global_authomatic_instance", ".", "credentials", "(", "*", "args", ",", "**", "kwargs", ")" ]
retrieves credentials .
train
false
12,080
def make_template_info(filename, root_dirs): return Template(filename, [filename[(len(d) + 1):] for d in root_dirs if filename.startswith(d)])
[ "def", "make_template_info", "(", "filename", ",", "root_dirs", ")", ":", "return", "Template", "(", "filename", ",", "[", "filename", "[", "(", "len", "(", "d", ")", "+", "1", ")", ":", "]", "for", "d", "in", "root_dirs", "if", "filename", ".", "startswith", "(", "d", ")", "]", ")" ]
creates a template object for a filename .
train
false
12,081
def delete_bridge_dev(dev): if device_exists(dev): try: utils.execute('ip', 'link', 'set', dev, 'down', run_as_root=True) utils.execute('brctl', 'delbr', dev, run_as_root=True) except processutils.ProcessExecutionError: with excutils.save_and_reraise_exception(): LOG.error(_LE("Failed removing bridge device: '%s'"), dev)
[ "def", "delete_bridge_dev", "(", "dev", ")", ":", "if", "device_exists", "(", "dev", ")", ":", "try", ":", "utils", ".", "execute", "(", "'ip'", ",", "'link'", ",", "'set'", ",", "dev", ",", "'down'", ",", "run_as_root", "=", "True", ")", "utils", ".", "execute", "(", "'brctl'", ",", "'delbr'", ",", "dev", ",", "run_as_root", "=", "True", ")", "except", "processutils", ".", "ProcessExecutionError", ":", "with", "excutils", ".", "save_and_reraise_exception", "(", ")", ":", "LOG", ".", "error", "(", "_LE", "(", "\"Failed removing bridge device: '%s'\"", ")", ",", "dev", ")" ]
delete a network bridge .
train
false
12,084
@functools.lru_cache(maxsize=None) def no_style(): return make_style('nocolor')
[ "@", "functools", ".", "lru_cache", "(", "maxsize", "=", "None", ")", "def", "no_style", "(", ")", ":", "return", "make_style", "(", "'nocolor'", ")" ]
returns a style object that has no colors .
train
false
12,085
def is_ubuntu(): if (sys.platform.startswith('linux') and osp.isfile('/etc/lsb-release')): release_info = open('/etc/lsb-release').read() if ('Ubuntu' in release_info): return True else: return False else: return False
[ "def", "is_ubuntu", "(", ")", ":", "if", "(", "sys", ".", "platform", ".", "startswith", "(", "'linux'", ")", "and", "osp", ".", "isfile", "(", "'/etc/lsb-release'", ")", ")", ":", "release_info", "=", "open", "(", "'/etc/lsb-release'", ")", ".", "read", "(", ")", "if", "(", "'Ubuntu'", "in", "release_info", ")", ":", "return", "True", "else", ":", "return", "False", "else", ":", "return", "False" ]
detect if we are running in an ubuntu-based distribution .
train
true
12,086
def set_context(context=None, font_scale=1, rc=None): context_object = plotting_context(context, font_scale, rc) mpl.rcParams.update(context_object)
[ "def", "set_context", "(", "context", "=", "None", ",", "font_scale", "=", "1", ",", "rc", "=", "None", ")", ":", "context_object", "=", "plotting_context", "(", "context", ",", "font_scale", ",", "rc", ")", "mpl", ".", "rcParams", ".", "update", "(", "context_object", ")" ]
set the plotting context parameters .
train
true
12,087
def gen_extractors(): return [klass() for klass in _ALL_CLASSES]
[ "def", "gen_extractors", "(", ")", ":", "return", "[", "klass", "(", ")", "for", "klass", "in", "_ALL_CLASSES", "]" ]
return a list of an instance of every supported extractor .
train
false
12,088
def index_in_children_list(module, xml=None): if hasattr(module, 'xml_attributes'): val = module.xml_attributes.get('index_in_children_list') if (val is not None): return int(val) return None if (xml is not None): create_xml_attributes(module, xml) return index_in_children_list(module) return None
[ "def", "index_in_children_list", "(", "module", ",", "xml", "=", "None", ")", ":", "if", "hasattr", "(", "module", ",", "'xml_attributes'", ")", ":", "val", "=", "module", ".", "xml_attributes", ".", "get", "(", "'index_in_children_list'", ")", "if", "(", "val", "is", "not", "None", ")", ":", "return", "int", "(", "val", ")", "return", "None", "if", "(", "xml", "is", "not", "None", ")", ":", "create_xml_attributes", "(", "module", ",", "xml", ")", "return", "index_in_children_list", "(", "module", ")", "return", "None" ]
get the index_in_children_list .
train
false
12,090
def create_in(dirname, schema, indexname=None): if (not indexname): indexname = _DEF_INDEX_NAME from whoosh.filedb.filestore import FileStorage storage = FileStorage(dirname) return storage.create_index(schema, indexname)
[ "def", "create_in", "(", "dirname", ",", "schema", ",", "indexname", "=", "None", ")", ":", "if", "(", "not", "indexname", ")", ":", "indexname", "=", "_DEF_INDEX_NAME", "from", "whoosh", ".", "filedb", ".", "filestore", "import", "FileStorage", "storage", "=", "FileStorage", "(", "dirname", ")", "return", "storage", ".", "create_index", "(", "schema", ",", "indexname", ")" ]
convenience function to create an index in a directory .
train
false
12,091
def guess_lag(x, y): if (len(x) != len(y)): return 0 diffs = [] indexes = range(len(x)) for i in indexes: if ((i + 1) not in indexes): continue diffs.append((y[(i + 1)] - y[i])) diffs = np.array(diffs) flex = x[(-1)] for i in indexes: if ((i + 1) not in indexes): continue if ((y[(i + 1)] - y[i]) > (diffs.mean() + diffs.std())): flex = x[i] break return flex
[ "def", "guess_lag", "(", "x", ",", "y", ")", ":", "if", "(", "len", "(", "x", ")", "!=", "len", "(", "y", ")", ")", ":", "return", "0", "diffs", "=", "[", "]", "indexes", "=", "range", "(", "len", "(", "x", ")", ")", "for", "i", "in", "indexes", ":", "if", "(", "(", "i", "+", "1", ")", "not", "in", "indexes", ")", ":", "continue", "diffs", ".", "append", "(", "(", "y", "[", "(", "i", "+", "1", ")", "]", "-", "y", "[", "i", "]", ")", ")", "diffs", "=", "np", ".", "array", "(", "diffs", ")", "flex", "=", "x", "[", "(", "-", "1", ")", "]", "for", "i", "in", "indexes", ":", "if", "(", "(", "i", "+", "1", ")", "not", "in", "indexes", ")", ":", "continue", "if", "(", "(", "y", "[", "(", "i", "+", "1", ")", "]", "-", "y", "[", "i", "]", ")", ">", "(", "diffs", ".", "mean", "(", ")", "+", "diffs", ".", "std", "(", ")", ")", ")", ":", "flex", "=", "x", "[", "i", "]", "break", "return", "flex" ]
given two axes returns a guess of the lag point .
train
false
12,093
def _resource_exists(context, data_dict): model = _get_or_bust(context, 'model') res_id = _get_or_bust(data_dict, 'resource_id') if (not model.Resource.get(res_id)): return False resources_sql = sqlalchemy.text(u'SELECT 1 FROM "_table_metadata"\n WHERE name = :id AND alias_of IS NULL') results = db._get_engine(data_dict).execute(resources_sql, id=res_id) return (results.rowcount > 0)
[ "def", "_resource_exists", "(", "context", ",", "data_dict", ")", ":", "model", "=", "_get_or_bust", "(", "context", ",", "'model'", ")", "res_id", "=", "_get_or_bust", "(", "data_dict", ",", "'resource_id'", ")", "if", "(", "not", "model", ".", "Resource", ".", "get", "(", "res_id", ")", ")", ":", "return", "False", "resources_sql", "=", "sqlalchemy", ".", "text", "(", "u'SELECT 1 FROM \"_table_metadata\"\\n WHERE name = :id AND alias_of IS NULL'", ")", "results", "=", "db", ".", "_get_engine", "(", "data_dict", ")", ".", "execute", "(", "resources_sql", ",", "id", "=", "res_id", ")", "return", "(", "results", ".", "rowcount", ">", "0", ")" ]
returns true if the resource exists in ckan and in the datastore .
train
false
12,094
@simple_decorator def check_localsite_admin(view_func): def _check(request, local_site_name=None, *args, **kwargs): if local_site_name: if (not request.local_site): raise Http404 local_site = request.local_site if (not local_site.is_mutable_by(request.user)): response = render_to_response('permission_denied.html', RequestContext(request)) response.status_code = 403 return response else: local_site = None return view_func(request, local_site_name=local_site_name, local_site=local_site, *args, **kwargs) return _check
[ "@", "simple_decorator", "def", "check_localsite_admin", "(", "view_func", ")", ":", "def", "_check", "(", "request", ",", "local_site_name", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "local_site_name", ":", "if", "(", "not", "request", ".", "local_site", ")", ":", "raise", "Http404", "local_site", "=", "request", ".", "local_site", "if", "(", "not", "local_site", ".", "is_mutable_by", "(", "request", ".", "user", ")", ")", ":", "response", "=", "render_to_response", "(", "'permission_denied.html'", ",", "RequestContext", "(", "request", ")", ")", "response", ".", "status_code", "=", "403", "return", "response", "else", ":", "local_site", "=", "None", "return", "view_func", "(", "request", ",", "local_site_name", "=", "local_site_name", ",", "local_site", "=", "local_site", ",", "*", "args", ",", "**", "kwargs", ")", "return", "_check" ]
checks if a user is an admin on a local site .
train
false
12,096
def generate_config_finder(get_config_paths=get_config_paths): config_paths = get_config_paths() return (lambda *args: _find_config_files(config_paths, *args))
[ "def", "generate_config_finder", "(", "get_config_paths", "=", "get_config_paths", ")", ":", "config_paths", "=", "get_config_paths", "(", ")", "return", "(", "lambda", "*", "args", ":", "_find_config_files", "(", "config_paths", ",", "*", "args", ")", ")" ]
generate find_config_files function this function will find .
train
false
12,098
def setAttributesByArguments(argumentNames, arguments, elementNode): for (argumentIndex, argument) in enumerate(arguments): elementNode.attributes[argumentNames[argumentIndex]] = argument
[ "def", "setAttributesByArguments", "(", "argumentNames", ",", "arguments", ",", "elementNode", ")", ":", "for", "(", "argumentIndex", ",", "argument", ")", "in", "enumerate", "(", "arguments", ")", ":", "elementNode", ".", "attributes", "[", "argumentNames", "[", "argumentIndex", "]", "]", "=", "argument" ]
set the attribute dictionary to the arguments .
train
false
12,099
@pytest.mark.models def test_issue587(EN): matcher = Matcher(EN.vocab) content = u'a b; c' matcher.add(entity_key=u'1', label=u'TEST', attrs={}, specs=[[{ORTH: u'a'}, {ORTH: u'b'}]]) matcher(EN(content)) matcher.add(entity_key=u'2', label=u'TEST', attrs={}, specs=[[{ORTH: u'a'}, {ORTH: u'b'}, {IS_PUNCT: True}, {ORTH: u'c'}]]) matcher(EN(content)) matcher.add(entity_key=u'3', label=u'TEST', attrs={}, specs=[[{ORTH: u'a'}, {ORTH: u'b'}, {IS_PUNCT: True}, {ORTH: u'd'}]]) matcher(EN(content))
[ "@", "pytest", ".", "mark", ".", "models", "def", "test_issue587", "(", "EN", ")", ":", "matcher", "=", "Matcher", "(", "EN", ".", "vocab", ")", "content", "=", "u'a b; c'", "matcher", ".", "add", "(", "entity_key", "=", "u'1'", ",", "label", "=", "u'TEST'", ",", "attrs", "=", "{", "}", ",", "specs", "=", "[", "[", "{", "ORTH", ":", "u'a'", "}", ",", "{", "ORTH", ":", "u'b'", "}", "]", "]", ")", "matcher", "(", "EN", "(", "content", ")", ")", "matcher", ".", "add", "(", "entity_key", "=", "u'2'", ",", "label", "=", "u'TEST'", ",", "attrs", "=", "{", "}", ",", "specs", "=", "[", "[", "{", "ORTH", ":", "u'a'", "}", ",", "{", "ORTH", ":", "u'b'", "}", ",", "{", "IS_PUNCT", ":", "True", "}", ",", "{", "ORTH", ":", "u'c'", "}", "]", "]", ")", "matcher", "(", "EN", "(", "content", ")", ")", "matcher", ".", "add", "(", "entity_key", "=", "u'3'", ",", "label", "=", "u'TEST'", ",", "attrs", "=", "{", "}", ",", "specs", "=", "[", "[", "{", "ORTH", ":", "u'a'", "}", ",", "{", "ORTH", ":", "u'b'", "}", ",", "{", "IS_PUNCT", ":", "True", "}", ",", "{", "ORTH", ":", "u'd'", "}", "]", "]", ")", "matcher", "(", "EN", "(", "content", ")", ")" ]
test that matcher doesnt segfault on particular input .
train
false
12,100
def upload_chunked(server, docs, chunksize=1000, preprocess=None): start = 0 for chunk in grouper(docs, chunksize): end = (start + len(chunk)) logger.info(('uploading documents %i-%i' % (start, (end - 1)))) if (preprocess is not None): pchunk = [] for doc in chunk: doc['tokens'] = preprocess(doc['text']) del doc['text'] pchunk.append(doc) chunk = pchunk server.buffer(chunk) start = end
[ "def", "upload_chunked", "(", "server", ",", "docs", ",", "chunksize", "=", "1000", ",", "preprocess", "=", "None", ")", ":", "start", "=", "0", "for", "chunk", "in", "grouper", "(", "docs", ",", "chunksize", ")", ":", "end", "=", "(", "start", "+", "len", "(", "chunk", ")", ")", "logger", ".", "info", "(", "(", "'uploading documents %i-%i'", "%", "(", "start", ",", "(", "end", "-", "1", ")", ")", ")", ")", "if", "(", "preprocess", "is", "not", "None", ")", ":", "pchunk", "=", "[", "]", "for", "doc", "in", "chunk", ":", "doc", "[", "'tokens'", "]", "=", "preprocess", "(", "doc", "[", "'text'", "]", ")", "del", "doc", "[", "'text'", "]", "pchunk", ".", "append", "(", "doc", ")", "chunk", "=", "pchunk", "server", ".", "buffer", "(", "chunk", ")", "start", "=", "end" ]
memory-friendly upload of documents to a simserver .
train
false
12,102
def gdal_release_date(date=False): from datetime import date as date_type rel = _version_info('RELEASE_DATE') (yy, mm, dd) = map(int, (rel[0:4], rel[4:6], rel[6:8])) d = date_type(yy, mm, dd) if date: return d else: return d.strftime('%Y/%m/%d')
[ "def", "gdal_release_date", "(", "date", "=", "False", ")", ":", "from", "datetime", "import", "date", "as", "date_type", "rel", "=", "_version_info", "(", "'RELEASE_DATE'", ")", "(", "yy", ",", "mm", ",", "dd", ")", "=", "map", "(", "int", ",", "(", "rel", "[", "0", ":", "4", "]", ",", "rel", "[", "4", ":", "6", "]", ",", "rel", "[", "6", ":", "8", "]", ")", ")", "d", "=", "date_type", "(", "yy", ",", "mm", ",", "dd", ")", "if", "date", ":", "return", "d", "else", ":", "return", "d", ".", "strftime", "(", "'%Y/%m/%d'", ")" ]
returns the release date in a string format .
train
false
12,104
@task def clean_env(): env = path(options.virtualenv.dir) if env.exists(): env.rmtree()
[ "@", "task", "def", "clean_env", "(", ")", ":", "env", "=", "path", "(", "options", ".", "virtualenv", ".", "dir", ")", "if", "env", ".", "exists", "(", ")", ":", "env", ".", "rmtree", "(", ")" ]
deletes the virtual environment .
train
false
12,105
def _turn_sigterm_into_systemexit(): try: import signal except ImportError: return def handle_term(signo, frame): raise SystemExit signal.signal(signal.SIGTERM, handle_term)
[ "def", "_turn_sigterm_into_systemexit", "(", ")", ":", "try", ":", "import", "signal", "except", "ImportError", ":", "return", "def", "handle_term", "(", "signo", ",", "frame", ")", ":", "raise", "SystemExit", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "handle_term", ")" ]
attempts to turn a sigterm exception into a systemexit exception .
train
true
12,106
def S_within_hac(x, d, nlags=1, groupidx=1): r = np.zeros(d.shape[1]) r[0] = 1 r[groupidx] = 1 weights = weights_bartlett(nlags) return aggregate_cov(x, d, r=r, weights=weights)
[ "def", "S_within_hac", "(", "x", ",", "d", ",", "nlags", "=", "1", ",", "groupidx", "=", "1", ")", ":", "r", "=", "np", ".", "zeros", "(", "d", ".", "shape", "[", "1", "]", ")", "r", "[", "0", "]", "=", "1", "r", "[", "groupidx", "]", "=", "1", "weights", "=", "weights_bartlett", "(", "nlags", ")", "return", "aggregate_cov", "(", "x", ",", "d", ",", "r", "=", "r", ",", "weights", "=", "weights", ")" ]
hac for observations within a categorical group .
train
false
12,108
def cov2corr(cov, return_std=False): cov = np.asanyarray(cov) std_ = np.sqrt(np.diag(cov)) corr = (cov / np.outer(std_, std_)) if return_std: return (corr, std_) else: return corr
[ "def", "cov2corr", "(", "cov", ",", "return_std", "=", "False", ")", ":", "cov", "=", "np", ".", "asanyarray", "(", "cov", ")", "std_", "=", "np", ".", "sqrt", "(", "np", ".", "diag", "(", "cov", ")", ")", "corr", "=", "(", "cov", "/", "np", ".", "outer", "(", "std_", ",", "std_", ")", ")", "if", "return_std", ":", "return", "(", "corr", ",", "std_", ")", "else", ":", "return", "corr" ]
convert covariance matrix to correlation matrix parameters cov : array_like .
train
false
12,109
@require_context @pick_context_manager_reader_allow_async def instance_get_all_by_filters_sort(context, filters, limit=None, marker=None, columns_to_join=None, sort_keys=None, sort_dirs=None): if (limit == 0): return [] (sort_keys, sort_dirs) = process_sort_params(sort_keys, sort_dirs, default_dir='desc') if (columns_to_join is None): columns_to_join_new = ['info_cache', 'security_groups'] manual_joins = ['metadata', 'system_metadata'] else: (manual_joins, columns_to_join_new) = _manual_join_columns(columns_to_join) query_prefix = context.session.query(models.Instance) for column in columns_to_join_new: if ('extra.' in column): query_prefix = query_prefix.options(undefer(column)) else: query_prefix = query_prefix.options(joinedload(column)) filters = filters.copy() if ('changes-since' in filters): changes_since = timeutils.normalize_time(filters['changes-since']) query_prefix = query_prefix.filter((models.Instance.updated_at >= changes_since)) if ('deleted' in filters): deleted = filters.pop('deleted') if deleted: if filters.pop('soft_deleted', True): delete = or_((models.Instance.deleted == models.Instance.id), (models.Instance.vm_state == vm_states.SOFT_DELETED)) query_prefix = query_prefix.filter(delete) else: query_prefix = query_prefix.filter((models.Instance.deleted == models.Instance.id)) else: query_prefix = query_prefix.filter_by(deleted=0) if (not filters.pop('soft_deleted', False)): not_soft_deleted = or_((models.Instance.vm_state != vm_states.SOFT_DELETED), (models.Instance.vm_state == null())) query_prefix = query_prefix.filter(not_soft_deleted) if ('cleaned' in filters): cleaned = (1 if filters.pop('cleaned') else 0) query_prefix = query_prefix.filter((models.Instance.cleaned == cleaned)) if ('tags' in filters): tags = filters.pop('tags') first_tag = tags.pop(0) query_prefix = query_prefix.join(models.Instance.tags) query_prefix = query_prefix.filter((models.Tag.tag == first_tag)) for tag in tags: tag_alias = aliased(models.Tag) query_prefix = query_prefix.join(tag_alias, models.Instance.tags) query_prefix = query_prefix.filter((tag_alias.tag == tag)) if ('tags-any' in filters): tags = filters.pop('tags-any') tag_alias = aliased(models.Tag) query_prefix = query_prefix.join(tag_alias, models.Instance.tags) query_prefix = query_prefix.filter(tag_alias.tag.in_(tags)) if ('not-tags' in filters): tags = filters.pop('not-tags') first_tag = tags.pop(0) subq = query_prefix.session.query(models.Tag.resource_id) subq = subq.join(models.Instance.tags) subq = subq.filter((models.Tag.tag == first_tag)) for tag in tags: tag_alias = aliased(models.Tag) subq = subq.join(tag_alias, models.Instance.tags) subq = subq.filter((tag_alias.tag == tag)) query_prefix = query_prefix.filter((~ models.Instance.uuid.in_(subq))) if ('not-tags-any' in filters): tags = filters.pop('not-tags-any') query_prefix = query_prefix.filter((~ models.Instance.tags.any(models.Tag.tag.in_(tags)))) if (not context.is_admin): if context.project_id: filters['project_id'] = context.project_id else: filters['user_id'] = context.user_id exact_match_filter_names = ['project_id', 'user_id', 'image_ref', 'vm_state', 'instance_type_id', 'uuid', 'metadata', 'host', 'task_state', 'system_metadata'] query_prefix = _exact_instance_filter(query_prefix, filters, exact_match_filter_names) if (query_prefix is None): return [] query_prefix = _regex_instance_filter(query_prefix, filters) query_prefix = _tag_instance_filter(context, query_prefix, filters) if (marker is not None): try: marker = _instance_get_by_uuid(context.elevated(read_deleted='yes'), marker) except exception.InstanceNotFound: raise exception.MarkerNotFound(marker=marker) try: query_prefix = sqlalchemyutils.paginate_query(query_prefix, models.Instance, limit, sort_keys, marker=marker, sort_dirs=sort_dirs) except db_exc.InvalidSortKey: raise exception.InvalidSortKey() return _instances_fill_metadata(context, query_prefix.all(), manual_joins)
[ "@", "require_context", "@", "pick_context_manager_reader_allow_async", "def", "instance_get_all_by_filters_sort", "(", "context", ",", "filters", ",", "limit", "=", "None", ",", "marker", "=", "None", ",", "columns_to_join", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ")", ":", "if", "(", "limit", "==", "0", ")", ":", "return", "[", "]", "(", "sort_keys", ",", "sort_dirs", ")", "=", "process_sort_params", "(", "sort_keys", ",", "sort_dirs", ",", "default_dir", "=", "'desc'", ")", "if", "(", "columns_to_join", "is", "None", ")", ":", "columns_to_join_new", "=", "[", "'info_cache'", ",", "'security_groups'", "]", "manual_joins", "=", "[", "'metadata'", ",", "'system_metadata'", "]", "else", ":", "(", "manual_joins", ",", "columns_to_join_new", ")", "=", "_manual_join_columns", "(", "columns_to_join", ")", "query_prefix", "=", "context", ".", "session", ".", "query", "(", "models", ".", "Instance", ")", "for", "column", "in", "columns_to_join_new", ":", "if", "(", "'extra.'", "in", "column", ")", ":", "query_prefix", "=", "query_prefix", ".", "options", "(", "undefer", "(", "column", ")", ")", "else", ":", "query_prefix", "=", "query_prefix", ".", "options", "(", "joinedload", "(", "column", ")", ")", "filters", "=", "filters", ".", "copy", "(", ")", "if", "(", "'changes-since'", "in", "filters", ")", ":", "changes_since", "=", "timeutils", ".", "normalize_time", "(", "filters", "[", "'changes-since'", "]", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "models", ".", "Instance", ".", "updated_at", ">=", "changes_since", ")", ")", "if", "(", "'deleted'", "in", "filters", ")", ":", "deleted", "=", "filters", ".", "pop", "(", "'deleted'", ")", "if", "deleted", ":", "if", "filters", ".", "pop", "(", "'soft_deleted'", ",", "True", ")", ":", "delete", "=", "or_", "(", "(", "models", ".", "Instance", ".", "deleted", "==", "models", ".", "Instance", ".", "id", ")", ",", "(", "models", ".", "Instance", ".", "vm_state", "==", "vm_states", ".", "SOFT_DELETED", ")", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "delete", ")", "else", ":", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "models", ".", "Instance", ".", "deleted", "==", "models", ".", "Instance", ".", "id", ")", ")", "else", ":", "query_prefix", "=", "query_prefix", ".", "filter_by", "(", "deleted", "=", "0", ")", "if", "(", "not", "filters", ".", "pop", "(", "'soft_deleted'", ",", "False", ")", ")", ":", "not_soft_deleted", "=", "or_", "(", "(", "models", ".", "Instance", ".", "vm_state", "!=", "vm_states", ".", "SOFT_DELETED", ")", ",", "(", "models", ".", "Instance", ".", "vm_state", "==", "null", "(", ")", ")", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "not_soft_deleted", ")", "if", "(", "'cleaned'", "in", "filters", ")", ":", "cleaned", "=", "(", "1", "if", "filters", ".", "pop", "(", "'cleaned'", ")", "else", "0", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "models", ".", "Instance", ".", "cleaned", "==", "cleaned", ")", ")", "if", "(", "'tags'", "in", "filters", ")", ":", "tags", "=", "filters", ".", "pop", "(", "'tags'", ")", "first_tag", "=", "tags", ".", "pop", "(", "0", ")", "query_prefix", "=", "query_prefix", ".", "join", "(", "models", ".", "Instance", ".", "tags", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "models", ".", "Tag", ".", "tag", "==", "first_tag", ")", ")", "for", "tag", "in", "tags", ":", "tag_alias", "=", "aliased", "(", "models", ".", "Tag", ")", "query_prefix", "=", "query_prefix", ".", "join", "(", "tag_alias", ",", "models", ".", "Instance", ".", "tags", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "tag_alias", ".", "tag", "==", "tag", ")", ")", "if", "(", "'tags-any'", "in", "filters", ")", ":", "tags", "=", "filters", ".", "pop", "(", "'tags-any'", ")", "tag_alias", "=", "aliased", "(", "models", ".", "Tag", ")", "query_prefix", "=", "query_prefix", ".", "join", "(", "tag_alias", ",", "models", ".", "Instance", ".", "tags", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "tag_alias", ".", "tag", ".", "in_", "(", "tags", ")", ")", "if", "(", "'not-tags'", "in", "filters", ")", ":", "tags", "=", "filters", ".", "pop", "(", "'not-tags'", ")", "first_tag", "=", "tags", ".", "pop", "(", "0", ")", "subq", "=", "query_prefix", ".", "session", ".", "query", "(", "models", ".", "Tag", ".", "resource_id", ")", "subq", "=", "subq", ".", "join", "(", "models", ".", "Instance", ".", "tags", ")", "subq", "=", "subq", ".", "filter", "(", "(", "models", ".", "Tag", ".", "tag", "==", "first_tag", ")", ")", "for", "tag", "in", "tags", ":", "tag_alias", "=", "aliased", "(", "models", ".", "Tag", ")", "subq", "=", "subq", ".", "join", "(", "tag_alias", ",", "models", ".", "Instance", ".", "tags", ")", "subq", "=", "subq", ".", "filter", "(", "(", "tag_alias", ".", "tag", "==", "tag", ")", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "~", "models", ".", "Instance", ".", "uuid", ".", "in_", "(", "subq", ")", ")", ")", "if", "(", "'not-tags-any'", "in", "filters", ")", ":", "tags", "=", "filters", ".", "pop", "(", "'not-tags-any'", ")", "query_prefix", "=", "query_prefix", ".", "filter", "(", "(", "~", "models", ".", "Instance", ".", "tags", ".", "any", "(", "models", ".", "Tag", ".", "tag", ".", "in_", "(", "tags", ")", ")", ")", ")", "if", "(", "not", "context", ".", "is_admin", ")", ":", "if", "context", ".", "project_id", ":", "filters", "[", "'project_id'", "]", "=", "context", ".", "project_id", "else", ":", "filters", "[", "'user_id'", "]", "=", "context", ".", "user_id", "exact_match_filter_names", "=", "[", "'project_id'", ",", "'user_id'", ",", "'image_ref'", ",", "'vm_state'", ",", "'instance_type_id'", ",", "'uuid'", ",", "'metadata'", ",", "'host'", ",", "'task_state'", ",", "'system_metadata'", "]", "query_prefix", "=", "_exact_instance_filter", "(", "query_prefix", ",", "filters", ",", "exact_match_filter_names", ")", "if", "(", "query_prefix", "is", "None", ")", ":", "return", "[", "]", "query_prefix", "=", "_regex_instance_filter", "(", "query_prefix", ",", "filters", ")", "query_prefix", "=", "_tag_instance_filter", "(", "context", ",", "query_prefix", ",", "filters", ")", "if", "(", "marker", "is", "not", "None", ")", ":", "try", ":", "marker", "=", "_instance_get_by_uuid", "(", "context", ".", "elevated", "(", "read_deleted", "=", "'yes'", ")", ",", "marker", ")", "except", "exception", ".", "InstanceNotFound", ":", "raise", "exception", ".", "MarkerNotFound", "(", "marker", "=", "marker", ")", "try", ":", "query_prefix", "=", "sqlalchemyutils", ".", "paginate_query", "(", "query_prefix", ",", "models", ".", "Instance", ",", "limit", ",", "sort_keys", ",", "marker", "=", "marker", ",", "sort_dirs", "=", "sort_dirs", ")", "except", "db_exc", ".", "InvalidSortKey", ":", "raise", "exception", ".", "InvalidSortKey", "(", ")", "return", "_instances_fill_metadata", "(", "context", ",", "query_prefix", ".", "all", "(", ")", ",", "manual_joins", ")" ]
get all instances that match all filters sorted by multiple keys .
train
false
12,110
@pytest.mark.cmd @pytest.mark.django_db def test_refresh_scores_recalculate(capfd, admin, member): member.delete() admin.delete() call_command('refresh_scores') (out, err) = capfd.readouterr() assert ('Score for user system set to' in out)
[ "@", "pytest", ".", "mark", ".", "cmd", "@", "pytest", ".", "mark", ".", "django_db", "def", "test_refresh_scores_recalculate", "(", "capfd", ",", "admin", ",", "member", ")", ":", "member", ".", "delete", "(", ")", "admin", ".", "delete", "(", ")", "call_command", "(", "'refresh_scores'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "'Score for user system set to'", "in", "out", ")" ]
recalculate scores .
train
false
12,112
def resource_create(context, data_dict): model = context['model'] user = context['user'] package_id = _get_or_bust(data_dict, 'package_id') if (not data_dict.get('url')): data_dict['url'] = '' pkg_dict = _get_action('package_show')(dict(context, return_type='dict'), {'id': package_id}) _check_access('resource_create', context, data_dict) for plugin in plugins.PluginImplementations(plugins.IResourceController): plugin.before_create(context, data_dict) if ('resources' not in pkg_dict): pkg_dict['resources'] = [] upload = uploader.get_resource_uploader(data_dict) if ('mimetype' not in data_dict): if hasattr(upload, 'mimetype'): data_dict['mimetype'] = upload.mimetype if ('size' not in data_dict): if hasattr(upload, 'filesize'): data_dict['size'] = upload.filesize pkg_dict['resources'].append(data_dict) try: context['defer_commit'] = True context['use_cache'] = False _get_action('package_update')(context, pkg_dict) context.pop('defer_commit') except ValidationError as e: errors = e.error_dict['resources'][(-1)] raise ValidationError(errors) upload.upload(context['package'].resources[(-1)].id, uploader.get_max_resource_size()) model.repo.commit() updated_pkg_dict = _get_action('package_show')(context, {'id': package_id}) resource = updated_pkg_dict['resources'][(-1)] logic.get_action('resource_create_default_resource_views')({'model': context['model'], 'user': context['user'], 'ignore_auth': True}, {'resource': resource, 'package': updated_pkg_dict}) for plugin in plugins.PluginImplementations(plugins.IResourceController): plugin.after_create(context, resource) return resource
[ "def", "resource_create", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "user", "=", "context", "[", "'user'", "]", "package_id", "=", "_get_or_bust", "(", "data_dict", ",", "'package_id'", ")", "if", "(", "not", "data_dict", ".", "get", "(", "'url'", ")", ")", ":", "data_dict", "[", "'url'", "]", "=", "''", "pkg_dict", "=", "_get_action", "(", "'package_show'", ")", "(", "dict", "(", "context", ",", "return_type", "=", "'dict'", ")", ",", "{", "'id'", ":", "package_id", "}", ")", "_check_access", "(", "'resource_create'", ",", "context", ",", "data_dict", ")", "for", "plugin", "in", "plugins", ".", "PluginImplementations", "(", "plugins", ".", "IResourceController", ")", ":", "plugin", ".", "before_create", "(", "context", ",", "data_dict", ")", "if", "(", "'resources'", "not", "in", "pkg_dict", ")", ":", "pkg_dict", "[", "'resources'", "]", "=", "[", "]", "upload", "=", "uploader", ".", "get_resource_uploader", "(", "data_dict", ")", "if", "(", "'mimetype'", "not", "in", "data_dict", ")", ":", "if", "hasattr", "(", "upload", ",", "'mimetype'", ")", ":", "data_dict", "[", "'mimetype'", "]", "=", "upload", ".", "mimetype", "if", "(", "'size'", "not", "in", "data_dict", ")", ":", "if", "hasattr", "(", "upload", ",", "'filesize'", ")", ":", "data_dict", "[", "'size'", "]", "=", "upload", ".", "filesize", "pkg_dict", "[", "'resources'", "]", ".", "append", "(", "data_dict", ")", "try", ":", "context", "[", "'defer_commit'", "]", "=", "True", "context", "[", "'use_cache'", "]", "=", "False", "_get_action", "(", "'package_update'", ")", "(", "context", ",", "pkg_dict", ")", "context", ".", "pop", "(", "'defer_commit'", ")", "except", "ValidationError", "as", "e", ":", "errors", "=", "e", ".", "error_dict", "[", "'resources'", "]", "[", "(", "-", "1", ")", "]", "raise", "ValidationError", "(", "errors", ")", "upload", ".", "upload", "(", "context", "[", "'package'", "]", ".", "resources", "[", "(", "-", "1", ")", "]", ".", "id", ",", "uploader", ".", "get_max_resource_size", "(", ")", ")", "model", ".", "repo", ".", "commit", "(", ")", "updated_pkg_dict", "=", "_get_action", "(", "'package_show'", ")", "(", "context", ",", "{", "'id'", ":", "package_id", "}", ")", "resource", "=", "updated_pkg_dict", "[", "'resources'", "]", "[", "(", "-", "1", ")", "]", "logic", ".", "get_action", "(", "'resource_create_default_resource_views'", ")", "(", "{", "'model'", ":", "context", "[", "'model'", "]", ",", "'user'", ":", "context", "[", "'user'", "]", ",", "'ignore_auth'", ":", "True", "}", ",", "{", "'resource'", ":", "resource", ",", "'package'", ":", "updated_pkg_dict", "}", ")", "for", "plugin", "in", "plugins", ".", "PluginImplementations", "(", "plugins", ".", "IResourceController", ")", ":", "plugin", ".", "after_create", "(", "context", ",", "resource", ")", "return", "resource" ]
create a resource via pcs command resource_id name for the resource resource_type resource type resource_options additional options for creating the resource cibfile use cibfile instead of the live cib for manipulation cli example: .
train
false
12,113
def _rewrite_url(url): (scheme, netloc, path, params, query, fragment) = urlparse(url) if (scheme != 'http'): return url if (':' in netloc): (host, port) = netloc.rsplit(':', 1) else: (host, port) = (netloc, str(80)) path = (path or '/') try: out = _reverse(host, port, path) except urlresolvers.NoReverseMatch as ex: LOGGER.error(("Encountered malformed URL '%s' when rewriting proxied page." % (url,))) return None if query: out = ((out + '?') + query) return out
[ "def", "_rewrite_url", "(", "url", ")", ":", "(", "scheme", ",", "netloc", ",", "path", ",", "params", ",", "query", ",", "fragment", ")", "=", "urlparse", "(", "url", ")", "if", "(", "scheme", "!=", "'http'", ")", ":", "return", "url", "if", "(", "':'", "in", "netloc", ")", ":", "(", "host", ",", "port", ")", "=", "netloc", ".", "rsplit", "(", "':'", ",", "1", ")", "else", ":", "(", "host", ",", "port", ")", "=", "(", "netloc", ",", "str", "(", "80", ")", ")", "path", "=", "(", "path", "or", "'/'", ")", "try", ":", "out", "=", "_reverse", "(", "host", ",", "port", ",", "path", ")", "except", "urlresolvers", ".", "NoReverseMatch", "as", "ex", ":", "LOGGER", ".", "error", "(", "(", "\"Encountered malformed URL '%s' when rewriting proxied page.\"", "%", "(", "url", ",", ")", ")", ")", "return", "None", "if", "query", ":", "out", "=", "(", "(", "out", "+", "'?'", ")", "+", "query", ")", "return", "out" ]
used by _rewrite_links .
train
false
12,116
def sm_volume_update(context, volume_id, values): return IMPL.sm_volume_update(context, values)
[ "def", "sm_volume_update", "(", "context", ",", "volume_id", ",", "values", ")", ":", "return", "IMPL", ".", "sm_volume_update", "(", "context", ",", "values", ")" ]
update a child zone entry .
train
false
12,117
def reset_check_cache(): global _install_fine _install_fine = False
[ "def", "reset_check_cache", "(", ")", ":", "global", "_install_fine", "_install_fine", "=", "False" ]
reset the cached data of all checks .
train
false
12,118
def subscribe_filter_to_signals(settings): for sig in settings.get('I18N_FILTER_SIGNALS', []): sig.connect(filter_contents_translations)
[ "def", "subscribe_filter_to_signals", "(", "settings", ")", ":", "for", "sig", "in", "settings", ".", "get", "(", "'I18N_FILTER_SIGNALS'", ",", "[", "]", ")", ":", "sig", ".", "connect", "(", "filter_contents_translations", ")" ]
subscribe content filter to requested signals .
train
false
12,120
def delete_lifecycle_configuration(Bucket, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.delete_bucket_lifecycle(Bucket=Bucket) return {'deleted': True, 'name': Bucket} except ClientError as e: return {'deleted': False, 'error': __utils__['boto3.get_error'](e)}
[ "def", "delete_lifecycle_configuration", "(", "Bucket", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "delete_bucket_lifecycle", "(", "Bucket", "=", "Bucket", ")", "return", "{", "'deleted'", ":", "True", ",", "'name'", ":", "Bucket", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
delete the lifecycle configuration for the given bucket returns {deleted: true} if lifecycle was deleted and returns {deleted: false} if lifecycle was not deleted .
train
true
12,121
def zlib_decode(input, errors='strict'): assert (errors == 'strict') output = zlib.decompress(input) return (output, len(input))
[ "def", "zlib_decode", "(", "input", ",", "errors", "=", "'strict'", ")", ":", "assert", "(", "errors", "==", "'strict'", ")", "output", "=", "zlib", ".", "decompress", "(", "input", ")", "return", "(", "output", ",", "len", "(", "input", ")", ")" ]
decodes the object input and returns a tuple .
train
false
12,122
def stage_two(options, sess): media_options = get_media_options(options, sess) if (not media_options): return False old_options = media_options.copy() media_options.update({'jform[check_mime]': 0, 'jform[restrict_uploads]': 0, 'jform[upload_extensions]': add_item(media_options, 'jform[upload_extensions]', 'pht'), 'jform[image_extensions]': add_item(media_options, 'jform[image_extensions]', 'pht'), 'jform[upload_mime]': add_item(media_options, 'jform[upload_mime]', 'application/octet-stream')}) if (not set_media_options(options, sess, media_options)): return False image_path = media_options.get('jform[image_path]', 'images') return upload_file(options, sess, image_path)
[ "def", "stage_two", "(", "options", ",", "sess", ")", ":", "media_options", "=", "get_media_options", "(", "options", ",", "sess", ")", "if", "(", "not", "media_options", ")", ":", "return", "False", "old_options", "=", "media_options", ".", "copy", "(", ")", "media_options", ".", "update", "(", "{", "'jform[check_mime]'", ":", "0", ",", "'jform[restrict_uploads]'", ":", "0", ",", "'jform[upload_extensions]'", ":", "add_item", "(", "media_options", ",", "'jform[upload_extensions]'", ",", "'pht'", ")", ",", "'jform[image_extensions]'", ":", "add_item", "(", "media_options", ",", "'jform[image_extensions]'", ",", "'pht'", ")", ",", "'jform[upload_mime]'", ":", "add_item", "(", "media_options", ",", "'jform[upload_mime]'", ",", "'application/octet-stream'", ")", "}", ")", "if", "(", "not", "set_media_options", "(", "options", ",", "sess", ",", "media_options", ")", ")", ":", "return", "False", "image_path", "=", "media_options", ".", "get", "(", "'jform[image_path]'", ",", "'images'", ")", "return", "upload_file", "(", "options", ",", "sess", ",", "image_path", ")" ]
now we are logged in to admin area .
train
false
12,123
def register_mime(id, mimetype): MIME[id.upper()] = mimetype
[ "def", "register_mime", "(", "id", ",", "mimetype", ")", ":", "MIME", "[", "id", ".", "upper", "(", ")", "]", "=", "mimetype" ]
registers an image mime type .
train
false
12,124
def test_latex_units(): t = table.Table([table.Column(name='date', data=['a', 'b']), table.Column(name='NUV exp.time', data=[1, 2])]) latexdict = copy.deepcopy(ascii.latexdicts['AA']) latexdict['units'] = {'NUV exp.time': 's'} out = StringIO() expected = '\\begin{table}{cc}\n\\tablehead{\\colhead{date} & \\colhead{NUV exp.time}\\\\ \\colhead{ } & \\colhead{s}}\n\\startdata\na & 1 \\\\\nb & 2\n\\enddata\n\\end{table}\n'.replace('\n', os.linesep) ascii.write(t, out, format='aastex', latexdict=latexdict) assert (out.getvalue() == expected) t['NUV exp.time'].unit = units.s t['date'].unit = units.yr out = StringIO() ascii.write(t, out, format='aastex', latexdict=ascii.latexdicts['AA']) assert (out.getvalue() == expected.replace('colhead{s}', 'colhead{$\\mathrm{s}$}').replace('colhead{ }', 'colhead{$\\mathrm{yr}$}'))
[ "def", "test_latex_units", "(", ")", ":", "t", "=", "table", ".", "Table", "(", "[", "table", ".", "Column", "(", "name", "=", "'date'", ",", "data", "=", "[", "'a'", ",", "'b'", "]", ")", ",", "table", ".", "Column", "(", "name", "=", "'NUV exp.time'", ",", "data", "=", "[", "1", ",", "2", "]", ")", "]", ")", "latexdict", "=", "copy", ".", "deepcopy", "(", "ascii", ".", "latexdicts", "[", "'AA'", "]", ")", "latexdict", "[", "'units'", "]", "=", "{", "'NUV exp.time'", ":", "'s'", "}", "out", "=", "StringIO", "(", ")", "expected", "=", "'\\\\begin{table}{cc}\\n\\\\tablehead{\\\\colhead{date} & \\\\colhead{NUV exp.time}\\\\\\\\ \\\\colhead{ } & \\\\colhead{s}}\\n\\\\startdata\\na & 1 \\\\\\\\\\nb & 2\\n\\\\enddata\\n\\\\end{table}\\n'", ".", "replace", "(", "'\\n'", ",", "os", ".", "linesep", ")", "ascii", ".", "write", "(", "t", ",", "out", ",", "format", "=", "'aastex'", ",", "latexdict", "=", "latexdict", ")", "assert", "(", "out", ".", "getvalue", "(", ")", "==", "expected", ")", "t", "[", "'NUV exp.time'", "]", ".", "unit", "=", "units", ".", "s", "t", "[", "'date'", "]", ".", "unit", "=", "units", ".", "yr", "out", "=", "StringIO", "(", ")", "ascii", ".", "write", "(", "t", ",", "out", ",", "format", "=", "'aastex'", ",", "latexdict", "=", "ascii", ".", "latexdicts", "[", "'AA'", "]", ")", "assert", "(", "out", ".", "getvalue", "(", ")", "==", "expected", ".", "replace", "(", "'colhead{s}'", ",", "'colhead{$\\\\mathrm{s}$}'", ")", ".", "replace", "(", "'colhead{ }'", ",", "'colhead{$\\\\mathrm{yr}$}'", ")", ")" ]
check to make sure that latex and aastex writers attempt to fall back on the **unit** attribute of **column** if the supplied **latexdict** does not specify units .
train
false
12,125
def _get_log_keys(common_config): return dict(((k, v) for (k, v) in common_config.items() if (k in LOG_KEYS)))
[ "def", "_get_log_keys", "(", "common_config", ")", ":", "return", "dict", "(", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "common_config", ".", "items", "(", ")", "if", "(", "k", "in", "LOG_KEYS", ")", ")", ")" ]
return a common configuration copy with only log-related config left .
train
false
12,126
def expect_log_sticks(sticks): dig_sum = psi(np.sum(sticks, 0)) ElogW = (psi(sticks[0]) - dig_sum) Elog1_W = (psi(sticks[1]) - dig_sum) n = (len(sticks[0]) + 1) Elogsticks = np.zeros(n) Elogsticks[0:(n - 1)] = ElogW Elogsticks[1:] = (Elogsticks[1:] + np.cumsum(Elog1_W)) return Elogsticks
[ "def", "expect_log_sticks", "(", "sticks", ")", ":", "dig_sum", "=", "psi", "(", "np", ".", "sum", "(", "sticks", ",", "0", ")", ")", "ElogW", "=", "(", "psi", "(", "sticks", "[", "0", "]", ")", "-", "dig_sum", ")", "Elog1_W", "=", "(", "psi", "(", "sticks", "[", "1", "]", ")", "-", "dig_sum", ")", "n", "=", "(", "len", "(", "sticks", "[", "0", "]", ")", "+", "1", ")", "Elogsticks", "=", "np", ".", "zeros", "(", "n", ")", "Elogsticks", "[", "0", ":", "(", "n", "-", "1", ")", "]", "=", "ElogW", "Elogsticks", "[", "1", ":", "]", "=", "(", "Elogsticks", "[", "1", ":", "]", "+", "np", ".", "cumsum", "(", "Elog1_W", ")", ")", "return", "Elogsticks" ]
for stick-breaking hdp .
train
false
12,127
def DALS(s): return textwrap.dedent(s).lstrip()
[ "def", "DALS", "(", "s", ")", ":", "return", "textwrap", ".", "dedent", "(", "s", ")", ".", "lstrip", "(", ")" ]
dedent and left-strip .
train
false
12,128
def next_connection_packets(piter, linktype=1): first_ft = None for (ts, raw) in piter: ft = flowtuple_from_raw(raw, linktype) if (not first_ft): first_ft = ft (sip, dip, sport, dport, proto) = ft if (not ((first_ft == ft) or (first_ft == (dip, sip, dport, sport, proto)))): break (yield {'src': sip, 'dst': dip, 'sport': sport, 'dport': dport, 'raw': payload_from_raw(raw, linktype).encode('base64'), 'direction': (first_ft == ft)})
[ "def", "next_connection_packets", "(", "piter", ",", "linktype", "=", "1", ")", ":", "first_ft", "=", "None", "for", "(", "ts", ",", "raw", ")", "in", "piter", ":", "ft", "=", "flowtuple_from_raw", "(", "raw", ",", "linktype", ")", "if", "(", "not", "first_ft", ")", ":", "first_ft", "=", "ft", "(", "sip", ",", "dip", ",", "sport", ",", "dport", ",", "proto", ")", "=", "ft", "if", "(", "not", "(", "(", "first_ft", "==", "ft", ")", "or", "(", "first_ft", "==", "(", "dip", ",", "sip", ",", "dport", ",", "sport", ",", "proto", ")", ")", ")", ")", ":", "break", "(", "yield", "{", "'src'", ":", "sip", ",", "'dst'", ":", "dip", ",", "'sport'", ":", "sport", ",", "'dport'", ":", "dport", ",", "'raw'", ":", "payload_from_raw", "(", "raw", ",", "linktype", ")", ".", "encode", "(", "'base64'", ")", ",", "'direction'", ":", "(", "first_ft", "==", "ft", ")", "}", ")" ]
extract all packets belonging to the same flow from a pcap packet iterator .
train
false
12,129
def validate_memory_size(memory_value): memory_value = int(positive_integer(memory_value)) if (memory_value not in MEMORY_VALUES): raise ValueError(('Lambda Function memory size must be one of:\n %s' % ', '.join((str(mb) for mb in MEMORY_VALUES)))) return memory_value
[ "def", "validate_memory_size", "(", "memory_value", ")", ":", "memory_value", "=", "int", "(", "positive_integer", "(", "memory_value", ")", ")", "if", "(", "memory_value", "not", "in", "MEMORY_VALUES", ")", ":", "raise", "ValueError", "(", "(", "'Lambda Function memory size must be one of:\\n %s'", "%", "', '", ".", "join", "(", "(", "str", "(", "mb", ")", "for", "mb", "in", "MEMORY_VALUES", ")", ")", ")", ")", "return", "memory_value" ]
validate memory size for lambda function .
train
true
12,130
def _GetList(a_list): if (a_list is None): return [] else: return list(a_list)
[ "def", "_GetList", "(", "a_list", ")", ":", "if", "(", "a_list", "is", "None", ")", ":", "return", "[", "]", "else", ":", "return", "list", "(", "a_list", ")" ]
utility function that converts none to the empty list .
train
false
12,132
def get_metastore(): global _METASTORE_LOC_CACHE if (not _METASTORE_LOC_CACHE): thrift_uris = get_conf().get(_CNF_METASTORE_URIS) is_local = ((thrift_uris is None) or (thrift_uris == '')) if (not is_local): use_sasl = (str(get_conf().get(_CNF_METASTORE_SASL, 'false')).lower() == 'true') thrift_uri = thrift_uris.split(',')[0] host = socket.getfqdn() match = _THRIFT_URI_RE.match(thrift_uri) if (not match): LOG.error(('Cannot understand remote metastore uri "%s"' % thrift_uri)) else: (host, port) = match.groups() kerberos_principal = security_util.get_kerberos_principal(get_conf().get(_CNF_METASTORE_KERBEROS_PRINCIPAL, None), host) _METASTORE_LOC_CACHE = {'use_sasl': use_sasl, 'thrift_uri': thrift_uri, 'kerberos_principal': kerberos_principal} else: LOG.error('Hue requires a remote metastore configuration') return _METASTORE_LOC_CACHE
[ "def", "get_metastore", "(", ")", ":", "global", "_METASTORE_LOC_CACHE", "if", "(", "not", "_METASTORE_LOC_CACHE", ")", ":", "thrift_uris", "=", "get_conf", "(", ")", ".", "get", "(", "_CNF_METASTORE_URIS", ")", "is_local", "=", "(", "(", "thrift_uris", "is", "None", ")", "or", "(", "thrift_uris", "==", "''", ")", ")", "if", "(", "not", "is_local", ")", ":", "use_sasl", "=", "(", "str", "(", "get_conf", "(", ")", ".", "get", "(", "_CNF_METASTORE_SASL", ",", "'false'", ")", ")", ".", "lower", "(", ")", "==", "'true'", ")", "thrift_uri", "=", "thrift_uris", ".", "split", "(", "','", ")", "[", "0", "]", "host", "=", "socket", ".", "getfqdn", "(", ")", "match", "=", "_THRIFT_URI_RE", ".", "match", "(", "thrift_uri", ")", "if", "(", "not", "match", ")", ":", "LOG", ".", "error", "(", "(", "'Cannot understand remote metastore uri \"%s\"'", "%", "thrift_uri", ")", ")", "else", ":", "(", "host", ",", "port", ")", "=", "match", ".", "groups", "(", ")", "kerberos_principal", "=", "security_util", ".", "get_kerberos_principal", "(", "get_conf", "(", ")", ".", "get", "(", "_CNF_METASTORE_KERBEROS_PRINCIPAL", ",", "None", ")", ",", "host", ")", "_METASTORE_LOC_CACHE", "=", "{", "'use_sasl'", ":", "use_sasl", ",", "'thrift_uri'", ":", "thrift_uri", ",", "'kerberos_principal'", ":", "kerberos_principal", "}", "else", ":", "LOG", ".", "error", "(", "'Hue requires a remote metastore configuration'", ")", "return", "_METASTORE_LOC_CACHE" ]
get first metastore information from local hive-site .
train
false
12,136
def ShellQuote(value): return pipes.quote(SmartUnicode(value))
[ "def", "ShellQuote", "(", "value", ")", ":", "return", "pipes", ".", "quote", "(", "SmartUnicode", "(", "value", ")", ")" ]
escapes the string for the safe use inside shell command line .
train
false
12,137
def get_invoiced_qty_map(delivery_note): invoiced_qty_map = {} for (dn_detail, qty) in frappe.db.sql(u'select dn_detail, qty from `tabSales Invoice Item`\n DCTB DCTB where delivery_note=%s and docstatus=1', delivery_note): if (not invoiced_qty_map.get(dn_detail)): invoiced_qty_map[dn_detail] = 0 invoiced_qty_map[dn_detail] += qty return invoiced_qty_map
[ "def", "get_invoiced_qty_map", "(", "delivery_note", ")", ":", "invoiced_qty_map", "=", "{", "}", "for", "(", "dn_detail", ",", "qty", ")", "in", "frappe", ".", "db", ".", "sql", "(", "u'select dn_detail, qty from `tabSales Invoice Item`\\n DCTB DCTB where delivery_note=%s and docstatus=1'", ",", "delivery_note", ")", ":", "if", "(", "not", "invoiced_qty_map", ".", "get", "(", "dn_detail", ")", ")", ":", "invoiced_qty_map", "[", "dn_detail", "]", "=", "0", "invoiced_qty_map", "[", "dn_detail", "]", "+=", "qty", "return", "invoiced_qty_map" ]
returns a map: {dn_detail: invoiced_qty} .
train
false
12,138
def _RequireCryptoOrDie(): if (not HAS_CRYPTO): raise CryptoUnavailableError('No crypto library available')
[ "def", "_RequireCryptoOrDie", "(", ")", ":", "if", "(", "not", "HAS_CRYPTO", ")", ":", "raise", "CryptoUnavailableError", "(", "'No crypto library available'", ")" ]
ensure we have a crypto library .
train
false
12,139
def cone(individual, position, height, width): value = 0.0 for (x, p) in zip(individual, position): value += ((x - p) ** 2) return (height - (width * math.sqrt(value)))
[ "def", "cone", "(", "individual", ",", "position", ",", "height", ",", "width", ")", ":", "value", "=", "0.0", "for", "(", "x", ",", "p", ")", "in", "zip", "(", "individual", ",", "position", ")", ":", "value", "+=", "(", "(", "x", "-", "p", ")", "**", "2", ")", "return", "(", "height", "-", "(", "width", "*", "math", ".", "sqrt", "(", "value", ")", ")", ")" ]
the cone peak function to be used with scenario 2 and 3 .
train
false
12,140
@not_implemented_for('undirected') def transitive_closure(G): TC = nx.DiGraph() TC.add_nodes_from(G.nodes()) TC.add_edges_from(G.edges()) for v in G: TC.add_edges_from(((v, u) for u in nx.dfs_preorder_nodes(G, source=v) if (v != u))) return TC
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "transitive_closure", "(", "G", ")", ":", "TC", "=", "nx", ".", "DiGraph", "(", ")", "TC", ".", "add_nodes_from", "(", "G", ".", "nodes", "(", ")", ")", "TC", ".", "add_edges_from", "(", "G", ".", "edges", "(", ")", ")", "for", "v", "in", "G", ":", "TC", ".", "add_edges_from", "(", "(", "(", "v", ",", "u", ")", "for", "u", "in", "nx", ".", "dfs_preorder_nodes", "(", "G", ",", "source", "=", "v", ")", "if", "(", "v", "!=", "u", ")", ")", ")", "return", "TC" ]
computes the transitive closure of a list of implications uses warshalls algorithm .
train
false
12,142
@pytest.mark.skipif((not sys.platform.startswith('win')), reason='win only') def test_get_user(monkeypatch): from _pytest.tmpdir import get_user monkeypatch.delenv('USER', raising=False) monkeypatch.delenv('USERNAME', raising=False) assert (get_user() is None)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "(", "not", "sys", ".", "platform", ".", "startswith", "(", "'win'", ")", ")", ",", "reason", "=", "'win only'", ")", "def", "test_get_user", "(", "monkeypatch", ")", ":", "from", "_pytest", ".", "tmpdir", "import", "get_user", "monkeypatch", ".", "delenv", "(", "'USER'", ",", "raising", "=", "False", ")", "monkeypatch", ".", "delenv", "(", "'USERNAME'", ",", "raising", "=", "False", ")", "assert", "(", "get_user", "(", ")", "is", "None", ")" ]
test that get_user() function works even if environment variables required by getpass module are missing from the environment on windows .
train
false
12,143
def sync_before(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): self._cell_data_sync() return f(self, *args, **kwargs) return wrapper
[ "def", "sync_before", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "self", ".", "_cell_data_sync", "(", ")", "return", "f", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
use as a decorator to wrap methods that use cell information to make sure they sync the latest information from the db periodically .
train
false
12,145
def read_cookie(cls, name): string_cookie = os.environ.get('HTTP_COOKIE', '') cls.cookie = Cookie.SimpleCookie() cls.cookie.load(string_cookie) value = None if cls.cookie.get(name): value = cls.cookie[name].value return value
[ "def", "read_cookie", "(", "cls", ",", "name", ")", ":", "string_cookie", "=", "os", ".", "environ", ".", "get", "(", "'HTTP_COOKIE'", ",", "''", ")", "cls", ".", "cookie", "=", "Cookie", ".", "SimpleCookie", "(", ")", "cls", ".", "cookie", ".", "load", "(", "string_cookie", ")", "value", "=", "None", "if", "cls", ".", "cookie", ".", "get", "(", "name", ")", ":", "value", "=", "cls", ".", "cookie", "[", "name", "]", ".", "value", "return", "value" ]
use: cook .
train
false
12,147
def cross_covariance(y, z): return CrossCovariance()(y, z)
[ "def", "cross_covariance", "(", "y", ",", "z", ")", ":", "return", "CrossCovariance", "(", ")", "(", "y", ",", "z", ")" ]
computes the sum-squared cross-covariance penalty between y and z args: y : variable holding a matrix where the first dimension corresponds to the batches .
train
false
12,148
def set_data_value(datastore, path, data): if isinstance(path, six.string_types): path = '/'.split(path) return _proxy_cmd('set_data_value', datastore, path, data)
[ "def", "set_data_value", "(", "datastore", ",", "path", ",", "data", ")", ":", "if", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", ":", "path", "=", "'/'", ".", "split", "(", "path", ")", "return", "_proxy_cmd", "(", "'set_data_value'", ",", "datastore", ",", "path", ",", "data", ")" ]
get a data entry in a datastore .
train
true
12,149
def libvlc_free(ptr): f = (_Cfunctions.get('libvlc_free', None) or _Cfunction('libvlc_free', ((1,),), None, None, ctypes.c_void_p)) return f(ptr)
[ "def", "libvlc_free", "(", "ptr", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_free'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_free'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "ctypes", ".", "c_void_p", ")", ")", "return", "f", "(", "ptr", ")" ]
frees an heap allocation returned by a libvlc function .
train
true
12,150
def make_index(name, localname): return type(('BB%sIndex' % (name.capitalize(),)), (BBIndex,), dict(name=name, localname=localname))
[ "def", "make_index", "(", "name", ",", "localname", ")", ":", "return", "type", "(", "(", "'BB%sIndex'", "%", "(", "name", ".", "capitalize", "(", ")", ",", ")", ")", ",", "(", "BBIndex", ",", ")", ",", "dict", "(", "name", "=", "name", ",", "localname", "=", "localname", ")", ")" ]
return string of the rst for the gallerys index .
train
true
12,151
def _fmadm_action_fmri(action, fmri): ret = {} fmadm = _check_fmadm() cmd = '{cmd} {action} {fmri}'.format(cmd=fmadm, action=action, fmri=fmri) res = __salt__['cmd.run_all'](cmd) retcode = res['retcode'] result = {} if (retcode != 0): result['Error'] = res['stderr'] else: result = True return result
[ "def", "_fmadm_action_fmri", "(", "action", ",", "fmri", ")", ":", "ret", "=", "{", "}", "fmadm", "=", "_check_fmadm", "(", ")", "cmd", "=", "'{cmd} {action} {fmri}'", ".", "format", "(", "cmd", "=", "fmadm", ",", "action", "=", "action", ",", "fmri", "=", "fmri", ")", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "retcode", "=", "res", "[", "'retcode'", "]", "result", "=", "{", "}", "if", "(", "retcode", "!=", "0", ")", ":", "result", "[", "'Error'", "]", "=", "res", "[", "'stderr'", "]", "else", ":", "result", "=", "True", "return", "result" ]
internal function for fmadm .
train
true
12,152
def install_unittest_module(package_dirpath, template_type): from_filepath = path.join(TEMPLATES_DIRPATH, ('%s.py' % template_type)) if (not path.exists(from_filepath)): raise UnsupportedTemplateTypeError to_filepath = path.join(package_dirpath, ('%s_unittest.py' % template_type)) shutil.copy(from_filepath, to_filepath) from_common_filepath = path.join(TEMPLATES_DIRPATH, 'scenario_package_common.py') to_common_filepath = path.join(package_dirpath, 'common.py') shutil.copy(from_common_filepath, to_common_filepath) os.mknod(path.join(package_dirpath, '__init__.py'))
[ "def", "install_unittest_module", "(", "package_dirpath", ",", "template_type", ")", ":", "from_filepath", "=", "path", ".", "join", "(", "TEMPLATES_DIRPATH", ",", "(", "'%s.py'", "%", "template_type", ")", ")", "if", "(", "not", "path", ".", "exists", "(", "from_filepath", ")", ")", ":", "raise", "UnsupportedTemplateTypeError", "to_filepath", "=", "path", ".", "join", "(", "package_dirpath", ",", "(", "'%s_unittest.py'", "%", "template_type", ")", ")", "shutil", ".", "copy", "(", "from_filepath", ",", "to_filepath", ")", "from_common_filepath", "=", "path", ".", "join", "(", "TEMPLATES_DIRPATH", ",", "'scenario_package_common.py'", ")", "to_common_filepath", "=", "path", ".", "join", "(", "package_dirpath", ",", "'common.py'", ")", "shutil", ".", "copy", "(", "from_common_filepath", ",", "to_common_filepath", ")", "os", ".", "mknod", "(", "path", ".", "join", "(", "package_dirpath", ",", "'__init__.py'", ")", ")" ]
install specified unittest template module to package_dirpath .
train
false
12,153
@decorators.memoize def _detect_conf(): if (__grains__['os_family'] == 'RedHat'): return '/boot/grub/grub.conf' return '/boot/grub/menu.lst'
[ "@", "decorators", ".", "memoize", "def", "_detect_conf", "(", ")", ":", "if", "(", "__grains__", "[", "'os_family'", "]", "==", "'RedHat'", ")", ":", "return", "'/boot/grub/grub.conf'", "return", "'/boot/grub/menu.lst'" ]
grub conf location differs depending on distro .
train
false
12,155
def fdr_correction(pvals, alpha=0.05, method='indep'): pvals = np.asarray(pvals) shape_init = pvals.shape pvals = pvals.ravel() pvals_sortind = np.argsort(pvals) pvals_sorted = pvals[pvals_sortind] sortrevind = pvals_sortind.argsort() if (method in ['i', 'indep', 'p', 'poscorr']): ecdffactor = _ecdf(pvals_sorted) elif (method in ['n', 'negcorr']): cm = np.sum((1.0 / np.arange(1, (len(pvals_sorted) + 1)))) ecdffactor = (_ecdf(pvals_sorted) / cm) else: raise ValueError("Method should be 'indep' and 'negcorr'") reject = (pvals_sorted < (ecdffactor * alpha)) if reject.any(): rejectmax = max(np.nonzero(reject)[0]) else: rejectmax = 0 reject[:rejectmax] = True pvals_corrected_raw = (pvals_sorted / ecdffactor) pvals_corrected = np.minimum.accumulate(pvals_corrected_raw[::(-1)])[::(-1)] pvals_corrected[(pvals_corrected > 1.0)] = 1.0 pvals_corrected = pvals_corrected[sortrevind].reshape(shape_init) reject = reject[sortrevind].reshape(shape_init) return (reject, pvals_corrected)
[ "def", "fdr_correction", "(", "pvals", ",", "alpha", "=", "0.05", ",", "method", "=", "'indep'", ")", ":", "pvals", "=", "np", ".", "asarray", "(", "pvals", ")", "shape_init", "=", "pvals", ".", "shape", "pvals", "=", "pvals", ".", "ravel", "(", ")", "pvals_sortind", "=", "np", ".", "argsort", "(", "pvals", ")", "pvals_sorted", "=", "pvals", "[", "pvals_sortind", "]", "sortrevind", "=", "pvals_sortind", ".", "argsort", "(", ")", "if", "(", "method", "in", "[", "'i'", ",", "'indep'", ",", "'p'", ",", "'poscorr'", "]", ")", ":", "ecdffactor", "=", "_ecdf", "(", "pvals_sorted", ")", "elif", "(", "method", "in", "[", "'n'", ",", "'negcorr'", "]", ")", ":", "cm", "=", "np", ".", "sum", "(", "(", "1.0", "/", "np", ".", "arange", "(", "1", ",", "(", "len", "(", "pvals_sorted", ")", "+", "1", ")", ")", ")", ")", "ecdffactor", "=", "(", "_ecdf", "(", "pvals_sorted", ")", "/", "cm", ")", "else", ":", "raise", "ValueError", "(", "\"Method should be 'indep' and 'negcorr'\"", ")", "reject", "=", "(", "pvals_sorted", "<", "(", "ecdffactor", "*", "alpha", ")", ")", "if", "reject", ".", "any", "(", ")", ":", "rejectmax", "=", "max", "(", "np", ".", "nonzero", "(", "reject", ")", "[", "0", "]", ")", "else", ":", "rejectmax", "=", "0", "reject", "[", ":", "rejectmax", "]", "=", "True", "pvals_corrected_raw", "=", "(", "pvals_sorted", "/", "ecdffactor", ")", "pvals_corrected", "=", "np", ".", "minimum", ".", "accumulate", "(", "pvals_corrected_raw", "[", ":", ":", "(", "-", "1", ")", "]", ")", "[", ":", ":", "(", "-", "1", ")", "]", "pvals_corrected", "[", "(", "pvals_corrected", ">", "1.0", ")", "]", "=", "1.0", "pvals_corrected", "=", "pvals_corrected", "[", "sortrevind", "]", ".", "reshape", "(", "shape_init", ")", "reject", "=", "reject", "[", "sortrevind", "]", ".", "reshape", "(", "shape_init", ")", "return", "(", "reject", ",", "pvals_corrected", ")" ]
adjust pvalues for multiple tests using the false discovery rate method .
train
true
12,156
def lambda_sum_largest(X, k): X = Expression.cast_to_const(X) if (X.size[0] != X.size[1]): raise ValueError('First argument must be a square matrix.') elif ((int(k) != k) or (k <= 0)): raise ValueError('Second argument must be a positive integer.') '\n S_k(X) denotes lambda_sum_largest(X, k)\n t >= k S_k(X - Z) + trace(Z), Z is PSD\n implies\n t >= ks + trace(Z)\n Z is PSD\n sI >= X - Z (PSD sense)\n which implies\n t >= ks + trace(Z) >= S_k(sI + Z) >= S_k(X)\n We use the fact that\n S_k(X) = sup_{sets of k orthonormal vectors u_i}\\sum_{i}u_i^T X u_i\n and if Z >= X in PSD sense then\n \\sum_{i}u_i^T Z u_i >= \\sum_{i}u_i^T X u_i\n\n We have equality when s = lambda_k and Z diagonal\n with Z_{ii} = (lambda_i - lambda_k)_+\n ' Z = Semidef(X.size[0]) return ((k * lambda_max((X - Z))) + trace(Z))
[ "def", "lambda_sum_largest", "(", "X", ",", "k", ")", ":", "X", "=", "Expression", ".", "cast_to_const", "(", "X", ")", "if", "(", "X", ".", "size", "[", "0", "]", "!=", "X", ".", "size", "[", "1", "]", ")", ":", "raise", "ValueError", "(", "'First argument must be a square matrix.'", ")", "elif", "(", "(", "int", "(", "k", ")", "!=", "k", ")", "or", "(", "k", "<=", "0", ")", ")", ":", "raise", "ValueError", "(", "'Second argument must be a positive integer.'", ")", "'\\n S_k(X) denotes lambda_sum_largest(X, k)\\n t >= k S_k(X - Z) + trace(Z), Z is PSD\\n implies\\n t >= ks + trace(Z)\\n Z is PSD\\n sI >= X - Z (PSD sense)\\n which implies\\n t >= ks + trace(Z) >= S_k(sI + Z) >= S_k(X)\\n We use the fact that\\n S_k(X) = sup_{sets of k orthonormal vectors u_i}\\\\sum_{i}u_i^T X u_i\\n and if Z >= X in PSD sense then\\n \\\\sum_{i}u_i^T Z u_i >= \\\\sum_{i}u_i^T X u_i\\n\\n We have equality when s = lambda_k and Z diagonal\\n with Z_{ii} = (lambda_i - lambda_k)_+\\n '", "Z", "=", "Semidef", "(", "X", ".", "size", "[", "0", "]", ")", "return", "(", "(", "k", "*", "lambda_max", "(", "(", "X", "-", "Z", ")", ")", ")", "+", "trace", "(", "Z", ")", ")" ]
sum of the largest k eigenvalues .
train
false
12,157
def _restore_bool(name, value): lowercase_value = value.lower() if (lowercase_value not in ('true', 'false')): raise errors.Error('Expected True or False for {0} but found {1}'.format(name, value)) return (lowercase_value == 'true')
[ "def", "_restore_bool", "(", "name", ",", "value", ")", ":", "lowercase_value", "=", "value", ".", "lower", "(", ")", "if", "(", "lowercase_value", "not", "in", "(", "'true'", ",", "'false'", ")", ")", ":", "raise", "errors", ".", "Error", "(", "'Expected True or False for {0} but found {1}'", ".", "format", "(", "name", ",", "value", ")", ")", "return", "(", "lowercase_value", "==", "'true'", ")" ]
restores an boolean key-value pair from a renewal config file .
train
false
12,158
def _in_gae_environment(): if (SETTINGS.env_name is not None): return (SETTINGS.env_name in ('GAE_PRODUCTION', 'GAE_LOCAL')) try: import google.appengine server_software = os.environ.get('SERVER_SOFTWARE', '') if server_software.startswith('Google App Engine/'): SETTINGS.env_name = 'GAE_PRODUCTION' return True elif server_software.startswith('Development/'): SETTINGS.env_name = 'GAE_LOCAL' return True except ImportError: pass return False
[ "def", "_in_gae_environment", "(", ")", ":", "if", "(", "SETTINGS", ".", "env_name", "is", "not", "None", ")", ":", "return", "(", "SETTINGS", ".", "env_name", "in", "(", "'GAE_PRODUCTION'", ",", "'GAE_LOCAL'", ")", ")", "try", ":", "import", "google", ".", "appengine", "server_software", "=", "os", ".", "environ", ".", "get", "(", "'SERVER_SOFTWARE'", ",", "''", ")", "if", "server_software", ".", "startswith", "(", "'Google App Engine/'", ")", ":", "SETTINGS", ".", "env_name", "=", "'GAE_PRODUCTION'", "return", "True", "elif", "server_software", ".", "startswith", "(", "'Development/'", ")", ":", "SETTINGS", ".", "env_name", "=", "'GAE_LOCAL'", "return", "True", "except", "ImportError", ":", "pass", "return", "False" ]
detects if the code is running in the app engine environment .
train
true
12,159
def timedelta_as_minutes(td): return (timedelta_as_seconds(td) / 60)
[ "def", "timedelta_as_minutes", "(", "td", ")", ":", "return", "(", "timedelta_as_seconds", "(", "td", ")", "/", "60", ")" ]
returns the value of the entire timedelta as integer minutes .
train
false
12,161
def roundtrip(img, plugin, suffix): if ('.' not in suffix): suffix = ('.' + suffix) temp_file = NamedTemporaryFile(suffix=suffix, delete=False) fname = temp_file.name temp_file.close() io.imsave(fname, img, plugin=plugin) new = io.imread(fname, plugin=plugin) try: os.remove(fname) except Exception: pass return new
[ "def", "roundtrip", "(", "img", ",", "plugin", ",", "suffix", ")", ":", "if", "(", "'.'", "not", "in", "suffix", ")", ":", "suffix", "=", "(", "'.'", "+", "suffix", ")", "temp_file", "=", "NamedTemporaryFile", "(", "suffix", "=", "suffix", ",", "delete", "=", "False", ")", "fname", "=", "temp_file", ".", "name", "temp_file", ".", "close", "(", ")", "io", ".", "imsave", "(", "fname", ",", "img", ",", "plugin", "=", "plugin", ")", "new", "=", "io", ".", "imread", "(", "fname", ",", "plugin", "=", "plugin", ")", "try", ":", "os", ".", "remove", "(", "fname", ")", "except", "Exception", ":", "pass", "return", "new" ]
save and read an image using a specified plugin .
train
false
12,165
def get_cleaned(mailchimp, list_id): return get_members(mailchimp, list_id, 'cleaned')
[ "def", "get_cleaned", "(", "mailchimp", ",", "list_id", ")", ":", "return", "get_members", "(", "mailchimp", ",", "list_id", ",", "'cleaned'", ")" ]
returns a set of email addresses that have been cleaned from list_id these email addresses may be invalid or have caused bounces .
train
false
12,167
def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): with _ignore_deprecated_imports(deprecated): orig_modules = {} names_to_remove = [] _save_and_remove_module(name, orig_modules) try: for fresh_name in fresh: _save_and_remove_module(fresh_name, orig_modules) for blocked_name in blocked: if (not _save_and_block_module(blocked_name, orig_modules)): names_to_remove.append(blocked_name) fresh_module = importlib.import_module(name) except ImportError: fresh_module = None finally: for (orig_name, module) in orig_modules.items(): sys.modules[orig_name] = module for name_to_remove in names_to_remove: del sys.modules[name_to_remove] return fresh_module
[ "def", "import_fresh_module", "(", "name", ",", "fresh", "=", "(", ")", ",", "blocked", "=", "(", ")", ",", "deprecated", "=", "False", ")", ":", "with", "_ignore_deprecated_imports", "(", "deprecated", ")", ":", "orig_modules", "=", "{", "}", "names_to_remove", "=", "[", "]", "_save_and_remove_module", "(", "name", ",", "orig_modules", ")", "try", ":", "for", "fresh_name", "in", "fresh", ":", "_save_and_remove_module", "(", "fresh_name", ",", "orig_modules", ")", "for", "blocked_name", "in", "blocked", ":", "if", "(", "not", "_save_and_block_module", "(", "blocked_name", ",", "orig_modules", ")", ")", ":", "names_to_remove", ".", "append", "(", "blocked_name", ")", "fresh_module", "=", "importlib", ".", "import_module", "(", "name", ")", "except", "ImportError", ":", "fresh_module", "=", "None", "finally", ":", "for", "(", "orig_name", ",", "module", ")", "in", "orig_modules", ".", "items", "(", ")", ":", "sys", ".", "modules", "[", "orig_name", "]", "=", "module", "for", "name_to_remove", "in", "names_to_remove", ":", "del", "sys", ".", "modules", "[", "name_to_remove", "]", "return", "fresh_module" ]
imports and returns a module .
train
false
12,168
def _get_cmd(): if (__grains__['os_family'] == 'RedHat'): return 'alternatives' return 'update-alternatives'
[ "def", "_get_cmd", "(", ")", ":", "if", "(", "__grains__", "[", "'os_family'", "]", "==", "'RedHat'", ")", ":", "return", "'alternatives'", "return", "'update-alternatives'" ]
combines the non-interactive zypper command with arguments/subcommands .
train
false
12,169
def test_hosts_decorator_by_itself(): host_list = ['a', 'b'] @hosts(*host_list) def command(): pass eq_hosts(command, host_list)
[ "def", "test_hosts_decorator_by_itself", "(", ")", ":", "host_list", "=", "[", "'a'", ",", "'b'", "]", "@", "hosts", "(", "*", "host_list", ")", "def", "command", "(", ")", ":", "pass", "eq_hosts", "(", "command", ",", "host_list", ")" ]
use of @hosts only .
train
false
12,171
def get_users_settings(user_ids): user_settings_models = user_models.UserSettingsModel.get_multi(user_ids) result = [] for (ind, model) in enumerate(user_settings_models): if (user_ids[ind] == feconf.SYSTEM_COMMITTER_ID): result.append(UserSettings(feconf.SYSTEM_COMMITTER_ID, email=feconf.SYSTEM_EMAIL_ADDRESS, username='admin', last_agreed_to_terms=datetime.datetime.utcnow())) elif model: result.append(UserSettings(model.id, email=model.email, username=model.username, last_agreed_to_terms=model.last_agreed_to_terms, last_started_state_editor_tutorial=model.last_started_state_editor_tutorial, last_logged_in=model.last_logged_in, last_edited_an_exploration=model.last_edited_an_exploration, last_created_an_exploration=model.last_created_an_exploration, profile_picture_data_url=model.profile_picture_data_url, user_bio=model.user_bio, subject_interests=model.subject_interests, first_contribution_msec=model.first_contribution_msec, preferred_language_codes=model.preferred_language_codes, preferred_site_language_code=model.preferred_site_language_code)) else: result.append(None) return result
[ "def", "get_users_settings", "(", "user_ids", ")", ":", "user_settings_models", "=", "user_models", ".", "UserSettingsModel", ".", "get_multi", "(", "user_ids", ")", "result", "=", "[", "]", "for", "(", "ind", ",", "model", ")", "in", "enumerate", "(", "user_settings_models", ")", ":", "if", "(", "user_ids", "[", "ind", "]", "==", "feconf", ".", "SYSTEM_COMMITTER_ID", ")", ":", "result", ".", "append", "(", "UserSettings", "(", "feconf", ".", "SYSTEM_COMMITTER_ID", ",", "email", "=", "feconf", ".", "SYSTEM_EMAIL_ADDRESS", ",", "username", "=", "'admin'", ",", "last_agreed_to_terms", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ")", ")", "elif", "model", ":", "result", ".", "append", "(", "UserSettings", "(", "model", ".", "id", ",", "email", "=", "model", ".", "email", ",", "username", "=", "model", ".", "username", ",", "last_agreed_to_terms", "=", "model", ".", "last_agreed_to_terms", ",", "last_started_state_editor_tutorial", "=", "model", ".", "last_started_state_editor_tutorial", ",", "last_logged_in", "=", "model", ".", "last_logged_in", ",", "last_edited_an_exploration", "=", "model", ".", "last_edited_an_exploration", ",", "last_created_an_exploration", "=", "model", ".", "last_created_an_exploration", ",", "profile_picture_data_url", "=", "model", ".", "profile_picture_data_url", ",", "user_bio", "=", "model", ".", "user_bio", ",", "subject_interests", "=", "model", ".", "subject_interests", ",", "first_contribution_msec", "=", "model", ".", "first_contribution_msec", ",", "preferred_language_codes", "=", "model", ".", "preferred_language_codes", ",", "preferred_site_language_code", "=", "model", ".", "preferred_site_language_code", ")", ")", "else", ":", "result", ".", "append", "(", "None", ")", "return", "result" ]
gets domain objects representing the settings for the given user_ids .
train
false
12,172
def serialize_one(out, package): if ((not package) or package[u'name'].startswith(u'_')): log(u'skipping', package) return 0 row = (package[u'package_manager'], package[u'name'], package[u'description'], arrayize(package[u'emails'])) out.writerow(row) return 1
[ "def", "serialize_one", "(", "out", ",", "package", ")", ":", "if", "(", "(", "not", "package", ")", "or", "package", "[", "u'name'", "]", ".", "startswith", "(", "u'_'", ")", ")", ":", "log", "(", "u'skipping'", ",", "package", ")", "return", "0", "row", "=", "(", "package", "[", "u'package_manager'", "]", ",", "package", "[", "u'name'", "]", ",", "package", "[", "u'description'", "]", ",", "arrayize", "(", "package", "[", "u'emails'", "]", ")", ")", "out", ".", "writerow", "(", "row", ")", "return", "1" ]
take a single package dict and emit a csv serialization suitable for postgres copy .
train
false
12,174
def change_HTTPS_KEY(https_key): if (https_key == ''): sickbeard.HTTPS_KEY = '' return True if (ek(os.path.normpath, sickbeard.HTTPS_KEY) != ek(os.path.normpath, https_key)): if helpers.makeDir(ek(os.path.dirname, ek(os.path.abspath, https_key))): sickbeard.HTTPS_KEY = ek(os.path.normpath, https_key) logger.log((u'Changed https key path to ' + https_key)) else: return False return True
[ "def", "change_HTTPS_KEY", "(", "https_key", ")", ":", "if", "(", "https_key", "==", "''", ")", ":", "sickbeard", ".", "HTTPS_KEY", "=", "''", "return", "True", "if", "(", "ek", "(", "os", ".", "path", ".", "normpath", ",", "sickbeard", ".", "HTTPS_KEY", ")", "!=", "ek", "(", "os", ".", "path", ".", "normpath", ",", "https_key", ")", ")", ":", "if", "helpers", ".", "makeDir", "(", "ek", "(", "os", ".", "path", ".", "dirname", ",", "ek", "(", "os", ".", "path", ".", "abspath", ",", "https_key", ")", ")", ")", ":", "sickbeard", ".", "HTTPS_KEY", "=", "ek", "(", "os", ".", "path", ".", "normpath", ",", "https_key", ")", "logger", ".", "log", "(", "(", "u'Changed https key path to '", "+", "https_key", ")", ")", "else", ":", "return", "False", "return", "True" ]
replace https key file path .
train
false
12,175
def state_path_def(*args): return os.path.join('$state_path', *args)
[ "def", "state_path_def", "(", "*", "args", ")", ":", "return", "os", ".", "path", ".", "join", "(", "'$state_path'", ",", "*", "args", ")" ]
return an uninterpolated path relative to $state_path .
train
false
12,176
def VERSION_INFO(): from django.conf import settings from kalite.shared.utils import open_json_or_yml return open_json_or_yml(os.path.join(settings.CONTENT_DATA_PATH, 'version.yml'))
[ "def", "VERSION_INFO", "(", ")", ":", "from", "django", ".", "conf", "import", "settings", "from", "kalite", ".", "shared", ".", "utils", "import", "open_json_or_yml", "return", "open_json_or_yml", "(", "os", ".", "path", ".", "join", "(", "settings", ".", "CONTENT_DATA_PATH", ",", "'version.yml'", ")", ")" ]
load a dictionary of changes between each version .
train
false
12,177
def keras_test(func): @six.wraps(func) def wrapper(*args, **kwargs): output = func(*args, **kwargs) if (K.backend() == 'tensorflow'): K.clear_session() return output return wrapper
[ "def", "keras_test", "(", "func", ")", ":", "@", "six", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "output", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "K", ".", "backend", "(", ")", "==", "'tensorflow'", ")", ":", "K", ".", "clear_session", "(", ")", "return", "output", "return", "wrapper" ]
function wrapper to clean up after tensorflow tests .
train
false
12,178
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialize the directories for the files .
train
false
12,179
def get_all_group_types(context, inactive=0, filters=None, marker=None, limit=None, sort_keys=None, sort_dirs=None, offset=None, list_result=False): grp_types = db.group_type_get_all(context, inactive, filters=filters, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, offset=offset, list_result=list_result) return grp_types
[ "def", "get_all_group_types", "(", "context", ",", "inactive", "=", "0", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ",", "offset", "=", "None", ",", "list_result", "=", "False", ")", ":", "grp_types", "=", "db", ".", "group_type_get_all", "(", "context", ",", "inactive", ",", "filters", "=", "filters", ",", "marker", "=", "marker", ",", "limit", "=", "limit", ",", "sort_keys", "=", "sort_keys", ",", "sort_dirs", "=", "sort_dirs", ",", "offset", "=", "offset", ",", "list_result", "=", "list_result", ")", "return", "grp_types" ]
get all non-deleted group_types .
train
false
12,180
def non_celestial_pixel_scales(inwcs): if inwcs.is_celestial: raise ValueError(u'WCS is celestial, use celestial_pixel_scales instead') pccd = inwcs.pixel_scale_matrix if np.allclose(np.extract((1 - np.eye(*pccd.shape)), pccd), 0): return (np.abs(np.diagonal(pccd)) * u.deg) else: raise ValueError(u'WCS is rotated, cannot determine consistent pixel scales')
[ "def", "non_celestial_pixel_scales", "(", "inwcs", ")", ":", "if", "inwcs", ".", "is_celestial", ":", "raise", "ValueError", "(", "u'WCS is celestial, use celestial_pixel_scales instead'", ")", "pccd", "=", "inwcs", ".", "pixel_scale_matrix", "if", "np", ".", "allclose", "(", "np", ".", "extract", "(", "(", "1", "-", "np", ".", "eye", "(", "*", "pccd", ".", "shape", ")", ")", ",", "pccd", ")", ",", "0", ")", ":", "return", "(", "np", ".", "abs", "(", "np", ".", "diagonal", "(", "pccd", ")", ")", "*", "u", ".", "deg", ")", "else", ":", "raise", "ValueError", "(", "u'WCS is rotated, cannot determine consistent pixel scales'", ")" ]
calculate the pixel scale along each axis of a non-celestial wcs .
train
false
12,181
def is_valid_port(entry, allow_zero=False): try: value = int(entry) if (str(value) != str(entry)): return False elif (allow_zero and (value == 0)): return True else: return ((value > 0) and (value < 65536)) except TypeError: if isinstance(entry, (tuple, list)): for port in entry: if (not is_valid_port(port, allow_zero)): return False return True else: return False except ValueError: return False
[ "def", "is_valid_port", "(", "entry", ",", "allow_zero", "=", "False", ")", ":", "try", ":", "value", "=", "int", "(", "entry", ")", "if", "(", "str", "(", "value", ")", "!=", "str", "(", "entry", ")", ")", ":", "return", "False", "elif", "(", "allow_zero", "and", "(", "value", "==", "0", ")", ")", ":", "return", "True", "else", ":", "return", "(", "(", "value", ">", "0", ")", "and", "(", "value", "<", "65536", ")", ")", "except", "TypeError", ":", "if", "isinstance", "(", "entry", ",", "(", "tuple", ",", "list", ")", ")", ":", "for", "port", "in", "entry", ":", "if", "(", "not", "is_valid_port", "(", "port", ",", "allow_zero", ")", ")", ":", "return", "False", "return", "True", "else", ":", "return", "False", "except", "ValueError", ":", "return", "False" ]
check if given port is valid .
train
false
12,182
def is_team_member(name, team_name, profile='github'): return (name.lower() in list_team_members(team_name, profile=profile))
[ "def", "is_team_member", "(", "name", ",", "team_name", ",", "profile", "=", "'github'", ")", ":", "return", "(", "name", ".", "lower", "(", ")", "in", "list_team_members", "(", "team_name", ",", "profile", "=", "profile", ")", ")" ]
returns true if the github user is in the team with team_name .
train
true
12,183
def fix_command(): settings.init() with logs.debug_time('Total'): logs.debug(u'Run with settings: {}'.format(pformat(settings))) try: command = types.Command.from_raw_script(sys.argv[1:]) except EmptyCommand: logs.debug('Empty command, nothing to do') return corrected_commands = get_corrected_commands(command) selected_command = select_command(corrected_commands) if selected_command: selected_command.run(command) else: sys.exit(1)
[ "def", "fix_command", "(", ")", ":", "settings", ".", "init", "(", ")", "with", "logs", ".", "debug_time", "(", "'Total'", ")", ":", "logs", ".", "debug", "(", "u'Run with settings: {}'", ".", "format", "(", "pformat", "(", "settings", ")", ")", ")", "try", ":", "command", "=", "types", ".", "Command", ".", "from_raw_script", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", "except", "EmptyCommand", ":", "logs", ".", "debug", "(", "'Empty command, nothing to do'", ")", "return", "corrected_commands", "=", "get_corrected_commands", "(", "command", ")", "selected_command", "=", "select_command", "(", "corrected_commands", ")", "if", "selected_command", ":", "selected_command", ".", "run", "(", "command", ")", "else", ":", "sys", ".", "exit", "(", "1", ")" ]
fixes previous command .
train
true
12,184
def _CheckNumberFoundAccuracy(number_found_accuracy): return _CheckInteger(number_found_accuracy, 'number_found_accuracy', zero_ok=False, upper_bound=MAXIMUM_NUMBER_FOUND_ACCURACY)
[ "def", "_CheckNumberFoundAccuracy", "(", "number_found_accuracy", ")", ":", "return", "_CheckInteger", "(", "number_found_accuracy", ",", "'number_found_accuracy'", ",", "zero_ok", "=", "False", ",", "upper_bound", "=", "MAXIMUM_NUMBER_FOUND_ACCURACY", ")" ]
checks the accuracy is an integer within range .
train
false
12,185
def should_abort(instance, now, progress_time, progress_timeout, elapsed, completion_timeout, migration_status): if (migration_status == 'running (post-copy)'): return False if ((progress_timeout != 0) and ((now - progress_time) > progress_timeout)): LOG.warning(_LW('Live migration stuck for %d sec'), (now - progress_time), instance=instance) return True if ((completion_timeout != 0) and (elapsed > completion_timeout)): LOG.warning(_LW('Live migration not completed after %d sec'), completion_timeout, instance=instance) return True return False
[ "def", "should_abort", "(", "instance", ",", "now", ",", "progress_time", ",", "progress_timeout", ",", "elapsed", ",", "completion_timeout", ",", "migration_status", ")", ":", "if", "(", "migration_status", "==", "'running (post-copy)'", ")", ":", "return", "False", "if", "(", "(", "progress_timeout", "!=", "0", ")", "and", "(", "(", "now", "-", "progress_time", ")", ">", "progress_timeout", ")", ")", ":", "LOG", ".", "warning", "(", "_LW", "(", "'Live migration stuck for %d sec'", ")", ",", "(", "now", "-", "progress_time", ")", ",", "instance", "=", "instance", ")", "return", "True", "if", "(", "(", "completion_timeout", "!=", "0", ")", "and", "(", "elapsed", ">", "completion_timeout", ")", ")", ":", "LOG", ".", "warning", "(", "_LW", "(", "'Live migration not completed after %d sec'", ")", ",", "completion_timeout", ",", "instance", "=", "instance", ")", "return", "True", "return", "False" ]
determine if the migration should be aborted .
train
false
12,186
def _compute_voxel(args): from dipy.sims.voxel import multi_tensor ffs = args[u'fractions'] gtab = args[u'gradients'] signal = np.zeros_like(gtab.bvals, dtype=np.float32) sf_vf = np.sum(ffs) if (sf_vf > 0.0): ffs = ((np.array(ffs) / sf_vf) * 100) snr = (args[u'snr'] if (args[u'snr'] > 0) else None) try: (signal, _) = multi_tensor(gtab, args[u'mevals'], S0=args[u'S0'], angles=args[u'sticks'], fractions=ffs, snr=snr) except Exception as e: pass return signal.tolist()
[ "def", "_compute_voxel", "(", "args", ")", ":", "from", "dipy", ".", "sims", ".", "voxel", "import", "multi_tensor", "ffs", "=", "args", "[", "u'fractions'", "]", "gtab", "=", "args", "[", "u'gradients'", "]", "signal", "=", "np", ".", "zeros_like", "(", "gtab", ".", "bvals", ",", "dtype", "=", "np", ".", "float32", ")", "sf_vf", "=", "np", ".", "sum", "(", "ffs", ")", "if", "(", "sf_vf", ">", "0.0", ")", ":", "ffs", "=", "(", "(", "np", ".", "array", "(", "ffs", ")", "/", "sf_vf", ")", "*", "100", ")", "snr", "=", "(", "args", "[", "u'snr'", "]", "if", "(", "args", "[", "u'snr'", "]", ">", "0", ")", "else", "None", ")", "try", ":", "(", "signal", ",", "_", ")", "=", "multi_tensor", "(", "gtab", ",", "args", "[", "u'mevals'", "]", ",", "S0", "=", "args", "[", "u'S0'", "]", ",", "angles", "=", "args", "[", "u'sticks'", "]", ",", "fractions", "=", "ffs", ",", "snr", "=", "snr", ")", "except", "Exception", "as", "e", ":", "pass", "return", "signal", ".", "tolist", "(", ")" ]
simulate dw signal for one voxel .
train
false
12,187
def is_emergency_number(number, region_code): return _matches_emergency_number_helper(number, region_code, False)
[ "def", "is_emergency_number", "(", "number", ",", "region_code", ")", ":", "return", "_matches_emergency_number_helper", "(", "number", ",", "region_code", ",", "False", ")" ]
returns true if the given number exactly matches an emergency service number in the given region .
train
false
12,189
def qos_specs_item_delete(context, qos_specs_id, key): return IMPL.qos_specs_item_delete(context, qos_specs_id, key)
[ "def", "qos_specs_item_delete", "(", "context", ",", "qos_specs_id", ",", "key", ")", ":", "return", "IMPL", ".", "qos_specs_item_delete", "(", "context", ",", "qos_specs_id", ",", "key", ")" ]
delete specified key in the qos_specs .
train
false
12,190
def get_dut1utc(time): try: return time.delta_ut1_utc except iers.IERSRangeError as e: _warn_iers(e) return np.zeros(time.shape)
[ "def", "get_dut1utc", "(", "time", ")", ":", "try", ":", "return", "time", ".", "delta_ut1_utc", "except", "iers", ".", "IERSRangeError", "as", "e", ":", "_warn_iers", "(", "e", ")", "return", "np", ".", "zeros", "(", "time", ".", "shape", ")" ]
this function is used to get ut1-utc in coordinates because normally it gives an error outside the iers range .
train
false
12,191
def make_fake_api_client(): api_client = docker.APIClient() mock_client = CopyReturnMagicMock(**{'build.return_value': fake_api.FAKE_IMAGE_ID, 'commit.return_value': fake_api.post_fake_commit()[1], 'containers.return_value': fake_api.get_fake_containers()[1], 'create_container.return_value': fake_api.post_fake_create_container()[1], 'create_host_config.side_effect': api_client.create_host_config, 'create_network.return_value': fake_api.post_fake_network()[1], 'exec_create.return_value': fake_api.post_fake_exec_create()[1], 'exec_start.return_value': fake_api.post_fake_exec_start()[1], 'images.return_value': fake_api.get_fake_images()[1], 'inspect_container.return_value': fake_api.get_fake_inspect_container()[1], 'inspect_image.return_value': fake_api.get_fake_inspect_image()[1], 'inspect_network.return_value': fake_api.get_fake_network()[1], 'logs.return_value': 'hello world\n', 'networks.return_value': fake_api.get_fake_network_list()[1], 'start.return_value': None, 'wait.return_value': 0}) mock_client._version = docker.constants.DEFAULT_DOCKER_API_VERSION return mock_client
[ "def", "make_fake_api_client", "(", ")", ":", "api_client", "=", "docker", ".", "APIClient", "(", ")", "mock_client", "=", "CopyReturnMagicMock", "(", "**", "{", "'build.return_value'", ":", "fake_api", ".", "FAKE_IMAGE_ID", ",", "'commit.return_value'", ":", "fake_api", ".", "post_fake_commit", "(", ")", "[", "1", "]", ",", "'containers.return_value'", ":", "fake_api", ".", "get_fake_containers", "(", ")", "[", "1", "]", ",", "'create_container.return_value'", ":", "fake_api", ".", "post_fake_create_container", "(", ")", "[", "1", "]", ",", "'create_host_config.side_effect'", ":", "api_client", ".", "create_host_config", ",", "'create_network.return_value'", ":", "fake_api", ".", "post_fake_network", "(", ")", "[", "1", "]", ",", "'exec_create.return_value'", ":", "fake_api", ".", "post_fake_exec_create", "(", ")", "[", "1", "]", ",", "'exec_start.return_value'", ":", "fake_api", ".", "post_fake_exec_start", "(", ")", "[", "1", "]", ",", "'images.return_value'", ":", "fake_api", ".", "get_fake_images", "(", ")", "[", "1", "]", ",", "'inspect_container.return_value'", ":", "fake_api", ".", "get_fake_inspect_container", "(", ")", "[", "1", "]", ",", "'inspect_image.return_value'", ":", "fake_api", ".", "get_fake_inspect_image", "(", ")", "[", "1", "]", ",", "'inspect_network.return_value'", ":", "fake_api", ".", "get_fake_network", "(", ")", "[", "1", "]", ",", "'logs.return_value'", ":", "'hello world\\n'", ",", "'networks.return_value'", ":", "fake_api", ".", "get_fake_network_list", "(", ")", "[", "1", "]", ",", "'start.return_value'", ":", "None", ",", "'wait.return_value'", ":", "0", "}", ")", "mock_client", ".", "_version", "=", "docker", ".", "constants", ".", "DEFAULT_DOCKER_API_VERSION", "return", "mock_client" ]
returns non-complete fake apiclient .
train
false
12,194
@functools.lru_cache() def get_app_template_dirs(dirname): template_dirs = [] for app_config in apps.get_app_configs(): if (not app_config.path): continue template_dir = os.path.join(app_config.path, dirname) if os.path.isdir(template_dir): template_dirs.append(template_dir) return tuple(template_dirs)
[ "@", "functools", ".", "lru_cache", "(", ")", "def", "get_app_template_dirs", "(", "dirname", ")", ":", "template_dirs", "=", "[", "]", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", ":", "if", "(", "not", "app_config", ".", "path", ")", ":", "continue", "template_dir", "=", "os", ".", "path", ".", "join", "(", "app_config", ".", "path", ",", "dirname", ")", "if", "os", ".", "path", ".", "isdir", "(", "template_dir", ")", ":", "template_dirs", ".", "append", "(", "template_dir", ")", "return", "tuple", "(", "template_dirs", ")" ]
return an iterable of paths of directories to load app templates from .
train
false