id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
10,713
def check_builtin_matches_remote(download_url=True): builtin_registry = EarthLocation._get_site_registry(force_builtin=True) dl_registry = EarthLocation._get_site_registry(force_download=download_url) in_dl = {} matches = {} for name in builtin_registry.names: in_dl[name] = (name in dl_registry) if in_dl[name]: matches[name] = quantity_allclose(builtin_registry[name], dl_registry[name]) else: matches[name] = False if (not all(matches.values())): print(u'In builtin registry but not in download:') for name in in_dl: if (not in_dl[name]): print(u' ', name) print(u'In both but not the same value:') for name in matches: if ((not matches[name]) and in_dl[name]): print(u' ', name, u'builtin:', builtin_registry[name], u'download:', dl_registry[name]) assert False, u"Builtin and download registry aren't consistent - failures printed to stdout"
[ "def", "check_builtin_matches_remote", "(", "download_url", "=", "True", ")", ":", "builtin_registry", "=", "EarthLocation", ".", "_get_site_registry", "(", "force_builtin", "=", "True", ")", "dl_registry", "=", "EarthLocation", ".", "_get_site_registry", "(", "force_download", "=", "download_url", ")", "in_dl", "=", "{", "}", "matches", "=", "{", "}", "for", "name", "in", "builtin_registry", ".", "names", ":", "in_dl", "[", "name", "]", "=", "(", "name", "in", "dl_registry", ")", "if", "in_dl", "[", "name", "]", ":", "matches", "[", "name", "]", "=", "quantity_allclose", "(", "builtin_registry", "[", "name", "]", ",", "dl_registry", "[", "name", "]", ")", "else", ":", "matches", "[", "name", "]", "=", "False", "if", "(", "not", "all", "(", "matches", ".", "values", "(", ")", ")", ")", ":", "print", "(", "u'In builtin registry but not in download:'", ")", "for", "name", "in", "in_dl", ":", "if", "(", "not", "in_dl", "[", "name", "]", ")", ":", "print", "(", "u' '", ",", "name", ")", "print", "(", "u'In both but not the same value:'", ")", "for", "name", "in", "matches", ":", "if", "(", "(", "not", "matches", "[", "name", "]", ")", "and", "in_dl", "[", "name", "]", ")", ":", "print", "(", "u' '", ",", "name", ",", "u'builtin:'", ",", "builtin_registry", "[", "name", "]", ",", "u'download:'", ",", "dl_registry", "[", "name", "]", ")", "assert", "False", ",", "u\"Builtin and download registry aren't consistent - failures printed to stdout\"" ]
this function checks that the builtin sites registry is consistent with the remote registry .
train
false
10,714
def articlesDictsForLang(lang): if (lang in _ART_CACHE): return _ART_CACHE[lang] artDicts = toDicts(LANG_ARTICLESget(lang, GENERIC_ARTICLES)) _ART_CACHE[lang] = artDicts return artDicts
[ "def", "articlesDictsForLang", "(", "lang", ")", ":", "if", "(", "lang", "in", "_ART_CACHE", ")", ":", "return", "_ART_CACHE", "[", "lang", "]", "artDicts", "=", "toDicts", "(", "LANG_ARTICLESget", "(", "lang", ",", "GENERIC_ARTICLES", ")", ")", "_ART_CACHE", "[", "lang", "]", "=", "artDicts", "return", "artDicts" ]
return dictionaries of articles specific for the given language .
train
false
10,715
def literal_dumps(obj, prettyprint=False, indent=4): memo = {} NoneType = type(None) def check(obj): if (type(obj) in [int, float, bool, NoneType, str, bytes]): return True if (id(obj) in memo): raise ValueError('{0} is a recursive structure'.format(obj)) memo[id(obj)] = obj if (type(obj) in [list, tuple]): return all(map(check, obj)) elif (type(obj) is dict): return all(map(check, chain(iter(obj.keys()), iter(obj.values())))) else: raise TypeError('{0} can not be serialized as a python literal'.format(type(obj))) check(obj) if prettyprint: return pprint.pformat(obj, indent=indent) else: return repr(obj)
[ "def", "literal_dumps", "(", "obj", ",", "prettyprint", "=", "False", ",", "indent", "=", "4", ")", ":", "memo", "=", "{", "}", "NoneType", "=", "type", "(", "None", ")", "def", "check", "(", "obj", ")", ":", "if", "(", "type", "(", "obj", ")", "in", "[", "int", ",", "float", ",", "bool", ",", "NoneType", ",", "str", ",", "bytes", "]", ")", ":", "return", "True", "if", "(", "id", "(", "obj", ")", "in", "memo", ")", ":", "raise", "ValueError", "(", "'{0} is a recursive structure'", ".", "format", "(", "obj", ")", ")", "memo", "[", "id", "(", "obj", ")", "]", "=", "obj", "if", "(", "type", "(", "obj", ")", "in", "[", "list", ",", "tuple", "]", ")", ":", "return", "all", "(", "map", "(", "check", ",", "obj", ")", ")", "elif", "(", "type", "(", "obj", ")", "is", "dict", ")", ":", "return", "all", "(", "map", "(", "check", ",", "chain", "(", "iter", "(", "obj", ".", "keys", "(", ")", ")", ",", "iter", "(", "obj", ".", "values", "(", ")", ")", ")", ")", ")", "else", ":", "raise", "TypeError", "(", "'{0} can not be serialized as a python literal'", ".", "format", "(", "type", "(", "obj", ")", ")", ")", "check", "(", "obj", ")", "if", "prettyprint", ":", "return", "pprint", ".", "pformat", "(", "obj", ",", "indent", "=", "indent", ")", "else", ":", "return", "repr", "(", "obj", ")" ]
write obj into a string as a python literal .
train
false
10,718
def scroll_one_line_down(event): w = find_window_for_buffer_name(event.cli, event.cli.current_buffer_name) b = event.cli.current_buffer if w: if w.render_info: info = w.render_info if (w.vertical_scroll < (info.content_height - info.window_height)): if (info.cursor_position.y <= info.configured_scroll_offsets.top): b.cursor_position += b.document.get_cursor_down_position() w.vertical_scroll += 1
[ "def", "scroll_one_line_down", "(", "event", ")", ":", "w", "=", "find_window_for_buffer_name", "(", "event", ".", "cli", ",", "event", ".", "cli", ".", "current_buffer_name", ")", "b", "=", "event", ".", "cli", ".", "current_buffer", "if", "w", ":", "if", "w", ".", "render_info", ":", "info", "=", "w", ".", "render_info", "if", "(", "w", ".", "vertical_scroll", "<", "(", "info", ".", "content_height", "-", "info", ".", "window_height", ")", ")", ":", "if", "(", "info", ".", "cursor_position", ".", "y", "<=", "info", ".", "configured_scroll_offsets", ".", "top", ")", ":", "b", ".", "cursor_position", "+=", "b", ".", "document", ".", "get_cursor_down_position", "(", ")", "w", ".", "vertical_scroll", "+=", "1" ]
scroll_offset += 1 .
train
true
10,719
def putInToC(document, toc): tocOrig = domhelpers.findElementsWithAttribute(document, 'class', 'toc') if tocOrig: tocOrig = tocOrig[0] tocOrig.childNodes = [toc]
[ "def", "putInToC", "(", "document", ",", "toc", ")", ":", "tocOrig", "=", "domhelpers", ".", "findElementsWithAttribute", "(", "document", ",", "'class'", ",", "'toc'", ")", "if", "tocOrig", ":", "tocOrig", "=", "tocOrig", "[", "0", "]", "tocOrig", ".", "childNodes", "=", "[", "toc", "]" ]
insert the given table of contents into the given document .
train
false
10,721
@treeio_login_required @handle_response_format def category_delete(request, category_id, response_format='html'): category = get_object_or_404(Category, pk=category_id) if (not request.user.profile.has_permission(category, mode='w')): return user_denied(request, "You don't have access to this Category", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): category.trash = True category.save() else: category.delete() return HttpResponseRedirect(reverse('finance_settings_view')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('finance_category_view', args=[category.id])) return render_to_response('finance/category_delete', {'category': category}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "category_delete", "(", "request", ",", "category_id", ",", "response_format", "=", "'html'", ")", ":", "category", "=", "get_object_or_404", "(", "Category", ",", "pk", "=", "category_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "category", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Category\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "category", ".", "trash", "=", "True", "category", ".", "save", "(", ")", "else", ":", "category", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_settings_view'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_category_view'", ",", "args", "=", "[", "category", ".", "id", "]", ")", ")", "return", "render_to_response", "(", "'finance/category_delete'", ",", "{", "'category'", ":", "category", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
knowledge category delete .
train
false
10,722
def is_valid_fingerprint(entry, check_prefix=False): try: if check_prefix: if ((not entry) or (entry[0] != '$')): return False entry = entry[1:] return is_hex_digits(entry, 40) except TypeError: return False
[ "def", "is_valid_fingerprint", "(", "entry", ",", "check_prefix", "=", "False", ")", ":", "try", ":", "if", "check_prefix", ":", "if", "(", "(", "not", "entry", ")", "or", "(", "entry", "[", "0", "]", "!=", "'$'", ")", ")", ":", "return", "False", "entry", "=", "entry", "[", "1", ":", "]", "return", "is_hex_digits", "(", "entry", ",", "40", ")", "except", "TypeError", ":", "return", "False" ]
checks if a string is a properly formatted relay fingerprint .
train
false
10,723
def getDescriptionMultiply(lines): activateMultiplyString = getSettingString(lines, 'multiply', 'Activate Multiply') if ((activateMultiplyString == None) or (activateMultiplyString == 'False')): return '' columnsString = getSettingString(lines, 'multiply', 'Number of Columns') rowsString = getSettingString(lines, 'multiply', 'Number of Rows') if ((columnsString == '1') and (rowsString == '1')): return '' return ('_%scx%sr' % (columnsString, rowsString))
[ "def", "getDescriptionMultiply", "(", "lines", ")", ":", "activateMultiplyString", "=", "getSettingString", "(", "lines", ",", "'multiply'", ",", "'Activate Multiply'", ")", "if", "(", "(", "activateMultiplyString", "==", "None", ")", "or", "(", "activateMultiplyString", "==", "'False'", ")", ")", ":", "return", "''", "columnsString", "=", "getSettingString", "(", "lines", ",", "'multiply'", ",", "'Number of Columns'", ")", "rowsString", "=", "getSettingString", "(", "lines", ",", "'multiply'", ",", "'Number of Rows'", ")", "if", "(", "(", "columnsString", "==", "'1'", ")", "and", "(", "rowsString", "==", "'1'", ")", ")", ":", "return", "''", "return", "(", "'_%scx%sr'", "%", "(", "columnsString", ",", "rowsString", ")", ")" ]
get the description for multiply .
train
false
10,725
def _deserialize(data): LOG.debug(_('Deserializing: %s'), data) return jsonutils.loads(data)
[ "def", "_deserialize", "(", "data", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'Deserializing: %s'", ")", ",", "data", ")", "return", "jsonutils", ".", "loads", "(", "data", ")" ]
deserialization wrapper .
train
false
10,727
def stdev(values): if ((not values) or (len(values) < 2)): return None key = (values[0], values[(-1)], len(values)) if (kb.get('cache') and (key in kb.cache.stdev)): retVal = kb.cache.stdev[key] else: avg = average(values) _ = reduce((lambda x, y: (x + pow(((y or 0) - avg), 2))), values, 0.0) retVal = sqrt((_ / (len(values) - 1))) if kb.get('cache'): kb.cache.stdev[key] = retVal return retVal
[ "def", "stdev", "(", "values", ")", ":", "if", "(", "(", "not", "values", ")", "or", "(", "len", "(", "values", ")", "<", "2", ")", ")", ":", "return", "None", "key", "=", "(", "values", "[", "0", "]", ",", "values", "[", "(", "-", "1", ")", "]", ",", "len", "(", "values", ")", ")", "if", "(", "kb", ".", "get", "(", "'cache'", ")", "and", "(", "key", "in", "kb", ".", "cache", ".", "stdev", ")", ")", ":", "retVal", "=", "kb", ".", "cache", ".", "stdev", "[", "key", "]", "else", ":", "avg", "=", "average", "(", "values", ")", "_", "=", "reduce", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "+", "pow", "(", "(", "(", "y", "or", "0", ")", "-", "avg", ")", ",", "2", ")", ")", ")", ",", "values", ",", "0.0", ")", "retVal", "=", "sqrt", "(", "(", "_", "/", "(", "len", "(", "values", ")", "-", "1", ")", ")", ")", "if", "kb", ".", "get", "(", "'cache'", ")", ":", "kb", ".", "cache", ".", "stdev", "[", "key", "]", "=", "retVal", "return", "retVal" ]
return the square root of the sample variance .
train
false
10,728
@register.filter(name='to_role_name') def to_role_name(role_id): role_dict = {'0': '\xe6\x99\xae\xe9\x80\x9a\xe7\x94\xa8\xe6\x88\xb7', '1': '\xe7\xbb\x84\xe7\xae\xa1\xe7\x90\x86\xe5\x91\x98', '2': '\xe8\xb6\x85\xe7\xba\xa7\xe7\xae\xa1\xe7\x90\x86\xe5\x91\x98'} return role_dict.get(str(role_id), '\xe6\x9c\xaa\xe7\x9f\xa5')
[ "@", "register", ".", "filter", "(", "name", "=", "'to_role_name'", ")", "def", "to_role_name", "(", "role_id", ")", ":", "role_dict", "=", "{", "'0'", ":", "'\\xe6\\x99\\xae\\xe9\\x80\\x9a\\xe7\\x94\\xa8\\xe6\\x88\\xb7'", ",", "'1'", ":", "'\\xe7\\xbb\\x84\\xe7\\xae\\xa1\\xe7\\x90\\x86\\xe5\\x91\\x98'", ",", "'2'", ":", "'\\xe8\\xb6\\x85\\xe7\\xba\\xa7\\xe7\\xae\\xa1\\xe7\\x90\\x86\\xe5\\x91\\x98'", "}", "return", "role_dict", ".", "get", "(", "str", "(", "role_id", ")", ",", "'\\xe6\\x9c\\xaa\\xe7\\x9f\\xa5'", ")" ]
role_id 转变为角色名称 .
train
false
10,729
@cacheit def _expand_delta(expr, index): if (not expr.is_Mul): return expr delta = None func = Add terms = [S(1)] for h in expr.args: if ((delta is None) and h.is_Add and _has_simple_delta(h, index)): delta = True func = h.func terms = [(terms[0] * t) for t in h.args] else: terms = [(t * h) for t in terms] return func(*terms)
[ "@", "cacheit", "def", "_expand_delta", "(", "expr", ",", "index", ")", ":", "if", "(", "not", "expr", ".", "is_Mul", ")", ":", "return", "expr", "delta", "=", "None", "func", "=", "Add", "terms", "=", "[", "S", "(", "1", ")", "]", "for", "h", "in", "expr", ".", "args", ":", "if", "(", "(", "delta", "is", "None", ")", "and", "h", ".", "is_Add", "and", "_has_simple_delta", "(", "h", ",", "index", ")", ")", ":", "delta", "=", "True", "func", "=", "h", ".", "func", "terms", "=", "[", "(", "terms", "[", "0", "]", "*", "t", ")", "for", "t", "in", "h", ".", "args", "]", "else", ":", "terms", "=", "[", "(", "t", "*", "h", ")", "for", "t", "in", "terms", "]", "return", "func", "(", "*", "terms", ")" ]
expand the first add containing a simple kroneckerdelta .
train
false
10,731
def delete_api_resources(restApiId, path, region=None, key=None, keyid=None, profile=None): if (path == '/'): return {'deleted': False, 'error': 'use delete_api to remove the root resource'} try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = describe_api_resource(restApiId, path, region=region, key=key, keyid=keyid, profile=profile) resource = r.get('resource') if resource: conn.delete_resource(restApiId=restApiId, resourceId=resource['id']) return {'deleted': True} else: return {'deleted': False, 'error': 'no resource found by {0}'.format(path)} except ClientError as e: return {'created': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "delete_api_resources", "(", "restApiId", ",", "path", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "path", "==", "'/'", ")", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "'use delete_api to remove the root resource'", "}", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "r", "=", "describe_api_resource", "(", "restApiId", ",", "path", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "resource", "=", "r", ".", "get", "(", "'resource'", ")", "if", "resource", ":", "conn", ".", "delete_resource", "(", "restApiId", "=", "restApiId", ",", "resourceId", "=", "resource", "[", "'id'", "]", ")", "return", "{", "'deleted'", ":", "True", "}", "else", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "'no resource found by {0}'", ".", "format", "(", "path", ")", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'created'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
given restapiid and an absolute resource path .
train
true
10,733
def load_file_in_same_dir(ref_file, filename): from couchpotato.core.helpers.encoding import toUnicode path = (split_path(toUnicode(ref_file))[:(-1)] + [filename]) for (i, p) in enumerate(path): if p.endswith(u'.zip'): zfilename = os.path.join(*path[:(i + 1)]) zfile = zipfile.ZipFile(zfilename) return zfile.read(u'/'.join(path[(i + 1):])) return u(io.open(os.path.join(*path), encoding=u'utf-8').read())
[ "def", "load_file_in_same_dir", "(", "ref_file", ",", "filename", ")", ":", "from", "couchpotato", ".", "core", ".", "helpers", ".", "encoding", "import", "toUnicode", "path", "=", "(", "split_path", "(", "toUnicode", "(", "ref_file", ")", ")", "[", ":", "(", "-", "1", ")", "]", "+", "[", "filename", "]", ")", "for", "(", "i", ",", "p", ")", "in", "enumerate", "(", "path", ")", ":", "if", "p", ".", "endswith", "(", "u'.zip'", ")", ":", "zfilename", "=", "os", ".", "path", ".", "join", "(", "*", "path", "[", ":", "(", "i", "+", "1", ")", "]", ")", "zfile", "=", "zipfile", ".", "ZipFile", "(", "zfilename", ")", "return", "zfile", ".", "read", "(", "u'/'", ".", "join", "(", "path", "[", "(", "i", "+", "1", ")", ":", "]", ")", ")", "return", "u", "(", "io", ".", "open", "(", "os", ".", "path", ".", "join", "(", "*", "path", ")", ",", "encoding", "=", "u'utf-8'", ")", ".", "read", "(", ")", ")" ]
load a given file .
train
false
10,734
def writelines(lines, filename, encoding='utf-8', mode='wb'): return write(os.linesep.join(lines), filename, encoding, mode)
[ "def", "writelines", "(", "lines", ",", "filename", ",", "encoding", "=", "'utf-8'", ",", "mode", "=", "'wb'", ")", ":", "return", "write", "(", "os", ".", "linesep", ".", "join", "(", "lines", ")", ",", "filename", ",", "encoding", ",", "mode", ")" ]
write lines to file assuming encoding return encoding .
train
true
10,735
def cc_library(name, srcs=[], deps=[], visibility=None, warning='yes', defs=[], incs=[], export_incs=[], optimize=[], always_optimize=False, pre_build=False, prebuilt=False, link_all_symbols=False, deprecated=False, extra_cppflags=[], extra_linkflags=[], allow_undefined=False, secure=False, **kwargs): target = CcLibrary(name, srcs, deps, visibility, warning, defs, incs, export_incs, optimize, always_optimize, (prebuilt or pre_build), link_all_symbols, deprecated, extra_cppflags, extra_linkflags, allow_undefined, secure, blade.blade, kwargs) if pre_build: console.warning(("//%s:%s: 'pre_build' has been deprecated, please use 'prebuilt'" % (target.path, target.name))) blade.blade.register_target(target)
[ "def", "cc_library", "(", "name", ",", "srcs", "=", "[", "]", ",", "deps", "=", "[", "]", ",", "visibility", "=", "None", ",", "warning", "=", "'yes'", ",", "defs", "=", "[", "]", ",", "incs", "=", "[", "]", ",", "export_incs", "=", "[", "]", ",", "optimize", "=", "[", "]", ",", "always_optimize", "=", "False", ",", "pre_build", "=", "False", ",", "prebuilt", "=", "False", ",", "link_all_symbols", "=", "False", ",", "deprecated", "=", "False", ",", "extra_cppflags", "=", "[", "]", ",", "extra_linkflags", "=", "[", "]", ",", "allow_undefined", "=", "False", ",", "secure", "=", "False", ",", "**", "kwargs", ")", ":", "target", "=", "CcLibrary", "(", "name", ",", "srcs", ",", "deps", ",", "visibility", ",", "warning", ",", "defs", ",", "incs", ",", "export_incs", ",", "optimize", ",", "always_optimize", ",", "(", "prebuilt", "or", "pre_build", ")", ",", "link_all_symbols", ",", "deprecated", ",", "extra_cppflags", ",", "extra_linkflags", ",", "allow_undefined", ",", "secure", ",", "blade", ".", "blade", ",", "kwargs", ")", "if", "pre_build", ":", "console", ".", "warning", "(", "(", "\"//%s:%s: 'pre_build' has been deprecated, please use 'prebuilt'\"", "%", "(", "target", ".", "path", ",", "target", ".", "name", ")", ")", ")", "blade", ".", "blade", ".", "register_target", "(", "target", ")" ]
cc_library target .
train
false
10,736
def get_languages_for_locale(locale): locale = locale.replace('-', '_') if (locale not in AVAILABLE_LANGUAGES): try: local_lang = babel.Locale(locale).languages except babel.UnknownLocaleError: return AVAILABLE_LANGUAGES['en'] diff = [lc for lc in REFERENCE_LANGUAGE.keys() if (lc not in local_lang.keys())] for lc in diff: local_lang[lc] = REFERENCE_LANGUAGE[lc] for lang in REMOVE_LANGS: if (lang in local_lang): local_lang.pop(lang) local_lang = sorted([(key, value.capitalize()) for (key, value) in local_lang.items()], key=(lambda language: language[1])) AVAILABLE_LANGUAGES[locale] = local_lang return AVAILABLE_LANGUAGES[locale]
[ "def", "get_languages_for_locale", "(", "locale", ")", ":", "locale", "=", "locale", ".", "replace", "(", "'-'", ",", "'_'", ")", "if", "(", "locale", "not", "in", "AVAILABLE_LANGUAGES", ")", ":", "try", ":", "local_lang", "=", "babel", ".", "Locale", "(", "locale", ")", ".", "languages", "except", "babel", ".", "UnknownLocaleError", ":", "return", "AVAILABLE_LANGUAGES", "[", "'en'", "]", "diff", "=", "[", "lc", "for", "lc", "in", "REFERENCE_LANGUAGE", ".", "keys", "(", ")", "if", "(", "lc", "not", "in", "local_lang", ".", "keys", "(", ")", ")", "]", "for", "lc", "in", "diff", ":", "local_lang", "[", "lc", "]", "=", "REFERENCE_LANGUAGE", "[", "lc", "]", "for", "lang", "in", "REMOVE_LANGS", ":", "if", "(", "lang", "in", "local_lang", ")", ":", "local_lang", ".", "pop", "(", "lang", ")", "local_lang", "=", "sorted", "(", "[", "(", "key", ",", "value", ".", "capitalize", "(", ")", ")", "for", "(", "key", ",", "value", ")", "in", "local_lang", ".", "items", "(", ")", "]", ",", "key", "=", "(", "lambda", "language", ":", "language", "[", "1", "]", ")", ")", "AVAILABLE_LANGUAGES", "[", "locale", "]", "=", "local_lang", "return", "AVAILABLE_LANGUAGES", "[", "locale", "]" ]
this method returns available languages localized in locale .
train
false
10,737
def indices_to_mask(indices, mask_length): if (mask_length <= np.max(indices)): raise ValueError('mask_length must be greater than max(indices)') mask = np.zeros(mask_length, dtype=np.bool) mask[indices] = True return mask
[ "def", "indices_to_mask", "(", "indices", ",", "mask_length", ")", ":", "if", "(", "mask_length", "<=", "np", ".", "max", "(", "indices", ")", ")", ":", "raise", "ValueError", "(", "'mask_length must be greater than max(indices)'", ")", "mask", "=", "np", ".", "zeros", "(", "mask_length", ",", "dtype", "=", "np", ".", "bool", ")", "mask", "[", "indices", "]", "=", "True", "return", "mask" ]
convert list of indices to boolean mask .
train
false
10,738
@pytest.mark.network def test_download_if_requested(script): result = script.pip('install', 'INITools==0.1', '-d', '.', expect_error=True) assert ((Path('scratch') / 'INITools-0.1.tar.gz') in result.files_created) assert ((script.site_packages / 'initools') not in result.files_created)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_download_if_requested", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'INITools==0.1'", ",", "'-d'", ",", "'.'", ",", "expect_error", "=", "True", ")", "assert", "(", "(", "Path", "(", "'scratch'", ")", "/", "'INITools-0.1.tar.gz'", ")", "in", "result", ".", "files_created", ")", "assert", "(", "(", "script", ".", "site_packages", "/", "'initools'", ")", "not", "in", "result", ".", "files_created", ")" ]
it should download and not install if requested .
train
false
10,740
def _talk2modjk(name, lbn, target, action, profile='default', tgt_type='glob'): ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} action_map = {'worker_stop': 'STP', 'worker_disable': 'DIS', 'worker_activate': 'ACT'} status = _worker_status(target, name, action_map[action], profile, tgt_type) if (not status['result']): ret['result'] = False ret['comment'] = 'no servers answered the published command modjk.worker_status' return ret if status['errors']: ret['result'] = False ret['comment'] = 'the following balancers could not find the worker {0}: {1}'.format(name, status['errors']) return ret if (not status['wrong_state']): ret['comment'] = 'the worker is in the desired activation state on all the balancers' return ret else: ret['comment'] = 'the action {0} will be sent to the balancers {1}'.format(action, status['wrong_state']) ret['changes'] = {action: status['wrong_state']} if __opts__['test']: ret['result'] = None return ret response = _send_command(action, name, lbn, target, profile, tgt_type) ret['comment'] = response['msg'] ret['result'] = response['code'] return ret
[ "def", "_talk2modjk", "(", "name", ",", "lbn", ",", "target", ",", "action", ",", "profile", "=", "'default'", ",", "tgt_type", "=", "'glob'", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "''", "}", "action_map", "=", "{", "'worker_stop'", ":", "'STP'", ",", "'worker_disable'", ":", "'DIS'", ",", "'worker_activate'", ":", "'ACT'", "}", "status", "=", "_worker_status", "(", "target", ",", "name", ",", "action_map", "[", "action", "]", ",", "profile", ",", "tgt_type", ")", "if", "(", "not", "status", "[", "'result'", "]", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'no servers answered the published command modjk.worker_status'", "return", "ret", "if", "status", "[", "'errors'", "]", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'the following balancers could not find the worker {0}: {1}'", ".", "format", "(", "name", ",", "status", "[", "'errors'", "]", ")", "return", "ret", "if", "(", "not", "status", "[", "'wrong_state'", "]", ")", ":", "ret", "[", "'comment'", "]", "=", "'the worker is in the desired activation state on all the balancers'", "return", "ret", "else", ":", "ret", "[", "'comment'", "]", "=", "'the action {0} will be sent to the balancers {1}'", ".", "format", "(", "action", ",", "status", "[", "'wrong_state'", "]", ")", "ret", "[", "'changes'", "]", "=", "{", "action", ":", "status", "[", "'wrong_state'", "]", "}", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "response", "=", "_send_command", "(", "action", ",", "name", ",", "lbn", ",", "target", ",", "profile", ",", "tgt_type", ")", "ret", "[", "'comment'", "]", "=", "response", "[", "'msg'", "]", "ret", "[", "'result'", "]", "=", "response", "[", "'code'", "]", "return", "ret" ]
wrapper function for the stop/disable/activate functions .
train
true
10,741
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None): mapping = {'$(ARCHS_STANDARD)': archs} if archs_including_64_bit: mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit return mapping
[ "def", "XcodeArchsVariableMapping", "(", "archs", ",", "archs_including_64_bit", "=", "None", ")", ":", "mapping", "=", "{", "'$(ARCHS_STANDARD)'", ":", "archs", "}", "if", "archs_including_64_bit", ":", "mapping", "[", "'$(ARCHS_STANDARD_INCLUDING_64_BIT)'", "]", "=", "archs_including_64_bit", "return", "mapping" ]
constructs a dictionary with expansion for $ variable .
train
false
10,742
def makedirs_perms(name, user=None, group=None, mode='0755'): name = os.path.expanduser(name) path = os.path (head, tail) = path.split(name) if (not tail): (head, tail) = path.split(head) if (head and tail and (not path.exists(head))): try: makedirs_perms(head, user, group, mode) except OSError as exc: if (exc.errno != errno.EEXIST): raise if (tail == os.curdir): return os.mkdir(name) check_perms(name, None, user, group, (int('{0}'.format(mode)) if mode else None))
[ "def", "makedirs_perms", "(", "name", ",", "user", "=", "None", ",", "group", "=", "None", ",", "mode", "=", "'0755'", ")", ":", "name", "=", "os", ".", "path", ".", "expanduser", "(", "name", ")", "path", "=", "os", ".", "path", "(", "head", ",", "tail", ")", "=", "path", ".", "split", "(", "name", ")", "if", "(", "not", "tail", ")", ":", "(", "head", ",", "tail", ")", "=", "path", ".", "split", "(", "head", ")", "if", "(", "head", "and", "tail", "and", "(", "not", "path", ".", "exists", "(", "head", ")", ")", ")", ":", "try", ":", "makedirs_perms", "(", "head", ",", "user", ",", "group", ",", "mode", ")", "except", "OSError", "as", "exc", ":", "if", "(", "exc", ".", "errno", "!=", "errno", ".", "EEXIST", ")", ":", "raise", "if", "(", "tail", "==", "os", ".", "curdir", ")", ":", "return", "os", ".", "mkdir", "(", "name", ")", "check_perms", "(", "name", ",", "None", ",", "user", ",", "group", ",", "(", "int", "(", "'{0}'", ".", "format", "(", "mode", ")", ")", "if", "mode", "else", "None", ")", ")" ]
set owner and permissions for each directory created .
train
true
10,743
def maybe_ref(ref): if (not isinstance(ref, str)): return ref return ref_to_obj(ref)
[ "def", "maybe_ref", "(", "ref", ")", ":", "if", "(", "not", "isinstance", "(", "ref", ",", "str", ")", ")", ":", "return", "ref", "return", "ref_to_obj", "(", "ref", ")" ]
returns the object that the given reference points to .
train
false
10,744
def __parse_binskim(bin_an_dic, output): current_run = output['runs'][0] if ('results' in current_run): rules = output['runs'][0]['rules'] for res in current_run['results']: result = {'rule_id': res['ruleId'], 'status': 'Insecure', 'desc': rules[res['ruleId']]['shortDescription']} bin_an_dic['results'].append(result) else: print '[WARNING] binskim has no results.' warning = {'rule_id': 'No Binskim-Results', 'status': 'Info', 'desc': 'No results from Binskim.'} bin_an_dic['warnings'].append(warning) if ('configurationNotifications' in current_run): for warn in current_run['configurationNotifications']: warning = {'rule_id': warn['ruleId'], 'status': 'Info', 'desc': warn['message']} bin_an_dic['warnings'].append(warning) return bin_an_dic
[ "def", "__parse_binskim", "(", "bin_an_dic", ",", "output", ")", ":", "current_run", "=", "output", "[", "'runs'", "]", "[", "0", "]", "if", "(", "'results'", "in", "current_run", ")", ":", "rules", "=", "output", "[", "'runs'", "]", "[", "0", "]", "[", "'rules'", "]", "for", "res", "in", "current_run", "[", "'results'", "]", ":", "result", "=", "{", "'rule_id'", ":", "res", "[", "'ruleId'", "]", ",", "'status'", ":", "'Insecure'", ",", "'desc'", ":", "rules", "[", "res", "[", "'ruleId'", "]", "]", "[", "'shortDescription'", "]", "}", "bin_an_dic", "[", "'results'", "]", ".", "append", "(", "result", ")", "else", ":", "print", "'[WARNING] binskim has no results.'", "warning", "=", "{", "'rule_id'", ":", "'No Binskim-Results'", ",", "'status'", ":", "'Info'", ",", "'desc'", ":", "'No results from Binskim.'", "}", "bin_an_dic", "[", "'warnings'", "]", ".", "append", "(", "warning", ")", "if", "(", "'configurationNotifications'", "in", "current_run", ")", ":", "for", "warn", "in", "current_run", "[", "'configurationNotifications'", "]", ":", "warning", "=", "{", "'rule_id'", ":", "warn", "[", "'ruleId'", "]", ",", "'status'", ":", "'Info'", ",", "'desc'", ":", "warn", "[", "'message'", "]", "}", "bin_an_dic", "[", "'warnings'", "]", ".", "append", "(", "warning", ")", "return", "bin_an_dic" ]
parse output to results and warnings .
train
false
10,745
def get_int_len(_int): power = 1 while True: if (_int == 0): return 0 elif (_int < (10 ** power)): return power power += 1
[ "def", "get_int_len", "(", "_int", ")", ":", "power", "=", "1", "while", "True", ":", "if", "(", "_int", "==", "0", ")", ":", "return", "0", "elif", "(", "_int", "<", "(", "10", "**", "power", ")", ")", ":", "return", "power", "power", "+=", "1" ]
find the number of digits of an int given an integer .
train
false
10,746
@public def vfree_group(symbols): _free_group = FreeGroup(symbols) pollute([sym.name for sym in _free_group.symbols], _free_group.generators) return _free_group
[ "@", "public", "def", "vfree_group", "(", "symbols", ")", ":", "_free_group", "=", "FreeGroup", "(", "symbols", ")", "pollute", "(", "[", "sym", ".", "name", "for", "sym", "in", "_free_group", ".", "symbols", "]", ",", "_free_group", ".", "generators", ")", "return", "_free_group" ]
construct a free group and inject f_0 .
train
false
10,748
@app.route('/account/<subscription_id>/resourcegroups/<resource_group_name>/storageaccounts/<account_name>/containers/<container_name>') @auth.require_login def storageaccount_container_view(subscription_id, resource_group_name, account_name, container_name): creds = _get_credentials() model = models.get_container_details(subscription_id, creds, resource_group_name, account_name, container_name) return render_template('storageaccount_container.html', title=container_name, year=datetime.now().year, subscription_id=subscription_id, resource_group_name=resource_group_name, account_name=account_name, model=model)
[ "@", "app", ".", "route", "(", "'/account/<subscription_id>/resourcegroups/<resource_group_name>/storageaccounts/<account_name>/containers/<container_name>'", ")", "@", "auth", ".", "require_login", "def", "storageaccount_container_view", "(", "subscription_id", ",", "resource_group_name", ",", "account_name", ",", "container_name", ")", ":", "creds", "=", "_get_credentials", "(", ")", "model", "=", "models", ".", "get_container_details", "(", "subscription_id", ",", "creds", ",", "resource_group_name", ",", "account_name", ",", "container_name", ")", "return", "render_template", "(", "'storageaccount_container.html'", ",", "title", "=", "container_name", ",", "year", "=", "datetime", ".", "now", "(", ")", ".", "year", ",", "subscription_id", "=", "subscription_id", ",", "resource_group_name", "=", "resource_group_name", ",", "account_name", "=", "account_name", ",", "model", "=", "model", ")" ]
renders the storage account container details .
train
false
10,749
def task_status_update(context, data_dict): model = context['model'] session = model.meta.create_local_session() context['session'] = session user = context['user'] id = data_dict.get('id') schema = (context.get('schema') or schema_.default_task_status_schema()) if id: task_status = model.TaskStatus.get(id) context['task_status'] = task_status if (task_status is None): raise NotFound(_('TaskStatus was not found.')) _check_access('task_status_update', context, data_dict) (data, errors) = _validate(data_dict, schema, context) if errors: session.rollback() raise ValidationError(errors) task_status = model_save.task_status_dict_save(data, context) session.commit() session.close() return model_dictize.task_status_dictize(task_status, context)
[ "def", "task_status_update", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "session", "=", "model", ".", "meta", ".", "create_local_session", "(", ")", "context", "[", "'session'", "]", "=", "session", "user", "=", "context", "[", "'user'", "]", "id", "=", "data_dict", ".", "get", "(", "'id'", ")", "schema", "=", "(", "context", ".", "get", "(", "'schema'", ")", "or", "schema_", ".", "default_task_status_schema", "(", ")", ")", "if", "id", ":", "task_status", "=", "model", ".", "TaskStatus", ".", "get", "(", "id", ")", "context", "[", "'task_status'", "]", "=", "task_status", "if", "(", "task_status", "is", "None", ")", ":", "raise", "NotFound", "(", "_", "(", "'TaskStatus was not found.'", ")", ")", "_check_access", "(", "'task_status_update'", ",", "context", ",", "data_dict", ")", "(", "data", ",", "errors", ")", "=", "_validate", "(", "data_dict", ",", "schema", ",", "context", ")", "if", "errors", ":", "session", ".", "rollback", "(", ")", "raise", "ValidationError", "(", "errors", ")", "task_status", "=", "model_save", ".", "task_status_dict_save", "(", "data", ",", "context", ")", "session", ".", "commit", "(", ")", "session", ".", "close", "(", ")", "return", "model_dictize", ".", "task_status_dictize", "(", "task_status", ",", "context", ")" ]
update a task status .
train
false
10,751
def emr_ssl_host_for_region(region): region = _fix_region(region) if (not region): return _EMR_REGIONLESS_ENDPOINT else: return (_EMR_REGION_SSL_HOST % {'region': region})
[ "def", "emr_ssl_host_for_region", "(", "region", ")", ":", "region", "=", "_fix_region", "(", "region", ")", "if", "(", "not", "region", ")", ":", "return", "_EMR_REGIONLESS_ENDPOINT", "else", ":", "return", "(", "_EMR_REGION_SSL_HOST", "%", "{", "'region'", ":", "region", "}", ")" ]
get the host for elastic mapreduce that matches their ssl cert for the given region .
train
false
10,752
@then(u'undefined-step snippets should exist for') def step_undefined_step_snippets_should_exist_for_table(context): assert context.table, 'REQUIRES: table' for row in context.table.rows: step = row['Step'] step_undefined_step_snippet_should_exist_for(context, step)
[ "@", "then", "(", "u'undefined-step snippets should exist for'", ")", "def", "step_undefined_step_snippets_should_exist_for_table", "(", "context", ")", ":", "assert", "context", ".", "table", ",", "'REQUIRES: table'", "for", "row", "in", "context", ".", "table", ".", "rows", ":", "step", "=", "row", "[", "'Step'", "]", "step_undefined_step_snippet_should_exist_for", "(", "context", ",", "step", ")" ]
checks if undefined-step snippets are provided .
train
false
10,754
def ulps_check(expected, got, ulps=20): ulps_error = (to_ulps(got) - to_ulps(expected)) if (abs(ulps_error) <= ulps): return None return 'error = {} ulps; permitted error = {} ulps'.format(ulps_error, ulps)
[ "def", "ulps_check", "(", "expected", ",", "got", ",", "ulps", "=", "20", ")", ":", "ulps_error", "=", "(", "to_ulps", "(", "got", ")", "-", "to_ulps", "(", "expected", ")", ")", "if", "(", "abs", "(", "ulps_error", ")", "<=", "ulps", ")", ":", "return", "None", "return", "'error = {} ulps; permitted error = {} ulps'", ".", "format", "(", "ulps_error", ",", "ulps", ")" ]
given non-nan floats expected and got .
train
false
10,756
def deferred_class_factory(model, attrs): class Meta: proxy = True app_label = model._meta.app_label name = (u'%s_Deferred_%s' % (model.__name__, u'_'.join(sorted(list(attrs))))) name = util.truncate_name(name, 80, 32) overrides = dict([(attr, DeferredAttribute(attr, model)) for attr in attrs]) overrides[u'Meta'] = Meta overrides[u'__module__'] = model.__module__ overrides[u'_deferred'] = True return type(str(name), (model,), overrides)
[ "def", "deferred_class_factory", "(", "model", ",", "attrs", ")", ":", "class", "Meta", ":", "proxy", "=", "True", "app_label", "=", "model", ".", "_meta", ".", "app_label", "name", "=", "(", "u'%s_Deferred_%s'", "%", "(", "model", ".", "__name__", ",", "u'_'", ".", "join", "(", "sorted", "(", "list", "(", "attrs", ")", ")", ")", ")", ")", "name", "=", "util", ".", "truncate_name", "(", "name", ",", "80", ",", "32", ")", "overrides", "=", "dict", "(", "[", "(", "attr", ",", "DeferredAttribute", "(", "attr", ",", "model", ")", ")", "for", "attr", "in", "attrs", "]", ")", "overrides", "[", "u'Meta'", "]", "=", "Meta", "overrides", "[", "u'__module__'", "]", "=", "model", ".", "__module__", "overrides", "[", "u'_deferred'", "]", "=", "True", "return", "type", "(", "str", "(", "name", ")", ",", "(", "model", ",", ")", ",", "overrides", ")" ]
returns a class object that is a copy of "model" with the specified "attrs" being replaced with deferredattribute objects .
train
false
10,757
def duration_string(duration): (days, hours, minutes, seconds, microseconds) = _get_duration_components(duration) string = '{:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds) if days: string = ('{} '.format(days) + string) if microseconds: string += '.{:06d}'.format(microseconds) return string
[ "def", "duration_string", "(", "duration", ")", ":", "(", "days", ",", "hours", ",", "minutes", ",", "seconds", ",", "microseconds", ")", "=", "_get_duration_components", "(", "duration", ")", "string", "=", "'{:02d}:{:02d}:{:02d}'", ".", "format", "(", "hours", ",", "minutes", ",", "seconds", ")", "if", "days", ":", "string", "=", "(", "'{} '", ".", "format", "(", "days", ")", "+", "string", ")", "if", "microseconds", ":", "string", "+=", "'.{:06d}'", ".", "format", "(", "microseconds", ")", "return", "string" ]
version of str which is not english specific .
train
false
10,758
def _inject_net_into_fs(net, fs): LOG.debug((_('Inject key fs=%(fs)s net=%(net)s') % locals())) netdir = os.path.join('etc', 'network') fs.make_path(netdir) fs.set_ownership(netdir, 'root', 'root') fs.set_permissions(netdir, 484) netfile = os.path.join('etc', 'network', 'interfaces') _inject_file_into_fs(fs, netfile, net)
[ "def", "_inject_net_into_fs", "(", "net", ",", "fs", ")", ":", "LOG", ".", "debug", "(", "(", "_", "(", "'Inject key fs=%(fs)s net=%(net)s'", ")", "%", "locals", "(", ")", ")", ")", "netdir", "=", "os", ".", "path", ".", "join", "(", "'etc'", ",", "'network'", ")", "fs", ".", "make_path", "(", "netdir", ")", "fs", ".", "set_ownership", "(", "netdir", ",", "'root'", ",", "'root'", ")", "fs", ".", "set_permissions", "(", "netdir", ",", "484", ")", "netfile", "=", "os", ".", "path", ".", "join", "(", "'etc'", ",", "'network'", ",", "'interfaces'", ")", "_inject_file_into_fs", "(", "fs", ",", "netfile", ",", "net", ")" ]
inject /etc/network/interfaces into the filesystem rooted at fs .
train
false
10,760
def monitorTest(N=3, seconds=3): topo = SingleSwitchTopo(N) net = Mininet(topo) net.start() hosts = net.hosts info('Starting test...\n') server = hosts[0] (outfiles, errfiles) = ({}, {}) for h in hosts: outfiles[h] = ('/tmp/%s.out' % h.name) errfiles[h] = ('/tmp/%s.err' % h.name) h.cmd('echo >', outfiles[h]) h.cmd('echo >', errfiles[h]) h.cmdPrint('ping', server.IP(), '>', outfiles[h], '2>', errfiles[h], '&') info('Monitoring output for', seconds, 'seconds\n') for (h, line) in monitorFiles(outfiles, seconds, timeoutms=500): if h: info(('%s: %s\n' % (h.name, line))) for h in hosts: h.cmd('kill %ping') net.stop()
[ "def", "monitorTest", "(", "N", "=", "3", ",", "seconds", "=", "3", ")", ":", "topo", "=", "SingleSwitchTopo", "(", "N", ")", "net", "=", "Mininet", "(", "topo", ")", "net", ".", "start", "(", ")", "hosts", "=", "net", ".", "hosts", "info", "(", "'Starting test...\\n'", ")", "server", "=", "hosts", "[", "0", "]", "(", "outfiles", ",", "errfiles", ")", "=", "(", "{", "}", ",", "{", "}", ")", "for", "h", "in", "hosts", ":", "outfiles", "[", "h", "]", "=", "(", "'/tmp/%s.out'", "%", "h", ".", "name", ")", "errfiles", "[", "h", "]", "=", "(", "'/tmp/%s.err'", "%", "h", ".", "name", ")", "h", ".", "cmd", "(", "'echo >'", ",", "outfiles", "[", "h", "]", ")", "h", ".", "cmd", "(", "'echo >'", ",", "errfiles", "[", "h", "]", ")", "h", ".", "cmdPrint", "(", "'ping'", ",", "server", ".", "IP", "(", ")", ",", "'>'", ",", "outfiles", "[", "h", "]", ",", "'2>'", ",", "errfiles", "[", "h", "]", ",", "'&'", ")", "info", "(", "'Monitoring output for'", ",", "seconds", ",", "'seconds\\n'", ")", "for", "(", "h", ",", "line", ")", "in", "monitorFiles", "(", "outfiles", ",", "seconds", ",", "timeoutms", "=", "500", ")", ":", "if", "h", ":", "info", "(", "(", "'%s: %s\\n'", "%", "(", "h", ".", "name", ",", "line", ")", ")", ")", "for", "h", "in", "hosts", ":", "h", ".", "cmd", "(", "'kill %ping'", ")", "net", ".", "stop", "(", ")" ]
run pings and monitor multiple hosts .
train
false
10,761
def list_state_functions(*args, **kwargs): st_ = salt.state.State(__opts__) if (not args): return sorted(st_.states) names = set() for module in args: if (('*' in module) or ('.' in module)): for func in fnmatch.filter(st_.states, module): names.add(func) else: moduledot = (module + '.') for func in st_.states: if func.startswith(moduledot): names.add(func) return sorted(names)
[ "def", "list_state_functions", "(", "*", "args", ",", "**", "kwargs", ")", ":", "st_", "=", "salt", ".", "state", ".", "State", "(", "__opts__", ")", "if", "(", "not", "args", ")", ":", "return", "sorted", "(", "st_", ".", "states", ")", "names", "=", "set", "(", ")", "for", "module", "in", "args", ":", "if", "(", "(", "'*'", "in", "module", ")", "or", "(", "'.'", "in", "module", ")", ")", ":", "for", "func", "in", "fnmatch", ".", "filter", "(", "st_", ".", "states", ",", "module", ")", ":", "names", ".", "add", "(", "func", ")", "else", ":", "moduledot", "=", "(", "module", "+", "'.'", ")", "for", "func", "in", "st_", ".", "states", ":", "if", "func", ".", "startswith", "(", "moduledot", ")", ":", "names", ".", "add", "(", "func", ")", "return", "sorted", "(", "names", ")" ]
list the functions for all state modules .
train
true
10,762
def safe_shlex_split(text_or_binary): return shlex.split(ensure_binary(text_or_binary))
[ "def", "safe_shlex_split", "(", "text_or_binary", ")", ":", "return", "shlex", ".", "split", "(", "ensure_binary", "(", "text_or_binary", ")", ")" ]
split a string using shell-like syntax .
train
false
10,766
def enable_task(name): return _run_cmd('kapacitor enable {0}'.format(name))
[ "def", "enable_task", "(", "name", ")", ":", "return", "_run_cmd", "(", "'kapacitor enable {0}'", ".", "format", "(", "name", ")", ")" ]
enable a kapacitor task .
train
false
10,767
@require_admin_context def group_type_access_add(context, type_id, project_id): group_type_id = _group_type_get_id_from_group_type(context, type_id) access_ref = models.GroupTypeProjects() access_ref.update({'group_type_id': group_type_id, 'project_id': project_id}) session = get_session() with session.begin(): try: access_ref.save(session=session) except db_exc.DBDuplicateEntry: raise exception.GroupTypeAccessExists(group_type_id=type_id, project_id=project_id) return access_ref
[ "@", "require_admin_context", "def", "group_type_access_add", "(", "context", ",", "type_id", ",", "project_id", ")", ":", "group_type_id", "=", "_group_type_get_id_from_group_type", "(", "context", ",", "type_id", ")", "access_ref", "=", "models", ".", "GroupTypeProjects", "(", ")", "access_ref", ".", "update", "(", "{", "'group_type_id'", ":", "group_type_id", ",", "'project_id'", ":", "project_id", "}", ")", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "try", ":", "access_ref", ".", "save", "(", "session", "=", "session", ")", "except", "db_exc", ".", "DBDuplicateEntry", ":", "raise", "exception", ".", "GroupTypeAccessExists", "(", "group_type_id", "=", "type_id", ",", "project_id", "=", "project_id", ")", "return", "access_ref" ]
add given tenant to the group type access list .
train
false
10,768
@lru_cache() def _get_firebase_db_url(): regex = re.compile('\\bdatabaseURL\\b.*?["\\\']([^"\\\']+)') cwd = os.path.dirname(__file__) try: with open(os.path.join(cwd, 'templates', _FIREBASE_CONFIG)) as f: url = next((regex.search(line) for line in f if regex.search(line))) except StopIteration: raise ValueError('Error parsing databaseURL. Please copy Firebase web snippet into templates/{}'.format(_FIREBASE_CONFIG)) return url.group(1)
[ "@", "lru_cache", "(", ")", "def", "_get_firebase_db_url", "(", ")", ":", "regex", "=", "re", ".", "compile", "(", "'\\\\bdatabaseURL\\\\b.*?[\"\\\\\\']([^\"\\\\\\']+)'", ")", "cwd", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "try", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "cwd", ",", "'templates'", ",", "_FIREBASE_CONFIG", ")", ")", "as", "f", ":", "url", "=", "next", "(", "(", "regex", ".", "search", "(", "line", ")", "for", "line", "in", "f", "if", "regex", ".", "search", "(", "line", ")", ")", ")", "except", "StopIteration", ":", "raise", "ValueError", "(", "'Error parsing databaseURL. Please copy Firebase web snippet into templates/{}'", ".", "format", "(", "_FIREBASE_CONFIG", ")", ")", "return", "url", ".", "group", "(", "1", ")" ]
grabs the databaseurl from the firebase config snippet .
train
false
10,769
def tenant_exists(keystone, tenant): return (tenant in [x.name for x in keystone.tenants.list()])
[ "def", "tenant_exists", "(", "keystone", ",", "tenant", ")", ":", "return", "(", "tenant", "in", "[", "x", ".", "name", "for", "x", "in", "keystone", ".", "tenants", ".", "list", "(", ")", "]", ")" ]
return true if tenant already exists .
train
false
10,770
def getWideAnglePointIndex(loop): dotProductMinimum = 9999999.9 widestPointIndex = 0 for pointIndex in xrange(len(loop)): point = loop[(pointIndex % len(loop))] afterPoint = loop[((pointIndex + 1) % len(loop))] beforePoint = loop[((pointIndex - 1) % len(loop))] afterSegmentNormalized = euclidean.getNormalized((afterPoint - point)) beforeSegmentNormalized = euclidean.getNormalized((beforePoint - point)) dotProduct = euclidean.getDotProduct(afterSegmentNormalized, beforeSegmentNormalized) if (dotProduct < 0.99): return pointIndex if (dotProduct < dotProductMinimum): dotProductMinimum = dotProduct widestPointIndex = pointIndex return widestPointIndex
[ "def", "getWideAnglePointIndex", "(", "loop", ")", ":", "dotProductMinimum", "=", "9999999.9", "widestPointIndex", "=", "0", "for", "pointIndex", "in", "xrange", "(", "len", "(", "loop", ")", ")", ":", "point", "=", "loop", "[", "(", "pointIndex", "%", "len", "(", "loop", ")", ")", "]", "afterPoint", "=", "loop", "[", "(", "(", "pointIndex", "+", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "beforePoint", "=", "loop", "[", "(", "(", "pointIndex", "-", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "afterSegmentNormalized", "=", "euclidean", ".", "getNormalized", "(", "(", "afterPoint", "-", "point", ")", ")", "beforeSegmentNormalized", "=", "euclidean", ".", "getNormalized", "(", "(", "beforePoint", "-", "point", ")", ")", "dotProduct", "=", "euclidean", ".", "getDotProduct", "(", "afterSegmentNormalized", ",", "beforeSegmentNormalized", ")", "if", "(", "dotProduct", "<", "0.99", ")", ":", "return", "pointIndex", "if", "(", "dotProduct", "<", "dotProductMinimum", ")", ":", "dotProductMinimum", "=", "dotProduct", "widestPointIndex", "=", "pointIndex", "return", "widestPointIndex" ]
get a point index which has a wide enough angle .
train
false
10,772
def stripid(text): if _re_stripid.search(repr(Exception)): return _re_stripid.sub('\\1', text) return text
[ "def", "stripid", "(", "text", ")", ":", "if", "_re_stripid", ".", "search", "(", "repr", "(", "Exception", ")", ")", ":", "return", "_re_stripid", ".", "sub", "(", "'\\\\1'", ",", "text", ")", "return", "text" ]
remove the hexadecimal id from a python object representation .
train
false
10,773
def _formatPartialContent(value): if (value and isinstance(value, basestring)): try: value = hexdecode(value) except: pass finally: value = safecharencode(value) return value
[ "def", "_formatPartialContent", "(", "value", ")", ":", "if", "(", "value", "and", "isinstance", "(", "value", ",", "basestring", ")", ")", ":", "try", ":", "value", "=", "hexdecode", "(", "value", ")", "except", ":", "pass", "finally", ":", "value", "=", "safecharencode", "(", "value", ")", "return", "value" ]
prepares partial content for safe console output .
train
false
10,774
def negate_all(f): return (lambda *args, **kwargs: [(- y) for y in f(*args, **kwargs)])
[ "def", "negate_all", "(", "f", ")", ":", "return", "(", "lambda", "*", "args", ",", "**", "kwargs", ":", "[", "(", "-", "y", ")", "for", "y", "in", "f", "(", "*", "args", ",", "**", "kwargs", ")", "]", ")" ]
the same when f returns a list of numbers .
train
false
10,775
def resource_status_show(context, data_dict): _check_access('resource_status_show', context, data_dict) try: import ckan.lib.celery_app as celery_app except ImportError: return {'message': 'queue is not installed on this instance'} model = context['model'] id = _get_or_bust(data_dict, 'id') q = _text("\n select status, date_done, traceback, task_status.*\n from task_status left join celery_taskmeta\n on task_status.value = celery_taskmeta.task_id\n and key = 'celery_task_id'\n where entity_id = :entity_id\n ") try: result = model.Session.connection().execute(q, entity_id=id) except sqlalchemy.exc.ProgrammingError: return {'message': 'queue tables not installed on this instance'} result_list = [_table_dictize(row, context) for row in result] return result_list
[ "def", "resource_status_show", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'resource_status_show'", ",", "context", ",", "data_dict", ")", "try", ":", "import", "ckan", ".", "lib", ".", "celery_app", "as", "celery_app", "except", "ImportError", ":", "return", "{", "'message'", ":", "'queue is not installed on this instance'", "}", "model", "=", "context", "[", "'model'", "]", "id", "=", "_get_or_bust", "(", "data_dict", ",", "'id'", ")", "q", "=", "_text", "(", "\"\\n select status, date_done, traceback, task_status.*\\n from task_status left join celery_taskmeta\\n on task_status.value = celery_taskmeta.task_id\\n and key = 'celery_task_id'\\n where entity_id = :entity_id\\n \"", ")", "try", ":", "result", "=", "model", ".", "Session", ".", "connection", "(", ")", ".", "execute", "(", "q", ",", "entity_id", "=", "id", ")", "except", "sqlalchemy", ".", "exc", ".", "ProgrammingError", ":", "return", "{", "'message'", ":", "'queue tables not installed on this instance'", "}", "result_list", "=", "[", "_table_dictize", "(", "row", ",", "context", ")", "for", "row", "in", "result", "]", "return", "result_list" ]
return the statuses of a resources tasks .
train
false
10,776
def test_ast_bad_lambda(): cant_compile(u'(lambda)')
[ "def", "test_ast_bad_lambda", "(", ")", ":", "cant_compile", "(", "u'(lambda)'", ")" ]
make sure ast cant compile invalid lambda .
train
false
10,777
def _resolve_layer(request, typename, permission='base.view_resourcebase', msg=_PERMISSION_MSG_GENERIC, **kwargs): service_typename = typename.split(':', 1) if Service.objects.filter(name=service_typename[0]).exists(): service = Service.objects.filter(name=service_typename[0]) return resolve_object(request, Layer, {'typename': (service_typename[1] if (service[0].method != 'C') else typename)}, permission=permission, permission_msg=msg, **kwargs) else: return resolve_object(request, Layer, {'typename': typename}, permission=permission, permission_msg=msg, **kwargs)
[ "def", "_resolve_layer", "(", "request", ",", "typename", ",", "permission", "=", "'base.view_resourcebase'", ",", "msg", "=", "_PERMISSION_MSG_GENERIC", ",", "**", "kwargs", ")", ":", "service_typename", "=", "typename", ".", "split", "(", "':'", ",", "1", ")", "if", "Service", ".", "objects", ".", "filter", "(", "name", "=", "service_typename", "[", "0", "]", ")", ".", "exists", "(", ")", ":", "service", "=", "Service", ".", "objects", ".", "filter", "(", "name", "=", "service_typename", "[", "0", "]", ")", "return", "resolve_object", "(", "request", ",", "Layer", ",", "{", "'typename'", ":", "(", "service_typename", "[", "1", "]", "if", "(", "service", "[", "0", "]", ".", "method", "!=", "'C'", ")", "else", "typename", ")", "}", ",", "permission", "=", "permission", ",", "permission_msg", "=", "msg", ",", "**", "kwargs", ")", "else", ":", "return", "resolve_object", "(", "request", ",", "Layer", ",", "{", "'typename'", ":", "typename", "}", ",", "permission", "=", "permission", ",", "permission_msg", "=", "msg", ",", "**", "kwargs", ")" ]
resolve the layer by the provided typename and check the optional permission .
train
false
10,779
def ode_order(expr, func): a = Wild('a', exclude=[func]) if expr.match(a): return 0 if isinstance(expr, Derivative): if (expr.args[0] == func): return len(expr.variables) else: order = 0 for arg in expr.args[0].args: order = max(order, (ode_order(arg, func) + len(expr.variables))) return order else: order = 0 for arg in expr.args: order = max(order, ode_order(arg, func)) return order
[ "def", "ode_order", "(", "expr", ",", "func", ")", ":", "a", "=", "Wild", "(", "'a'", ",", "exclude", "=", "[", "func", "]", ")", "if", "expr", ".", "match", "(", "a", ")", ":", "return", "0", "if", "isinstance", "(", "expr", ",", "Derivative", ")", ":", "if", "(", "expr", ".", "args", "[", "0", "]", "==", "func", ")", ":", "return", "len", "(", "expr", ".", "variables", ")", "else", ":", "order", "=", "0", "for", "arg", "in", "expr", ".", "args", "[", "0", "]", ".", "args", ":", "order", "=", "max", "(", "order", ",", "(", "ode_order", "(", "arg", ",", "func", ")", "+", "len", "(", "expr", ".", "variables", ")", ")", ")", "return", "order", "else", ":", "order", "=", "0", "for", "arg", "in", "expr", ".", "args", ":", "order", "=", "max", "(", "order", ",", "ode_order", "(", "arg", ",", "func", ")", ")", "return", "order" ]
returns the order of a given differential equation with respect to func .
train
false
10,780
def addBottomLoop(deltaZ, loops): bottomLoop = loops[0] bottomAddition = [] bottomZ = (euclidean.getBottomByPath(bottomLoop) + deltaZ) for point in bottomLoop: bottomAddition.append(Vector3Index(len(bottomAddition), point.x, point.y, bottomZ)) loops.insert(0, bottomAddition) numberOfVertexes = 0 for loop in loops: for point in loop: point.index = numberOfVertexes numberOfVertexes += 1
[ "def", "addBottomLoop", "(", "deltaZ", ",", "loops", ")", ":", "bottomLoop", "=", "loops", "[", "0", "]", "bottomAddition", "=", "[", "]", "bottomZ", "=", "(", "euclidean", ".", "getBottomByPath", "(", "bottomLoop", ")", "+", "deltaZ", ")", "for", "point", "in", "bottomLoop", ":", "bottomAddition", ".", "append", "(", "Vector3Index", "(", "len", "(", "bottomAddition", ")", ",", "point", ".", "x", ",", "point", ".", "y", ",", "bottomZ", ")", ")", "loops", ".", "insert", "(", "0", ",", "bottomAddition", ")", "numberOfVertexes", "=", "0", "for", "loop", "in", "loops", ":", "for", "point", "in", "loop", ":", "point", ".", "index", "=", "numberOfVertexes", "numberOfVertexes", "+=", "1" ]
add bottom loop to loops .
train
false
10,781
def which_prefix(path): prefix = abspath(path) while True: if isdir(join(prefix, u'conda-meta')): return prefix if (prefix == dirname(prefix)): return None prefix = dirname(prefix)
[ "def", "which_prefix", "(", "path", ")", ":", "prefix", "=", "abspath", "(", "path", ")", "while", "True", ":", "if", "isdir", "(", "join", "(", "prefix", ",", "u'conda-meta'", ")", ")", ":", "return", "prefix", "if", "(", "prefix", "==", "dirname", "(", "prefix", ")", ")", ":", "return", "None", "prefix", "=", "dirname", "(", "prefix", ")" ]
given the path (to a conda installed file) return the environment prefix in which the file in located .
train
false
10,783
def _extractCommon(string): elements = string.split(None, 2) if (len(elements) != 3): raise InvalidEntry() (hostnames, keyType, keyAndComment) = elements splitkey = keyAndComment.split(None, 1) if (len(splitkey) == 2): (keyString, comment) = splitkey comment = comment.rstrip('\n') else: keyString = splitkey[0] comment = None key = Key.fromString(keyString.decode('base64')) return (hostnames, keyType, key, comment)
[ "def", "_extractCommon", "(", "string", ")", ":", "elements", "=", "string", ".", "split", "(", "None", ",", "2", ")", "if", "(", "len", "(", "elements", ")", "!=", "3", ")", ":", "raise", "InvalidEntry", "(", ")", "(", "hostnames", ",", "keyType", ",", "keyAndComment", ")", "=", "elements", "splitkey", "=", "keyAndComment", ".", "split", "(", "None", ",", "1", ")", "if", "(", "len", "(", "splitkey", ")", "==", "2", ")", ":", "(", "keyString", ",", "comment", ")", "=", "splitkey", "comment", "=", "comment", ".", "rstrip", "(", "'\\n'", ")", "else", ":", "keyString", "=", "splitkey", "[", "0", "]", "comment", "=", "None", "key", "=", "Key", ".", "fromString", "(", "keyString", ".", "decode", "(", "'base64'", ")", ")", "return", "(", "hostnames", ",", "keyType", ",", "key", ",", "comment", ")" ]
extract common elements of base64 keys from an entry in a hosts file .
train
false
10,785
def request_cached(f): def wrapper(*args, **kwargs): '\n Wrapper function to decorate with.\n ' converted_args = map(str, args) converted_kwargs = map(str, reduce(list.__add__, map(list, sorted(kwargs.iteritems())), [])) cache_keys = (([f.__module__, f.func_name] + converted_args) + converted_kwargs) cache_key = '.'.join(cache_keys) rcache = RequestCache.get_request_cache() if (cache_key in rcache.data): return rcache.data.get(cache_key) else: result = f(*args, **kwargs) rcache.data[cache_key] = result return result return wrapper
[ "def", "request_cached", "(", "f", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "converted_args", "=", "map", "(", "str", ",", "args", ")", "converted_kwargs", "=", "map", "(", "str", ",", "reduce", "(", "list", ".", "__add__", ",", "map", "(", "list", ",", "sorted", "(", "kwargs", ".", "iteritems", "(", ")", ")", ")", ",", "[", "]", ")", ")", "cache_keys", "=", "(", "(", "[", "f", ".", "__module__", ",", "f", ".", "func_name", "]", "+", "converted_args", ")", "+", "converted_kwargs", ")", "cache_key", "=", "'.'", ".", "join", "(", "cache_keys", ")", "rcache", "=", "RequestCache", ".", "get_request_cache", "(", ")", "if", "(", "cache_key", "in", "rcache", ".", "data", ")", ":", "return", "rcache", ".", "data", ".", "get", "(", "cache_key", ")", "else", ":", "result", "=", "f", "(", "*", "args", ",", "**", "kwargs", ")", "rcache", ".", "data", "[", "cache_key", "]", "=", "result", "return", "result", "return", "wrapper" ]
a decorator for wrapping a function and automatically handles caching its return value .
train
false
10,786
def cidr_to_ipv4_netmask(cidr_bits): try: cidr_bits = int(cidr_bits) if (not (1 <= cidr_bits <= 32)): return '' except ValueError: return '' netmask = '' for idx in range(4): if idx: netmask += '.' if (cidr_bits >= 8): netmask += '255' cidr_bits -= 8 else: netmask += '{0:d}'.format((256 - (2 ** (8 - cidr_bits)))) cidr_bits = 0 return netmask
[ "def", "cidr_to_ipv4_netmask", "(", "cidr_bits", ")", ":", "try", ":", "cidr_bits", "=", "int", "(", "cidr_bits", ")", "if", "(", "not", "(", "1", "<=", "cidr_bits", "<=", "32", ")", ")", ":", "return", "''", "except", "ValueError", ":", "return", "''", "netmask", "=", "''", "for", "idx", "in", "range", "(", "4", ")", ":", "if", "idx", ":", "netmask", "+=", "'.'", "if", "(", "cidr_bits", ">=", "8", ")", ":", "netmask", "+=", "'255'", "cidr_bits", "-=", "8", "else", ":", "netmask", "+=", "'{0:d}'", ".", "format", "(", "(", "256", "-", "(", "2", "**", "(", "8", "-", "cidr_bits", ")", ")", ")", ")", "cidr_bits", "=", "0", "return", "netmask" ]
returns an ipv4 netmask .
train
true
10,787
def chr(code): return Utf8(unichr(code))
[ "def", "chr", "(", "code", ")", ":", "return", "Utf8", "(", "unichr", "(", "code", ")", ")" ]
returns utf8-character with *code* unicode id .
train
false
10,788
def fix_indentation(code, new_indents): min_indents = find_minimum_indents(code) return indent_lines(code, (new_indents - min_indents))
[ "def", "fix_indentation", "(", "code", ",", "new_indents", ")", ":", "min_indents", "=", "find_minimum_indents", "(", "code", ")", "return", "indent_lines", "(", "code", ",", "(", "new_indents", "-", "min_indents", ")", ")" ]
replace tabs by spaces .
train
true
10,789
def morph_data_precomputed(subject_from, subject_to, stc_from, vertices_to, morph_mat): if (not sparse.issparse(morph_mat)): raise ValueError('morph_mat must be a sparse matrix') if ((not isinstance(vertices_to, list)) or (not (len(vertices_to) == 2))): raise ValueError('vertices_to must be a list of length 2') if (not (sum((len(v) for v in vertices_to)) == morph_mat.shape[0])): raise ValueError('number of vertices in vertices_to must match morph_mat.shape[0]') if (not (stc_from.data.shape[0] == morph_mat.shape[1])): raise ValueError('stc_from.data.shape[0] must be the same as morph_mat.shape[0]') if ((stc_from.subject is not None) and (stc_from.subject != subject_from)): raise ValueError('stc_from.subject and subject_from must match') data = (morph_mat * stc_from.data) stc_to = SourceEstimate(data, vertices_to, stc_from.tmin, stc_from.tstep, verbose=stc_from.verbose, subject=subject_to) return stc_to
[ "def", "morph_data_precomputed", "(", "subject_from", ",", "subject_to", ",", "stc_from", ",", "vertices_to", ",", "morph_mat", ")", ":", "if", "(", "not", "sparse", ".", "issparse", "(", "morph_mat", ")", ")", ":", "raise", "ValueError", "(", "'morph_mat must be a sparse matrix'", ")", "if", "(", "(", "not", "isinstance", "(", "vertices_to", ",", "list", ")", ")", "or", "(", "not", "(", "len", "(", "vertices_to", ")", "==", "2", ")", ")", ")", ":", "raise", "ValueError", "(", "'vertices_to must be a list of length 2'", ")", "if", "(", "not", "(", "sum", "(", "(", "len", "(", "v", ")", "for", "v", "in", "vertices_to", ")", ")", "==", "morph_mat", ".", "shape", "[", "0", "]", ")", ")", ":", "raise", "ValueError", "(", "'number of vertices in vertices_to must match morph_mat.shape[0]'", ")", "if", "(", "not", "(", "stc_from", ".", "data", ".", "shape", "[", "0", "]", "==", "morph_mat", ".", "shape", "[", "1", "]", ")", ")", ":", "raise", "ValueError", "(", "'stc_from.data.shape[0] must be the same as morph_mat.shape[0]'", ")", "if", "(", "(", "stc_from", ".", "subject", "is", "not", "None", ")", "and", "(", "stc_from", ".", "subject", "!=", "subject_from", ")", ")", ":", "raise", "ValueError", "(", "'stc_from.subject and subject_from must match'", ")", "data", "=", "(", "morph_mat", "*", "stc_from", ".", "data", ")", "stc_to", "=", "SourceEstimate", "(", "data", ",", "vertices_to", ",", "stc_from", ".", "tmin", ",", "stc_from", ".", "tstep", ",", "verbose", "=", "stc_from", ".", "verbose", ",", "subject", "=", "subject_to", ")", "return", "stc_to" ]
morph source estimate between subjects using a precomputed matrix .
train
false
10,790
def update_query_params(uri, params): parts = urllib.parse.urlparse(uri) query_params = parse_unique_urlencoded(parts.query) query_params.update(params) new_query = urllib.parse.urlencode(query_params) new_parts = parts._replace(query=new_query) return urllib.parse.urlunparse(new_parts)
[ "def", "update_query_params", "(", "uri", ",", "params", ")", ":", "parts", "=", "urllib", ".", "parse", ".", "urlparse", "(", "uri", ")", "query_params", "=", "parse_unique_urlencoded", "(", "parts", ".", "query", ")", "query_params", ".", "update", "(", "params", ")", "new_query", "=", "urllib", ".", "parse", ".", "urlencode", "(", "query_params", ")", "new_parts", "=", "parts", ".", "_replace", "(", "query", "=", "new_query", ")", "return", "urllib", ".", "parse", ".", "urlunparse", "(", "new_parts", ")" ]
updates a uri with new query parameters .
train
true
10,792
def copyfiles(filelist, dest, copy=False, create_new=False): outfiles = filename_to_list(dest) newfiles = [] for (i, f) in enumerate(filename_to_list(filelist)): if isinstance(f, list): newfiles.insert(i, copyfiles(f, dest, copy=copy, create_new=create_new)) else: if (len(outfiles) > 1): destfile = outfiles[i] else: destfile = fname_presuffix(f, newpath=outfiles[0]) destfile = copyfile(f, destfile, copy, create_new=create_new) newfiles.insert(i, destfile) return newfiles
[ "def", "copyfiles", "(", "filelist", ",", "dest", ",", "copy", "=", "False", ",", "create_new", "=", "False", ")", ":", "outfiles", "=", "filename_to_list", "(", "dest", ")", "newfiles", "=", "[", "]", "for", "(", "i", ",", "f", ")", "in", "enumerate", "(", "filename_to_list", "(", "filelist", ")", ")", ":", "if", "isinstance", "(", "f", ",", "list", ")", ":", "newfiles", ".", "insert", "(", "i", ",", "copyfiles", "(", "f", ",", "dest", ",", "copy", "=", "copy", ",", "create_new", "=", "create_new", ")", ")", "else", ":", "if", "(", "len", "(", "outfiles", ")", ">", "1", ")", ":", "destfile", "=", "outfiles", "[", "i", "]", "else", ":", "destfile", "=", "fname_presuffix", "(", "f", ",", "newpath", "=", "outfiles", "[", "0", "]", ")", "destfile", "=", "copyfile", "(", "f", ",", "destfile", ",", "copy", ",", "create_new", "=", "create_new", ")", "newfiles", ".", "insert", "(", "i", ",", "destfile", ")", "return", "newfiles" ]
copy or symlink files in filelist to dest directory .
train
false
10,795
def intToBytes(integer, length=None): hexString = ('%x' % (integer,)) if (length is None): n = len(hexString) else: n = (length * 2) return binascii.unhexlify(hexString.zfill((n + (n & 1))))
[ "def", "intToBytes", "(", "integer", ",", "length", "=", "None", ")", ":", "hexString", "=", "(", "'%x'", "%", "(", "integer", ",", ")", ")", "if", "(", "length", "is", "None", ")", ":", "n", "=", "len", "(", "hexString", ")", "else", ":", "n", "=", "(", "length", "*", "2", ")", "return", "binascii", ".", "unhexlify", "(", "hexString", ".", "zfill", "(", "(", "n", "+", "(", "n", "&", "1", ")", ")", ")", ")" ]
convert a python l{int} to packed data .
train
false
10,796
def calc_net(ipaddr, netmask=None): if (netmask is not None): ipaddr = '{0}/{1}'.format(ipaddr, netmask) return str(ipaddress.ip_network(ipaddr, strict=False))
[ "def", "calc_net", "(", "ipaddr", ",", "netmask", "=", "None", ")", ":", "if", "(", "netmask", "is", "not", "None", ")", ":", "ipaddr", "=", "'{0}/{1}'", ".", "format", "(", "ipaddr", ",", "netmask", ")", "return", "str", "(", "ipaddress", ".", "ip_network", "(", "ipaddr", ",", "strict", "=", "False", ")", ")" ]
returns the cidr of a subnet based on an ip address and optional netmask .
train
false
10,797
def create_imageattachment(files, user, obj): up_file = files.values()[0] check_file_size(up_file, settings.IMAGE_MAX_FILESIZE) (up_file, is_animated) = _image_to_png(up_file) image = ImageAttachment(content_object=obj, creator=user) image.file.save(up_file.name, File(up_file), save=True) generate_thumbnail.delay(image, 'file', 'thumbnail') if (not is_animated): compress_image.delay(image, 'file') (width, height) = _scale_dimensions(image.file.width, image.file.height) name = bleach.clean(up_file.name) return {'name': name, 'url': image.file.url, 'thumbnail_url': image.thumbnail_if_set().url, 'width': width, 'height': height, 'delete_url': image.get_delete_url()}
[ "def", "create_imageattachment", "(", "files", ",", "user", ",", "obj", ")", ":", "up_file", "=", "files", ".", "values", "(", ")", "[", "0", "]", "check_file_size", "(", "up_file", ",", "settings", ".", "IMAGE_MAX_FILESIZE", ")", "(", "up_file", ",", "is_animated", ")", "=", "_image_to_png", "(", "up_file", ")", "image", "=", "ImageAttachment", "(", "content_object", "=", "obj", ",", "creator", "=", "user", ")", "image", ".", "file", ".", "save", "(", "up_file", ".", "name", ",", "File", "(", "up_file", ")", ",", "save", "=", "True", ")", "generate_thumbnail", ".", "delay", "(", "image", ",", "'file'", ",", "'thumbnail'", ")", "if", "(", "not", "is_animated", ")", ":", "compress_image", ".", "delay", "(", "image", ",", "'file'", ")", "(", "width", ",", "height", ")", "=", "_scale_dimensions", "(", "image", ".", "file", ".", "width", ",", "image", ".", "file", ".", "height", ")", "name", "=", "bleach", ".", "clean", "(", "up_file", ".", "name", ")", "return", "{", "'name'", ":", "name", ",", "'url'", ":", "image", ".", "file", ".", "url", ",", "'thumbnail_url'", ":", "image", ".", "thumbnail_if_set", "(", ")", ".", "url", ",", "'width'", ":", "width", ",", "'height'", ":", "height", ",", "'delete_url'", ":", "image", ".", "get_delete_url", "(", ")", "}" ]
given an uploaded file .
train
false
10,799
def from_array(a, mode=None, info={}): info = dict(info) bitdepth = None try: if mode.startswith('L'): gotmode = 'L' mode = mode[1:] elif mode.startswith('RGB'): gotmode = 'RGB' mode = mode[3:] else: raise Error() if mode.startswith('A'): gotmode += 'A' mode = mode[1:] while mode.startswith(';'): mode = mode[1:] if mode: try: bitdepth = int(mode) except (TypeError, ValueError): raise Error() except Error: raise Error("mode string should be 'RGB' or 'L;16' or similar.") mode = gotmode if bitdepth: if (info.get('bitdepth') and (bitdepth != info['bitdepth'])): raise Error(('mode bitdepth (%d) should match info bitdepth (%d).' % (bitdepth, info['bitdepth']))) info['bitdepth'] = bitdepth if ('size' in info): for (dimension, axis) in [('width', 0), ('height', 1)]: if (dimension in info): if (info[dimension] != info['size'][axis]): raise Error(("info[%r] should match info['size'][%r]." % (dimension, axis))) (info['width'], info['height']) = info['size'] if ('height' not in info): try: l = len(a) except TypeError: raise Error("len(a) does not work, supply info['height'] instead.") info['height'] = l if ('greyscale' in info): if (bool(info['greyscale']) != ('L' in mode)): raise Error("info['greyscale'] should match mode.") info['greyscale'] = ('L' in mode) if ('alpha' in info): if (bool(info['alpha']) != ('A' in mode)): raise Error("info['alpha'] should match mode.") info['alpha'] = ('A' in mode) planes = len(mode) if ('planes' in info): if (info['planes'] != planes): raise Error("info['planes'] should match mode.") (a, t) = itertools.tee(a) row = t.next() del t try: row[0][0] threed = True testelement = row[0] except (IndexError, TypeError): threed = False testelement = row if ('width' not in info): if threed: width = len(row) else: width = (len(row) // planes) info['width'] = width assert (not threed) if ('bitdepth' not in info): try: dtype = testelement.dtype except AttributeError: try: bitdepth = (8 * testelement.itemsize) except AttributeError: bitdepth = 8 else: if (dtype.kind == 'b'): bitdepth = 1 else: bitdepth = (8 * dtype.itemsize) info['bitdepth'] = bitdepth for thing in 'width height bitdepth greyscale alpha'.split(): assert (thing in info) return Image(a, info)
[ "def", "from_array", "(", "a", ",", "mode", "=", "None", ",", "info", "=", "{", "}", ")", ":", "info", "=", "dict", "(", "info", ")", "bitdepth", "=", "None", "try", ":", "if", "mode", ".", "startswith", "(", "'L'", ")", ":", "gotmode", "=", "'L'", "mode", "=", "mode", "[", "1", ":", "]", "elif", "mode", ".", "startswith", "(", "'RGB'", ")", ":", "gotmode", "=", "'RGB'", "mode", "=", "mode", "[", "3", ":", "]", "else", ":", "raise", "Error", "(", ")", "if", "mode", ".", "startswith", "(", "'A'", ")", ":", "gotmode", "+=", "'A'", "mode", "=", "mode", "[", "1", ":", "]", "while", "mode", ".", "startswith", "(", "';'", ")", ":", "mode", "=", "mode", "[", "1", ":", "]", "if", "mode", ":", "try", ":", "bitdepth", "=", "int", "(", "mode", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "Error", "(", ")", "except", "Error", ":", "raise", "Error", "(", "\"mode string should be 'RGB' or 'L;16' or similar.\"", ")", "mode", "=", "gotmode", "if", "bitdepth", ":", "if", "(", "info", ".", "get", "(", "'bitdepth'", ")", "and", "(", "bitdepth", "!=", "info", "[", "'bitdepth'", "]", ")", ")", ":", "raise", "Error", "(", "(", "'mode bitdepth (%d) should match info bitdepth (%d).'", "%", "(", "bitdepth", ",", "info", "[", "'bitdepth'", "]", ")", ")", ")", "info", "[", "'bitdepth'", "]", "=", "bitdepth", "if", "(", "'size'", "in", "info", ")", ":", "for", "(", "dimension", ",", "axis", ")", "in", "[", "(", "'width'", ",", "0", ")", ",", "(", "'height'", ",", "1", ")", "]", ":", "if", "(", "dimension", "in", "info", ")", ":", "if", "(", "info", "[", "dimension", "]", "!=", "info", "[", "'size'", "]", "[", "axis", "]", ")", ":", "raise", "Error", "(", "(", "\"info[%r] should match info['size'][%r].\"", "%", "(", "dimension", ",", "axis", ")", ")", ")", "(", "info", "[", "'width'", "]", ",", "info", "[", "'height'", "]", ")", "=", "info", "[", "'size'", "]", "if", "(", "'height'", "not", "in", "info", ")", ":", "try", ":", "l", "=", "len", "(", "a", ")", "except", "TypeError", ":", "raise", "Error", "(", "\"len(a) does not work, supply info['height'] instead.\"", ")", "info", "[", "'height'", "]", "=", "l", "if", "(", "'greyscale'", "in", "info", ")", ":", "if", "(", "bool", "(", "info", "[", "'greyscale'", "]", ")", "!=", "(", "'L'", "in", "mode", ")", ")", ":", "raise", "Error", "(", "\"info['greyscale'] should match mode.\"", ")", "info", "[", "'greyscale'", "]", "=", "(", "'L'", "in", "mode", ")", "if", "(", "'alpha'", "in", "info", ")", ":", "if", "(", "bool", "(", "info", "[", "'alpha'", "]", ")", "!=", "(", "'A'", "in", "mode", ")", ")", ":", "raise", "Error", "(", "\"info['alpha'] should match mode.\"", ")", "info", "[", "'alpha'", "]", "=", "(", "'A'", "in", "mode", ")", "planes", "=", "len", "(", "mode", ")", "if", "(", "'planes'", "in", "info", ")", ":", "if", "(", "info", "[", "'planes'", "]", "!=", "planes", ")", ":", "raise", "Error", "(", "\"info['planes'] should match mode.\"", ")", "(", "a", ",", "t", ")", "=", "itertools", ".", "tee", "(", "a", ")", "row", "=", "t", ".", "next", "(", ")", "del", "t", "try", ":", "row", "[", "0", "]", "[", "0", "]", "threed", "=", "True", "testelement", "=", "row", "[", "0", "]", "except", "(", "IndexError", ",", "TypeError", ")", ":", "threed", "=", "False", "testelement", "=", "row", "if", "(", "'width'", "not", "in", "info", ")", ":", "if", "threed", ":", "width", "=", "len", "(", "row", ")", "else", ":", "width", "=", "(", "len", "(", "row", ")", "//", "planes", ")", "info", "[", "'width'", "]", "=", "width", "assert", "(", "not", "threed", ")", "if", "(", "'bitdepth'", "not", "in", "info", ")", ":", "try", ":", "dtype", "=", "testelement", ".", "dtype", "except", "AttributeError", ":", "try", ":", "bitdepth", "=", "(", "8", "*", "testelement", ".", "itemsize", ")", "except", "AttributeError", ":", "bitdepth", "=", "8", "else", ":", "if", "(", "dtype", ".", "kind", "==", "'b'", ")", ":", "bitdepth", "=", "1", "else", ":", "bitdepth", "=", "(", "8", "*", "dtype", ".", "itemsize", ")", "info", "[", "'bitdepth'", "]", "=", "bitdepth", "for", "thing", "in", "'width height bitdepth greyscale alpha'", ".", "split", "(", ")", ":", "assert", "(", "thing", "in", "info", ")", "return", "Image", "(", "a", ",", "info", ")" ]
create dask array from something that looks like an array input must have a .
train
true
10,801
def varmap(func, var, context=None, name=None): if (context is None): context = {} objid = id(var) if (objid in context): return func(name, '<...>') context[objid] = 1 if isinstance(var, dict): ret = dict(((k, varmap(func, v, context, k)) for (k, v) in iteritems(var))) elif isinstance(var, (list, tuple)): ret = [varmap(func, f, context, name) for f in var] else: ret = func(name, var) del context[objid] return ret
[ "def", "varmap", "(", "func", ",", "var", ",", "context", "=", "None", ",", "name", "=", "None", ")", ":", "if", "(", "context", "is", "None", ")", ":", "context", "=", "{", "}", "objid", "=", "id", "(", "var", ")", "if", "(", "objid", "in", "context", ")", ":", "return", "func", "(", "name", ",", "'<...>'", ")", "context", "[", "objid", "]", "=", "1", "if", "isinstance", "(", "var", ",", "dict", ")", ":", "ret", "=", "dict", "(", "(", "(", "k", ",", "varmap", "(", "func", ",", "v", ",", "context", ",", "k", ")", ")", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "var", ")", ")", ")", "elif", "isinstance", "(", "var", ",", "(", "list", ",", "tuple", ")", ")", ":", "ret", "=", "[", "varmap", "(", "func", ",", "f", ",", "context", ",", "name", ")", "for", "f", "in", "var", "]", "else", ":", "ret", "=", "func", "(", "name", ",", "var", ")", "del", "context", "[", "objid", "]", "return", "ret" ]
executes func on all values recurisively discovering dict and list scoped values .
train
true
10,803
def memoize_faked(obj): cache = obj.cache = {} def memoizer(*args, **kwargs): key = (obj, args, frozenset(kwargs.items())) try: result = cache[key] except TypeError: return obj(*args, **kwargs) except KeyError: result = obj(*args, **kwargs) if (result is not None): cache[key] = obj(*args, **kwargs) return result else: return result return memoizer
[ "def", "memoize_faked", "(", "obj", ")", ":", "cache", "=", "obj", ".", "cache", "=", "{", "}", "def", "memoizer", "(", "*", "args", ",", "**", "kwargs", ")", ":", "key", "=", "(", "obj", ",", "args", ",", "frozenset", "(", "kwargs", ".", "items", "(", ")", ")", ")", "try", ":", "result", "=", "cache", "[", "key", "]", "except", "TypeError", ":", "return", "obj", "(", "*", "args", ",", "**", "kwargs", ")", "except", "KeyError", ":", "result", "=", "obj", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "result", "is", "not", "None", ")", ":", "cache", "[", "key", "]", "=", "obj", "(", "*", "args", ",", "**", "kwargs", ")", "return", "result", "else", ":", "return", "result", "return", "memoizer" ]
a typical memoize function that ignores issues with non hashable results .
train
false
10,804
def CDLDRAGONFLYDOJI(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLDRAGONFLYDOJI)
[ "def", "CDLDRAGONFLYDOJI", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLDRAGONFLYDOJI", ")" ]
dragonfly doji .
train
false
10,805
def deploy_password(username, password, host=None, admin_username=None, admin_password=None, module=None): return __execute_cmd('deploy -u {0} -p {1}'.format(username, password), host=host, admin_username=admin_username, admin_password=admin_password, module=module)
[ "def", "deploy_password", "(", "username", ",", "password", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ",", "module", "=", "None", ")", ":", "return", "__execute_cmd", "(", "'deploy -u {0} -p {1}'", ".", "format", "(", "username", ",", "password", ")", ",", "host", "=", "host", ",", "admin_username", "=", "admin_username", ",", "admin_password", "=", "admin_password", ",", "module", "=", "module", ")" ]
change the quickdeploy password .
train
true
10,806
def ls_remote(cwd=None, remote='origin', ref=None, opts='', user=None, password=None, identity=None, https_user=None, https_pass=None, ignore_retcode=False, saltenv='base'): if (cwd is not None): cwd = _expand_path(cwd, user) try: remote = salt.utils.url.add_http_basic_auth(remote, https_user, https_pass, https_only=True) except ValueError as exc: raise SaltInvocationError(exc.__str__()) command = ['git', 'ls-remote'] command.extend(_format_opts(opts)) if (not isinstance(remote, six.string_types)): remote = str(remote) command.extend([remote]) if ref: if (not isinstance(ref, six.string_types)): ref = str(ref) command.extend([ref]) output = _git_run(command, cwd=cwd, user=user, password=password, identity=identity, ignore_retcode=ignore_retcode, saltenv=saltenv)['stdout'] ret = {} for line in output.splitlines(): try: (ref_sha1, ref_name) = line.split(None, 1) except IndexError: continue ret[ref_name] = ref_sha1 return ret
[ "def", "ls_remote", "(", "cwd", "=", "None", ",", "remote", "=", "'origin'", ",", "ref", "=", "None", ",", "opts", "=", "''", ",", "user", "=", "None", ",", "password", "=", "None", ",", "identity", "=", "None", ",", "https_user", "=", "None", ",", "https_pass", "=", "None", ",", "ignore_retcode", "=", "False", ",", "saltenv", "=", "'base'", ")", ":", "if", "(", "cwd", "is", "not", "None", ")", ":", "cwd", "=", "_expand_path", "(", "cwd", ",", "user", ")", "try", ":", "remote", "=", "salt", ".", "utils", ".", "url", ".", "add_http_basic_auth", "(", "remote", ",", "https_user", ",", "https_pass", ",", "https_only", "=", "True", ")", "except", "ValueError", "as", "exc", ":", "raise", "SaltInvocationError", "(", "exc", ".", "__str__", "(", ")", ")", "command", "=", "[", "'git'", ",", "'ls-remote'", "]", "command", ".", "extend", "(", "_format_opts", "(", "opts", ")", ")", "if", "(", "not", "isinstance", "(", "remote", ",", "six", ".", "string_types", ")", ")", ":", "remote", "=", "str", "(", "remote", ")", "command", ".", "extend", "(", "[", "remote", "]", ")", "if", "ref", ":", "if", "(", "not", "isinstance", "(", "ref", ",", "six", ".", "string_types", ")", ")", ":", "ref", "=", "str", "(", "ref", ")", "command", ".", "extend", "(", "[", "ref", "]", ")", "output", "=", "_git_run", "(", "command", ",", "cwd", "=", "cwd", ",", "user", "=", "user", ",", "password", "=", "password", ",", "identity", "=", "identity", ",", "ignore_retcode", "=", "ignore_retcode", ",", "saltenv", "=", "saltenv", ")", "[", "'stdout'", "]", "ret", "=", "{", "}", "for", "line", "in", "output", ".", "splitlines", "(", ")", ":", "try", ":", "(", "ref_sha1", ",", "ref_name", ")", "=", "line", ".", "split", "(", "None", ",", "1", ")", "except", "IndexError", ":", "continue", "ret", "[", "ref_name", "]", "=", "ref_sha1", "return", "ret" ]
list the refs in a remote .
train
true
10,811
@task def show_files(file, print_=True): if (file == 'source'): ret = local('tar tf release/{source}'.format(**tarball_formatter()), capture=True) elif (file == 'win'): raise NotImplementedError('Windows installers') elif (file == 'html'): ret = local('unzip -l release/{html}'.format(**tarball_formatter()), capture=True) else: raise ValueError((file + ' is not valid')) if print_: print(ret) return ret
[ "@", "task", "def", "show_files", "(", "file", ",", "print_", "=", "True", ")", ":", "if", "(", "file", "==", "'source'", ")", ":", "ret", "=", "local", "(", "'tar tf release/{source}'", ".", "format", "(", "**", "tarball_formatter", "(", ")", ")", ",", "capture", "=", "True", ")", "elif", "(", "file", "==", "'win'", ")", ":", "raise", "NotImplementedError", "(", "'Windows installers'", ")", "elif", "(", "file", "==", "'html'", ")", ":", "ret", "=", "local", "(", "'unzip -l release/{html}'", ".", "format", "(", "**", "tarball_formatter", "(", ")", ")", ",", "capture", "=", "True", ")", "else", ":", "raise", "ValueError", "(", "(", "file", "+", "' is not valid'", ")", ")", "if", "print_", ":", "print", "(", "ret", ")", "return", "ret" ]
show the contents of a tarball .
train
false
10,812
def _objects_eq(manager, list_): eq_(set(manager.all()), set(list_))
[ "def", "_objects_eq", "(", "manager", ",", "list_", ")", ":", "eq_", "(", "set", "(", "manager", ".", "all", "(", ")", ")", ",", "set", "(", "list_", ")", ")" ]
assert that the objects contained by manager are those in list_ .
train
false
10,814
def test_ensure_list_csv(): schema = vol.Schema(cv.ensure_list_csv) options = (None, 12, [], ['string'], 'string1,string2') for value in options: schema(value) assert (schema('string1, string2 ') == ['string1', 'string2'])
[ "def", "test_ensure_list_csv", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "ensure_list_csv", ")", "options", "=", "(", "None", ",", "12", ",", "[", "]", ",", "[", "'string'", "]", ",", "'string1,string2'", ")", "for", "value", "in", "options", ":", "schema", "(", "value", ")", "assert", "(", "schema", "(", "'string1, string2 '", ")", "==", "[", "'string1'", ",", "'string2'", "]", ")" ]
test ensure_list_csv .
train
false
10,815
def cluster_leave(username, hostname): ret = {'comment': '', 'success': False} cmd = __execute_cmd('riak-admin', 'cluster leave {0}@{1}'.format(username, hostname)) if (cmd['retcode'] != 0): ret['comment'] = cmd['stdout'] else: ret['comment'] = cmd['stdout'] ret['success'] = True return ret
[ "def", "cluster_leave", "(", "username", ",", "hostname", ")", ":", "ret", "=", "{", "'comment'", ":", "''", ",", "'success'", ":", "False", "}", "cmd", "=", "__execute_cmd", "(", "'riak-admin'", ",", "'cluster leave {0}@{1}'", ".", "format", "(", "username", ",", "hostname", ")", ")", "if", "(", "cmd", "[", "'retcode'", "]", "!=", "0", ")", ":", "ret", "[", "'comment'", "]", "=", "cmd", "[", "'stdout'", "]", "else", ":", "ret", "[", "'comment'", "]", "=", "cmd", "[", "'stdout'", "]", "ret", "[", "'success'", "]", "=", "True", "return", "ret" ]
leave a riak cluster .
train
true
10,821
def intcurve_series(vector_field, param, start_point, n=6, coord_sys=None, coeffs=False): if ((contravariant_order(vector_field) != 1) or covariant_order(vector_field)): raise ValueError('The supplied field was not a vector field.') def iter_vfield(scalar_field, i): 'Return ``vector_field`` called `i` times on ``scalar_field``.' return reduce((lambda s, v: v.rcall(s)), ([vector_field] * i), scalar_field) def taylor_terms_per_coord(coord_function): 'Return the series for one of the coordinates.' return [(((param ** i) * iter_vfield(coord_function, i).rcall(start_point)) / factorial(i)) for i in range(n)] coord_sys = (coord_sys if coord_sys else start_point._coord_sys) coord_functions = coord_sys.coord_functions() taylor_terms = [taylor_terms_per_coord(f) for f in coord_functions] if coeffs: return [Matrix(t) for t in zip(*taylor_terms)] else: return Matrix([sum(c) for c in taylor_terms])
[ "def", "intcurve_series", "(", "vector_field", ",", "param", ",", "start_point", ",", "n", "=", "6", ",", "coord_sys", "=", "None", ",", "coeffs", "=", "False", ")", ":", "if", "(", "(", "contravariant_order", "(", "vector_field", ")", "!=", "1", ")", "or", "covariant_order", "(", "vector_field", ")", ")", ":", "raise", "ValueError", "(", "'The supplied field was not a vector field.'", ")", "def", "iter_vfield", "(", "scalar_field", ",", "i", ")", ":", "return", "reduce", "(", "(", "lambda", "s", ",", "v", ":", "v", ".", "rcall", "(", "s", ")", ")", ",", "(", "[", "vector_field", "]", "*", "i", ")", ",", "scalar_field", ")", "def", "taylor_terms_per_coord", "(", "coord_function", ")", ":", "return", "[", "(", "(", "(", "param", "**", "i", ")", "*", "iter_vfield", "(", "coord_function", ",", "i", ")", ".", "rcall", "(", "start_point", ")", ")", "/", "factorial", "(", "i", ")", ")", "for", "i", "in", "range", "(", "n", ")", "]", "coord_sys", "=", "(", "coord_sys", "if", "coord_sys", "else", "start_point", ".", "_coord_sys", ")", "coord_functions", "=", "coord_sys", ".", "coord_functions", "(", ")", "taylor_terms", "=", "[", "taylor_terms_per_coord", "(", "f", ")", "for", "f", "in", "coord_functions", "]", "if", "coeffs", ":", "return", "[", "Matrix", "(", "t", ")", "for", "t", "in", "zip", "(", "*", "taylor_terms", ")", "]", "else", ":", "return", "Matrix", "(", "[", "sum", "(", "c", ")", "for", "c", "in", "taylor_terms", "]", ")" ]
return the series expansion for an integral curve of the field .
train
false
10,822
def _af_commutes_with(a, b): return (not any(((a[b[i]] != b[a[i]]) for i in range((len(a) - 1)))))
[ "def", "_af_commutes_with", "(", "a", ",", "b", ")", ":", "return", "(", "not", "any", "(", "(", "(", "a", "[", "b", "[", "i", "]", "]", "!=", "b", "[", "a", "[", "i", "]", "]", ")", "for", "i", "in", "range", "(", "(", "len", "(", "a", ")", "-", "1", ")", ")", ")", ")", ")" ]
checks if the two permutations with array forms given by a and b commute .
train
false
10,823
def transfer_get_all(context): return IMPL.transfer_get_all(context)
[ "def", "transfer_get_all", "(", "context", ")", ":", "return", "IMPL", ".", "transfer_get_all", "(", "context", ")" ]
get all volume transfer records .
train
false
10,825
def deregister_instances(name, instances, region=None, key=None, keyid=None, profile=None): if (isinstance(instances, str) or isinstance(instances, six.text_type)): instances = [instances] conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: registered_instances = conn.deregister_instances(name, instances) except boto.exception.BotoServerError as error: if (error.error_code == 'InvalidInstance'): log.warning('One or more of instance(s) {0} are not part of ELB {1}. deregister_instances not performed.'.format(instances, name)) return None else: log.warning(error) return False registered_instance_ids = [instance.id for instance in registered_instances] deregister_failures = set(instances).intersection(set(registered_instance_ids)) if deregister_failures: log.warning('Instance(s): {0} not deregistered from ELB {1}.'.format(list(deregister_failures), name)) deregister_result = False else: deregister_result = True return deregister_result
[ "def", "deregister_instances", "(", "name", ",", "instances", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "isinstance", "(", "instances", ",", "str", ")", "or", "isinstance", "(", "instances", ",", "six", ".", "text_type", ")", ")", ":", "instances", "=", "[", "instances", "]", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "registered_instances", "=", "conn", ".", "deregister_instances", "(", "name", ",", "instances", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "error", ":", "if", "(", "error", ".", "error_code", "==", "'InvalidInstance'", ")", ":", "log", ".", "warning", "(", "'One or more of instance(s) {0} are not part of ELB {1}. deregister_instances not performed.'", ".", "format", "(", "instances", ",", "name", ")", ")", "return", "None", "else", ":", "log", ".", "warning", "(", "error", ")", "return", "False", "registered_instance_ids", "=", "[", "instance", ".", "id", "for", "instance", "in", "registered_instances", "]", "deregister_failures", "=", "set", "(", "instances", ")", ".", "intersection", "(", "set", "(", "registered_instance_ids", ")", ")", "if", "deregister_failures", ":", "log", ".", "warning", "(", "'Instance(s): {0} not deregistered from ELB {1}.'", ".", "format", "(", "list", "(", "deregister_failures", ")", ",", "name", ")", ")", "deregister_result", "=", "False", "else", ":", "deregister_result", "=", "True", "return", "deregister_result" ]
deregister instances with an elb .
train
true
10,828
def parseRequest_window_change(data): (cols, rows, xpixel, ypixel) = struct.unpack('>4L', data) return (rows, cols, xpixel, ypixel)
[ "def", "parseRequest_window_change", "(", "data", ")", ":", "(", "cols", ",", "rows", ",", "xpixel", ",", "ypixel", ")", "=", "struct", ".", "unpack", "(", "'>4L'", ",", "data", ")", "return", "(", "rows", ",", "cols", ",", "xpixel", ",", "ypixel", ")" ]
parse the data from a window-change request into usuable data .
train
false
10,829
def default_access_fn(user, url_name, url_args=None, url_kwargs=None): exception = ImproperlyConfigured("Please follow Oscar's default dashboard app layout or set a custom access_fn") if (url_name is None): return True try: url = reverse(url_name, args=url_args, kwargs=url_kwargs) view_module = resolve(url).func.__module__ except (NoReverseMatch, Http404): return False match = re.search('(dashboard[\\w\\.]*)\\.views$', view_module) if (not match): raise exception app_label_str = (match.groups()[0] + '.app') try: app_instance = get_class(app_label_str, 'application') except AppNotFoundError: raise exception if (':' in url_name): view_name = url_name.split(':')[1] else: view_name = url_name permissions = app_instance.get_permissions(view_name) return check_permissions(user, permissions)
[ "def", "default_access_fn", "(", "user", ",", "url_name", ",", "url_args", "=", "None", ",", "url_kwargs", "=", "None", ")", ":", "exception", "=", "ImproperlyConfigured", "(", "\"Please follow Oscar's default dashboard app layout or set a custom access_fn\"", ")", "if", "(", "url_name", "is", "None", ")", ":", "return", "True", "try", ":", "url", "=", "reverse", "(", "url_name", ",", "args", "=", "url_args", ",", "kwargs", "=", "url_kwargs", ")", "view_module", "=", "resolve", "(", "url", ")", ".", "func", ".", "__module__", "except", "(", "NoReverseMatch", ",", "Http404", ")", ":", "return", "False", "match", "=", "re", ".", "search", "(", "'(dashboard[\\\\w\\\\.]*)\\\\.views$'", ",", "view_module", ")", "if", "(", "not", "match", ")", ":", "raise", "exception", "app_label_str", "=", "(", "match", ".", "groups", "(", ")", "[", "0", "]", "+", "'.app'", ")", "try", ":", "app_instance", "=", "get_class", "(", "app_label_str", ",", "'application'", ")", "except", "AppNotFoundError", ":", "raise", "exception", "if", "(", "':'", "in", "url_name", ")", ":", "view_name", "=", "url_name", ".", "split", "(", "':'", ")", "[", "1", "]", "else", ":", "view_name", "=", "url_name", "permissions", "=", "app_instance", ".", "get_permissions", "(", "view_name", ")", "return", "check_permissions", "(", "user", ",", "permissions", ")" ]
given a url_name and a user .
train
false
10,830
def marshal_write_response(document, resource): resource_def = app.config['DOMAIN'][resource] if (app.config['BANDWIDTH_SAVER'] is True): fields = (auto_fields(resource) + resource_def['extra_response_fields']) document = dict(((k, v) for (k, v) in document.items() if (k in fields))) else: auth_field = resource_def.get('auth_field') if (auth_field and (auth_field not in resource_def['schema'])): try: del document[auth_field] except: pass return document
[ "def", "marshal_write_response", "(", "document", ",", "resource", ")", ":", "resource_def", "=", "app", ".", "config", "[", "'DOMAIN'", "]", "[", "resource", "]", "if", "(", "app", ".", "config", "[", "'BANDWIDTH_SAVER'", "]", "is", "True", ")", ":", "fields", "=", "(", "auto_fields", "(", "resource", ")", "+", "resource_def", "[", "'extra_response_fields'", "]", ")", "document", "=", "dict", "(", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "document", ".", "items", "(", ")", "if", "(", "k", "in", "fields", ")", ")", ")", "else", ":", "auth_field", "=", "resource_def", ".", "get", "(", "'auth_field'", ")", "if", "(", "auth_field", "and", "(", "auth_field", "not", "in", "resource_def", "[", "'schema'", "]", ")", ")", ":", "try", ":", "del", "document", "[", "auth_field", "]", "except", ":", "pass", "return", "document" ]
limit response document to minimize bandwidth when client supports it .
train
false
10,833
def get_log_for_svn_txn(repo_path, txn_id): return subproc_check_output(['svnlook', 'log', repo_path, '-t', txn_id])
[ "def", "get_log_for_svn_txn", "(", "repo_path", ",", "txn_id", ")", ":", "return", "subproc_check_output", "(", "[", "'svnlook'", ",", "'log'", ",", "repo_path", ",", "'-t'", ",", "txn_id", "]", ")" ]
returns svn log for an svn transaction .
train
false
10,834
def get_all_formatters(): for formatter in FORMATTERS: (yield formatter) for (_, formatter) in find_plugin_formatters(): (yield formatter)
[ "def", "get_all_formatters", "(", ")", ":", "for", "formatter", "in", "FORMATTERS", ":", "(", "yield", "formatter", ")", "for", "(", "_", ",", "formatter", ")", "in", "find_plugin_formatters", "(", ")", ":", "(", "yield", "formatter", ")" ]
return a generator for all formatters .
train
false
10,835
def CurrentColumn(): return vim.current.window.cursor[1]
[ "def", "CurrentColumn", "(", ")", ":", "return", "vim", ".", "current", ".", "window", ".", "cursor", "[", "1", "]" ]
returns the 0-based current column .
train
false
10,836
def safe_make_node(op, *inputs): node = op(*inputs) if isinstance(node, list): return node[0].owner else: return node.owner
[ "def", "safe_make_node", "(", "op", ",", "*", "inputs", ")", ":", "node", "=", "op", "(", "*", "inputs", ")", "if", "isinstance", "(", "node", ",", "list", ")", ":", "return", "node", "[", "0", "]", ".", "owner", "else", ":", "return", "node", ".", "owner" ]
emulate the behaviour of make_node when op is a function .
train
false
10,837
@gen.coroutine def GetFileList(merged_store, marker): base_path = 'processed_data/device_details/' marker = os.path.join(base_path, marker) file_list = (yield gen.Task(store_utils.ListAllKeys, merged_store, prefix=base_path, marker=marker)) file_list.sort() raise gen.Return(file_list)
[ "@", "gen", ".", "coroutine", "def", "GetFileList", "(", "merged_store", ",", "marker", ")", ":", "base_path", "=", "'processed_data/device_details/'", "marker", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "marker", ")", "file_list", "=", "(", "yield", "gen", ".", "Task", "(", "store_utils", ".", "ListAllKeys", ",", "merged_store", ",", "prefix", "=", "base_path", ",", "marker", "=", "marker", ")", ")", "file_list", ".", "sort", "(", ")", "raise", "gen", ".", "Return", "(", "file_list", ")" ]
fetch the list of file names from s3 .
train
false
10,838
def html_escape(s): if (s is None): return '' if (not isinstance(s, basestring)): if hasattr(s, '__unicode__'): s = unicode(s) else: s = str(s) s = cgi.escape(s, True) if isinstance(s, unicode): s = s.encode('ascii', 'xmlcharrefreplace') return s
[ "def", "html_escape", "(", "s", ")", ":", "if", "(", "s", "is", "None", ")", ":", "return", "''", "if", "(", "not", "isinstance", "(", "s", ",", "basestring", ")", ")", ":", "if", "hasattr", "(", "s", ",", "'__unicode__'", ")", ":", "s", "=", "unicode", "(", "s", ")", "else", ":", "s", "=", "str", "(", "s", ")", "s", "=", "cgi", ".", "escape", "(", "s", ",", "True", ")", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "s", "=", "s", ".", "encode", "(", "'ascii'", ",", "'xmlcharrefreplace'", ")", "return", "s" ]
an html escape function that behaves the same in both python 2 and 3 .
train
false
10,839
def HTTPResponse__getheaders(self): if (self.msg is None): raise httplib.ResponseNotReady() return self.msg.items()
[ "def", "HTTPResponse__getheaders", "(", "self", ")", ":", "if", "(", "self", ".", "msg", "is", "None", ")", ":", "raise", "httplib", ".", "ResponseNotReady", "(", ")", "return", "self", ".", "msg", ".", "items", "(", ")" ]
return list of tuples .
train
false
10,840
def hexists(key, field, host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.hexists(key, field)
[ "def", "hexists", "(", "key", ",", "field", ",", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "hexists", "(", "key", ",", "field", ")" ]
determine if a hash fields exists .
train
true
10,842
def serialize_entry(data=None, **kwargs): data = (data or {}) entry_dict = copy.copy(data) entry_dict.update(**kwargs) result = {} for (k, v) in entry_dict.items(): if (k == 'source'): qualifier = encode_unicode(('f:s_%s' % v)) result[qualifier] = dump('1') elif (k == 'meter'): for (meter, ts) in v.items(): qualifier = encode_unicode(('f:m_%s' % meter)) result[qualifier] = dump(ts) elif (k == 'resource_metadata'): flattened_meta = dump_metadata(v) for (key, m) in flattened_meta.items(): metadata_qualifier = encode_unicode(('f:r_metadata.' + key)) result[metadata_qualifier] = dump(m) result['f:resource_metadata'] = dump(v) else: result[('f:' + quote(k, ':'))] = dump(v) return result
[ "def", "serialize_entry", "(", "data", "=", "None", ",", "**", "kwargs", ")", ":", "data", "=", "(", "data", "or", "{", "}", ")", "entry_dict", "=", "copy", ".", "copy", "(", "data", ")", "entry_dict", ".", "update", "(", "**", "kwargs", ")", "result", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "entry_dict", ".", "items", "(", ")", ":", "if", "(", "k", "==", "'source'", ")", ":", "qualifier", "=", "encode_unicode", "(", "(", "'f:s_%s'", "%", "v", ")", ")", "result", "[", "qualifier", "]", "=", "dump", "(", "'1'", ")", "elif", "(", "k", "==", "'meter'", ")", ":", "for", "(", "meter", ",", "ts", ")", "in", "v", ".", "items", "(", ")", ":", "qualifier", "=", "encode_unicode", "(", "(", "'f:m_%s'", "%", "meter", ")", ")", "result", "[", "qualifier", "]", "=", "dump", "(", "ts", ")", "elif", "(", "k", "==", "'resource_metadata'", ")", ":", "flattened_meta", "=", "dump_metadata", "(", "v", ")", "for", "(", "key", ",", "m", ")", "in", "flattened_meta", ".", "items", "(", ")", ":", "metadata_qualifier", "=", "encode_unicode", "(", "(", "'f:r_metadata.'", "+", "key", ")", ")", "result", "[", "metadata_qualifier", "]", "=", "dump", "(", "m", ")", "result", "[", "'f:resource_metadata'", "]", "=", "dump", "(", "v", ")", "else", ":", "result", "[", "(", "'f:'", "+", "quote", "(", "k", ",", "':'", ")", ")", "]", "=", "dump", "(", "v", ")", "return", "result" ]
return a dict that is ready to be stored to hbase .
train
false
10,843
@condition(etag_func=(lambda r: None)) def etag_view_none(request): return HttpResponse(FULL_RESPONSE)
[ "@", "condition", "(", "etag_func", "=", "(", "lambda", "r", ":", "None", ")", ")", "def", "etag_view_none", "(", "request", ")", ":", "return", "HttpResponse", "(", "FULL_RESPONSE", ")" ]
use an etag_func() that returns none .
train
false
10,845
def run_matlab_job(job): log('Running matlab job.') job_file = job_file_for(job) function_call = ("matlab_wrapper('%s'),quit;" % job_file) matlab_cmd = ('matlab -nosplash -nodesktop -r "%s"' % function_call) log(matlab_cmd) sh(matlab_cmd)
[ "def", "run_matlab_job", "(", "job", ")", ":", "log", "(", "'Running matlab job.'", ")", "job_file", "=", "job_file_for", "(", "job", ")", "function_call", "=", "(", "\"matlab_wrapper('%s'),quit;\"", "%", "job_file", ")", "matlab_cmd", "=", "(", "'matlab -nosplash -nodesktop -r \"%s\"'", "%", "function_call", ")", "log", "(", "matlab_cmd", ")", "sh", "(", "matlab_cmd", ")" ]
run it as a matlab function .
train
false
10,846
def settrace(func): assert 0, 'threading.settrace not supported in Pyston yet' global _trace_hook _trace_hook = func
[ "def", "settrace", "(", "func", ")", ":", "assert", "0", ",", "'threading.settrace not supported in Pyston yet'", "global", "_trace_hook", "_trace_hook", "=", "func" ]
set a trace function for all threads started from the threading module .
train
false
10,847
def N(x, n=15, **options): return sympify(x).evalf(n, **options)
[ "def", "N", "(", "x", ",", "n", "=", "15", ",", "**", "options", ")", ":", "return", "sympify", "(", "x", ")", ".", "evalf", "(", "n", ",", "**", "options", ")" ]
calls x .
train
false
10,848
@decorator def rollback_open_connections(fn, *args, **kw): try: fn(*args, **kw) finally: testing_reaper.rollback_all()
[ "@", "decorator", "def", "rollback_open_connections", "(", "fn", ",", "*", "args", ",", "**", "kw", ")", ":", "try", ":", "fn", "(", "*", "args", ",", "**", "kw", ")", "finally", ":", "testing_reaper", ".", "rollback_all", "(", ")" ]
decorator that rolls back all open connections after fn execution .
train
false
10,849
def get_iter_string_reader(stdin): bufsize = 1024 iter_str = (stdin[i:(i + bufsize)] for i in range(0, len(stdin), bufsize)) return get_iter_chunk_reader(iter_str)
[ "def", "get_iter_string_reader", "(", "stdin", ")", ":", "bufsize", "=", "1024", "iter_str", "=", "(", "stdin", "[", "i", ":", "(", "i", "+", "bufsize", ")", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "stdin", ")", ",", "bufsize", ")", ")", "return", "get_iter_chunk_reader", "(", "iter_str", ")" ]
return an iterator that returns a chunk of a string every time it is called .
train
true
10,851
@pytest.mark.parametrize('args', [['dir1', 'dir2', '-v'], ['dir1', '-v', 'dir2'], ['dir2', '-v', 'dir1'], ['-v', 'dir2', 'dir1']]) def test_consider_args_after_options_for_rootdir_and_inifile(testdir, args): root = testdir.tmpdir.mkdir('myroot') d1 = root.mkdir('dir1') d2 = root.mkdir('dir2') for (i, arg) in enumerate(args): if (arg == 'dir1'): args[i] = d1 elif (arg == 'dir2'): args[i] = d2 with root.as_cwd(): result = testdir.runpytest(*args) result.stdout.fnmatch_lines(['*rootdir: *myroot, inifile: '])
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'args'", ",", "[", "[", "'dir1'", ",", "'dir2'", ",", "'-v'", "]", ",", "[", "'dir1'", ",", "'-v'", ",", "'dir2'", "]", ",", "[", "'dir2'", ",", "'-v'", ",", "'dir1'", "]", ",", "[", "'-v'", ",", "'dir2'", ",", "'dir1'", "]", "]", ")", "def", "test_consider_args_after_options_for_rootdir_and_inifile", "(", "testdir", ",", "args", ")", ":", "root", "=", "testdir", ".", "tmpdir", ".", "mkdir", "(", "'myroot'", ")", "d1", "=", "root", ".", "mkdir", "(", "'dir1'", ")", "d2", "=", "root", ".", "mkdir", "(", "'dir2'", ")", "for", "(", "i", ",", "arg", ")", "in", "enumerate", "(", "args", ")", ":", "if", "(", "arg", "==", "'dir1'", ")", ":", "args", "[", "i", "]", "=", "d1", "elif", "(", "arg", "==", "'dir2'", ")", ":", "args", "[", "i", "]", "=", "d2", "with", "root", ".", "as_cwd", "(", ")", ":", "result", "=", "testdir", ".", "runpytest", "(", "*", "args", ")", "result", ".", "stdout", ".", "fnmatch_lines", "(", "[", "'*rootdir: *myroot, inifile: '", "]", ")" ]
consider all arguments in the command-line for rootdir and inifile discovery .
train
false
10,852
@contextmanager def warnings_state(module): global __warningregistry__ for to_clear in (sys, warning_tests): try: to_clear.__warningregistry__.clear() except AttributeError: pass try: __warningregistry__.clear() except NameError: pass original_warnings = warning_tests.warnings try: warning_tests.warnings = module (yield) finally: warning_tests.warnings = original_warnings
[ "@", "contextmanager", "def", "warnings_state", "(", "module", ")", ":", "global", "__warningregistry__", "for", "to_clear", "in", "(", "sys", ",", "warning_tests", ")", ":", "try", ":", "to_clear", ".", "__warningregistry__", ".", "clear", "(", ")", "except", "AttributeError", ":", "pass", "try", ":", "__warningregistry__", ".", "clear", "(", ")", "except", "NameError", ":", "pass", "original_warnings", "=", "warning_tests", ".", "warnings", "try", ":", "warning_tests", ".", "warnings", "=", "module", "(", "yield", ")", "finally", ":", "warning_tests", ".", "warnings", "=", "original_warnings" ]
use a specific warnings implementation in warning_tests .
train
false
10,855
def getNextVersion(version, now=None): if (now is None): now = date.today() major = (now.year - VERSION_OFFSET) if (major != version.major): minor = 0 else: minor = (version.minor + 1) return Version(version.package, major, minor, 0)
[ "def", "getNextVersion", "(", "version", ",", "now", "=", "None", ")", ":", "if", "(", "now", "is", "None", ")", ":", "now", "=", "date", ".", "today", "(", ")", "major", "=", "(", "now", ".", "year", "-", "VERSION_OFFSET", ")", "if", "(", "major", "!=", "version", ".", "major", ")", ":", "minor", "=", "0", "else", ":", "minor", "=", "(", "version", ".", "minor", "+", "1", ")", "return", "Version", "(", "version", ".", "package", ",", "major", ",", "minor", ",", "0", ")" ]
calculate the version number for a new release of twisted based on the previous version number .
train
false
10,857
@image_comparison(baseline_images=[u'legend_auto2'], remove_text=True) def test_legend_auto2(): fig = plt.figure() ax = fig.add_subplot(111) x = np.arange(100) b1 = ax.bar(x, x, color=u'm') b2 = ax.bar(x, x[::(-1)], color=u'g') ax.legend([b1[0], b2[0]], [u'up', u'down'], loc=0)
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'legend_auto2'", "]", ",", "remove_text", "=", "True", ")", "def", "test_legend_auto2", "(", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "x", "=", "np", ".", "arange", "(", "100", ")", "b1", "=", "ax", ".", "bar", "(", "x", ",", "x", ",", "color", "=", "u'm'", ")", "b2", "=", "ax", ".", "bar", "(", "x", ",", "x", "[", ":", ":", "(", "-", "1", ")", "]", ",", "color", "=", "u'g'", ")", "ax", ".", "legend", "(", "[", "b1", "[", "0", "]", ",", "b2", "[", "0", "]", "]", ",", "[", "u'up'", ",", "u'down'", "]", ",", "loc", "=", "0", ")" ]
test automatic legend placement .
train
false
10,859
def budget_staff(): s3.prep = (lambda r: (r.representation == 's3json')) return s3_rest_controller()
[ "def", "budget_staff", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", "return", "s3_rest_controller", "(", ")" ]
rest controller to retrieve budget_budget_staff field options .
train
false
10,860
def coverage_error(y_true, y_score, sample_weight=None): y_true = check_array(y_true, ensure_2d=False) y_score = check_array(y_score, ensure_2d=False) check_consistent_length(y_true, y_score, sample_weight) y_type = type_of_target(y_true) if (y_type != 'multilabel-indicator'): raise ValueError('{0} format is not supported'.format(y_type)) if (y_true.shape != y_score.shape): raise ValueError('y_true and y_score have different shape') y_score_mask = np.ma.masked_array(y_score, mask=np.logical_not(y_true)) y_min_relevant = y_score_mask.min(axis=1).reshape(((-1), 1)) coverage = (y_score >= y_min_relevant).sum(axis=1) coverage = coverage.filled(0) return np.average(coverage, weights=sample_weight)
[ "def", "coverage_error", "(", "y_true", ",", "y_score", ",", "sample_weight", "=", "None", ")", ":", "y_true", "=", "check_array", "(", "y_true", ",", "ensure_2d", "=", "False", ")", "y_score", "=", "check_array", "(", "y_score", ",", "ensure_2d", "=", "False", ")", "check_consistent_length", "(", "y_true", ",", "y_score", ",", "sample_weight", ")", "y_type", "=", "type_of_target", "(", "y_true", ")", "if", "(", "y_type", "!=", "'multilabel-indicator'", ")", ":", "raise", "ValueError", "(", "'{0} format is not supported'", ".", "format", "(", "y_type", ")", ")", "if", "(", "y_true", ".", "shape", "!=", "y_score", ".", "shape", ")", ":", "raise", "ValueError", "(", "'y_true and y_score have different shape'", ")", "y_score_mask", "=", "np", ".", "ma", ".", "masked_array", "(", "y_score", ",", "mask", "=", "np", ".", "logical_not", "(", "y_true", ")", ")", "y_min_relevant", "=", "y_score_mask", ".", "min", "(", "axis", "=", "1", ")", ".", "reshape", "(", "(", "(", "-", "1", ")", ",", "1", ")", ")", "coverage", "=", "(", "y_score", ">=", "y_min_relevant", ")", ".", "sum", "(", "axis", "=", "1", ")", "coverage", "=", "coverage", ".", "filled", "(", "0", ")", "return", "np", ".", "average", "(", "coverage", ",", "weights", "=", "sample_weight", ")" ]
coverage error measure compute how far we need to go through the ranked scores to cover all true labels .
train
false