id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
14,910
def redirected(url, location): r = requests.get(url, allow_redirects=False) status = (r.status_code in (301, 302)) redirect = (('location' in r.headers) and (r.headers['location'] == location)) return all([status, redirect])
[ "def", "redirected", "(", "url", ",", "location", ")", ":", "r", "=", "requests", ".", "get", "(", "url", ",", "allow_redirects", "=", "False", ")", "status", "=", "(", "r", ".", "status_code", "in", "(", "301", ",", "302", ")", ")", "redirect", "=", "(", "(", "'location'", "in", "r", ".", "headers", ")", "and", "(", "r", ".", "headers", "[", "'location'", "]", "==", "location", ")", ")", "return", "all", "(", "[", "status", ",", "redirect", "]", ")" ]
return true if url is redirected to location .
train
false
14,911
def split_domain(domain, split_label): groups = defaultdict(list) for attr in domain.attributes: groups[attr.attributes.get(split_label)].append(attr) attr_values = [attr.attributes.get(split_label) for attr in domain.attributes] domains = [] for (value, attrs) in groups.items(): group_domain = Domain(attrs, domain.class_vars, domain.metas) domains.append((value, group_domain)) if domains: assert all(((len(dom) == len(domains[0][1])) for (_, dom) in domains)) return sorted(domains, key=(lambda t: attr_values.index(t[0])))
[ "def", "split_domain", "(", "domain", ",", "split_label", ")", ":", "groups", "=", "defaultdict", "(", "list", ")", "for", "attr", "in", "domain", ".", "attributes", ":", "groups", "[", "attr", ".", "attributes", ".", "get", "(", "split_label", ")", "]", ".", "append", "(", "attr", ")", "attr_values", "=", "[", "attr", ".", "attributes", ".", "get", "(", "split_label", ")", "for", "attr", "in", "domain", ".", "attributes", "]", "domains", "=", "[", "]", "for", "(", "value", ",", "attrs", ")", "in", "groups", ".", "items", "(", ")", ":", "group_domain", "=", "Domain", "(", "attrs", ",", "domain", ".", "class_vars", ",", "domain", ".", "metas", ")", "domains", ".", "append", "(", "(", "value", ",", "group_domain", ")", ")", "if", "domains", ":", "assert", "all", "(", "(", "(", "len", "(", "dom", ")", "==", "len", "(", "domains", "[", "0", "]", "[", "1", "]", ")", ")", "for", "(", "_", ",", "dom", ")", "in", "domains", ")", ")", "return", "sorted", "(", "domains", ",", "key", "=", "(", "lambda", "t", ":", "attr_values", ".", "index", "(", "t", "[", "0", "]", ")", ")", ")" ]
split the domain based on values of split_label value .
train
false
14,912
def is_darwin(): return (sys.platform == u'darwin')
[ "def", "is_darwin", "(", ")", ":", "return", "(", "sys", ".", "platform", "==", "u'darwin'", ")" ]
simple function to return if a host is darwin or not .
train
false
14,914
@register(u'delete-horizontal-space') def delete_horizontal_space(event): buff = event.current_buffer text_before_cursor = buff.document.text_before_cursor text_after_cursor = buff.document.text_after_cursor delete_before = (len(text_before_cursor) - len(text_before_cursor.rstrip(u' DCTB '))) delete_after = (len(text_after_cursor) - len(text_after_cursor.lstrip(u' DCTB '))) buff.delete_before_cursor(count=delete_before) buff.delete(count=delete_after)
[ "@", "register", "(", "u'delete-horizontal-space'", ")", "def", "delete_horizontal_space", "(", "event", ")", ":", "buff", "=", "event", ".", "current_buffer", "text_before_cursor", "=", "buff", ".", "document", ".", "text_before_cursor", "text_after_cursor", "=", "buff", ".", "document", ".", "text_after_cursor", "delete_before", "=", "(", "len", "(", "text_before_cursor", ")", "-", "len", "(", "text_before_cursor", ".", "rstrip", "(", "u' DCTB '", ")", ")", ")", "delete_after", "=", "(", "len", "(", "text_after_cursor", ")", "-", "len", "(", "text_after_cursor", ".", "lstrip", "(", "u' DCTB '", ")", ")", ")", "buff", ".", "delete_before_cursor", "(", "count", "=", "delete_before", ")", "buff", ".", "delete", "(", "count", "=", "delete_after", ")" ]
delete all spaces and tabs around point .
train
true
14,915
def condor_submit(submit_file): external_id = None try: submit = Popen(('condor_submit', submit_file), stdout=PIPE, stderr=STDOUT) (message, _) = submit.communicate() if (submit.returncode == 0): external_id = parse_external_id(message, type='condor') else: message = PROBLEM_PARSING_EXTERNAL_ID except Exception as e: message = str(e) return (external_id, message)
[ "def", "condor_submit", "(", "submit_file", ")", ":", "external_id", "=", "None", "try", ":", "submit", "=", "Popen", "(", "(", "'condor_submit'", ",", "submit_file", ")", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "STDOUT", ")", "(", "message", ",", "_", ")", "=", "submit", ".", "communicate", "(", ")", "if", "(", "submit", ".", "returncode", "==", "0", ")", ":", "external_id", "=", "parse_external_id", "(", "message", ",", "type", "=", "'condor'", ")", "else", ":", "message", "=", "PROBLEM_PARSING_EXTERNAL_ID", "except", "Exception", "as", "e", ":", "message", "=", "str", "(", "e", ")", "return", "(", "external_id", ",", "message", ")" ]
submit a condor job described by the given file .
train
true
14,916
def list_files(targets=[], ftypes=[], use_shebang=True, modified_only=False, exclude=[], group_by_ftype=False, extless_only=False): ftypes = [x.strip('.') for x in ftypes] ftypes_set = set(ftypes) repository_root = subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).strip().decode('utf-8') exclude_abspaths = [os.path.join(repository_root, fpath).rstrip('/') for fpath in exclude] cmdline = (['git', 'ls-files'] + targets) if modified_only: cmdline.append('-m') files_gen = (x.strip() for x in subprocess.check_output(cmdline, universal_newlines=True).split('\n')) files = list(filter(os.path.isfile, files_gen)) result_dict = defaultdict(list) result_list = [] for fpath in files: ext = os.path.splitext(fpath)[1] if (extless_only and ext): continue absfpath = abspath(fpath) if any((((absfpath == expath) or absfpath.startswith((expath + '/'))) for expath in exclude_abspaths)): continue if (ftypes or group_by_ftype): try: filetype = get_ftype(fpath, use_shebang) except (OSError, UnicodeDecodeError) as e: etype = e.__class__.__name__ print(('Error: %s while determining type of file "%s":' % (etype, fpath)), file=sys.stderr) print(e, file=sys.stderr) filetype = '' if (ftypes and (filetype not in ftypes_set)): continue if group_by_ftype: result_dict[filetype].append(fpath) else: result_list.append(fpath) if group_by_ftype: return result_dict else: return result_list
[ "def", "list_files", "(", "targets", "=", "[", "]", ",", "ftypes", "=", "[", "]", ",", "use_shebang", "=", "True", ",", "modified_only", "=", "False", ",", "exclude", "=", "[", "]", ",", "group_by_ftype", "=", "False", ",", "extless_only", "=", "False", ")", ":", "ftypes", "=", "[", "x", ".", "strip", "(", "'.'", ")", "for", "x", "in", "ftypes", "]", "ftypes_set", "=", "set", "(", "ftypes", ")", "repository_root", "=", "subprocess", ".", "check_output", "(", "[", "'git'", ",", "'rev-parse'", ",", "'--show-toplevel'", "]", ")", ".", "strip", "(", ")", ".", "decode", "(", "'utf-8'", ")", "exclude_abspaths", "=", "[", "os", ".", "path", ".", "join", "(", "repository_root", ",", "fpath", ")", ".", "rstrip", "(", "'/'", ")", "for", "fpath", "in", "exclude", "]", "cmdline", "=", "(", "[", "'git'", ",", "'ls-files'", "]", "+", "targets", ")", "if", "modified_only", ":", "cmdline", ".", "append", "(", "'-m'", ")", "files_gen", "=", "(", "x", ".", "strip", "(", ")", "for", "x", "in", "subprocess", ".", "check_output", "(", "cmdline", ",", "universal_newlines", "=", "True", ")", ".", "split", "(", "'\\n'", ")", ")", "files", "=", "list", "(", "filter", "(", "os", ".", "path", ".", "isfile", ",", "files_gen", ")", ")", "result_dict", "=", "defaultdict", "(", "list", ")", "result_list", "=", "[", "]", "for", "fpath", "in", "files", ":", "ext", "=", "os", ".", "path", ".", "splitext", "(", "fpath", ")", "[", "1", "]", "if", "(", "extless_only", "and", "ext", ")", ":", "continue", "absfpath", "=", "abspath", "(", "fpath", ")", "if", "any", "(", "(", "(", "(", "absfpath", "==", "expath", ")", "or", "absfpath", ".", "startswith", "(", "(", "expath", "+", "'/'", ")", ")", ")", "for", "expath", "in", "exclude_abspaths", ")", ")", ":", "continue", "if", "(", "ftypes", "or", "group_by_ftype", ")", ":", "try", ":", "filetype", "=", "get_ftype", "(", "fpath", ",", "use_shebang", ")", "except", "(", "OSError", ",", "UnicodeDecodeError", ")", "as", "e", ":", "etype", "=", "e", ".", "__class__", ".", "__name__", "print", "(", "(", "'Error: %s while determining type of file \"%s\":'", "%", "(", "etype", ",", "fpath", ")", ")", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "e", ",", "file", "=", "sys", ".", "stderr", ")", "filetype", "=", "''", "if", "(", "ftypes", "and", "(", "filetype", "not", "in", "ftypes_set", ")", ")", ":", "continue", "if", "group_by_ftype", ":", "result_dict", "[", "filetype", "]", ".", "append", "(", "fpath", ")", "else", ":", "result_list", ".", "append", "(", "fpath", ")", "if", "group_by_ftype", ":", "return", "result_dict", "else", ":", "return", "result_list" ]
returns a list of all files in pylearn2 with the given suffix .
train
false
14,917
@memoized def get_commits_not_in_prs(start_ref, end_ref): return tuple(Commit.iter_items(repo, u'{start}..{end}'.format(start=start_ref, end=end_ref), first_parent=True, no_merges=True))
[ "@", "memoized", "def", "get_commits_not_in_prs", "(", "start_ref", ",", "end_ref", ")", ":", "return", "tuple", "(", "Commit", ".", "iter_items", "(", "repo", ",", "u'{start}..{end}'", ".", "format", "(", "start", "=", "start_ref", ",", "end", "=", "end_ref", ")", ",", "first_parent", "=", "True", ",", "no_merges", "=", "True", ")", ")" ]
return a tuple of commits that exist between start_ref and end_ref .
train
false
14,919
def masquerade_as_group_member(user, course, partition_id, group_id): request = _create_mock_json_request(user, data={'role': 'student', 'user_partition_id': partition_id, 'group_id': group_id}) response = handle_ajax(request, unicode(course.id)) setup_masquerade(request, course.id, True) return response.status_code
[ "def", "masquerade_as_group_member", "(", "user", ",", "course", ",", "partition_id", ",", "group_id", ")", ":", "request", "=", "_create_mock_json_request", "(", "user", ",", "data", "=", "{", "'role'", ":", "'student'", ",", "'user_partition_id'", ":", "partition_id", ",", "'group_id'", ":", "group_id", "}", ")", "response", "=", "handle_ajax", "(", "request", ",", "unicode", "(", "course", ".", "id", ")", ")", "setup_masquerade", "(", "request", ",", "course", ".", "id", ",", "True", ")", "return", "response", ".", "status_code" ]
installs a masquerade for the specified user and course .
train
false
14,920
@slow_test @testing.requires_testing_data def test_io_inverse_operator(): tempdir = _TempDir() inverse_operator = read_inverse_operator(fname_inv) x = repr(inverse_operator) assert_true(x) assert_true(isinstance(inverse_operator['noise_cov'], Covariance)) _compare_io(inverse_operator, '.gz') with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') inv_badname = op.join(tempdir, 'test-bad-name.fif.gz') write_inverse_operator(inv_badname, inverse_operator) read_inverse_operator(inv_badname) assert_naming(w, 'test_inverse.py', 2) inv_fname = op.join(tempdir, 'test-inv.fif') args = (10, (1.0 / 9.0), 'dSPM') inv_prep = prepare_inverse_operator(inverse_operator, *args) write_inverse_operator(inv_fname, inv_prep) inv_read = read_inverse_operator(inv_fname) _compare(inverse_operator, inv_read) inv_read_prep = prepare_inverse_operator(inv_read, *args) _compare(inv_prep, inv_read_prep) inv_prep_prep = prepare_inverse_operator(inv_prep, *args) _compare(inv_prep, inv_prep_prep)
[ "@", "slow_test", "@", "testing", ".", "requires_testing_data", "def", "test_io_inverse_operator", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "inverse_operator", "=", "read_inverse_operator", "(", "fname_inv", ")", "x", "=", "repr", "(", "inverse_operator", ")", "assert_true", "(", "x", ")", "assert_true", "(", "isinstance", "(", "inverse_operator", "[", "'noise_cov'", "]", ",", "Covariance", ")", ")", "_compare_io", "(", "inverse_operator", ",", "'.gz'", ")", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", "as", "w", ":", "warnings", ".", "simplefilter", "(", "'always'", ")", "inv_badname", "=", "op", ".", "join", "(", "tempdir", ",", "'test-bad-name.fif.gz'", ")", "write_inverse_operator", "(", "inv_badname", ",", "inverse_operator", ")", "read_inverse_operator", "(", "inv_badname", ")", "assert_naming", "(", "w", ",", "'test_inverse.py'", ",", "2", ")", "inv_fname", "=", "op", ".", "join", "(", "tempdir", ",", "'test-inv.fif'", ")", "args", "=", "(", "10", ",", "(", "1.0", "/", "9.0", ")", ",", "'dSPM'", ")", "inv_prep", "=", "prepare_inverse_operator", "(", "inverse_operator", ",", "*", "args", ")", "write_inverse_operator", "(", "inv_fname", ",", "inv_prep", ")", "inv_read", "=", "read_inverse_operator", "(", "inv_fname", ")", "_compare", "(", "inverse_operator", ",", "inv_read", ")", "inv_read_prep", "=", "prepare_inverse_operator", "(", "inv_read", ",", "*", "args", ")", "_compare", "(", "inv_prep", ",", "inv_read_prep", ")", "inv_prep_prep", "=", "prepare_inverse_operator", "(", "inv_prep", ",", "*", "args", ")", "_compare", "(", "inv_prep", ",", "inv_prep_prep", ")" ]
test io of inverse_operator .
train
false
14,921
def setup_cluster_ssl(client_auth=False): use_single_node(start=False) ccm_cluster = get_cluster() ccm_cluster.stop() abs_path_server_keystore_path = os.path.abspath(SERVER_KEYSTORE_PATH) config_options = {'client_encryption_options': {'enabled': True, 'keystore': abs_path_server_keystore_path, 'keystore_password': DEFAULT_PASSWORD}} if client_auth: abs_path_server_truststore_path = os.path.abspath(SERVER_TRUSTSTORE_PATH) client_encyrption_options = config_options['client_encryption_options'] client_encyrption_options['require_client_auth'] = True client_encyrption_options['truststore'] = abs_path_server_truststore_path client_encyrption_options['truststore_password'] = DEFAULT_PASSWORD ccm_cluster.set_configuration_options(config_options) ccm_cluster.start(wait_for_binary_proto=True, wait_other_notice=True)
[ "def", "setup_cluster_ssl", "(", "client_auth", "=", "False", ")", ":", "use_single_node", "(", "start", "=", "False", ")", "ccm_cluster", "=", "get_cluster", "(", ")", "ccm_cluster", ".", "stop", "(", ")", "abs_path_server_keystore_path", "=", "os", ".", "path", ".", "abspath", "(", "SERVER_KEYSTORE_PATH", ")", "config_options", "=", "{", "'client_encryption_options'", ":", "{", "'enabled'", ":", "True", ",", "'keystore'", ":", "abs_path_server_keystore_path", ",", "'keystore_password'", ":", "DEFAULT_PASSWORD", "}", "}", "if", "client_auth", ":", "abs_path_server_truststore_path", "=", "os", ".", "path", ".", "abspath", "(", "SERVER_TRUSTSTORE_PATH", ")", "client_encyrption_options", "=", "config_options", "[", "'client_encryption_options'", "]", "client_encyrption_options", "[", "'require_client_auth'", "]", "=", "True", "client_encyrption_options", "[", "'truststore'", "]", "=", "abs_path_server_truststore_path", "client_encyrption_options", "[", "'truststore_password'", "]", "=", "DEFAULT_PASSWORD", "ccm_cluster", ".", "set_configuration_options", "(", "config_options", ")", "ccm_cluster", ".", "start", "(", "wait_for_binary_proto", "=", "True", ",", "wait_other_notice", "=", "True", ")" ]
we need some custom setup for this module .
train
false
14,922
def rel_path(*subpaths): return path.join(BASE_DIR, *subpaths)
[ "def", "rel_path", "(", "*", "subpaths", ")", ":", "return", "path", ".", "join", "(", "BASE_DIR", ",", "*", "subpaths", ")" ]
construct the full path given a relative path .
train
false
14,923
def _strip_header(doc): hdr = u'NIPYPE' cruft = u'\x1b' try: index = doc.index(hdr) index += len(hdr) index += 1 doc = doc[index:] try: index = doc.index(cruft) except ValueError: index = len(doc) return doc[:index] except KeyError as e: raise_from(IOError(u'This docstring was not generated by Nipype!\n'), e)
[ "def", "_strip_header", "(", "doc", ")", ":", "hdr", "=", "u'NIPYPE'", "cruft", "=", "u'\\x1b'", "try", ":", "index", "=", "doc", ".", "index", "(", "hdr", ")", "index", "+=", "len", "(", "hdr", ")", "index", "+=", "1", "doc", "=", "doc", "[", "index", ":", "]", "try", ":", "index", "=", "doc", ".", "index", "(", "cruft", ")", "except", "ValueError", ":", "index", "=", "len", "(", "doc", ")", "return", "doc", "[", ":", "index", "]", "except", "KeyError", "as", "e", ":", "raise_from", "(", "IOError", "(", "u'This docstring was not generated by Nipype!\\n'", ")", ",", "e", ")" ]
strip matlab header and splash info off doc .
train
false
14,924
def _from_qemu_format(fmt): return FROM_QEMU_FORMAT_MAP[fmt]
[ "def", "_from_qemu_format", "(", "fmt", ")", ":", "return", "FROM_QEMU_FORMAT_MAP", "[", "fmt", "]" ]
convert to qemu format name param fmt: conventional format name .
train
false
14,926
def register_blueprints(app, package_name, package_path): rv = [] for (_, name, _) in pkgutil.iter_modules(package_path): m = importlib.import_module(('%s.%s' % (package_name, name))) for item in dir(m): item = getattr(m, item) if isinstance(item, Blueprint): app.register_blueprint(item) rv.append(item) return rv
[ "def", "register_blueprints", "(", "app", ",", "package_name", ",", "package_path", ")", ":", "rv", "=", "[", "]", "for", "(", "_", ",", "name", ",", "_", ")", "in", "pkgutil", ".", "iter_modules", "(", "package_path", ")", ":", "m", "=", "importlib", ".", "import_module", "(", "(", "'%s.%s'", "%", "(", "package_name", ",", "name", ")", ")", ")", "for", "item", "in", "dir", "(", "m", ")", ":", "item", "=", "getattr", "(", "m", ",", "item", ")", "if", "isinstance", "(", "item", ",", "Blueprint", ")", ":", "app", ".", "register_blueprint", "(", "item", ")", "rv", ".", "append", "(", "item", ")", "return", "rv" ]
register all blueprint instances on the specified flask application found in all modules for the specified package .
train
false
14,927
def os_disk_stats(folder): if (os.name == u'nt'): import ctypes free_bytes = ctypes.c_ulonglong(0) total_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(folder), None, ctypes.pointer(total_bytes), ctypes.pointer(free_bytes)) return (free_bytes.value, total_bytes.value) else: stats = os.statvfs(folder) return ((stats.f_bavail * stats.f_frsize), (stats.f_blocks * stats.f_frsize))
[ "def", "os_disk_stats", "(", "folder", ")", ":", "if", "(", "os", ".", "name", "==", "u'nt'", ")", ":", "import", "ctypes", "free_bytes", "=", "ctypes", ".", "c_ulonglong", "(", "0", ")", "total_bytes", "=", "ctypes", ".", "c_ulonglong", "(", "0", ")", "ctypes", ".", "windll", ".", "kernel32", ".", "GetDiskFreeSpaceExW", "(", "ctypes", ".", "c_wchar_p", "(", "folder", ")", ",", "None", ",", "ctypes", ".", "pointer", "(", "total_bytes", ")", ",", "ctypes", ".", "pointer", "(", "free_bytes", ")", ")", "return", "(", "free_bytes", ".", "value", ",", "total_bytes", ".", "value", ")", "else", ":", "stats", "=", "os", ".", "statvfs", "(", "folder", ")", "return", "(", "(", "stats", ".", "f_bavail", "*", "stats", ".", "f_frsize", ")", ",", "(", "stats", ".", "f_blocks", "*", "stats", ".", "f_frsize", ")", ")" ]
return drive free .
train
false
14,930
def DfpClassType(value): return value.__class__.__name__
[ "def", "DfpClassType", "(", "value", ")", ":", "return", "value", ".", "__class__", ".", "__name__" ]
returns the class type for the suds object .
train
false
14,932
def securitygroup(vm_): return config.get_cloud_config_value('securitygroup', vm_, __opts__, search_global=False)
[ "def", "securitygroup", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'securitygroup'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")" ]
return the security group .
train
false
14,933
def inverse_mercator(xy): lon = ((xy[0] / 20037508.34) * 180) lat = ((xy[1] / 20037508.34) * 180) lat = ((180 / math.pi) * ((2 * math.atan(math.exp(((lat * math.pi) / 180)))) - (math.pi / 2))) return (lon, lat)
[ "def", "inverse_mercator", "(", "xy", ")", ":", "lon", "=", "(", "(", "xy", "[", "0", "]", "/", "20037508.34", ")", "*", "180", ")", "lat", "=", "(", "(", "xy", "[", "1", "]", "/", "20037508.34", ")", "*", "180", ")", "lat", "=", "(", "(", "180", "/", "math", ".", "pi", ")", "*", "(", "(", "2", "*", "math", ".", "atan", "(", "math", ".", "exp", "(", "(", "(", "lat", "*", "math", ".", "pi", ")", "/", "180", ")", ")", ")", ")", "-", "(", "math", ".", "pi", "/", "2", ")", ")", ")", "return", "(", "lon", ",", "lat", ")" ]
given coordinates in spherical mercator .
train
true
14,934
def _toc_pattern(indexname): return re.compile(('^_%s_([0-9]+).toc$' % indexname))
[ "def", "_toc_pattern", "(", "indexname", ")", ":", "return", "re", ".", "compile", "(", "(", "'^_%s_([0-9]+).toc$'", "%", "indexname", ")", ")" ]
returns a regular expression object that matches toc filenames .
train
false
14,937
def get_profile_image_urls_for_user(user, request=None): try: if user.profile.has_profile_image: urls = _get_profile_image_urls(_make_profile_image_name(user.username), get_profile_image_storage(), version=user.profile.profile_image_uploaded_at.strftime('%s')) else: urls = _get_default_profile_image_urls() except UserProfile.DoesNotExist: urls = _get_default_profile_image_urls() if request: for (key, value) in urls.items(): urls[key] = request.build_absolute_uri(value) return urls
[ "def", "get_profile_image_urls_for_user", "(", "user", ",", "request", "=", "None", ")", ":", "try", ":", "if", "user", ".", "profile", ".", "has_profile_image", ":", "urls", "=", "_get_profile_image_urls", "(", "_make_profile_image_name", "(", "user", ".", "username", ")", ",", "get_profile_image_storage", "(", ")", ",", "version", "=", "user", ".", "profile", ".", "profile_image_uploaded_at", ".", "strftime", "(", "'%s'", ")", ")", "else", ":", "urls", "=", "_get_default_profile_image_urls", "(", ")", "except", "UserProfile", ".", "DoesNotExist", ":", "urls", "=", "_get_default_profile_image_urls", "(", ")", "if", "request", ":", "for", "(", "key", ",", "value", ")", "in", "urls", ".", "items", "(", ")", ":", "urls", "[", "key", "]", "=", "request", ".", "build_absolute_uri", "(", "value", ")", "return", "urls" ]
return a dict {size:url} for each profile image for a given user .
train
false
14,938
@inspect_command(alias=u'dump_tasks', variadic=u'taskinfoitems', signature=u'[attr1 [attr2 [... [attrN]]]]') def registered(state, taskinfoitems=None, builtins=False, **kwargs): reg = state.app.tasks taskinfoitems = (taskinfoitems or DEFAULT_TASK_INFO_ITEMS) tasks = (reg if builtins else (task for task in reg if (not task.startswith(u'celery.')))) def _extract_info(task): fields = {field: str(getattr(task, field, None)) for field in taskinfoitems if (getattr(task, field, None) is not None)} if fields: info = [u'='.join(f) for f in items(fields)] return u'{0} [{1}]'.format(task.name, u' '.join(info)) return task.name return [_extract_info(reg[task]) for task in sorted(tasks)]
[ "@", "inspect_command", "(", "alias", "=", "u'dump_tasks'", ",", "variadic", "=", "u'taskinfoitems'", ",", "signature", "=", "u'[attr1 [attr2 [... [attrN]]]]'", ")", "def", "registered", "(", "state", ",", "taskinfoitems", "=", "None", ",", "builtins", "=", "False", ",", "**", "kwargs", ")", ":", "reg", "=", "state", ".", "app", ".", "tasks", "taskinfoitems", "=", "(", "taskinfoitems", "or", "DEFAULT_TASK_INFO_ITEMS", ")", "tasks", "=", "(", "reg", "if", "builtins", "else", "(", "task", "for", "task", "in", "reg", "if", "(", "not", "task", ".", "startswith", "(", "u'celery.'", ")", ")", ")", ")", "def", "_extract_info", "(", "task", ")", ":", "fields", "=", "{", "field", ":", "str", "(", "getattr", "(", "task", ",", "field", ",", "None", ")", ")", "for", "field", "in", "taskinfoitems", "if", "(", "getattr", "(", "task", ",", "field", ",", "None", ")", "is", "not", "None", ")", "}", "if", "fields", ":", "info", "=", "[", "u'='", ".", "join", "(", "f", ")", "for", "f", "in", "items", "(", "fields", ")", "]", "return", "u'{0} [{1}]'", ".", "format", "(", "task", ".", "name", ",", "u' '", ".", "join", "(", "info", ")", ")", "return", "task", ".", "name", "return", "[", "_extract_info", "(", "reg", "[", "task", "]", ")", "for", "task", "in", "sorted", "(", "tasks", ")", "]" ]
list of registered tasks .
train
false
14,939
def listing(output_lines): items = [] table_ = table(output_lines) for row in table_['values']: item = {} for (col_idx, col_key) in enumerate(table_['headers']): item[col_key] = row[col_idx] items.append(item) return items
[ "def", "listing", "(", "output_lines", ")", ":", "items", "=", "[", "]", "table_", "=", "table", "(", "output_lines", ")", "for", "row", "in", "table_", "[", "'values'", "]", ":", "item", "=", "{", "}", "for", "(", "col_idx", ",", "col_key", ")", "in", "enumerate", "(", "table_", "[", "'headers'", "]", ")", ":", "item", "[", "col_key", "]", "=", "row", "[", "col_idx", "]", "items", ".", "append", "(", "item", ")", "return", "items" ]
return list of dicts with basic item info parsed from cli output .
train
false
14,940
def real_path(path): return os.path.normpath(os.path.normcase(os.path.realpath(path)))
[ "def", "real_path", "(", "path", ")", ":", "return", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "normcase", "(", "os", ".", "path", ".", "realpath", "(", "path", ")", ")", ")" ]
returns: the canonicalized absolute pathname .
train
false
14,941
def test_projection_layer_yaml(): test_dir = os.path.dirname(__file__) with open(os.path.join(test_dir, 'composite.yaml')) as f: train = yaml_parse.load(f.read()) train.main_loop()
[ "def", "test_projection_layer_yaml", "(", ")", ":", "test_dir", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "test_dir", ",", "'composite.yaml'", ")", ")", "as", "f", ":", "train", "=", "yaml_parse", ".", "load", "(", "f", ".", "read", "(", ")", ")", "train", ".", "main_loop", "(", ")" ]
test loading and running a complex model with projectionlayer .
train
false
14,943
def enter_text_feedback(context, text_feedback): input_field = find_css_class_with_wait(context, TEXT_INPUT_CLASS) input_field.send_keys(text_feedback)
[ "def", "enter_text_feedback", "(", "context", ",", "text_feedback", ")", ":", "input_field", "=", "find_css_class_with_wait", "(", "context", ",", "TEXT_INPUT_CLASS", ")", "input_field", ".", "send_keys", "(", "text_feedback", ")" ]
enter text feedback into feedback form .
train
false
14,944
def _chkconfig_is_enabled(name, runlevel=None): cmdline = '/sbin/chkconfig --list {0}'.format(name) result = __salt__['cmd.run_all'](cmdline, python_shell=False) if (runlevel is None): runlevel = _runlevel() if (result['retcode'] == 0): for row in result['stdout'].splitlines(): if ('{0}:on'.format(runlevel) in row): if (row.split()[0] == name): return True elif (row.split() == [name, 'on']): return True return False
[ "def", "_chkconfig_is_enabled", "(", "name", ",", "runlevel", "=", "None", ")", ":", "cmdline", "=", "'/sbin/chkconfig --list {0}'", ".", "format", "(", "name", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmdline", ",", "python_shell", "=", "False", ")", "if", "(", "runlevel", "is", "None", ")", ":", "runlevel", "=", "_runlevel", "(", ")", "if", "(", "result", "[", "'retcode'", "]", "==", "0", ")", ":", "for", "row", "in", "result", "[", "'stdout'", "]", ".", "splitlines", "(", ")", ":", "if", "(", "'{0}:on'", ".", "format", "(", "runlevel", ")", "in", "row", ")", ":", "if", "(", "row", ".", "split", "(", ")", "[", "0", "]", "==", "name", ")", ":", "return", "True", "elif", "(", "row", ".", "split", "(", ")", "==", "[", "name", ",", "'on'", "]", ")", ":", "return", "True", "return", "False" ]
return true if the service is enabled according to chkconfig; otherwise return false .
train
true
14,946
def redirect_by_default(): choices = [('Easy', 'Allow both HTTP and HTTPS access to these sites'), ('Secure', 'Make all requests redirect to secure HTTPS access')] (code, selection) = util(interfaces.IDisplay).menu('Please choose whether HTTPS access is required or optional.', choices, default=0, cli_flag='--redirect / --no-redirect', force_interactive=True) if (code != display_util.OK): return False return (selection == 1)
[ "def", "redirect_by_default", "(", ")", ":", "choices", "=", "[", "(", "'Easy'", ",", "'Allow both HTTP and HTTPS access to these sites'", ")", ",", "(", "'Secure'", ",", "'Make all requests redirect to secure HTTPS access'", ")", "]", "(", "code", ",", "selection", ")", "=", "util", "(", "interfaces", ".", "IDisplay", ")", ".", "menu", "(", "'Please choose whether HTTPS access is required or optional.'", ",", "choices", ",", "default", "=", "0", ",", "cli_flag", "=", "'--redirect / --no-redirect'", ",", "force_interactive", "=", "True", ")", "if", "(", "code", "!=", "display_util", ".", "OK", ")", ":", "return", "False", "return", "(", "selection", "==", "1", ")" ]
determines whether the user would like to redirect to https .
train
false
14,948
def _log_or_execute(content, func, *args, **kwargs): subject = kwargs.get('subject', func.__name__) if DRY_RUN: logging.info('Would %s: %s', subject, content) else: func(*args)
[ "def", "_log_or_execute", "(", "content", ",", "func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "subject", "=", "kwargs", ".", "get", "(", "'subject'", ",", "func", ".", "__name__", ")", "if", "DRY_RUN", ":", "logging", ".", "info", "(", "'Would %s: %s'", ",", "subject", ",", "content", ")", "else", ":", "func", "(", "*", "args", ")" ]
log a message if dry_run is enabled .
train
false
14,949
def rand_alpha(length=0): return ''.join((choice(letters) for _ in xrange((length or randint(10, 30)))))
[ "def", "rand_alpha", "(", "length", "=", "0", ")", ":", "return", "''", ".", "join", "(", "(", "choice", "(", "letters", ")", "for", "_", "in", "xrange", "(", "(", "length", "or", "randint", "(", "10", ",", "30", ")", ")", ")", ")", ")" ]
create a random string only with letters :return: a random string only composed by letters .
train
false
14,951
def configure_app(app, config): app.config.from_object('flaskbb.configs.default.DefaultConfig') if (isinstance(config, string_types) and os.path.exists(os.path.abspath(config))): app.config.from_pyfile(os.path.abspath(config)) else: app.config.from_object(config) app.config.from_envvar('FLASKBB_SETTINGS', silent=True)
[ "def", "configure_app", "(", "app", ",", "config", ")", ":", "app", ".", "config", ".", "from_object", "(", "'flaskbb.configs.default.DefaultConfig'", ")", "if", "(", "isinstance", "(", "config", ",", "string_types", ")", "and", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "abspath", "(", "config", ")", ")", ")", ":", "app", ".", "config", ".", "from_pyfile", "(", "os", ".", "path", ".", "abspath", "(", "config", ")", ")", "else", ":", "app", ".", "config", ".", "from_object", "(", "config", ")", "app", ".", "config", ".", "from_envvar", "(", "'FLASKBB_SETTINGS'", ",", "silent", "=", "True", ")" ]
different ways of configurations .
train
false
14,952
@image_comparison(baseline_images=[u'grayscale_alpha'], extensions=[u'pdf']) def test_grayscale_alpha(): (x, y) = np.ogrid[(-2):2:0.1, (-2):2:0.1] dd = np.exp((- ((x ** 2) + (y ** 2)))) dd[(dd < 0.1)] = np.nan (fig, ax) = plt.subplots() ax.imshow(dd, interpolation=u'none', cmap=u'gray_r') ax.set_xticks([]) ax.set_yticks([])
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'grayscale_alpha'", "]", ",", "extensions", "=", "[", "u'pdf'", "]", ")", "def", "test_grayscale_alpha", "(", ")", ":", "(", "x", ",", "y", ")", "=", "np", ".", "ogrid", "[", "(", "-", "2", ")", ":", "2", ":", "0.1", ",", "(", "-", "2", ")", ":", "2", ":", "0.1", "]", "dd", "=", "np", ".", "exp", "(", "(", "-", "(", "(", "x", "**", "2", ")", "+", "(", "y", "**", "2", ")", ")", ")", ")", "dd", "[", "(", "dd", "<", "0.1", ")", "]", "=", "np", ".", "nan", "(", "fig", ",", "ax", ")", "=", "plt", ".", "subplots", "(", ")", "ax", ".", "imshow", "(", "dd", ",", "interpolation", "=", "u'none'", ",", "cmap", "=", "u'gray_r'", ")", "ax", ".", "set_xticks", "(", "[", "]", ")", "ax", ".", "set_yticks", "(", "[", "]", ")" ]
masking images with nan did not work for grayscale images .
train
false
14,953
def hide_me(tb, g=globals()): base_tb = tb try: while (tb and (tb.tb_frame.f_globals is not g)): tb = tb.tb_next while (tb and (tb.tb_frame.f_globals is g)): tb = tb.tb_next except Exception as e: logging.exception(e) tb = base_tb if (not tb): tb = base_tb return tb
[ "def", "hide_me", "(", "tb", ",", "g", "=", "globals", "(", ")", ")", ":", "base_tb", "=", "tb", "try", ":", "while", "(", "tb", "and", "(", "tb", ".", "tb_frame", ".", "f_globals", "is", "not", "g", ")", ")", ":", "tb", "=", "tb", ".", "tb_next", "while", "(", "tb", "and", "(", "tb", ".", "tb_frame", ".", "f_globals", "is", "g", ")", ")", ":", "tb", "=", "tb", ".", "tb_next", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "tb", "=", "base_tb", "if", "(", "not", "tb", ")", ":", "tb", "=", "base_tb", "return", "tb" ]
hide stack traceback of given stack .
train
true
14,954
def string_to_list(string): args = [] if string: string = str(string) if (',' not in string): string += ',' for arg in string.split(','): arg = arg.strip() if (arg == ''): continue args.append(arg) return args
[ "def", "string_to_list", "(", "string", ")", ":", "args", "=", "[", "]", "if", "string", ":", "string", "=", "str", "(", "string", ")", "if", "(", "','", "not", "in", "string", ")", ":", "string", "+=", "','", "for", "arg", "in", "string", ".", "split", "(", "','", ")", ":", "arg", "=", "arg", ".", "strip", "(", ")", "if", "(", "arg", "==", "''", ")", ":", "continue", "args", ".", "append", "(", "arg", ")", "return", "args" ]
usage: {{ url|thumbnail:"width .
train
false
14,955
def to_int_be(data): return reduce((lambda a, b: ((a << 8) + b)), bytearray(data), 0)
[ "def", "to_int_be", "(", "data", ")", ":", "return", "reduce", "(", "(", "lambda", "a", ",", "b", ":", "(", "(", "a", "<<", "8", ")", "+", "b", ")", ")", ",", "bytearray", "(", "data", ")", ",", "0", ")" ]
convert an arbitrarily-long string to a long using big-endian byte order .
train
false
14,956
@task def patch_version(vs, path=pjoin(here, '..')): v = parse_vs(vs) version_py = pjoin(path, 'jupyterhub', 'version.py') print ('patching %s with %s' % (version_py, vs)) with open(version_py) as f: pre_lines = [] post_lines = [] for line in f: pre_lines.append(line) if line.startswith('version_info'): break for line in f: if line.startswith(')'): post_lines.append(line) break for line in f: post_lines.append(line) with open(version_py, 'w') as f: for line in pre_lines: f.write(line) for part in v: f.write((' %r,\n' % part)) for line in post_lines: f.write(line) ns = {} with open(version_py) as f: exec f.read() in {}, ns assert (ns['__version__'] == vs), ('%r != %r' % (ns['__version__'], vs))
[ "@", "task", "def", "patch_version", "(", "vs", ",", "path", "=", "pjoin", "(", "here", ",", "'..'", ")", ")", ":", "v", "=", "parse_vs", "(", "vs", ")", "version_py", "=", "pjoin", "(", "path", ",", "'jupyterhub'", ",", "'version.py'", ")", "print", "(", "'patching %s with %s'", "%", "(", "version_py", ",", "vs", ")", ")", "with", "open", "(", "version_py", ")", "as", "f", ":", "pre_lines", "=", "[", "]", "post_lines", "=", "[", "]", "for", "line", "in", "f", ":", "pre_lines", ".", "append", "(", "line", ")", "if", "line", ".", "startswith", "(", "'version_info'", ")", ":", "break", "for", "line", "in", "f", ":", "if", "line", ".", "startswith", "(", "')'", ")", ":", "post_lines", ".", "append", "(", "line", ")", "break", "for", "line", "in", "f", ":", "post_lines", ".", "append", "(", "line", ")", "with", "open", "(", "version_py", ",", "'w'", ")", "as", "f", ":", "for", "line", "in", "pre_lines", ":", "f", ".", "write", "(", "line", ")", "for", "part", "in", "v", ":", "f", ".", "write", "(", "(", "' %r,\\n'", "%", "part", ")", ")", "for", "line", "in", "post_lines", ":", "f", ".", "write", "(", "line", ")", "ns", "=", "{", "}", "with", "open", "(", "version_py", ")", "as", "f", ":", "exec", "f", ".", "read", "(", ")", "in", "{", "}", ",", "ns", "assert", "(", "ns", "[", "'__version__'", "]", "==", "vs", ")", ",", "(", "'%r != %r'", "%", "(", "ns", "[", "'__version__'", "]", ",", "vs", ")", ")" ]
patch zmq/sugar/version .
train
false
14,957
def convert_id3(id_64): _id = ((id_64 - ID_BASE) * 2) if ((_id % 2) == 0): _id += 0 else: _id += 1 actual = str((_id // 2)) return 'U:1:{}'.format(actual)
[ "def", "convert_id3", "(", "id_64", ")", ":", "_id", "=", "(", "(", "id_64", "-", "ID_BASE", ")", "*", "2", ")", "if", "(", "(", "_id", "%", "2", ")", "==", "0", ")", ":", "_id", "+=", "0", "else", ":", "_id", "+=", "1", "actual", "=", "str", "(", "(", "_id", "//", "2", ")", ")", "return", "'U:1:{}'", ".", "format", "(", "actual", ")" ]
takes a steam id_64 formatted id and returns a id_3 formatted id :typetype id_64: int :return: str .
train
false
14,958
def _find_value(ret_dict, key, path=None): if (path is None): path = key else: path = '{0}:{1}'.format(path, key) ret = [] for (ikey, val) in six.iteritems(ret_dict): if (ikey == key): ret.append({path: val}) if isinstance(val, list): for item in val: if isinstance(item, dict): ret = (ret + _find_value(item, key, path)) if isinstance(val, dict): ret = (ret + _find_value(val, key, path)) return ret
[ "def", "_find_value", "(", "ret_dict", ",", "key", ",", "path", "=", "None", ")", ":", "if", "(", "path", "is", "None", ")", ":", "path", "=", "key", "else", ":", "path", "=", "'{0}:{1}'", ".", "format", "(", "path", ",", "key", ")", "ret", "=", "[", "]", "for", "(", "ikey", ",", "val", ")", "in", "six", ".", "iteritems", "(", "ret_dict", ")", ":", "if", "(", "ikey", "==", "key", ")", ":", "ret", ".", "append", "(", "{", "path", ":", "val", "}", ")", "if", "isinstance", "(", "val", ",", "list", ")", ":", "for", "item", "in", "val", ":", "if", "isinstance", "(", "item", ",", "dict", ")", ":", "ret", "=", "(", "ret", "+", "_find_value", "(", "item", ",", "key", ",", "path", ")", ")", "if", "isinstance", "(", "val", ",", "dict", ")", ":", "ret", "=", "(", "ret", "+", "_find_value", "(", "val", ",", "key", ",", "path", ")", ")", "return", "ret" ]
private method traverses a dictionary of dictionaries/lists to find key and return the value stored .
train
true
14,959
def find_parents(candidate, branches): for branch in branches: try: idx = branch.index(candidate.lower()) return list(reversed(branch[:(idx + 1)])) except ValueError: continue return [candidate]
[ "def", "find_parents", "(", "candidate", ",", "branches", ")", ":", "for", "branch", "in", "branches", ":", "try", ":", "idx", "=", "branch", ".", "index", "(", "candidate", ".", "lower", "(", ")", ")", "return", "list", "(", "reversed", "(", "branch", "[", ":", "(", "idx", "+", "1", ")", "]", ")", ")", "except", "ValueError", ":", "continue", "return", "[", "candidate", "]" ]
find parents genre of a given genre .
train
false
14,960
def PersistentTemporaryDirectory(suffix='', prefix='', dir=None): if (dir is None): dir = base_dir() tdir = _make_dir(suffix, prefix, dir) atexit.register(remove_dir, tdir) return tdir
[ "def", "PersistentTemporaryDirectory", "(", "suffix", "=", "''", ",", "prefix", "=", "''", ",", "dir", "=", "None", ")", ":", "if", "(", "dir", "is", "None", ")", ":", "dir", "=", "base_dir", "(", ")", "tdir", "=", "_make_dir", "(", "suffix", ",", "prefix", ",", "dir", ")", "atexit", ".", "register", "(", "remove_dir", ",", "tdir", ")", "return", "tdir" ]
return the path to a newly created temporary directory that will be automatically deleted on application exit .
train
false
14,961
def _covar_mstep_tied(gmm, X, responsibilities, weighted_X_sum, norm, min_covar): avg_X2 = np.dot(X.T, X) avg_means2 = np.dot(gmm.means_.T, weighted_X_sum) out = (avg_X2 - avg_means2) out *= (1.0 / X.shape[0]) out.flat[::(len(out) + 1)] += min_covar return out
[ "def", "_covar_mstep_tied", "(", "gmm", ",", "X", ",", "responsibilities", ",", "weighted_X_sum", ",", "norm", ",", "min_covar", ")", ":", "avg_X2", "=", "np", ".", "dot", "(", "X", ".", "T", ",", "X", ")", "avg_means2", "=", "np", ".", "dot", "(", "gmm", ".", "means_", ".", "T", ",", "weighted_X_sum", ")", "out", "=", "(", "avg_X2", "-", "avg_means2", ")", "out", "*=", "(", "1.0", "/", "X", ".", "shape", "[", "0", "]", ")", "out", ".", "flat", "[", ":", ":", "(", "len", "(", "out", ")", "+", "1", ")", "]", "+=", "min_covar", "return", "out" ]
perform the covariance m step for tied cases .
train
true
14,962
@first_event_received.connect(weak=False) def record_first_event(project, group, **kwargs): (rows_affected, created) = OrganizationOnboardingTask.objects.create_or_update(organization_id=project.organization_id, task=OnboardingTask.FIRST_EVENT, status=OnboardingTaskStatus.PENDING, values={'status': OnboardingTaskStatus.COMPLETE, 'project_id': project.id, 'date_completed': project.first_event, 'data': {'platform': group.platform}}) if ((not rows_affected) and (not created)): try: oot = OrganizationOnboardingTask.objects.filter(organization_id=project.organization_id, task=OnboardingTask.FIRST_EVENT)[0] except IndexError: return if ((oot.project_id != project.id) and (oot.data.get('platform', group.platform) != group.platform)): OrganizationOnboardingTask.objects.create_or_update(organization_id=project.organization_id, task=OnboardingTask.SECOND_PLATFORM, status=OnboardingTaskStatus.PENDING, values={'status': OnboardingTaskStatus.COMPLETE, 'project_id': project.id, 'date_completed': project.first_event, 'data': {'platform': group.platform}})
[ "@", "first_event_received", ".", "connect", "(", "weak", "=", "False", ")", "def", "record_first_event", "(", "project", ",", "group", ",", "**", "kwargs", ")", ":", "(", "rows_affected", ",", "created", ")", "=", "OrganizationOnboardingTask", ".", "objects", ".", "create_or_update", "(", "organization_id", "=", "project", ".", "organization_id", ",", "task", "=", "OnboardingTask", ".", "FIRST_EVENT", ",", "status", "=", "OnboardingTaskStatus", ".", "PENDING", ",", "values", "=", "{", "'status'", ":", "OnboardingTaskStatus", ".", "COMPLETE", ",", "'project_id'", ":", "project", ".", "id", ",", "'date_completed'", ":", "project", ".", "first_event", ",", "'data'", ":", "{", "'platform'", ":", "group", ".", "platform", "}", "}", ")", "if", "(", "(", "not", "rows_affected", ")", "and", "(", "not", "created", ")", ")", ":", "try", ":", "oot", "=", "OrganizationOnboardingTask", ".", "objects", ".", "filter", "(", "organization_id", "=", "project", ".", "organization_id", ",", "task", "=", "OnboardingTask", ".", "FIRST_EVENT", ")", "[", "0", "]", "except", "IndexError", ":", "return", "if", "(", "(", "oot", ".", "project_id", "!=", "project", ".", "id", ")", "and", "(", "oot", ".", "data", ".", "get", "(", "'platform'", ",", "group", ".", "platform", ")", "!=", "group", ".", "platform", ")", ")", ":", "OrganizationOnboardingTask", ".", "objects", ".", "create_or_update", "(", "organization_id", "=", "project", ".", "organization_id", ",", "task", "=", "OnboardingTask", ".", "SECOND_PLATFORM", ",", "status", "=", "OnboardingTaskStatus", ".", "PENDING", ",", "values", "=", "{", "'status'", ":", "OnboardingTaskStatus", ".", "COMPLETE", ",", "'project_id'", ":", "project", ".", "id", ",", "'date_completed'", ":", "project", ".", "first_event", ",", "'data'", ":", "{", "'platform'", ":", "group", ".", "platform", "}", "}", ")" ]
requires up to 2 database calls .
train
false
14,964
def load_function(path): (module_path, _, name) = path.rpartition('.') return getattr(import_module(module_path), name)
[ "def", "load_function", "(", "path", ")", ":", "(", "module_path", ",", "_", ",", "name", ")", "=", "path", ".", "rpartition", "(", "'.'", ")", "return", "getattr", "(", "import_module", "(", "module_path", ")", ",", "name", ")" ]
load a function by name .
train
false
14,965
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True, **domains): from cherrypy.lib import httputil def vhost_dispatch(path_info): request = cherrypy.serving.request header = request.headers.get domain = header('Host', '') if use_x_forwarded_host: domain = header('X-Forwarded-Host', domain) prefix = domains.get(domain, '') if prefix: path_info = httputil.urljoin(prefix, path_info) result = next_dispatcher(path_info) section = request.config.get('tools.staticdir.section') if section: section = section[len(prefix):] request.config['tools.staticdir.section'] = section return result return vhost_dispatch
[ "def", "VirtualHost", "(", "next_dispatcher", "=", "Dispatcher", "(", ")", ",", "use_x_forwarded_host", "=", "True", ",", "**", "domains", ")", ":", "from", "cherrypy", ".", "lib", "import", "httputil", "def", "vhost_dispatch", "(", "path_info", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "header", "=", "request", ".", "headers", ".", "get", "domain", "=", "header", "(", "'Host'", ",", "''", ")", "if", "use_x_forwarded_host", ":", "domain", "=", "header", "(", "'X-Forwarded-Host'", ",", "domain", ")", "prefix", "=", "domains", ".", "get", "(", "domain", ",", "''", ")", "if", "prefix", ":", "path_info", "=", "httputil", ".", "urljoin", "(", "prefix", ",", "path_info", ")", "result", "=", "next_dispatcher", "(", "path_info", ")", "section", "=", "request", ".", "config", ".", "get", "(", "'tools.staticdir.section'", ")", "if", "section", ":", "section", "=", "section", "[", "len", "(", "prefix", ")", ":", "]", "request", ".", "config", "[", "'tools.staticdir.section'", "]", "=", "section", "return", "result", "return", "vhost_dispatch" ]
select a different handler based on the host header .
train
false
14,966
@register_hook def bitbucket_hook_helper(data): if ('push' in data): return bitbucket_webhook_helper(data) owner = data['repository']['owner'] slug = data['repository']['slug'] if data['commits']: branch = data['commits'][(-1)]['branch'] else: branch = None params = {'owner': owner, 'slug': slug} if (data['repository']['scm'] == 'git'): repos = [(repo % params) for repo in BITBUCKET_GIT_REPOS] elif (data['repository']['scm'] == 'hg'): repos = [(repo % params) for repo in BITBUCKET_HG_REPOS] else: LOGGER.error('unsupported repository: %s', repr(data['repository'])) raise ValueError('unsupported repository') return {'service_long_name': 'Bitbucket', 'repo_url': ''.join([data['canon_url'], data['repository']['absolute_url']]), 'repos': repos, 'branch': branch}
[ "@", "register_hook", "def", "bitbucket_hook_helper", "(", "data", ")", ":", "if", "(", "'push'", "in", "data", ")", ":", "return", "bitbucket_webhook_helper", "(", "data", ")", "owner", "=", "data", "[", "'repository'", "]", "[", "'owner'", "]", "slug", "=", "data", "[", "'repository'", "]", "[", "'slug'", "]", "if", "data", "[", "'commits'", "]", ":", "branch", "=", "data", "[", "'commits'", "]", "[", "(", "-", "1", ")", "]", "[", "'branch'", "]", "else", ":", "branch", "=", "None", "params", "=", "{", "'owner'", ":", "owner", ",", "'slug'", ":", "slug", "}", "if", "(", "data", "[", "'repository'", "]", "[", "'scm'", "]", "==", "'git'", ")", ":", "repos", "=", "[", "(", "repo", "%", "params", ")", "for", "repo", "in", "BITBUCKET_GIT_REPOS", "]", "elif", "(", "data", "[", "'repository'", "]", "[", "'scm'", "]", "==", "'hg'", ")", ":", "repos", "=", "[", "(", "repo", "%", "params", ")", "for", "repo", "in", "BITBUCKET_HG_REPOS", "]", "else", ":", "LOGGER", ".", "error", "(", "'unsupported repository: %s'", ",", "repr", "(", "data", "[", "'repository'", "]", ")", ")", "raise", "ValueError", "(", "'unsupported repository'", ")", "return", "{", "'service_long_name'", ":", "'Bitbucket'", ",", "'repo_url'", ":", "''", ".", "join", "(", "[", "data", "[", "'canon_url'", "]", ",", "data", "[", "'repository'", "]", "[", "'absolute_url'", "]", "]", ")", ",", "'repos'", ":", "repos", ",", "'branch'", ":", "branch", "}" ]
api to handle service hooks from bitbucket .
train
false
14,967
def _kill(req, image_id, from_state): registry.update_image_metadata(req.context, image_id, {'status': 'killed'}, from_state=from_state)
[ "def", "_kill", "(", "req", ",", "image_id", ",", "from_state", ")", ":", "registry", ".", "update_image_metadata", "(", "req", ".", "context", ",", "image_id", ",", "{", "'status'", ":", "'killed'", "}", ",", "from_state", "=", "from_state", ")" ]
marks the image status to killed .
train
false
14,968
def is_degenerate(identity_set, gate_identity): for an_id in identity_set: if (gate_identity in an_id.equivalent_ids): return True return False
[ "def", "is_degenerate", "(", "identity_set", ",", "gate_identity", ")", ":", "for", "an_id", "in", "identity_set", ":", "if", "(", "gate_identity", "in", "an_id", ".", "equivalent_ids", ")", ":", "return", "True", "return", "False" ]
checks if a gate identity is a permutation of another identity .
train
false
14,969
def get_permissions(FunctionName, Qualifier=None, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) kwargs = {} if (Qualifier is not None): kwargs['Qualifier'] = Qualifier policy = conn.get_policy(FunctionName=FunctionName, **kwargs) policy = policy.get('Policy', {}) if isinstance(policy, six.string_types): policy = json.loads(policy) if (policy is None): policy = {} permissions = {} for statement in policy.get('Statement', []): condition = statement.get('Condition', {}) principal = statement.get('Principal', {}) if ('AWS' in principal): principal = principal['AWS'].split(':')[4] else: principal = principal.get('Service') permission = {'Action': statement.get('Action'), 'Principal': principal} if ('ArnLike' in condition): permission['SourceArn'] = condition['ArnLike'].get('AWS:SourceArn') if ('StringEquals' in condition): permission['SourceAccount'] = condition['StringEquals'].get('AWS:SourceAccount') permissions[statement.get('Sid')] = permission return {'permissions': permissions} except ClientError as e: err = salt.utils.boto3.get_error(e) if (e.response.get('Error', {}).get('Code') == 'ResourceNotFoundException'): return {'permissions': None} return {'permissions': None, 'error': err}
[ "def", "get_permissions", "(", "FunctionName", ",", "Qualifier", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "kwargs", "=", "{", "}", "if", "(", "Qualifier", "is", "not", "None", ")", ":", "kwargs", "[", "'Qualifier'", "]", "=", "Qualifier", "policy", "=", "conn", ".", "get_policy", "(", "FunctionName", "=", "FunctionName", ",", "**", "kwargs", ")", "policy", "=", "policy", ".", "get", "(", "'Policy'", ",", "{", "}", ")", "if", "isinstance", "(", "policy", ",", "six", ".", "string_types", ")", ":", "policy", "=", "json", ".", "loads", "(", "policy", ")", "if", "(", "policy", "is", "None", ")", ":", "policy", "=", "{", "}", "permissions", "=", "{", "}", "for", "statement", "in", "policy", ".", "get", "(", "'Statement'", ",", "[", "]", ")", ":", "condition", "=", "statement", ".", "get", "(", "'Condition'", ",", "{", "}", ")", "principal", "=", "statement", ".", "get", "(", "'Principal'", ",", "{", "}", ")", "if", "(", "'AWS'", "in", "principal", ")", ":", "principal", "=", "principal", "[", "'AWS'", "]", ".", "split", "(", "':'", ")", "[", "4", "]", "else", ":", "principal", "=", "principal", ".", "get", "(", "'Service'", ")", "permission", "=", "{", "'Action'", ":", "statement", ".", "get", "(", "'Action'", ")", ",", "'Principal'", ":", "principal", "}", "if", "(", "'ArnLike'", "in", "condition", ")", ":", "permission", "[", "'SourceArn'", "]", "=", "condition", "[", "'ArnLike'", "]", ".", "get", "(", "'AWS:SourceArn'", ")", "if", "(", "'StringEquals'", "in", "condition", ")", ":", "permission", "[", "'SourceAccount'", "]", "=", "condition", "[", "'StringEquals'", "]", ".", "get", "(", "'AWS:SourceAccount'", ")", "permissions", "[", "statement", ".", "get", "(", "'Sid'", ")", "]", "=", "permission", "return", "{", "'permissions'", ":", "permissions", "}", "except", "ClientError", "as", "e", ":", "err", "=", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "if", "(", "e", ".", "response", ".", "get", "(", "'Error'", ",", "{", "}", ")", ".", "get", "(", "'Code'", ")", "==", "'ResourceNotFoundException'", ")", ":", "return", "{", "'permissions'", ":", "None", "}", "return", "{", "'permissions'", ":", "None", ",", "'error'", ":", "err", "}" ]
get the permissions for the passed object args: obj_name : the name of or path to the object .
train
true
14,970
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError]) def request_password_change(email, orig_host, is_secure): form = PasswordResetFormNoActive({'email': email}) if form.is_valid(): form.save(from_email=configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL), domain_override=orig_host, use_https=is_secure) else: raise UserNotFound
[ "@", "intercept_errors", "(", "UserAPIInternalError", ",", "ignore_errors", "=", "[", "UserAPIRequestError", "]", ")", "def", "request_password_change", "(", "email", ",", "orig_host", ",", "is_secure", ")", ":", "form", "=", "PasswordResetFormNoActive", "(", "{", "'email'", ":", "email", "}", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", ".", "save", "(", "from_email", "=", "configuration_helpers", ".", "get_value", "(", "'email_from_address'", ",", "settings", ".", "DEFAULT_FROM_EMAIL", ")", ",", "domain_override", "=", "orig_host", ",", "use_https", "=", "is_secure", ")", "else", ":", "raise", "UserNotFound" ]
email a single-use link for performing a password reset .
train
false
14,971
@deprecated('The function log_multivariate_normal_density is deprecated in 0.18 and will be removed in 0.20.') def log_multivariate_normal_density(X, means, covars, covariance_type='diag'): log_multivariate_normal_density_dict = {'spherical': _log_multivariate_normal_density_spherical, 'tied': _log_multivariate_normal_density_tied, 'diag': _log_multivariate_normal_density_diag, 'full': _log_multivariate_normal_density_full} return log_multivariate_normal_density_dict[covariance_type](X, means, covars)
[ "@", "deprecated", "(", "'The function log_multivariate_normal_density is deprecated in 0.18 and will be removed in 0.20.'", ")", "def", "log_multivariate_normal_density", "(", "X", ",", "means", ",", "covars", ",", "covariance_type", "=", "'diag'", ")", ":", "log_multivariate_normal_density_dict", "=", "{", "'spherical'", ":", "_log_multivariate_normal_density_spherical", ",", "'tied'", ":", "_log_multivariate_normal_density_tied", ",", "'diag'", ":", "_log_multivariate_normal_density_diag", ",", "'full'", ":", "_log_multivariate_normal_density_full", "}", "return", "log_multivariate_normal_density_dict", "[", "covariance_type", "]", "(", "X", ",", "means", ",", "covars", ")" ]
compute the log probability under a multivariate gaussian distribution .
train
true
14,972
def is_extension_enabled(extension_name, service): config_dict = {'compute': CONF.compute_feature_enabled.api_extensions, 'volume': CONF.volume_feature_enabled.api_extensions, 'network': CONF.network_feature_enabled.api_extensions, 'object': CONF.object_storage_feature_enabled.discoverable_apis, 'identity': CONF.identity_feature_enabled.api_extensions} if (len(config_dict[service]) == 0): return False if (config_dict[service][0] == 'all'): return True if (extension_name in config_dict[service]): return True return False
[ "def", "is_extension_enabled", "(", "extension_name", ",", "service", ")", ":", "config_dict", "=", "{", "'compute'", ":", "CONF", ".", "compute_feature_enabled", ".", "api_extensions", ",", "'volume'", ":", "CONF", ".", "volume_feature_enabled", ".", "api_extensions", ",", "'network'", ":", "CONF", ".", "network_feature_enabled", ".", "api_extensions", ",", "'object'", ":", "CONF", ".", "object_storage_feature_enabled", ".", "discoverable_apis", ",", "'identity'", ":", "CONF", ".", "identity_feature_enabled", ".", "api_extensions", "}", "if", "(", "len", "(", "config_dict", "[", "service", "]", ")", "==", "0", ")", ":", "return", "False", "if", "(", "config_dict", "[", "service", "]", "[", "0", "]", "==", "'all'", ")", ":", "return", "True", "if", "(", "extension_name", "in", "config_dict", "[", "service", "]", ")", ":", "return", "True", "return", "False" ]
a function that will check the list of enabled extensions from config .
train
false
14,973
def syslog(server, enable=True): if (enable and __execute_cmd('config -g cfgRemoteHosts -o cfgRhostsSyslogEnable 1')): return __execute_cmd('config -g cfgRemoteHosts -o cfgRhostsSyslogServer1 {0}'.format(server)) return __execute_cmd('config -g cfgRemoteHosts -o cfgRhostsSyslogEnable 0')
[ "def", "syslog", "(", "server", ",", "enable", "=", "True", ")", ":", "if", "(", "enable", "and", "__execute_cmd", "(", "'config -g cfgRemoteHosts -o cfgRhostsSyslogEnable 1'", ")", ")", ":", "return", "__execute_cmd", "(", "'config -g cfgRemoteHosts -o cfgRhostsSyslogServer1 {0}'", ".", "format", "(", "server", ")", ")", "return", "__execute_cmd", "(", "'config -g cfgRemoteHosts -o cfgRhostsSyslogEnable 0'", ")" ]
configure syslog remote logging .
train
true
14,974
def group_columns(A, order=0): if issparse(A): A = csc_matrix(A) else: A = np.atleast_2d(A) A = (A != 0).astype(np.int32) if (A.ndim != 2): raise ValueError('`A` must be 2-dimensional.') (m, n) = A.shape if ((order is None) or np.isscalar(order)): rng = np.random.RandomState(order) order = rng.permutation(n) else: order = np.asarray(order) if (order.shape != (n,)): raise ValueError('`order` has incorrect shape.') A = A[:, order] if issparse(A): groups = group_sparse(m, n, A.indices, A.indptr) else: groups = group_dense(m, n, A) groups[order] = groups.copy() return groups
[ "def", "group_columns", "(", "A", ",", "order", "=", "0", ")", ":", "if", "issparse", "(", "A", ")", ":", "A", "=", "csc_matrix", "(", "A", ")", "else", ":", "A", "=", "np", ".", "atleast_2d", "(", "A", ")", "A", "=", "(", "A", "!=", "0", ")", ".", "astype", "(", "np", ".", "int32", ")", "if", "(", "A", ".", "ndim", "!=", "2", ")", ":", "raise", "ValueError", "(", "'`A` must be 2-dimensional.'", ")", "(", "m", ",", "n", ")", "=", "A", ".", "shape", "if", "(", "(", "order", "is", "None", ")", "or", "np", ".", "isscalar", "(", "order", ")", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "order", ")", "order", "=", "rng", ".", "permutation", "(", "n", ")", "else", ":", "order", "=", "np", ".", "asarray", "(", "order", ")", "if", "(", "order", ".", "shape", "!=", "(", "n", ",", ")", ")", ":", "raise", "ValueError", "(", "'`order` has incorrect shape.'", ")", "A", "=", "A", "[", ":", ",", "order", "]", "if", "issparse", "(", "A", ")", ":", "groups", "=", "group_sparse", "(", "m", ",", "n", ",", "A", ".", "indices", ",", "A", ".", "indptr", ")", "else", ":", "groups", "=", "group_dense", "(", "m", ",", "n", ",", "A", ")", "groups", "[", "order", "]", "=", "groups", ".", "copy", "(", ")", "return", "groups" ]
group columns of a 2-d matrix for sparse finite differencing [1]_ .
train
false
14,978
def tes_mkdir_raises_when_path_is_not_a_dir(): mox = Mox() mox.StubOutWithMock(io, 'os') mox.StubOutWithMock(io.os, 'path') class MyFs(io.FileSystem, ): pass oserror = OSError() oserror.errno = 17 io.os.makedirs('/make/all/those/subdirs').AndRaise(oserror) io.os.isdir('/make/all/those/subdirs').AndReturn(False) mox.ReplayAll() try: assert_raises(OSError, MyFs.mkdir, '/make/all/those/subdirs') mox.VerifyAll() finally: mox.UnsetStubs()
[ "def", "tes_mkdir_raises_when_path_is_not_a_dir", "(", ")", ":", "mox", "=", "Mox", "(", ")", "mox", ".", "StubOutWithMock", "(", "io", ",", "'os'", ")", "mox", ".", "StubOutWithMock", "(", "io", ".", "os", ",", "'path'", ")", "class", "MyFs", "(", "io", ".", "FileSystem", ",", ")", ":", "pass", "oserror", "=", "OSError", "(", ")", "oserror", ".", "errno", "=", "17", "io", ".", "os", ".", "makedirs", "(", "'/make/all/those/subdirs'", ")", ".", "AndRaise", "(", "oserror", ")", "io", ".", "os", ".", "isdir", "(", "'/make/all/those/subdirs'", ")", ".", "AndReturn", "(", "False", ")", "mox", ".", "ReplayAll", "(", ")", "try", ":", "assert_raises", "(", "OSError", ",", "MyFs", ".", "mkdir", ",", "'/make/all/those/subdirs'", ")", "mox", ".", "VerifyAll", "(", ")", "finally", ":", "mox", ".", "UnsetStubs", "(", ")" ]
test mkdir raises when path is not a dir .
train
false
14,980
@retry_on_failure def test_getprotobyname(): proto_map = {'icmp': socket.IPPROTO_ICMP, 'ip': socket.IPPROTO_IP, 'tcp': socket.IPPROTO_TCP, 'udp': socket.IPPROTO_UDP} if is_cli: proto_map.update({'dstopts': socket.IPPROTO_DSTOPTS, 'none': socket.IPPROTO_NONE, 'raw': socket.IPPROTO_RAW, 'ipv4': socket.IPPROTO_IPV4, 'ipv6': socket.IPPROTO_IPV6, 'esp': socket.IPPROTO_ESP, 'fragment': socket.IPPROTO_FRAGMENT, 'nd': socket.IPPROTO_ND, 'icmpv6': socket.IPPROTO_ICMPV6, 'routing': socket.IPPROTO_ROUTING, 'pup': socket.IPPROTO_PUP, 'ggp': socket.IPPROTO_GGP}) for (proto_name, good_val) in proto_map.iteritems(): temp_val = socket.getprotobyname(proto_name) AreEqual(temp_val, good_val) bad_list = ['', 'blah', 'i'] for name in bad_list: AssertError(socket.error, socket.getprotobyname, name)
[ "@", "retry_on_failure", "def", "test_getprotobyname", "(", ")", ":", "proto_map", "=", "{", "'icmp'", ":", "socket", ".", "IPPROTO_ICMP", ",", "'ip'", ":", "socket", ".", "IPPROTO_IP", ",", "'tcp'", ":", "socket", ".", "IPPROTO_TCP", ",", "'udp'", ":", "socket", ".", "IPPROTO_UDP", "}", "if", "is_cli", ":", "proto_map", ".", "update", "(", "{", "'dstopts'", ":", "socket", ".", "IPPROTO_DSTOPTS", ",", "'none'", ":", "socket", ".", "IPPROTO_NONE", ",", "'raw'", ":", "socket", ".", "IPPROTO_RAW", ",", "'ipv4'", ":", "socket", ".", "IPPROTO_IPV4", ",", "'ipv6'", ":", "socket", ".", "IPPROTO_IPV6", ",", "'esp'", ":", "socket", ".", "IPPROTO_ESP", ",", "'fragment'", ":", "socket", ".", "IPPROTO_FRAGMENT", ",", "'nd'", ":", "socket", ".", "IPPROTO_ND", ",", "'icmpv6'", ":", "socket", ".", "IPPROTO_ICMPV6", ",", "'routing'", ":", "socket", ".", "IPPROTO_ROUTING", ",", "'pup'", ":", "socket", ".", "IPPROTO_PUP", ",", "'ggp'", ":", "socket", ".", "IPPROTO_GGP", "}", ")", "for", "(", "proto_name", ",", "good_val", ")", "in", "proto_map", ".", "iteritems", "(", ")", ":", "temp_val", "=", "socket", ".", "getprotobyname", "(", "proto_name", ")", "AreEqual", "(", "temp_val", ",", "good_val", ")", "bad_list", "=", "[", "''", ",", "'blah'", ",", "'i'", "]", "for", "name", "in", "bad_list", ":", "AssertError", "(", "socket", ".", "error", ",", "socket", ".", "getprotobyname", ",", "name", ")" ]
tests socket .
train
false
14,981
def bulk_benchmark_estimator(estimator, X_test, n_bulk_repeats, verbose): n_instances = X_test.shape[0] runtimes = np.zeros(n_bulk_repeats, dtype=np.float) for i in range(n_bulk_repeats): start = time.time() estimator.predict(X_test) runtimes[i] = (time.time() - start) runtimes = np.array(list(map((lambda x: (x / float(n_instances))), runtimes))) if verbose: print('bulk_benchmark runtimes:', min(runtimes), scoreatpercentile(runtimes, 50), max(runtimes)) return runtimes
[ "def", "bulk_benchmark_estimator", "(", "estimator", ",", "X_test", ",", "n_bulk_repeats", ",", "verbose", ")", ":", "n_instances", "=", "X_test", ".", "shape", "[", "0", "]", "runtimes", "=", "np", ".", "zeros", "(", "n_bulk_repeats", ",", "dtype", "=", "np", ".", "float", ")", "for", "i", "in", "range", "(", "n_bulk_repeats", ")", ":", "start", "=", "time", ".", "time", "(", ")", "estimator", ".", "predict", "(", "X_test", ")", "runtimes", "[", "i", "]", "=", "(", "time", ".", "time", "(", ")", "-", "start", ")", "runtimes", "=", "np", ".", "array", "(", "list", "(", "map", "(", "(", "lambda", "x", ":", "(", "x", "/", "float", "(", "n_instances", ")", ")", ")", ",", "runtimes", ")", ")", ")", "if", "verbose", ":", "print", "(", "'bulk_benchmark runtimes:'", ",", "min", "(", "runtimes", ")", ",", "scoreatpercentile", "(", "runtimes", ",", "50", ")", ",", "max", "(", "runtimes", ")", ")", "return", "runtimes" ]
measure runtime prediction of the whole input .
train
false
14,982
def basedir_rel(*args): return os.path.join(CONF.pybasedir, *args)
[ "def", "basedir_rel", "(", "*", "args", ")", ":", "return", "os", ".", "path", ".", "join", "(", "CONF", ".", "pybasedir", ",", "*", "args", ")" ]
return a path relative to $pybasedir .
train
false
14,984
def to_camelcase(string): string = re.sub('(\\s)', (lambda match: '_'), string) string = re.sub('^(_*)(.)', (lambda match: (match.group(1) + match.group(2).lower())), string) return re.sub('(?<=[^_])_+([^_])', (lambda match: match.group(1).upper()), string)
[ "def", "to_camelcase", "(", "string", ")", ":", "string", "=", "re", ".", "sub", "(", "'(\\\\s)'", ",", "(", "lambda", "match", ":", "'_'", ")", ",", "string", ")", "string", "=", "re", ".", "sub", "(", "'^(_*)(.)'", ",", "(", "lambda", "match", ":", "(", "match", ".", "group", "(", "1", ")", "+", "match", ".", "group", "(", "2", ")", ".", "lower", "(", ")", ")", ")", ",", "string", ")", "return", "re", ".", "sub", "(", "'(?<=[^_])_+([^_])'", ",", "(", "lambda", "match", ":", "match", ".", "group", "(", "1", ")", ".", "upper", "(", ")", ")", ",", "string", ")" ]
converts the given string to camel-case .
train
false
14,986
def last_word(text, include='alphanum_underscore'): if (not text): return '' if text[(-1)].isspace(): return '' else: regex = cleanup_regex[include] matches = regex.search(text) if matches: return matches.group(0) else: return ''
[ "def", "last_word", "(", "text", ",", "include", "=", "'alphanum_underscore'", ")", ":", "if", "(", "not", "text", ")", ":", "return", "''", "if", "text", "[", "(", "-", "1", ")", "]", ".", "isspace", "(", ")", ":", "return", "''", "else", ":", "regex", "=", "cleanup_regex", "[", "include", "]", "matches", "=", "regex", ".", "search", "(", "text", ")", "if", "matches", ":", "return", "matches", ".", "group", "(", "0", ")", "else", ":", "return", "''" ]
find the last word in a sentence .
train
true
14,987
def linkify_only_full_urls(attrs, new=False): if (not new): return attrs if (not attrs['_text'].startswith(('http:', 'https:'))): return None return attrs
[ "def", "linkify_only_full_urls", "(", "attrs", ",", "new", "=", "False", ")", ":", "if", "(", "not", "new", ")", ":", "return", "attrs", "if", "(", "not", "attrs", "[", "'_text'", "]", ".", "startswith", "(", "(", "'http:'", ",", "'https:'", ")", ")", ")", ":", "return", "None", "return", "attrs" ]
linkify only full links .
train
false
14,988
def setup_behave(): from behave.configuration import Configuration Configuration.defaults['show_timings'] = False
[ "def", "setup_behave", "(", ")", ":", "from", "behave", ".", "configuration", "import", "Configuration", "Configuration", ".", "defaults", "[", "'show_timings'", "]", "=", "False" ]
apply tweaks .
train
false
14,989
def get_ip(data): try: ip = data.public_ips[0] except Exception: ip = data.private_ips[0] return ip
[ "def", "get_ip", "(", "data", ")", ":", "try", ":", "ip", "=", "data", ".", "public_ips", "[", "0", "]", "except", "Exception", ":", "ip", "=", "data", ".", "private_ips", "[", "0", "]", "return", "ip" ]
return the ip associated with the named host cli example: .
train
true
14,991
def generate_client_login_request_body(email, password, service, source, account_type='HOSTED_OR_GOOGLE', captcha_token=None, captcha_response=None): request_fields = {'Email': email, 'Passwd': password, 'accountType': account_type, 'service': service, 'source': source} if (captcha_token and captcha_response): request_fields['logintoken'] = captcha_token request_fields['logincaptcha'] = captcha_response return urllib.urlencode(request_fields)
[ "def", "generate_client_login_request_body", "(", "email", ",", "password", ",", "service", ",", "source", ",", "account_type", "=", "'HOSTED_OR_GOOGLE'", ",", "captcha_token", "=", "None", ",", "captcha_response", "=", "None", ")", ":", "request_fields", "=", "{", "'Email'", ":", "email", ",", "'Passwd'", ":", "password", ",", "'accountType'", ":", "account_type", ",", "'service'", ":", "service", ",", "'source'", ":", "source", "}", "if", "(", "captcha_token", "and", "captcha_response", ")", ":", "request_fields", "[", "'logintoken'", "]", "=", "captcha_token", "request_fields", "[", "'logincaptcha'", "]", "=", "captcha_response", "return", "urllib", ".", "urlencode", "(", "request_fields", ")" ]
creates the body of the autentication request see URL#request for more details .
train
false
14,992
def _get_indentation(line): if line.strip(): non_whitespace_index = (len(line) - len(line.lstrip())) return line[:non_whitespace_index] else: return u''
[ "def", "_get_indentation", "(", "line", ")", ":", "if", "line", ".", "strip", "(", ")", ":", "non_whitespace_index", "=", "(", "len", "(", "line", ")", "-", "len", "(", "line", ".", "lstrip", "(", ")", ")", ")", "return", "line", "[", ":", "non_whitespace_index", "]", "else", ":", "return", "u''" ]
return leading whitespace .
train
true
14,995
def _iter_tests(names): for name in names: suite = _load_tests(name) for test in iterate_tests(suite): (yield test)
[ "def", "_iter_tests", "(", "names", ")", ":", "for", "name", "in", "names", ":", "suite", "=", "_load_tests", "(", "name", ")", "for", "test", "in", "iterate_tests", "(", "suite", ")", ":", "(", "yield", "test", ")" ]
given a list of names .
train
false
14,996
def figure(num=None, figsize=None, dpi=None, facecolor=None, edgecolor=None, frameon=True, FigureClass=Figure, **kwargs): if (figsize is None): figsize = rcParams[u'figure.figsize'] if (dpi is None): dpi = rcParams[u'figure.dpi'] if (facecolor is None): facecolor = rcParams[u'figure.facecolor'] if (edgecolor is None): edgecolor = rcParams[u'figure.edgecolor'] allnums = get_fignums() next_num = ((max(allnums) + 1) if allnums else 1) figLabel = u'' if (num is None): num = next_num elif is_string_like(num): figLabel = num allLabels = get_figlabels() if (figLabel not in allLabels): if (figLabel == u'all'): warnings.warn(u"close('all') closes all existing figures") num = next_num else: inum = allLabels.index(figLabel) num = allnums[inum] else: num = int(num) figManager = _pylab_helpers.Gcf.get_fig_manager(num) if (figManager is None): max_open_warning = rcParams[u'figure.max_open_warning'] if ((max_open_warning >= 1) and (len(allnums) >= max_open_warning)): warnings.warn((u'More than %d figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).' % max_open_warning), RuntimeWarning) if (get_backend().lower() == u'ps'): dpi = 72 figManager = new_figure_manager(num, figsize=figsize, dpi=dpi, facecolor=facecolor, edgecolor=edgecolor, frameon=frameon, FigureClass=FigureClass, **kwargs) if figLabel: figManager.set_window_title(figLabel) figManager.canvas.figure.set_label(figLabel) def make_active(event): _pylab_helpers.Gcf.set_active(figManager) cid = figManager.canvas.mpl_connect(u'button_press_event', make_active) figManager._cidgcf = cid _pylab_helpers.Gcf.set_active(figManager) fig = figManager.canvas.figure fig.number = num if matplotlib.is_interactive(): draw_if_interactive() if _INSTALL_FIG_OBSERVER: fig.stale_callback = _auto_draw_if_interactive return figManager.canvas.figure
[ "def", "figure", "(", "num", "=", "None", ",", "figsize", "=", "None", ",", "dpi", "=", "None", ",", "facecolor", "=", "None", ",", "edgecolor", "=", "None", ",", "frameon", "=", "True", ",", "FigureClass", "=", "Figure", ",", "**", "kwargs", ")", ":", "if", "(", "figsize", "is", "None", ")", ":", "figsize", "=", "rcParams", "[", "u'figure.figsize'", "]", "if", "(", "dpi", "is", "None", ")", ":", "dpi", "=", "rcParams", "[", "u'figure.dpi'", "]", "if", "(", "facecolor", "is", "None", ")", ":", "facecolor", "=", "rcParams", "[", "u'figure.facecolor'", "]", "if", "(", "edgecolor", "is", "None", ")", ":", "edgecolor", "=", "rcParams", "[", "u'figure.edgecolor'", "]", "allnums", "=", "get_fignums", "(", ")", "next_num", "=", "(", "(", "max", "(", "allnums", ")", "+", "1", ")", "if", "allnums", "else", "1", ")", "figLabel", "=", "u''", "if", "(", "num", "is", "None", ")", ":", "num", "=", "next_num", "elif", "is_string_like", "(", "num", ")", ":", "figLabel", "=", "num", "allLabels", "=", "get_figlabels", "(", ")", "if", "(", "figLabel", "not", "in", "allLabels", ")", ":", "if", "(", "figLabel", "==", "u'all'", ")", ":", "warnings", ".", "warn", "(", "u\"close('all') closes all existing figures\"", ")", "num", "=", "next_num", "else", ":", "inum", "=", "allLabels", ".", "index", "(", "figLabel", ")", "num", "=", "allnums", "[", "inum", "]", "else", ":", "num", "=", "int", "(", "num", ")", "figManager", "=", "_pylab_helpers", ".", "Gcf", ".", "get_fig_manager", "(", "num", ")", "if", "(", "figManager", "is", "None", ")", ":", "max_open_warning", "=", "rcParams", "[", "u'figure.max_open_warning'", "]", "if", "(", "(", "max_open_warning", ">=", "1", ")", "and", "(", "len", "(", "allnums", ")", ">=", "max_open_warning", ")", ")", ":", "warnings", ".", "warn", "(", "(", "u'More than %d figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).'", "%", "max_open_warning", ")", ",", "RuntimeWarning", ")", "if", "(", "get_backend", "(", ")", ".", "lower", "(", ")", "==", "u'ps'", ")", ":", "dpi", "=", "72", "figManager", "=", "new_figure_manager", "(", "num", ",", "figsize", "=", "figsize", ",", "dpi", "=", "dpi", ",", "facecolor", "=", "facecolor", ",", "edgecolor", "=", "edgecolor", ",", "frameon", "=", "frameon", ",", "FigureClass", "=", "FigureClass", ",", "**", "kwargs", ")", "if", "figLabel", ":", "figManager", ".", "set_window_title", "(", "figLabel", ")", "figManager", ".", "canvas", ".", "figure", ".", "set_label", "(", "figLabel", ")", "def", "make_active", "(", "event", ")", ":", "_pylab_helpers", ".", "Gcf", ".", "set_active", "(", "figManager", ")", "cid", "=", "figManager", ".", "canvas", ".", "mpl_connect", "(", "u'button_press_event'", ",", "make_active", ")", "figManager", ".", "_cidgcf", "=", "cid", "_pylab_helpers", ".", "Gcf", ".", "set_active", "(", "figManager", ")", "fig", "=", "figManager", ".", "canvas", ".", "figure", "fig", ".", "number", "=", "num", "if", "matplotlib", ".", "is_interactive", "(", ")", ":", "draw_if_interactive", "(", ")", "if", "_INSTALL_FIG_OBSERVER", ":", "fig", ".", "stale_callback", "=", "_auto_draw_if_interactive", "return", "figManager", ".", "canvas", ".", "figure" ]
create a new :class:~bokeh .
train
false
14,997
def get_default_resolver(): global default_resolver if (default_resolver is None): default_resolver = Resolver() return default_resolver
[ "def", "get_default_resolver", "(", ")", ":", "global", "default_resolver", "if", "(", "default_resolver", "is", "None", ")", ":", "default_resolver", "=", "Resolver", "(", ")", "return", "default_resolver" ]
get the default resolver .
train
false
14,998
def translate_exception(req, e): if (not hasattr(req, 'best_match_language')): return e locale = req.best_match_language() if isinstance(e, webob.exc.HTTPError): e.explanation = i18n.translate(e.explanation, locale) e.detail = i18n.translate(e.detail, locale) if getattr(e, 'body_template', None): e.body_template = i18n.translate(e.body_template, locale) return e
[ "def", "translate_exception", "(", "req", ",", "e", ")", ":", "if", "(", "not", "hasattr", "(", "req", ",", "'best_match_language'", ")", ")", ":", "return", "e", "locale", "=", "req", ".", "best_match_language", "(", ")", "if", "isinstance", "(", "e", ",", "webob", ".", "exc", ".", "HTTPError", ")", ":", "e", ".", "explanation", "=", "i18n", ".", "translate", "(", "e", ".", "explanation", ",", "locale", ")", "e", ".", "detail", "=", "i18n", ".", "translate", "(", "e", ".", "detail", ",", "locale", ")", "if", "getattr", "(", "e", ",", "'body_template'", ",", "None", ")", ":", "e", ".", "body_template", "=", "i18n", ".", "translate", "(", "e", ".", "body_template", ",", "locale", ")", "return", "e" ]
if passed an exc_info it will automatically rewrite the exceptions all the way down to the correct line numbers and frames .
train
false
14,999
def XML2Node(xml): return NodeBuilder(xml).getDom()
[ "def", "XML2Node", "(", "xml", ")", ":", "return", "NodeBuilder", "(", "xml", ")", ".", "getDom", "(", ")" ]
converts supplied textual string into xml node .
train
false
15,000
def _Logger_notice(self, msg, *args, **kwargs): if self.isEnabledFor(logging.NOTICE): self._log(logging.NOTICE, msg, args, **kwargs)
[ "def", "_Logger_notice", "(", "self", ",", "msg", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "self", ".", "isEnabledFor", "(", "logging", ".", "NOTICE", ")", ":", "self", ".", "_log", "(", "logging", ".", "NOTICE", ",", "msg", ",", "args", ",", "**", "kwargs", ")" ]
log msg % args with severity notice .
train
true
15,001
def chain_test(n): global planner planner = Planner() (prev, first, last) = (None, None, None) for i in range((n + 1)): name = ('v%s' % i) v = Variable(name) if (prev is not None): EqualityConstraint(prev, v, Strength.REQUIRED) if (i == 0): first = v if (i == n): last = v prev = v StayConstraint(last, Strength.STRONG_DEFAULT) edit = EditConstraint(first, Strength.PREFERRED) edits = OrderedCollection() edits.append(edit) plan = planner.extract_plan_from_constraints(edits) for i in range(100): first.value = i plan.execute() if (last.value != i): print('Chain test failed.')
[ "def", "chain_test", "(", "n", ")", ":", "global", "planner", "planner", "=", "Planner", "(", ")", "(", "prev", ",", "first", ",", "last", ")", "=", "(", "None", ",", "None", ",", "None", ")", "for", "i", "in", "range", "(", "(", "n", "+", "1", ")", ")", ":", "name", "=", "(", "'v%s'", "%", "i", ")", "v", "=", "Variable", "(", "name", ")", "if", "(", "prev", "is", "not", "None", ")", ":", "EqualityConstraint", "(", "prev", ",", "v", ",", "Strength", ".", "REQUIRED", ")", "if", "(", "i", "==", "0", ")", ":", "first", "=", "v", "if", "(", "i", "==", "n", ")", ":", "last", "=", "v", "prev", "=", "v", "StayConstraint", "(", "last", ",", "Strength", ".", "STRONG_DEFAULT", ")", "edit", "=", "EditConstraint", "(", "first", ",", "Strength", ".", "PREFERRED", ")", "edits", "=", "OrderedCollection", "(", ")", "edits", ".", "append", "(", "edit", ")", "plan", "=", "planner", ".", "extract_plan_from_constraints", "(", "edits", ")", "for", "i", "in", "range", "(", "100", ")", ":", "first", ".", "value", "=", "i", "plan", ".", "execute", "(", ")", "if", "(", "last", ".", "value", "!=", "i", ")", ":", "print", "(", "'Chain test failed.'", ")" ]
this is the standard deltablue benchmark .
train
true
15,002
def artifactory(registry, xml_parent, data): artifactory = XML.SubElement(xml_parent, 'org.jfrog.hudson.ArtifactoryRedeployPublisher') helpers.artifactory_optional_props(artifactory, data, 'publishers') XML.SubElement(artifactory, 'matrixParams').text = ','.join(data.get('matrix-params', [])) details = XML.SubElement(artifactory, 'details') helpers.artifactory_common_details(details, data) XML.SubElement(details, 'repositoryKey').text = data.get('release-repo-key', '') XML.SubElement(details, 'snapshotsRepositoryKey').text = data.get('snapshot-repo-key', '') plugin = XML.SubElement(details, 'stagingPlugin') XML.SubElement(plugin, 'pluginName').text = 'None' helpers.artifactory_deployment_patterns(artifactory, data) helpers.artifactory_env_vars_patterns(artifactory, data)
[ "def", "artifactory", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "artifactory", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'org.jfrog.hudson.ArtifactoryRedeployPublisher'", ")", "helpers", ".", "artifactory_optional_props", "(", "artifactory", ",", "data", ",", "'publishers'", ")", "XML", ".", "SubElement", "(", "artifactory", ",", "'matrixParams'", ")", ".", "text", "=", "','", ".", "join", "(", "data", ".", "get", "(", "'matrix-params'", ",", "[", "]", ")", ")", "details", "=", "XML", ".", "SubElement", "(", "artifactory", ",", "'details'", ")", "helpers", ".", "artifactory_common_details", "(", "details", ",", "data", ")", "XML", ".", "SubElement", "(", "details", ",", "'repositoryKey'", ")", ".", "text", "=", "data", ".", "get", "(", "'release-repo-key'", ",", "''", ")", "XML", ".", "SubElement", "(", "details", ",", "'snapshotsRepositoryKey'", ")", ".", "text", "=", "data", ".", "get", "(", "'snapshot-repo-key'", ",", "''", ")", "plugin", "=", "XML", ".", "SubElement", "(", "details", ",", "'stagingPlugin'", ")", "XML", ".", "SubElement", "(", "plugin", ",", "'pluginName'", ")", ".", "text", "=", "'None'", "helpers", ".", "artifactory_deployment_patterns", "(", "artifactory", ",", "data", ")", "helpers", ".", "artifactory_env_vars_patterns", "(", "artifactory", ",", "data", ")" ]
yaml: artifactory uses/requires the artifactory plugin to deploy artifacts to artifactory server .
train
false
15,003
def timedelta2seconds(delta): return (((delta.microseconds / 1000000.0) + delta.seconds) + (((delta.days * 60) * 60) * 24))
[ "def", "timedelta2seconds", "(", "delta", ")", ":", "return", "(", "(", "(", "delta", ".", "microseconds", "/", "1000000.0", ")", "+", "delta", ".", "seconds", ")", "+", "(", "(", "(", "delta", ".", "days", "*", "60", ")", "*", "60", ")", "*", "24", ")", ")" ]
convert a datetime .
train
false
15,004
def FastqPhredIterator(handle, alphabet=single_letter_alphabet, title2ids=None): assert (SANGER_SCORE_OFFSET == ord('!')) q_mapping = dict() for letter in range(0, 255): q_mapping[chr(letter)] = (letter - SANGER_SCORE_OFFSET) for (title_line, seq_string, quality_string) in FastqGeneralIterator(handle): if title2ids: (id, name, descr) = title2ids(title_line) else: descr = title_line id = descr.split()[0] name = id record = SeqRecord(Seq(seq_string, alphabet), id=id, name=name, description=descr) qualities = [q_mapping[letter] for letter in quality_string] if (qualities and ((min(qualities) < 0) or (max(qualities) > 93))): raise ValueError('Invalid character in quality string') dict.__setitem__(record._per_letter_annotations, 'phred_quality', qualities) (yield record)
[ "def", "FastqPhredIterator", "(", "handle", ",", "alphabet", "=", "single_letter_alphabet", ",", "title2ids", "=", "None", ")", ":", "assert", "(", "SANGER_SCORE_OFFSET", "==", "ord", "(", "'!'", ")", ")", "q_mapping", "=", "dict", "(", ")", "for", "letter", "in", "range", "(", "0", ",", "255", ")", ":", "q_mapping", "[", "chr", "(", "letter", ")", "]", "=", "(", "letter", "-", "SANGER_SCORE_OFFSET", ")", "for", "(", "title_line", ",", "seq_string", ",", "quality_string", ")", "in", "FastqGeneralIterator", "(", "handle", ")", ":", "if", "title2ids", ":", "(", "id", ",", "name", ",", "descr", ")", "=", "title2ids", "(", "title_line", ")", "else", ":", "descr", "=", "title_line", "id", "=", "descr", ".", "split", "(", ")", "[", "0", "]", "name", "=", "id", "record", "=", "SeqRecord", "(", "Seq", "(", "seq_string", ",", "alphabet", ")", ",", "id", "=", "id", ",", "name", "=", "name", ",", "description", "=", "descr", ")", "qualities", "=", "[", "q_mapping", "[", "letter", "]", "for", "letter", "in", "quality_string", "]", "if", "(", "qualities", "and", "(", "(", "min", "(", "qualities", ")", "<", "0", ")", "or", "(", "max", "(", "qualities", ")", ">", "93", ")", ")", ")", ":", "raise", "ValueError", "(", "'Invalid character in quality string'", ")", "dict", ".", "__setitem__", "(", "record", ".", "_per_letter_annotations", ",", "'phred_quality'", ",", "qualities", ")", "(", "yield", "record", ")" ]
generator function to iterate over fastq records .
train
false
15,005
def getSegmentPath(loop, path, pointIndex, segmentCenter): centerBegin = loop[pointIndex] centerEnd = loop[((pointIndex + 1) % len(loop))] centerEndMinusBegin = (centerEnd - centerBegin) if (abs(centerEndMinusBegin) <= 0.0): return [centerBegin] if (segmentCenter != None): return getRadialPath(centerBegin, centerEnd, path, segmentCenter) begin = loop[(((pointIndex + len(loop)) - 1) % len(loop))] end = loop[((pointIndex + 2) % len(loop))] return getWedgePath(begin, centerBegin, centerEnd, centerEndMinusBegin, end, path)
[ "def", "getSegmentPath", "(", "loop", ",", "path", ",", "pointIndex", ",", "segmentCenter", ")", ":", "centerBegin", "=", "loop", "[", "pointIndex", "]", "centerEnd", "=", "loop", "[", "(", "(", "pointIndex", "+", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "centerEndMinusBegin", "=", "(", "centerEnd", "-", "centerBegin", ")", "if", "(", "abs", "(", "centerEndMinusBegin", ")", "<=", "0.0", ")", ":", "return", "[", "centerBegin", "]", "if", "(", "segmentCenter", "!=", "None", ")", ":", "return", "getRadialPath", "(", "centerBegin", ",", "centerEnd", ",", "path", ",", "segmentCenter", ")", "begin", "=", "loop", "[", "(", "(", "(", "pointIndex", "+", "len", "(", "loop", ")", ")", "-", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "end", "=", "loop", "[", "(", "(", "pointIndex", "+", "2", ")", "%", "len", "(", "loop", ")", ")", "]", "return", "getWedgePath", "(", "begin", ",", "centerBegin", ",", "centerEnd", ",", "centerEndMinusBegin", ",", "end", ",", "path", ")" ]
get segment path .
train
false
15,006
def check_config_ajax(request): if (not request.user.is_superuser): return HttpResponse('') error_list = _get_config_errors(request) if (not error_list): return HttpResponse('') return render('config_alert_dock.mako', request, dict(error_list=error_list), force_template=True)
[ "def", "check_config_ajax", "(", "request", ")", ":", "if", "(", "not", "request", ".", "user", ".", "is_superuser", ")", ":", "return", "HttpResponse", "(", "''", ")", "error_list", "=", "_get_config_errors", "(", "request", ")", "if", "(", "not", "error_list", ")", ":", "return", "HttpResponse", "(", "''", ")", "return", "render", "(", "'config_alert_dock.mako'", ",", "request", ",", "dict", "(", "error_list", "=", "error_list", ")", ",", "force_template", "=", "True", ")" ]
alert administrators about configuration problems .
train
false
15,007
@login_required def commit_poll(request): return HttpResponse(json.dumps(view_helpers.mission_completed(request.user.get_profile(), 'svn_commit')))
[ "@", "login_required", "def", "commit_poll", "(", "request", ")", ":", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "view_helpers", ".", "mission_completed", "(", "request", ".", "user", ".", "get_profile", "(", ")", ",", "'svn_commit'", ")", ")", ")" ]
determines if svn commit mission is completed .
train
false
15,008
def getRoundedPoint(point): return Vector3(round(point.x), round(point.y), round(point.z))
[ "def", "getRoundedPoint", "(", "point", ")", ":", "return", "Vector3", "(", "round", "(", "point", ".", "x", ")", ",", "round", "(", "point", ".", "y", ")", ",", "round", "(", "point", ".", "z", ")", ")" ]
get point with each component rounded .
train
false
15,009
def darken(color, factor=0.7): newcol = color_to_reportlab(color) for a in ['red', 'green', 'blue']: setattr(newcol, a, (factor * getattr(newcol, a))) return newcol
[ "def", "darken", "(", "color", ",", "factor", "=", "0.7", ")", ":", "newcol", "=", "color_to_reportlab", "(", "color", ")", "for", "a", "in", "[", "'red'", ",", "'green'", ",", "'blue'", "]", ":", "setattr", "(", "newcol", ",", "a", ",", "(", "factor", "*", "getattr", "(", "newcol", ",", "a", ")", ")", ")", "return", "newcol" ]
darken a color by decreasing its lightness by percent .
train
false
15,011
@when(u'we delete a named query') def step_delete_named_query(context): context.cli.sendline(u'\\nd foo')
[ "@", "when", "(", "u'we delete a named query'", ")", "def", "step_delete_named_query", "(", "context", ")", ":", "context", ".", "cli", ".", "sendline", "(", "u'\\\\nd foo'", ")" ]
send d command .
train
false
15,012
def test_scharr_h_zeros(): result = filters.scharr_h(np.zeros((10, 10)), np.ones((10, 10), bool)) assert_allclose(result, 0)
[ "def", "test_scharr_h_zeros", "(", ")", ":", "result", "=", "filters", ".", "scharr_h", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclose", "(", "result", ",", "0", ")" ]
horizontal scharr on an array of all zeros .
train
false
15,013
def store_emails(recipient_ids, notification_type, event, user, node, timestamp, **context): if (notification_type == 'none'): return template = (event + '.html.mako') context['user'] = user node_lineage_ids = (get_node_lineage(node) if node else []) for user_id in recipient_ids: if (user_id == user._id): continue recipient = OSFUser.load(user_id) context['localized_timestamp'] = localize_timestamp(timestamp, recipient) message = mails.render_message(template, **context) digest = NotificationDigest(timestamp=timestamp, send_type=notification_type, event=event, user=recipient, message=message, node_lineage=node_lineage_ids) digest.save()
[ "def", "store_emails", "(", "recipient_ids", ",", "notification_type", ",", "event", ",", "user", ",", "node", ",", "timestamp", ",", "**", "context", ")", ":", "if", "(", "notification_type", "==", "'none'", ")", ":", "return", "template", "=", "(", "event", "+", "'.html.mako'", ")", "context", "[", "'user'", "]", "=", "user", "node_lineage_ids", "=", "(", "get_node_lineage", "(", "node", ")", "if", "node", "else", "[", "]", ")", "for", "user_id", "in", "recipient_ids", ":", "if", "(", "user_id", "==", "user", ".", "_id", ")", ":", "continue", "recipient", "=", "OSFUser", ".", "load", "(", "user_id", ")", "context", "[", "'localized_timestamp'", "]", "=", "localize_timestamp", "(", "timestamp", ",", "recipient", ")", "message", "=", "mails", ".", "render_message", "(", "template", ",", "**", "context", ")", "digest", "=", "NotificationDigest", "(", "timestamp", "=", "timestamp", ",", "send_type", "=", "notification_type", ",", "event", "=", "event", ",", "user", "=", "recipient", ",", "message", "=", "message", ",", "node_lineage", "=", "node_lineage_ids", ")", "digest", ".", "save", "(", ")" ]
store notification emails emails are sent via celery beat as digests .
train
false
15,014
def primer_exceeds_mismatches(primer_seq, all_primer_seqs, max_primer_mm): if (primer_seq not in all_primer_seqs): if (not ok_mm_primer(primer_seq, all_primer_seqs, max_primer_mm)): return True return False
[ "def", "primer_exceeds_mismatches", "(", "primer_seq", ",", "all_primer_seqs", ",", "max_primer_mm", ")", ":", "if", "(", "primer_seq", "not", "in", "all_primer_seqs", ")", ":", "if", "(", "not", "ok_mm_primer", "(", "primer_seq", ",", "all_primer_seqs", ",", "max_primer_mm", ")", ")", ":", "return", "True", "return", "False" ]
returns true if primer exceeds allowed mismatches .
train
false
15,015
@task def consume_email(email_text, **kwargs): res = save_from_email_reply(email_text) if (not res): log.error('Failed to save email.')
[ "@", "task", "def", "consume_email", "(", "email_text", ",", "**", "kwargs", ")", ":", "res", "=", "save_from_email_reply", "(", "email_text", ")", "if", "(", "not", "res", ")", ":", "log", ".", "error", "(", "'Failed to save email.'", ")" ]
parse emails and save notes .
train
false
15,016
def attachable(name, path=None): cachekey = 'lxc.attachable{0}{1}'.format(name, path) try: return __context__[cachekey] except KeyError: _ensure_exists(name, path=path) log.debug('Checking if LXC container {0} is attachable'.format(name)) cmd = 'lxc-attach' if path: cmd += ' -P {0}'.format(pipes.quote(path)) cmd += ' --clear-env -n {0} -- /usr/bin/env'.format(name) result = (__salt__['cmd.retcode'](cmd, python_shell=False, output_loglevel='quiet', ignore_retcode=True) == 0) __context__[cachekey] = result return __context__[cachekey]
[ "def", "attachable", "(", "name", ",", "path", "=", "None", ")", ":", "cachekey", "=", "'lxc.attachable{0}{1}'", ".", "format", "(", "name", ",", "path", ")", "try", ":", "return", "__context__", "[", "cachekey", "]", "except", "KeyError", ":", "_ensure_exists", "(", "name", ",", "path", "=", "path", ")", "log", ".", "debug", "(", "'Checking if LXC container {0} is attachable'", ".", "format", "(", "name", ")", ")", "cmd", "=", "'lxc-attach'", "if", "path", ":", "cmd", "+=", "' -P {0}'", ".", "format", "(", "pipes", ".", "quote", "(", "path", ")", ")", "cmd", "+=", "' --clear-env -n {0} -- /usr/bin/env'", ".", "format", "(", "name", ")", "result", "=", "(", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ",", "python_shell", "=", "False", ",", "output_loglevel", "=", "'quiet'", ",", "ignore_retcode", "=", "True", ")", "==", "0", ")", "__context__", "[", "cachekey", "]", "=", "result", "return", "__context__", "[", "cachekey", "]" ]
return true if the named container can be attached to via the lxc-attach command path path to the container parent default: /var/lib/lxc .
train
true
15,017
def verbose_process_run_results_f(f): (infiles_lists, out_filepaths) = parse_tmp_to_final_filepath_map_file(f) for (infiles_list, out_filepath) in zip(infiles_lists, out_filepaths): try: of = open(out_filepath, 'w') print ('Final result file (%s) contains temp files:' % out_filepath) except IOError: raise IOError((("Poller can't open final output file: %s" % out_filepath) + '\nLeaving individual jobs output.\n Do you have write access?')) for fp in infiles_list: print (' DCTB %s' % fp) for line in open(fp): of.write(line) of.close() return True
[ "def", "verbose_process_run_results_f", "(", "f", ")", ":", "(", "infiles_lists", ",", "out_filepaths", ")", "=", "parse_tmp_to_final_filepath_map_file", "(", "f", ")", "for", "(", "infiles_list", ",", "out_filepath", ")", "in", "zip", "(", "infiles_lists", ",", "out_filepaths", ")", ":", "try", ":", "of", "=", "open", "(", "out_filepath", ",", "'w'", ")", "print", "(", "'Final result file (%s) contains temp files:'", "%", "out_filepath", ")", "except", "IOError", ":", "raise", "IOError", "(", "(", "(", "\"Poller can't open final output file: %s\"", "%", "out_filepath", ")", "+", "'\\nLeaving individual jobs output.\\n Do you have write access?'", ")", ")", "for", "fp", "in", "infiles_list", ":", "print", "(", "' DCTB %s'", "%", "fp", ")", "for", "line", "in", "open", "(", "fp", ")", ":", "of", ".", "write", "(", "line", ")", "of", ".", "close", "(", ")", "return", "True" ]
copy each list of infiles to each outfile and delete infiles f: file containing one set of mapping instructions per line example f: f1 .
train
false
15,019
def nonisomorphic_trees(order, create='graph'): if (order < 2): raise ValueError layout = (list(range(((order // 2) + 1))) + list(range(1, ((order + 1) // 2)))) while (layout is not None): layout = _next_tree(layout) if (layout is not None): if (create == 'graph'): (yield _layout_to_graph(layout)) elif (create == 'matrix'): (yield _layout_to_matrix(layout)) layout = _next_rooted_tree(layout)
[ "def", "nonisomorphic_trees", "(", "order", ",", "create", "=", "'graph'", ")", ":", "if", "(", "order", "<", "2", ")", ":", "raise", "ValueError", "layout", "=", "(", "list", "(", "range", "(", "(", "(", "order", "//", "2", ")", "+", "1", ")", ")", ")", "+", "list", "(", "range", "(", "1", ",", "(", "(", "order", "+", "1", ")", "//", "2", ")", ")", ")", ")", "while", "(", "layout", "is", "not", "None", ")", ":", "layout", "=", "_next_tree", "(", "layout", ")", "if", "(", "layout", "is", "not", "None", ")", ":", "if", "(", "create", "==", "'graph'", ")", ":", "(", "yield", "_layout_to_graph", "(", "layout", ")", ")", "elif", "(", "create", "==", "'matrix'", ")", ":", "(", "yield", "_layout_to_matrix", "(", "layout", ")", ")", "layout", "=", "_next_rooted_tree", "(", "layout", ")" ]
returns a list of nonisomporphic trees parameters order : int order of the desired tree(s) create : graph or matrix if graph is selected a list of trees will be returned .
train
false
15,020
def paths_to_3d_segments(paths, zs=0, zdir=u'z'): if (not iterable(zs)): zs = (np.ones(len(paths)) * zs) segments = [] for (path, pathz) in zip(paths, zs): segments.append(path_to_3d_segment(path, pathz, zdir)) return segments
[ "def", "paths_to_3d_segments", "(", "paths", ",", "zs", "=", "0", ",", "zdir", "=", "u'z'", ")", ":", "if", "(", "not", "iterable", "(", "zs", ")", ")", ":", "zs", "=", "(", "np", ".", "ones", "(", "len", "(", "paths", ")", ")", "*", "zs", ")", "segments", "=", "[", "]", "for", "(", "path", ",", "pathz", ")", "in", "zip", "(", "paths", ",", "zs", ")", ":", "segments", ".", "append", "(", "path_to_3d_segment", "(", "path", ",", "pathz", ",", "zdir", ")", ")", "return", "segments" ]
convert paths from a collection object to 3d segments .
train
false
15,021
def directory_to_samples(directory, flags=None, filter_channel=False): samples = [] targets = [] label = 0 try: classes = sorted(os.walk(directory).next()[1]) except Exception: classes = sorted(os.walk(directory).__next__()[1]) for c in classes: c_dir = os.path.join(directory, c) try: walk = os.walk(c_dir).next() except Exception: walk = os.walk(c_dir).__next__() for sample in walk[2]: if ((not flags) or any(((flag in sample) for flag in flags))): if filter_channel: if (get_img_channel(os.path.join(c_dir, sample)) != 3): continue samples.append(os.path.join(c_dir, sample)) targets.append(label) label += 1 return (samples, targets)
[ "def", "directory_to_samples", "(", "directory", ",", "flags", "=", "None", ",", "filter_channel", "=", "False", ")", ":", "samples", "=", "[", "]", "targets", "=", "[", "]", "label", "=", "0", "try", ":", "classes", "=", "sorted", "(", "os", ".", "walk", "(", "directory", ")", ".", "next", "(", ")", "[", "1", "]", ")", "except", "Exception", ":", "classes", "=", "sorted", "(", "os", ".", "walk", "(", "directory", ")", ".", "__next__", "(", ")", "[", "1", "]", ")", "for", "c", "in", "classes", ":", "c_dir", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "c", ")", "try", ":", "walk", "=", "os", ".", "walk", "(", "c_dir", ")", ".", "next", "(", ")", "except", "Exception", ":", "walk", "=", "os", ".", "walk", "(", "c_dir", ")", ".", "__next__", "(", ")", "for", "sample", "in", "walk", "[", "2", "]", ":", "if", "(", "(", "not", "flags", ")", "or", "any", "(", "(", "(", "flag", "in", "sample", ")", "for", "flag", "in", "flags", ")", ")", ")", ":", "if", "filter_channel", ":", "if", "(", "get_img_channel", "(", "os", ".", "path", ".", "join", "(", "c_dir", ",", "sample", ")", ")", "!=", "3", ")", ":", "continue", "samples", ".", "append", "(", "os", ".", "path", ".", "join", "(", "c_dir", ",", "sample", ")", ")", "targets", ".", "append", "(", "label", ")", "label", "+=", "1", "return", "(", "samples", ",", "targets", ")" ]
read a directory .
train
false
15,022
def test_lex_expression_complex(): objs = tokenize('(foo 2.j)') assert (objs == [HyExpression([HySymbol('foo'), HyComplex(2j)])]) objs = tokenize('(foo -0.5j)') assert (objs == [HyExpression([HySymbol('foo'), HyComplex((-0.5j))])]) objs = tokenize('(foo 1.e7j)') assert (objs == [HyExpression([HySymbol('foo'), HyComplex(10000000j)])]) objs = tokenize('(foo j)') assert (objs == [HyExpression([HySymbol('foo'), HySymbol('j')])])
[ "def", "test_lex_expression_complex", "(", ")", ":", "objs", "=", "tokenize", "(", "'(foo 2.j)'", ")", "assert", "(", "objs", "==", "[", "HyExpression", "(", "[", "HySymbol", "(", "'foo'", ")", ",", "HyComplex", "(", "2j", ")", "]", ")", "]", ")", "objs", "=", "tokenize", "(", "'(foo -0.5j)'", ")", "assert", "(", "objs", "==", "[", "HyExpression", "(", "[", "HySymbol", "(", "'foo'", ")", ",", "HyComplex", "(", "(", "-", "0.5j", ")", ")", "]", ")", "]", ")", "objs", "=", "tokenize", "(", "'(foo 1.e7j)'", ")", "assert", "(", "objs", "==", "[", "HyExpression", "(", "[", "HySymbol", "(", "'foo'", ")", ",", "HyComplex", "(", "10000000j", ")", "]", ")", "]", ")", "objs", "=", "tokenize", "(", "'(foo j)'", ")", "assert", "(", "objs", "==", "[", "HyExpression", "(", "[", "HySymbol", "(", "'foo'", ")", ",", "HySymbol", "(", "'j'", ")", "]", ")", "]", ")" ]
make sure expressions can produce complex .
train
false
15,025
def _fraud_email(body, kind): Email.handler.add_to_queue(None, g.fraud_email, g.domain, g.fraud_email, kind, body=body)
[ "def", "_fraud_email", "(", "body", ",", "kind", ")", ":", "Email", ".", "handler", ".", "add_to_queue", "(", "None", ",", "g", ".", "fraud_email", ",", "g", ".", "domain", ",", "g", ".", "fraud_email", ",", "kind", ",", "body", "=", "body", ")" ]
for sending email to the fraud mailbox .
train
false
15,026
@testing.requires_testing_data def test_min_distance_fit_dipole(): subject = 'sample' raw = read_raw_fif(fname_raw, preload=True) picks = pick_types(raw.info, meg=False, eeg=True, exclude='bads') info = pick_info(raw.info, picks) cov = read_cov(fname_cov) cov['data'] = np.eye(cov['data'].shape[0]) simulated_scalp_map = np.zeros(picks.shape[0]) simulated_scalp_map[27:34] = 1 simulated_scalp_map = simulated_scalp_map[:, None] evoked = EvokedArray(simulated_scalp_map, info, tmin=0) min_dist = 5.0 bem = read_bem_solution(fname_bem) (dip, residual) = fit_dipole(evoked, cov, bem, fname_trans, min_dist=min_dist) dist = _compute_depth(dip, fname_bem, fname_trans, subject, subjects_dir) assert_true(((min_dist - 0.1) < (dist[0] * 1000.0) < (min_dist + 1.0))) assert_raises(ValueError, fit_dipole, evoked, cov, fname_bem, fname_trans, (-1.0))
[ "@", "testing", ".", "requires_testing_data", "def", "test_min_distance_fit_dipole", "(", ")", ":", "subject", "=", "'sample'", "raw", "=", "read_raw_fif", "(", "fname_raw", ",", "preload", "=", "True", ")", "picks", "=", "pick_types", "(", "raw", ".", "info", ",", "meg", "=", "False", ",", "eeg", "=", "True", ",", "exclude", "=", "'bads'", ")", "info", "=", "pick_info", "(", "raw", ".", "info", ",", "picks", ")", "cov", "=", "read_cov", "(", "fname_cov", ")", "cov", "[", "'data'", "]", "=", "np", ".", "eye", "(", "cov", "[", "'data'", "]", ".", "shape", "[", "0", "]", ")", "simulated_scalp_map", "=", "np", ".", "zeros", "(", "picks", ".", "shape", "[", "0", "]", ")", "simulated_scalp_map", "[", "27", ":", "34", "]", "=", "1", "simulated_scalp_map", "=", "simulated_scalp_map", "[", ":", ",", "None", "]", "evoked", "=", "EvokedArray", "(", "simulated_scalp_map", ",", "info", ",", "tmin", "=", "0", ")", "min_dist", "=", "5.0", "bem", "=", "read_bem_solution", "(", "fname_bem", ")", "(", "dip", ",", "residual", ")", "=", "fit_dipole", "(", "evoked", ",", "cov", ",", "bem", ",", "fname_trans", ",", "min_dist", "=", "min_dist", ")", "dist", "=", "_compute_depth", "(", "dip", ",", "fname_bem", ",", "fname_trans", ",", "subject", ",", "subjects_dir", ")", "assert_true", "(", "(", "(", "min_dist", "-", "0.1", ")", "<", "(", "dist", "[", "0", "]", "*", "1000.0", ")", "<", "(", "min_dist", "+", "1.0", ")", ")", ")", "assert_raises", "(", "ValueError", ",", "fit_dipole", ",", "evoked", ",", "cov", ",", "fname_bem", ",", "fname_trans", ",", "(", "-", "1.0", ")", ")" ]
test dipole min_dist to inner_skull .
train
false
15,027
def get_tenant_network(creds_provider, compute_networks_client, shared_network_name): caller = test_utils.find_test_caller() net_creds = creds_provider.get_primary_creds() network = getattr(net_creds, 'network', None) if ((not network) or (not network.get('name'))): if shared_network_name: msg = 'No valid network provided or created, defaulting to fixed_network_name' if caller: msg = ('(%s) %s' % (caller, msg)) LOG.debug(msg) try: network = get_network_from_name(shared_network_name, compute_networks_client) except exceptions.InvalidTestResource: network = {} msg = ('Found network %s available for tenant' % network) if caller: msg = ('(%s) %s' % (caller, msg)) LOG.info(msg) return network
[ "def", "get_tenant_network", "(", "creds_provider", ",", "compute_networks_client", ",", "shared_network_name", ")", ":", "caller", "=", "test_utils", ".", "find_test_caller", "(", ")", "net_creds", "=", "creds_provider", ".", "get_primary_creds", "(", ")", "network", "=", "getattr", "(", "net_creds", ",", "'network'", ",", "None", ")", "if", "(", "(", "not", "network", ")", "or", "(", "not", "network", ".", "get", "(", "'name'", ")", ")", ")", ":", "if", "shared_network_name", ":", "msg", "=", "'No valid network provided or created, defaulting to fixed_network_name'", "if", "caller", ":", "msg", "=", "(", "'(%s) %s'", "%", "(", "caller", ",", "msg", ")", ")", "LOG", ".", "debug", "(", "msg", ")", "try", ":", "network", "=", "get_network_from_name", "(", "shared_network_name", ",", "compute_networks_client", ")", "except", "exceptions", ".", "InvalidTestResource", ":", "network", "=", "{", "}", "msg", "=", "(", "'Found network %s available for tenant'", "%", "network", ")", "if", "caller", ":", "msg", "=", "(", "'(%s) %s'", "%", "(", "caller", ",", "msg", ")", ")", "LOG", ".", "info", "(", "msg", ")", "return", "network" ]
get a network usable by the primary tenant .
train
false
15,028
@core_helper def get_facet_items_dict(facet, limit=None, exclude_active=False): if ((not c.search_facets) or (not c.search_facets.get(facet)) or (not c.search_facets.get(facet).get('items'))): return [] facets = [] for facet_item in c.search_facets.get(facet)['items']: if (not len(facet_item['name'].strip())): continue if (not ((facet, facet_item['name']) in request.params.items())): facets.append(dict(active=False, **facet_item)) elif (not exclude_active): facets.append(dict(active=True, **facet_item)) facets.sort(key=(lambda it: ((- it['count']), it['display_name'].lower()))) if (c.search_facets_limits and (limit is None)): limit = c.search_facets_limits.get(facet) if ((limit is not None) and (limit > 0)): return facets[:limit] return facets
[ "@", "core_helper", "def", "get_facet_items_dict", "(", "facet", ",", "limit", "=", "None", ",", "exclude_active", "=", "False", ")", ":", "if", "(", "(", "not", "c", ".", "search_facets", ")", "or", "(", "not", "c", ".", "search_facets", ".", "get", "(", "facet", ")", ")", "or", "(", "not", "c", ".", "search_facets", ".", "get", "(", "facet", ")", ".", "get", "(", "'items'", ")", ")", ")", ":", "return", "[", "]", "facets", "=", "[", "]", "for", "facet_item", "in", "c", ".", "search_facets", ".", "get", "(", "facet", ")", "[", "'items'", "]", ":", "if", "(", "not", "len", "(", "facet_item", "[", "'name'", "]", ".", "strip", "(", ")", ")", ")", ":", "continue", "if", "(", "not", "(", "(", "facet", ",", "facet_item", "[", "'name'", "]", ")", "in", "request", ".", "params", ".", "items", "(", ")", ")", ")", ":", "facets", ".", "append", "(", "dict", "(", "active", "=", "False", ",", "**", "facet_item", ")", ")", "elif", "(", "not", "exclude_active", ")", ":", "facets", ".", "append", "(", "dict", "(", "active", "=", "True", ",", "**", "facet_item", ")", ")", "facets", ".", "sort", "(", "key", "=", "(", "lambda", "it", ":", "(", "(", "-", "it", "[", "'count'", "]", ")", ",", "it", "[", "'display_name'", "]", ".", "lower", "(", ")", ")", ")", ")", "if", "(", "c", ".", "search_facets_limits", "and", "(", "limit", "is", "None", ")", ")", ":", "limit", "=", "c", ".", "search_facets_limits", ".", "get", "(", "facet", ")", "if", "(", "(", "limit", "is", "not", "None", ")", "and", "(", "limit", ">", "0", ")", ")", ":", "return", "facets", "[", ":", "limit", "]", "return", "facets" ]
return the list of unselected facet items for the given facet .
train
false
15,029
def path_to_url(path): path = os.path.normcase(os.path.abspath(path)) if _drive_re.match(path): path = ((path[0] + '|') + path[2:]) url = urllib.quote(path) url = url.replace(os.path.sep, '/') url = url.lstrip('/') return ('file:///' + url)
[ "def", "path_to_url", "(", "path", ")", ":", "path", "=", "os", ".", "path", ".", "normcase", "(", "os", ".", "path", ".", "abspath", "(", "path", ")", ")", "if", "_drive_re", ".", "match", "(", "path", ")", ":", "path", "=", "(", "(", "path", "[", "0", "]", "+", "'|'", ")", "+", "path", "[", "2", ":", "]", ")", "url", "=", "urllib", ".", "quote", "(", "path", ")", "url", "=", "url", ".", "replace", "(", "os", ".", "path", ".", "sep", ",", "'/'", ")", "url", "=", "url", ".", "lstrip", "(", "'/'", ")", "return", "(", "'file:///'", "+", "url", ")" ]
convert a system path to a url .
train
true
15,030
def _diff(state_data, resource_object): objects_differ = None for (k, v) in state_data['service'].items(): if (k == 'escalation_policy_id'): resource_value = resource_object['escalation_policy']['id'] elif (k == 'service_key'): resource_value = resource_object['service_key'] if ('@' in resource_value): resource_value = resource_value[0:resource_value.find('@')] else: resource_value = resource_object[k] if (v != resource_value): objects_differ = '{0} {1} {2}'.format(k, v, resource_value) break if objects_differ: return state_data else: return {}
[ "def", "_diff", "(", "state_data", ",", "resource_object", ")", ":", "objects_differ", "=", "None", "for", "(", "k", ",", "v", ")", "in", "state_data", "[", "'service'", "]", ".", "items", "(", ")", ":", "if", "(", "k", "==", "'escalation_policy_id'", ")", ":", "resource_value", "=", "resource_object", "[", "'escalation_policy'", "]", "[", "'id'", "]", "elif", "(", "k", "==", "'service_key'", ")", ":", "resource_value", "=", "resource_object", "[", "'service_key'", "]", "if", "(", "'@'", "in", "resource_value", ")", ":", "resource_value", "=", "resource_value", "[", "0", ":", "resource_value", ".", "find", "(", "'@'", ")", "]", "else", ":", "resource_value", "=", "resource_object", "[", "k", "]", "if", "(", "v", "!=", "resource_value", ")", ":", "objects_differ", "=", "'{0} {1} {2}'", ".", "format", "(", "k", ",", "v", ",", "resource_value", ")", "break", "if", "objects_differ", ":", "return", "state_data", "else", ":", "return", "{", "}" ]
diff of strings; returns a generator .
train
true
15,032
def save_on_signal(obj, trans): signal = models.signals.pre_save def cb(sender, instance, **kw): if (instance is obj): is_new = (trans.autoid is None) trans.save(force_insert=is_new, force_update=(not is_new)) signal.disconnect(cb) signal.connect(cb, sender=obj.__class__, weak=False)
[ "def", "save_on_signal", "(", "obj", ",", "trans", ")", ":", "signal", "=", "models", ".", "signals", ".", "pre_save", "def", "cb", "(", "sender", ",", "instance", ",", "**", "kw", ")", ":", "if", "(", "instance", "is", "obj", ")", ":", "is_new", "=", "(", "trans", ".", "autoid", "is", "None", ")", "trans", ".", "save", "(", "force_insert", "=", "is_new", ",", "force_update", "=", "(", "not", "is_new", ")", ")", "signal", ".", "disconnect", "(", "cb", ")", "signal", ".", "connect", "(", "cb", ",", "sender", "=", "obj", ".", "__class__", ",", "weak", "=", "False", ")" ]
connect signals so the translation gets saved during obj .
train
false
15,033
def launch_server(app, webapp_factory, kwargs, prefix='GALAXY', config_object=None): name = prefix.lower() host_env_key = ('%s_TEST_HOST' % prefix) port_env_key = ('%s_TEST_PORT' % prefix) default_web_host = getattr(config_object, 'default_web_host', DEFAULT_WEB_HOST) host = os.environ.get(host_env_key, default_web_host) port = os.environ.get(port_env_key, None) webapp = webapp_factory(kwargs['global_conf'], app=app, use_translogger=False, static_enabled=True) (server, port) = serve_webapp(webapp, host=host, port=port) os.environ[host_env_key] = host os.environ[port_env_key] = port wait_for_http_server(host, port) log.info(('Embedded web server for %s started' % name)) return ServerWrapper(app, server, name, host, port)
[ "def", "launch_server", "(", "app", ",", "webapp_factory", ",", "kwargs", ",", "prefix", "=", "'GALAXY'", ",", "config_object", "=", "None", ")", ":", "name", "=", "prefix", ".", "lower", "(", ")", "host_env_key", "=", "(", "'%s_TEST_HOST'", "%", "prefix", ")", "port_env_key", "=", "(", "'%s_TEST_PORT'", "%", "prefix", ")", "default_web_host", "=", "getattr", "(", "config_object", ",", "'default_web_host'", ",", "DEFAULT_WEB_HOST", ")", "host", "=", "os", ".", "environ", ".", "get", "(", "host_env_key", ",", "default_web_host", ")", "port", "=", "os", ".", "environ", ".", "get", "(", "port_env_key", ",", "None", ")", "webapp", "=", "webapp_factory", "(", "kwargs", "[", "'global_conf'", "]", ",", "app", "=", "app", ",", "use_translogger", "=", "False", ",", "static_enabled", "=", "True", ")", "(", "server", ",", "port", ")", "=", "serve_webapp", "(", "webapp", ",", "host", "=", "host", ",", "port", "=", "port", ")", "os", ".", "environ", "[", "host_env_key", "]", "=", "host", "os", ".", "environ", "[", "port_env_key", "]", "=", "port", "wait_for_http_server", "(", "host", ",", "port", ")", "log", ".", "info", "(", "(", "'Embedded web server for %s started'", "%", "name", ")", ")", "return", "ServerWrapper", "(", "app", ",", "server", ",", "name", ",", "host", ",", "port", ")" ]
launch a web server for a given app using supplied factory .
train
false
15,034
def parse_unique_urlencoded(content): urlencoded_params = urllib.parse.parse_qs(content) params = {} for (key, value) in six.iteritems(urlencoded_params): if (len(value) != 1): msg = ('URL-encoded content contains a repeated value:%s -> %s' % (key, ', '.join(value))) raise ValueError(msg) params[key] = value[0] return params
[ "def", "parse_unique_urlencoded", "(", "content", ")", ":", "urlencoded_params", "=", "urllib", ".", "parse", ".", "parse_qs", "(", "content", ")", "params", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "urlencoded_params", ")", ":", "if", "(", "len", "(", "value", ")", "!=", "1", ")", ":", "msg", "=", "(", "'URL-encoded content contains a repeated value:%s -> %s'", "%", "(", "key", ",", "', '", ".", "join", "(", "value", ")", ")", ")", "raise", "ValueError", "(", "msg", ")", "params", "[", "key", "]", "=", "value", "[", "0", "]", "return", "params" ]
parses unique key-value parameters from urlencoded content .
train
true
15,036
def clean_up(): for filename in ['test_file', 'Phylip/opuntia.phy', 'Phylip/hedgehog.phy']: if os.path.isfile(filename): os.remove(filename)
[ "def", "clean_up", "(", ")", ":", "for", "filename", "in", "[", "'test_file'", ",", "'Phylip/opuntia.phy'", ",", "'Phylip/hedgehog.phy'", "]", ":", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "os", ".", "remove", "(", "filename", ")" ]
fallback clean up method to remove temp files .
train
false
15,037
def _install_handlers(cp, formatters): hlist = cp.get('handlers', 'keys') if (not len(hlist)): return {} hlist = hlist.split(',') hlist = _strip_spaces(hlist) handlers = {} fixups = [] for hand in hlist: sectname = ('handler_%s' % hand) klass = cp.get(sectname, 'class') opts = cp.options(sectname) if ('formatter' in opts): fmt = cp.get(sectname, 'formatter') else: fmt = '' try: klass = eval(klass, vars(logging)) except (AttributeError, NameError): klass = _resolve(klass) args = cp.get(sectname, 'args') args = eval(args, vars(logging)) h = klass(*args) if ('level' in opts): level = cp.get(sectname, 'level') h.setLevel(logging._levelNames[level]) if len(fmt): h.setFormatter(formatters[fmt]) if issubclass(klass, logging.handlers.MemoryHandler): if ('target' in opts): target = cp.get(sectname, 'target') else: target = '' if len(target): fixups.append((h, target)) handlers[hand] = h for (h, t) in fixups: h.setTarget(handlers[t]) return handlers
[ "def", "_install_handlers", "(", "cp", ",", "formatters", ")", ":", "hlist", "=", "cp", ".", "get", "(", "'handlers'", ",", "'keys'", ")", "if", "(", "not", "len", "(", "hlist", ")", ")", ":", "return", "{", "}", "hlist", "=", "hlist", ".", "split", "(", "','", ")", "hlist", "=", "_strip_spaces", "(", "hlist", ")", "handlers", "=", "{", "}", "fixups", "=", "[", "]", "for", "hand", "in", "hlist", ":", "sectname", "=", "(", "'handler_%s'", "%", "hand", ")", "klass", "=", "cp", ".", "get", "(", "sectname", ",", "'class'", ")", "opts", "=", "cp", ".", "options", "(", "sectname", ")", "if", "(", "'formatter'", "in", "opts", ")", ":", "fmt", "=", "cp", ".", "get", "(", "sectname", ",", "'formatter'", ")", "else", ":", "fmt", "=", "''", "try", ":", "klass", "=", "eval", "(", "klass", ",", "vars", "(", "logging", ")", ")", "except", "(", "AttributeError", ",", "NameError", ")", ":", "klass", "=", "_resolve", "(", "klass", ")", "args", "=", "cp", ".", "get", "(", "sectname", ",", "'args'", ")", "args", "=", "eval", "(", "args", ",", "vars", "(", "logging", ")", ")", "h", "=", "klass", "(", "*", "args", ")", "if", "(", "'level'", "in", "opts", ")", ":", "level", "=", "cp", ".", "get", "(", "sectname", ",", "'level'", ")", "h", ".", "setLevel", "(", "logging", ".", "_levelNames", "[", "level", "]", ")", "if", "len", "(", "fmt", ")", ":", "h", ".", "setFormatter", "(", "formatters", "[", "fmt", "]", ")", "if", "issubclass", "(", "klass", ",", "logging", ".", "handlers", ".", "MemoryHandler", ")", ":", "if", "(", "'target'", "in", "opts", ")", ":", "target", "=", "cp", ".", "get", "(", "sectname", ",", "'target'", ")", "else", ":", "target", "=", "''", "if", "len", "(", "target", ")", ":", "fixups", ".", "append", "(", "(", "h", ",", "target", ")", ")", "handlers", "[", "hand", "]", "=", "h", "for", "(", "h", ",", "t", ")", "in", "fixups", ":", "h", ".", "setTarget", "(", "handlers", "[", "t", "]", ")", "return", "handlers" ]
install and return handlers .
train
false