id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
52,948
def UrlCheck(url): uri = urlparse(url) if (not (uri.scheme in URI_SCHEMES)): raise ValueError((_('Not a valid url: %s') % url)) else: return url
[ "def", "UrlCheck", "(", "url", ")", ":", "uri", "=", "urlparse", "(", "url", ")", "if", "(", "not", "(", "uri", ".", "scheme", "in", "URI_SCHEMES", ")", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "'Not a valid url: %s'", ")", "%", "url", ")", ")", "else", ":", "return", "url" ]
verify that a url parsed string has a valid uri scheme .
train
false
52,949
def exe(pid): return psutil.Process(pid).exe()
[ "def", "exe", "(", "pid", ")", ":", "return", "psutil", ".", "Process", "(", "pid", ")", ".", "exe", "(", ")" ]
exe -> str arguments: pid : pid of the process .
train
false
52,952
def drop_database(name): _run_as_pg(('dropdb %(name)s' % locals()))
[ "def", "drop_database", "(", "name", ")", ":", "_run_as_pg", "(", "(", "'dropdb %(name)s'", "%", "locals", "(", ")", ")", ")" ]
delete a postgresql database .
train
false
52,953
def undistort_unproject_pts(pts_uv, camera_matrix, dist_coefs): pts_uv = np.array(pts_uv) num_pts = (pts_uv.size / 2) pts_uv.shape = (int(num_pts), 1, 2) pts_uv = cv2.undistortPoints(pts_uv, camera_matrix, dist_coefs) pts_3d = cv2.convertPointsToHomogeneous(np.float32(pts_uv)) pts_3d.shape = (int(num_pts), 3) return pts_3d
[ "def", "undistort_unproject_pts", "(", "pts_uv", ",", "camera_matrix", ",", "dist_coefs", ")", ":", "pts_uv", "=", "np", ".", "array", "(", "pts_uv", ")", "num_pts", "=", "(", "pts_uv", ".", "size", "/", "2", ")", "pts_uv", ".", "shape", "=", "(", "int", "(", "num_pts", ")", ",", "1", ",", "2", ")", "pts_uv", "=", "cv2", ".", "undistortPoints", "(", "pts_uv", ",", "camera_matrix", ",", "dist_coefs", ")", "pts_3d", "=", "cv2", ".", "convertPointsToHomogeneous", "(", "np", ".", "float32", "(", "pts_uv", ")", ")", "pts_3d", ".", "shape", "=", "(", "int", "(", "num_pts", ")", ",", "3", ")", "return", "pts_3d" ]
this function converts a set of 2d image coordinates to vectors in pinhole camera space .
train
false
52,955
def find_kernel(path): if (path is None): return None if os.path.isfile(path): return path elif os.path.isdir(path): return find_highest_files(path, 'vmlinuz', _re_kernel) elif (file_is_remote(path) and remote_file_exists(path)): return path return None
[ "def", "find_kernel", "(", "path", ")", ":", "if", "(", "path", "is", "None", ")", ":", "return", "None", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "return", "path", "elif", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "find_highest_files", "(", "path", ",", "'vmlinuz'", ",", "_re_kernel", ")", "elif", "(", "file_is_remote", "(", "path", ")", "and", "remote_file_exists", "(", "path", ")", ")", ":", "return", "path", "return", "None" ]
given a directory or a filename .
train
false
52,956
def transaction_guard(f): @functools.wraps(f) def inner(self, context, *args, **kwargs): if (context.session.is_active and getattr(context, 'GUARD_TRANSACTION', True)): raise RuntimeError((_('Method %s cannot be called within a transaction.') % f)) return f(self, context, *args, **kwargs) return inner
[ "def", "transaction_guard", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "inner", "(", "self", ",", "context", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "context", ".", "session", ".", "is_active", "and", "getattr", "(", "context", ",", "'GUARD_TRANSACTION'", ",", "True", ")", ")", ":", "raise", "RuntimeError", "(", "(", "_", "(", "'Method %s cannot be called within a transaction.'", ")", "%", "f", ")", ")", "return", "f", "(", "self", ",", "context", ",", "*", "args", ",", "**", "kwargs", ")", "return", "inner" ]
ensures that the context passed in is not in a transaction .
train
false
52,957
def trim_join_unit(join_unit, length): if (0 not in join_unit.indexers): extra_indexers = join_unit.indexers if (join_unit.block is None): extra_block = None else: extra_block = join_unit.block.getitem_block(slice(length, None)) join_unit.block = join_unit.block.getitem_block(slice(length)) else: extra_block = join_unit.block extra_indexers = copy.copy(join_unit.indexers) extra_indexers[0] = extra_indexers[0][length:] join_unit.indexers[0] = join_unit.indexers[0][:length] extra_shape = (((join_unit.shape[0] - length),) + join_unit.shape[1:]) join_unit.shape = ((length,) + join_unit.shape[1:]) return JoinUnit(block=extra_block, indexers=extra_indexers, shape=extra_shape)
[ "def", "trim_join_unit", "(", "join_unit", ",", "length", ")", ":", "if", "(", "0", "not", "in", "join_unit", ".", "indexers", ")", ":", "extra_indexers", "=", "join_unit", ".", "indexers", "if", "(", "join_unit", ".", "block", "is", "None", ")", ":", "extra_block", "=", "None", "else", ":", "extra_block", "=", "join_unit", ".", "block", ".", "getitem_block", "(", "slice", "(", "length", ",", "None", ")", ")", "join_unit", ".", "block", "=", "join_unit", ".", "block", ".", "getitem_block", "(", "slice", "(", "length", ")", ")", "else", ":", "extra_block", "=", "join_unit", ".", "block", "extra_indexers", "=", "copy", ".", "copy", "(", "join_unit", ".", "indexers", ")", "extra_indexers", "[", "0", "]", "=", "extra_indexers", "[", "0", "]", "[", "length", ":", "]", "join_unit", ".", "indexers", "[", "0", "]", "=", "join_unit", ".", "indexers", "[", "0", "]", "[", ":", "length", "]", "extra_shape", "=", "(", "(", "(", "join_unit", ".", "shape", "[", "0", "]", "-", "length", ")", ",", ")", "+", "join_unit", ".", "shape", "[", "1", ":", "]", ")", "join_unit", ".", "shape", "=", "(", "(", "length", ",", ")", "+", "join_unit", ".", "shape", "[", "1", ":", "]", ")", "return", "JoinUnit", "(", "block", "=", "extra_block", ",", "indexers", "=", "extra_indexers", ",", "shape", "=", "extra_shape", ")" ]
reduce join_units shape along item axis to length .
train
true
52,960
def assertIsInstance(obj, cls, msg=''): if (not isinstance(obj, cls)): err_msg = '{0}Expected type {1}, found {2} instead' raise AssertionError(err_msg.format(msg, cls, type(obj)))
[ "def", "assertIsInstance", "(", "obj", ",", "cls", ",", "msg", "=", "''", ")", ":", "if", "(", "not", "isinstance", "(", "obj", ",", "cls", ")", ")", ":", "err_msg", "=", "'{0}Expected type {1}, found {2} instead'", "raise", "AssertionError", "(", "err_msg", ".", "format", "(", "msg", ",", "cls", ",", "type", "(", "obj", ")", ")", ")" ]
test that obj is an instance of cls (which can be a class or a tuple of classes .
train
false
52,961
def _new_request(): if (RESERVED_MARKER in os.environ): return False os.environ[RESERVED_MARKER] = RESERVED_MARKER return True
[ "def", "_new_request", "(", ")", ":", "if", "(", "RESERVED_MARKER", "in", "os", ".", "environ", ")", ":", "return", "False", "os", ".", "environ", "[", "RESERVED_MARKER", "]", "=", "RESERVED_MARKER", "return", "True" ]
test if this is the first call to this function in the current request .
train
false
52,964
def getRandomNBitInteger(N, randfunc=None): value = getRandomInteger((N - 1), randfunc) value |= (2 ** (N - 1)) assert (size(value) >= N) return value
[ "def", "getRandomNBitInteger", "(", "N", ",", "randfunc", "=", "None", ")", ":", "value", "=", "getRandomInteger", "(", "(", "N", "-", "1", ")", ",", "randfunc", ")", "value", "|=", "(", "2", "**", "(", "N", "-", "1", ")", ")", "assert", "(", "size", "(", "value", ")", ">=", "N", ")", "return", "value" ]
getrandominteger:long return a random number with exactly n-bits .
train
false
52,965
@transaction.commit_on_success def load_data_for_offline_install(in_file): assert os.path.exists(in_file), 'in_file must exist.' with open(in_file, 'r') as fp: models = deserialize(fp.read()) try: central_server = models.next().object except Exception as e: logging.debug(('Exception loading central server object: %s' % e)) return logging.debug(('Saving object %s' % central_server)) assert isinstance(central_server, Device) central_server.save(imported=True, is_trusted=True) invitation = None for model in models: try: logging.debug(('Saving object %s' % model.object)) model.object.save(imported=True) if isinstance(model.object, ZoneInvitation): invitation = model.object if (invitation.used_by is None): invitation.claim(used_by=Device.get_own_device()) except ValidationError as e: logging.error(('Failed to import model %s' % model)) return invitation
[ "@", "transaction", ".", "commit_on_success", "def", "load_data_for_offline_install", "(", "in_file", ")", ":", "assert", "os", ".", "path", ".", "exists", "(", "in_file", ")", ",", "'in_file must exist.'", "with", "open", "(", "in_file", ",", "'r'", ")", "as", "fp", ":", "models", "=", "deserialize", "(", "fp", ".", "read", "(", ")", ")", "try", ":", "central_server", "=", "models", ".", "next", "(", ")", ".", "object", "except", "Exception", "as", "e", ":", "logging", ".", "debug", "(", "(", "'Exception loading central server object: %s'", "%", "e", ")", ")", "return", "logging", ".", "debug", "(", "(", "'Saving object %s'", "%", "central_server", ")", ")", "assert", "isinstance", "(", "central_server", ",", "Device", ")", "central_server", ".", "save", "(", "imported", "=", "True", ",", "is_trusted", "=", "True", ")", "invitation", "=", "None", "for", "model", "in", "models", ":", "try", ":", "logging", ".", "debug", "(", "(", "'Saving object %s'", "%", "model", ".", "object", ")", ")", "model", ".", "object", ".", "save", "(", "imported", "=", "True", ")", "if", "isinstance", "(", "model", ".", "object", ",", "ZoneInvitation", ")", ":", "invitation", "=", "model", ".", "object", "if", "(", "invitation", ".", "used_by", "is", "None", ")", ":", "invitation", ".", "claim", "(", "used_by", "=", "Device", ".", "get_own_device", "(", ")", ")", "except", "ValidationError", "as", "e", ":", "logging", ".", "error", "(", "(", "'Failed to import model %s'", "%", "model", ")", ")", "return", "invitation" ]
receives a serialized file for import .
train
false
52,967
def print_vm_info(vm, depth=1, max_depth=10): if hasattr(vm, 'childEntity'): if (depth > max_depth): return vmList = vm.childEntity for c in vmList: print_vm_info(c, (depth + 1)) return summary = vm.summary print 'Name : ', summary.config.name print 'Path : ', summary.config.vmPathName print 'Guest : ', summary.config.guestFullName annotation = summary.config.annotation if annotation: print 'Annotation : ', annotation print 'State : ', summary.runtime.powerState if (summary.guest is not None): ip = summary.guest.ipAddress if ip: print 'IP : ', ip if (summary.runtime.question is not None): print 'Question : ', summary.runtime.question.text print ''
[ "def", "print_vm_info", "(", "vm", ",", "depth", "=", "1", ",", "max_depth", "=", "10", ")", ":", "if", "hasattr", "(", "vm", ",", "'childEntity'", ")", ":", "if", "(", "depth", ">", "max_depth", ")", ":", "return", "vmList", "=", "vm", ".", "childEntity", "for", "c", "in", "vmList", ":", "print_vm_info", "(", "c", ",", "(", "depth", "+", "1", ")", ")", "return", "summary", "=", "vm", ".", "summary", "print", "'Name : '", ",", "summary", ".", "config", ".", "name", "print", "'Path : '", ",", "summary", ".", "config", ".", "vmPathName", "print", "'Guest : '", ",", "summary", ".", "config", ".", "guestFullName", "annotation", "=", "summary", ".", "config", ".", "annotation", "if", "annotation", ":", "print", "'Annotation : '", ",", "annotation", "print", "'State : '", ",", "summary", ".", "runtime", ".", "powerState", "if", "(", "summary", ".", "guest", "is", "not", "None", ")", ":", "ip", "=", "summary", ".", "guest", ".", "ipAddress", "if", "ip", ":", "print", "'IP : '", ",", "ip", "if", "(", "summary", ".", "runtime", ".", "question", "is", "not", "None", ")", ":", "print", "'Question : '", ",", "summary", ".", "runtime", ".", "question", ".", "text", "print", "''" ]
print information for a particular virtual machine or recurse into a folder with depth protection .
train
false
52,969
def addFunctionsToDictionary(dictionary, functions, prefix): for function in functions: dictionary[function.__name__[len(prefix):]] = function
[ "def", "addFunctionsToDictionary", "(", "dictionary", ",", "functions", ",", "prefix", ")", ":", "for", "function", "in", "functions", ":", "dictionary", "[", "function", ".", "__name__", "[", "len", "(", "prefix", ")", ":", "]", "]", "=", "function" ]
add functions to dictionary .
train
false
52,970
def split_every(n, iterable, piece_maker=tuple): iterator = iter(iterable) piece = piece_maker(islice(iterator, n)) while piece: (yield piece) piece = piece_maker(islice(iterator, n))
[ "def", "split_every", "(", "n", ",", "iterable", ",", "piece_maker", "=", "tuple", ")", ":", "iterator", "=", "iter", "(", "iterable", ")", "piece", "=", "piece_maker", "(", "islice", "(", "iterator", ",", "n", ")", ")", "while", "piece", ":", "(", "yield", "piece", ")", "piece", "=", "piece_maker", "(", "islice", "(", "iterator", ",", "n", ")", ")" ]
splits an iterable into length-n pieces .
train
false
52,972
def getprime(nbits): while True: integer = rsa.randnum.read_random_int(nbits) integer |= 1 if is_prime(integer): return integer
[ "def", "getprime", "(", "nbits", ")", ":", "while", "True", ":", "integer", "=", "rsa", ".", "randnum", ".", "read_random_int", "(", "nbits", ")", "integer", "|=", "1", "if", "is_prime", "(", "integer", ")", ":", "return", "integer" ]
returns a prime number that can be stored in nbits bits .
train
false
52,973
def make_aware(value, timezone): if hasattr(timezone, 'localize'): return timezone.localize(value, is_dst=None) else: return value.replace(tzinfo=timezone)
[ "def", "make_aware", "(", "value", ",", "timezone", ")", ":", "if", "hasattr", "(", "timezone", ",", "'localize'", ")", ":", "return", "timezone", ".", "localize", "(", "value", ",", "is_dst", "=", "None", ")", "else", ":", "return", "value", ".", "replace", "(", "tzinfo", "=", "timezone", ")" ]
makes a naive datetime .
train
false
52,975
def removeElementsFromDictionary(dictionary, keys): for key in keys: removeElementFromDictionary(dictionary, key)
[ "def", "removeElementsFromDictionary", "(", "dictionary", ",", "keys", ")", ":", "for", "key", "in", "keys", ":", "removeElementFromDictionary", "(", "dictionary", ",", "key", ")" ]
remove list from the dictionary .
train
false
52,976
def all_refs(split=False, git=git): local_branches = [] remote_branches = [] tags = [] triple = _triple query = (triple(u'refs/tags', tags), triple(u'refs/heads', local_branches), triple(u'refs/remotes', remote_branches)) out = git.for_each_ref(format=u'%(refname)')[STDOUT] for ref in out.splitlines(): for (prefix, prefix_len, dst) in query: if (ref.startswith(prefix) and (not ref.endswith(u'/HEAD'))): dst.append(ref[prefix_len:]) continue if split: return (local_branches, remote_branches, tags) else: return ((local_branches + remote_branches) + tags)
[ "def", "all_refs", "(", "split", "=", "False", ",", "git", "=", "git", ")", ":", "local_branches", "=", "[", "]", "remote_branches", "=", "[", "]", "tags", "=", "[", "]", "triple", "=", "_triple", "query", "=", "(", "triple", "(", "u'refs/tags'", ",", "tags", ")", ",", "triple", "(", "u'refs/heads'", ",", "local_branches", ")", ",", "triple", "(", "u'refs/remotes'", ",", "remote_branches", ")", ")", "out", "=", "git", ".", "for_each_ref", "(", "format", "=", "u'%(refname)'", ")", "[", "STDOUT", "]", "for", "ref", "in", "out", ".", "splitlines", "(", ")", ":", "for", "(", "prefix", ",", "prefix_len", ",", "dst", ")", "in", "query", ":", "if", "(", "ref", ".", "startswith", "(", "prefix", ")", "and", "(", "not", "ref", ".", "endswith", "(", "u'/HEAD'", ")", ")", ")", ":", "dst", ".", "append", "(", "ref", "[", "prefix_len", ":", "]", ")", "continue", "if", "split", ":", "return", "(", "local_branches", ",", "remote_branches", ",", "tags", ")", "else", ":", "return", "(", "(", "local_branches", "+", "remote_branches", ")", "+", "tags", ")" ]
return a tuple of .
train
false
52,977
def templated_docstring(**docs): def decorator(f): f.__doc__ = format_docstring(f.__name__, f.__doc__, docs) return f return decorator
[ "def", "templated_docstring", "(", "**", "docs", ")", ":", "def", "decorator", "(", "f", ")", ":", "f", ".", "__doc__", "=", "format_docstring", "(", "f", ".", "__name__", ",", "f", ".", "__doc__", ",", "docs", ")", "return", "f", "return", "decorator" ]
decorator allowing the use of templated docstrings .
train
true
52,979
def trueValues(T, R, discountFactor): assert (discountFactor < 1) distr = T.copy() res = dot(T, R) for i in range(1, int((10 / (1.0 - discountFactor)))): distr = dot(distr, T) res += ((discountFactor ** i) * dot(distr, R)) return res
[ "def", "trueValues", "(", "T", ",", "R", ",", "discountFactor", ")", ":", "assert", "(", "discountFactor", "<", "1", ")", "distr", "=", "T", ".", "copy", "(", ")", "res", "=", "dot", "(", "T", ",", "R", ")", "for", "i", "in", "range", "(", "1", ",", "int", "(", "(", "10", "/", "(", "1.0", "-", "discountFactor", ")", ")", ")", ")", ":", "distr", "=", "dot", "(", "distr", ",", "T", ")", "res", "+=", "(", "(", "discountFactor", "**", "i", ")", "*", "dot", "(", "distr", ",", "R", ")", ")", "return", "res" ]
compute the true discounted value function for each state .
train
false
52,980
def verify(user, password): def verify_user(user_name, user_password): if ((user_name == user) and (user_password == password)): return user_name return False return verify_user
[ "def", "verify", "(", "user", ",", "password", ")", ":", "def", "verify_user", "(", "user_name", ",", "user_password", ")", ":", "if", "(", "(", "user_name", "==", "user", ")", "and", "(", "user_password", "==", "password", ")", ")", ":", "return", "user_name", "return", "False", "return", "verify_user" ]
verifies that the signature matches the message .
train
true
52,981
def git_version(git_path=_DEFAULT_GIT): try: output = run_git_or_fail(['--version'], git_path=git_path) except OSError: return None version_prefix = 'git version ' if (not output.startswith(version_prefix)): return None parts = output[len(version_prefix):].split('.') nums = [] for part in parts: try: nums.append(int(part)) except ValueError: break while (len(nums) < _VERSION_LEN): nums.append(0) return tuple(nums[:_VERSION_LEN])
[ "def", "git_version", "(", "git_path", "=", "_DEFAULT_GIT", ")", ":", "try", ":", "output", "=", "run_git_or_fail", "(", "[", "'--version'", "]", ",", "git_path", "=", "git_path", ")", "except", "OSError", ":", "return", "None", "version_prefix", "=", "'git version '", "if", "(", "not", "output", ".", "startswith", "(", "version_prefix", ")", ")", ":", "return", "None", "parts", "=", "output", "[", "len", "(", "version_prefix", ")", ":", "]", ".", "split", "(", "'.'", ")", "nums", "=", "[", "]", "for", "part", "in", "parts", ":", "try", ":", "nums", ".", "append", "(", "int", "(", "part", ")", ")", "except", "ValueError", ":", "break", "while", "(", "len", "(", "nums", ")", "<", "_VERSION_LEN", ")", ":", "nums", ".", "append", "(", "0", ")", "return", "tuple", "(", "nums", "[", ":", "_VERSION_LEN", "]", ")" ]
attempt to determine the version of git currently installed .
train
false
52,983
def get_datasource(name, orgname=None, profile='grafana'): data = get_datasources(orgname=orgname, profile=profile) for datasource in data: if (datasource['name'] == name): return datasource return None
[ "def", "get_datasource", "(", "name", ",", "orgname", "=", "None", ",", "profile", "=", "'grafana'", ")", ":", "data", "=", "get_datasources", "(", "orgname", "=", "orgname", ",", "profile", "=", "profile", ")", "for", "datasource", "in", "data", ":", "if", "(", "datasource", "[", "'name'", "]", "==", "name", ")", ":", "return", "datasource", "return", "None" ]
show a single datasource in an organisation .
train
true
52,984
def generate_all_classes(modules_list=[], launcher=[], redirect_x=False, mipav_hacks=False): all_code = {} for module in modules_list: print((u'=' * 80)) print(u'Generating Definition for module {0}'.format(module)) print((u'^' * 80)) (package, code, module) = generate_class(module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks) cur_package = all_code module_name = package.strip().split(u' ')[0].split(u'.')[(-1)] for package in package.strip().split(u' ')[0].split(u'.')[:(-1)]: if (package not in cur_package): cur_package[package] = {} cur_package = cur_package[package] if (module_name not in cur_package): cur_package[module_name] = {} cur_package[module_name][module] = code if os.path.exists(u'__init__.py'): os.unlink(u'__init__.py') crawl_code_struct(all_code, os.getcwd())
[ "def", "generate_all_classes", "(", "modules_list", "=", "[", "]", ",", "launcher", "=", "[", "]", ",", "redirect_x", "=", "False", ",", "mipav_hacks", "=", "False", ")", ":", "all_code", "=", "{", "}", "for", "module", "in", "modules_list", ":", "print", "(", "(", "u'='", "*", "80", ")", ")", "print", "(", "u'Generating Definition for module {0}'", ".", "format", "(", "module", ")", ")", "print", "(", "(", "u'^'", "*", "80", ")", ")", "(", "package", ",", "code", ",", "module", ")", "=", "generate_class", "(", "module", ",", "launcher", ",", "redirect_x", "=", "redirect_x", ",", "mipav_hacks", "=", "mipav_hacks", ")", "cur_package", "=", "all_code", "module_name", "=", "package", ".", "strip", "(", ")", ".", "split", "(", "u' '", ")", "[", "0", "]", ".", "split", "(", "u'.'", ")", "[", "(", "-", "1", ")", "]", "for", "package", "in", "package", ".", "strip", "(", ")", ".", "split", "(", "u' '", ")", "[", "0", "]", ".", "split", "(", "u'.'", ")", "[", ":", "(", "-", "1", ")", "]", ":", "if", "(", "package", "not", "in", "cur_package", ")", ":", "cur_package", "[", "package", "]", "=", "{", "}", "cur_package", "=", "cur_package", "[", "package", "]", "if", "(", "module_name", "not", "in", "cur_package", ")", ":", "cur_package", "[", "module_name", "]", "=", "{", "}", "cur_package", "[", "module_name", "]", "[", "module", "]", "=", "code", "if", "os", ".", "path", ".", "exists", "(", "u'__init__.py'", ")", ":", "os", ".", "unlink", "(", "u'__init__.py'", ")", "crawl_code_struct", "(", "all_code", ",", "os", ".", "getcwd", "(", ")", ")" ]
modules_list contains all the sem compliant tools that should have wrappers created for them .
train
false
52,986
def seq_exceeds_homopolymers(curr_seq, max_len=6): for base in 'ATGC': curr = (base * (max_len + 1)) if (curr in curr_seq): return True return False
[ "def", "seq_exceeds_homopolymers", "(", "curr_seq", ",", "max_len", "=", "6", ")", ":", "for", "base", "in", "'ATGC'", ":", "curr", "=", "(", "base", "*", "(", "max_len", "+", "1", ")", ")", "if", "(", "curr", "in", "curr_seq", ")", ":", "return", "True", "return", "False" ]
returns false if primer contains any homopolymer > allowed length .
train
false
52,988
def list_subscriptions(document_class, sub_id_start='', topic=None, max_results=DEFAULT_LIST_SUBSCRIPTIONS_MAX_RESULTS, expires_before=None): from google.appengine.ext import db if issubclass(document_class, db.Model): topic = _get_document_topic(document_class, topic) elif issubclass(document_class, datastore.Entity): if (not topic): raise TopicNotSpecified() else: raise DocumentTypeError() return prospective_search_admin.list_subscriptions(topic, max_results, None, sub_id_start, expires_before)
[ "def", "list_subscriptions", "(", "document_class", ",", "sub_id_start", "=", "''", ",", "topic", "=", "None", ",", "max_results", "=", "DEFAULT_LIST_SUBSCRIPTIONS_MAX_RESULTS", ",", "expires_before", "=", "None", ")", ":", "from", "google", ".", "appengine", ".", "ext", "import", "db", "if", "issubclass", "(", "document_class", ",", "db", ".", "Model", ")", ":", "topic", "=", "_get_document_topic", "(", "document_class", ",", "topic", ")", "elif", "issubclass", "(", "document_class", ",", "datastore", ".", "Entity", ")", ":", "if", "(", "not", "topic", ")", ":", "raise", "TopicNotSpecified", "(", ")", "else", ":", "raise", "DocumentTypeError", "(", ")", "return", "prospective_search_admin", ".", "list_subscriptions", "(", "topic", ",", "max_results", ",", "None", ",", "sub_id_start", ",", "expires_before", ")" ]
list subscriptions on a topic .
train
false
52,989
def _get_service_exec(): contextkey = 'systemd._get_service_exec' if (contextkey not in __context__): executables = ('update-rc.d', 'chkconfig') for executable in executables: service_exec = salt.utils.which(executable) if (service_exec is not None): break else: raise CommandExecutionError('Unable to find sysv service manager (tried {0})'.format(', '.join(executables))) __context__[contextkey] = service_exec return __context__[contextkey]
[ "def", "_get_service_exec", "(", ")", ":", "contextkey", "=", "'systemd._get_service_exec'", "if", "(", "contextkey", "not", "in", "__context__", ")", ":", "executables", "=", "(", "'update-rc.d'", ",", "'chkconfig'", ")", "for", "executable", "in", "executables", ":", "service_exec", "=", "salt", ".", "utils", ".", "which", "(", "executable", ")", "if", "(", "service_exec", "is", "not", "None", ")", ":", "break", "else", ":", "raise", "CommandExecutionError", "(", "'Unable to find sysv service manager (tried {0})'", ".", "format", "(", "', '", ".", "join", "(", "executables", ")", ")", ")", "__context__", "[", "contextkey", "]", "=", "service_exec", "return", "__context__", "[", "contextkey", "]" ]
returns the path to the sysv service manager .
train
true
52,991
def _StripSeparators(value): return re.sub(' [ ]*', ' ', re.sub(_WORD_SEPARATOR_RE, ' ', value))
[ "def", "_StripSeparators", "(", "value", ")", ":", "return", "re", ".", "sub", "(", "' [ ]*'", ",", "' '", ",", "re", ".", "sub", "(", "_WORD_SEPARATOR_RE", ",", "' '", ",", "value", ")", ")" ]
remove special characters and collapse spaces .
train
false
52,992
def py_test(name, srcs=[], deps=[], main=None, base=None, testdata=[], **kwargs): target = PythonTest(name, srcs, deps, main, base, testdata, kwargs) blade.blade.register_target(target)
[ "def", "py_test", "(", "name", ",", "srcs", "=", "[", "]", ",", "deps", "=", "[", "]", ",", "main", "=", "None", ",", "base", "=", "None", ",", "testdata", "=", "[", "]", ",", "**", "kwargs", ")", ":", "target", "=", "PythonTest", "(", "name", ",", "srcs", ",", "deps", ",", "main", ",", "base", ",", "testdata", ",", "kwargs", ")", "blade", ".", "blade", ".", "register_target", "(", "target", ")" ]
python test .
train
false
52,994
def permitted(func): @functools.wraps(func) def wrapper(request, *args, **kwargs): '\n Wrapper for the view that only calls the view if the user is authorized.\n ' def fetch_content(): '\n Extract the forum object from the keyword arguments to the view.\n ' if ('thread_id' in kwargs): content = cc.Thread.find(kwargs['thread_id']).to_dict() elif ('comment_id' in kwargs): content = cc.Comment.find(kwargs['comment_id']).to_dict() elif ('commentable_id' in kwargs): content = cc.Commentable.find(kwargs['commentable_id']).to_dict() else: content = None return content course_key = CourseKey.from_string(kwargs['course_id']) if check_permissions_by_view(request.user, course_key, fetch_content(), request.view_name): return func(request, *args, **kwargs) else: return JsonError('unauthorized', status=401) return wrapper
[ "def", "permitted", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "def", "fetch_content", "(", ")", ":", "if", "(", "'thread_id'", "in", "kwargs", ")", ":", "content", "=", "cc", ".", "Thread", ".", "find", "(", "kwargs", "[", "'thread_id'", "]", ")", ".", "to_dict", "(", ")", "elif", "(", "'comment_id'", "in", "kwargs", ")", ":", "content", "=", "cc", ".", "Comment", ".", "find", "(", "kwargs", "[", "'comment_id'", "]", ")", ".", "to_dict", "(", ")", "elif", "(", "'commentable_id'", "in", "kwargs", ")", ":", "content", "=", "cc", ".", "Commentable", ".", "find", "(", "kwargs", "[", "'commentable_id'", "]", ")", ".", "to_dict", "(", ")", "else", ":", "content", "=", "None", "return", "content", "course_key", "=", "CourseKey", ".", "from_string", "(", "kwargs", "[", "'course_id'", "]", ")", "if", "check_permissions_by_view", "(", "request", ".", "user", ",", "course_key", ",", "fetch_content", "(", ")", ",", "request", ".", "view_name", ")", ":", "return", "func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "JsonError", "(", "'unauthorized'", ",", "status", "=", "401", ")", "return", "wrapper" ]
view decorator to verify the user is authorized to access this endpoint .
train
false
52,995
def count_discussions_handler(sender, **kwargs): if (kwargs.get('instance') and kwargs.get('created')): return comment = ((('comment' in kwargs) and kwargs['comment']) or kwargs['instance']) entry = comment.content_object if isinstance(entry, Entry): entry.comment_count = entry.comments.count() entry.pingback_count = entry.pingbacks.count() entry.trackback_count = entry.trackbacks.count() entry.save(update_fields=['comment_count', 'pingback_count', 'trackback_count'])
[ "def", "count_discussions_handler", "(", "sender", ",", "**", "kwargs", ")", ":", "if", "(", "kwargs", ".", "get", "(", "'instance'", ")", "and", "kwargs", ".", "get", "(", "'created'", ")", ")", ":", "return", "comment", "=", "(", "(", "(", "'comment'", "in", "kwargs", ")", "and", "kwargs", "[", "'comment'", "]", ")", "or", "kwargs", "[", "'instance'", "]", ")", "entry", "=", "comment", ".", "content_object", "if", "isinstance", "(", "entry", ",", "Entry", ")", ":", "entry", ".", "comment_count", "=", "entry", ".", "comments", ".", "count", "(", ")", "entry", ".", "pingback_count", "=", "entry", ".", "pingbacks", ".", "count", "(", ")", "entry", ".", "trackback_count", "=", "entry", ".", "trackbacks", ".", "count", "(", ")", "entry", ".", "save", "(", "update_fields", "=", "[", "'comment_count'", ",", "'pingback_count'", ",", "'trackback_count'", "]", ")" ]
update the count of each type of discussion on an entry .
train
true
52,996
def list_grants(key_id, limit=None, marker=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if key_id.startswith('alias/'): key_id = _get_key_id(key_id) r = {} try: _grants = [] next_marker = None while True: grants = conn.list_grants(key_id, limit=limit, marker=next_marker) for grant in grants['Grants']: _grants.append(grant) if ('NextMarker' in grants): next_marker = grants['NextMarker'] else: break r['grants'] = _grants except boto.exception.BotoServerError as e: r['error'] = __utils__['boto.get_error'](e) return r
[ "def", "list_grants", "(", "key_id", ",", "limit", "=", "None", ",", "marker", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "key_id", ".", "startswith", "(", "'alias/'", ")", ":", "key_id", "=", "_get_key_id", "(", "key_id", ")", "r", "=", "{", "}", "try", ":", "_grants", "=", "[", "]", "next_marker", "=", "None", "while", "True", ":", "grants", "=", "conn", ".", "list_grants", "(", "key_id", ",", "limit", "=", "limit", ",", "marker", "=", "next_marker", ")", "for", "grant", "in", "grants", "[", "'Grants'", "]", ":", "_grants", ".", "append", "(", "grant", ")", "if", "(", "'NextMarker'", "in", "grants", ")", ":", "next_marker", "=", "grants", "[", "'NextMarker'", "]", "else", ":", "break", "r", "[", "'grants'", "]", "=", "_grants", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "r", "[", "'error'", "]", "=", "__utils__", "[", "'boto.get_error'", "]", "(", "e", ")", "return", "r" ]
list grants for the specified key .
train
true
52,997
def Event(): from multiprocessing.synchronize import Event return Event()
[ "def", "Event", "(", ")", ":", "from", "multiprocessing", ".", "synchronize", "import", "Event", "return", "Event", "(", ")" ]
returns an event object .
train
false
52,998
def parse_flags_subpattern(source, info): (flags_on, flags_off) = parse_flags(source, info) if (flags_off & GLOBAL_FLAGS): raise error('bad inline flags: cannot turn off global flag', source.string, source.pos) if (flags_on & flags_off): raise error('bad inline flags: flag turned on and off', source.string, source.pos) new_global_flags = ((flags_on & (~ info.global_flags)) & GLOBAL_FLAGS) if new_global_flags: info.global_flags |= new_global_flags raise _UnscopedFlagSet(info.global_flags) flags_on &= (~ GLOBAL_FLAGS) if source.match(':'): return parse_subpattern(source, info, flags_on, flags_off) if source.match(')'): parse_positional_flags(source, info, flags_on, flags_off) return FLAGS raise error('unknown extension', source.string, source.pos)
[ "def", "parse_flags_subpattern", "(", "source", ",", "info", ")", ":", "(", "flags_on", ",", "flags_off", ")", "=", "parse_flags", "(", "source", ",", "info", ")", "if", "(", "flags_off", "&", "GLOBAL_FLAGS", ")", ":", "raise", "error", "(", "'bad inline flags: cannot turn off global flag'", ",", "source", ".", "string", ",", "source", ".", "pos", ")", "if", "(", "flags_on", "&", "flags_off", ")", ":", "raise", "error", "(", "'bad inline flags: flag turned on and off'", ",", "source", ".", "string", ",", "source", ".", "pos", ")", "new_global_flags", "=", "(", "(", "flags_on", "&", "(", "~", "info", ".", "global_flags", ")", ")", "&", "GLOBAL_FLAGS", ")", "if", "new_global_flags", ":", "info", ".", "global_flags", "|=", "new_global_flags", "raise", "_UnscopedFlagSet", "(", "info", ".", "global_flags", ")", "flags_on", "&=", "(", "~", "GLOBAL_FLAGS", ")", "if", "source", ".", "match", "(", "':'", ")", ":", "return", "parse_subpattern", "(", "source", ",", "info", ",", "flags_on", ",", "flags_off", ")", "if", "source", ".", "match", "(", "')'", ")", ":", "parse_positional_flags", "(", "source", ",", "info", ",", "flags_on", ",", "flags_off", ")", "return", "FLAGS", "raise", "error", "(", "'unknown extension'", ",", "source", ".", "string", ",", "source", ".", "pos", ")" ]
parses a flags subpattern .
train
false
52,999
def all_dependencies(target, dep_context): for dep in target.closure(bfs=True, **dep_context.target_closure_kwargs): (yield dep)
[ "def", "all_dependencies", "(", "target", ",", "dep_context", ")", ":", "for", "dep", "in", "target", ".", "closure", "(", "bfs", "=", "True", ",", "**", "dep_context", ".", "target_closure_kwargs", ")", ":", "(", "yield", "dep", ")" ]
all transitive dependencies of the contexts target .
train
false
53,000
def path_to_3d_segment(path, zs=0, zdir=u'z'): if (not iterable(zs)): zs = (np.ones(len(path)) * zs) seg = [] pathsegs = path.iter_segments(simplify=False, curves=False) for (((x, y), code), z) in zip(pathsegs, zs): seg.append((x, y, z)) seg3d = [juggle_axes(x, y, z, zdir) for (x, y, z) in seg] return seg3d
[ "def", "path_to_3d_segment", "(", "path", ",", "zs", "=", "0", ",", "zdir", "=", "u'z'", ")", ":", "if", "(", "not", "iterable", "(", "zs", ")", ")", ":", "zs", "=", "(", "np", ".", "ones", "(", "len", "(", "path", ")", ")", "*", "zs", ")", "seg", "=", "[", "]", "pathsegs", "=", "path", ".", "iter_segments", "(", "simplify", "=", "False", ",", "curves", "=", "False", ")", "for", "(", "(", "(", "x", ",", "y", ")", ",", "code", ")", ",", "z", ")", "in", "zip", "(", "pathsegs", ",", "zs", ")", ":", "seg", ".", "append", "(", "(", "x", ",", "y", ",", "z", ")", ")", "seg3d", "=", "[", "juggle_axes", "(", "x", ",", "y", ",", "z", ",", "zdir", ")", "for", "(", "x", ",", "y", ",", "z", ")", "in", "seg", "]", "return", "seg3d" ]
convert a path to a 3d segment .
train
false
53,001
def save_managed_config(directory, base_config, cluster): managed_config_file = directory.child('managed.yaml') managed_config = create_managed_config(base_config, cluster) managed_config_file.setContent(yaml.safe_dump(managed_config, default_flow_style=False))
[ "def", "save_managed_config", "(", "directory", ",", "base_config", ",", "cluster", ")", ":", "managed_config_file", "=", "directory", ".", "child", "(", "'managed.yaml'", ")", "managed_config", "=", "create_managed_config", "(", "base_config", ",", "cluster", ")", "managed_config_file", ".", "setContent", "(", "yaml", ".", "safe_dump", "(", "managed_config", ",", "default_flow_style", "=", "False", ")", ")" ]
create and save a configuration file for the given cluster .
train
false
53,002
def modified_pelican_run(self): context = self.settings.copy() context['filenames'] = {} context['localsiteurl'] = self.settings['SITEURL'] generators = [cls(context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path) for cls in self.get_generator_classes()] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) next((g for g in generators if isinstance(g, ArticlesGenerator))) next((g for g in generators if isinstance(g, PagesGenerator))) return context
[ "def", "modified_pelican_run", "(", "self", ")", ":", "context", "=", "self", ".", "settings", ".", "copy", "(", ")", "context", "[", "'filenames'", "]", "=", "{", "}", "context", "[", "'localsiteurl'", "]", "=", "self", ".", "settings", "[", "'SITEURL'", "]", "generators", "=", "[", "cls", "(", "context", "=", "context", ",", "settings", "=", "self", ".", "settings", ",", "path", "=", "self", ".", "path", ",", "theme", "=", "self", ".", "theme", ",", "output_path", "=", "self", ".", "output_path", ")", "for", "cls", "in", "self", ".", "get_generator_classes", "(", ")", "]", "for", "p", "in", "generators", ":", "if", "hasattr", "(", "p", ",", "'generate_context'", ")", ":", "p", ".", "generate_context", "(", ")", "writer", "=", "self", ".", "get_writer", "(", ")", "for", "p", "in", "generators", ":", "if", "hasattr", "(", "p", ",", "'generate_output'", ")", ":", "p", ".", "generate_output", "(", "writer", ")", "next", "(", "(", "g", "for", "g", "in", "generators", "if", "isinstance", "(", "g", ",", "ArticlesGenerator", ")", ")", ")", "next", "(", "(", "g", "for", "g", "in", "generators", "if", "isinstance", "(", "g", ",", "PagesGenerator", ")", ")", ")", "return", "context" ]
runs the generators and returns the context object modified from the pelican objects run methods .
train
false
53,008
def configure_environment(): if ('HTTP_ACCEPT_LANGUAGE' not in os.environ): os.environ['HTTP_ACCEPT_LANGUAGE'] = DEFAULT_LOCALES if ('TOOL_SHED_TEST_FILE_DIR' not in os.environ): os.environ['TOOL_SHED_TEST_FILE_DIR'] = TOOL_SHED_TEST_DATA os.environ['GALAXY_TEST_ENVIRONMENT_CONFIGURED'] = '1'
[ "def", "configure_environment", "(", ")", ":", "if", "(", "'HTTP_ACCEPT_LANGUAGE'", "not", "in", "os", ".", "environ", ")", ":", "os", ".", "environ", "[", "'HTTP_ACCEPT_LANGUAGE'", "]", "=", "DEFAULT_LOCALES", "if", "(", "'TOOL_SHED_TEST_FILE_DIR'", "not", "in", "os", ".", "environ", ")", ":", "os", ".", "environ", "[", "'TOOL_SHED_TEST_FILE_DIR'", "]", "=", "TOOL_SHED_TEST_DATA", "os", ".", "environ", "[", "'GALAXY_TEST_ENVIRONMENT_CONFIGURED'", "]", "=", "'1'" ]
hack up environment for test cases .
train
false
53,009
@conf.commands.register def fletcher16_checkbytes(binbuf, offset): if (len(binbuf) < offset): raise Exception(('Packet too short for checkbytes %d' % len(binbuf))) binbuf = ((binbuf[:offset] + '\x00\x00') + binbuf[(offset + 2):]) (c0, c1) = _fletcher16(binbuf) x = (((((len(binbuf) - offset) - 1) * c0) - c1) % 255) if (x <= 0): x += 255 y = ((510 - c0) - x) if (y > 255): y -= 255 return (chr(x) + chr(y))
[ "@", "conf", ".", "commands", ".", "register", "def", "fletcher16_checkbytes", "(", "binbuf", ",", "offset", ")", ":", "if", "(", "len", "(", "binbuf", ")", "<", "offset", ")", ":", "raise", "Exception", "(", "(", "'Packet too short for checkbytes %d'", "%", "len", "(", "binbuf", ")", ")", ")", "binbuf", "=", "(", "(", "binbuf", "[", ":", "offset", "]", "+", "'\\x00\\x00'", ")", "+", "binbuf", "[", "(", "offset", "+", "2", ")", ":", "]", ")", "(", "c0", ",", "c1", ")", "=", "_fletcher16", "(", "binbuf", ")", "x", "=", "(", "(", "(", "(", "(", "len", "(", "binbuf", ")", "-", "offset", ")", "-", "1", ")", "*", "c0", ")", "-", "c1", ")", "%", "255", ")", "if", "(", "x", "<=", "0", ")", ":", "x", "+=", "255", "y", "=", "(", "(", "510", "-", "c0", ")", "-", "x", ")", "if", "(", "y", ">", "255", ")", ":", "y", "-=", "255", "return", "(", "chr", "(", "x", ")", "+", "chr", "(", "y", ")", ")" ]
calculates the fletcher-16 checkbytes returned as 2 byte binary-string .
train
false
53,010
def _specialized_from_args(signatures, args, kwargs): raise Exception('yet to be implemented')
[ "def", "_specialized_from_args", "(", "signatures", ",", "args", ",", "kwargs", ")", ":", "raise", "Exception", "(", "'yet to be implemented'", ")" ]
perhaps this should be implemented in a treefragment in cython code .
train
false
53,011
def parse_socket_string(socket_string, default_port): port = default_port if socket_string.startswith('['): match = IPV6_RE.match(socket_string) if (not match): raise ValueError(('Invalid IPv6 address: %s' % socket_string)) host = match.group('address') port = (match.group('port') or port) elif (':' in socket_string): tokens = socket_string.split(':') if (len(tokens) > 2): raise ValueError("IPv6 addresses must be between '[]'") (host, port) = tokens else: host = socket_string return (host, port)
[ "def", "parse_socket_string", "(", "socket_string", ",", "default_port", ")", ":", "port", "=", "default_port", "if", "socket_string", ".", "startswith", "(", "'['", ")", ":", "match", "=", "IPV6_RE", ".", "match", "(", "socket_string", ")", "if", "(", "not", "match", ")", ":", "raise", "ValueError", "(", "(", "'Invalid IPv6 address: %s'", "%", "socket_string", ")", ")", "host", "=", "match", ".", "group", "(", "'address'", ")", "port", "=", "(", "match", ".", "group", "(", "'port'", ")", "or", "port", ")", "elif", "(", "':'", "in", "socket_string", ")", ":", "tokens", "=", "socket_string", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "tokens", ")", ">", "2", ")", ":", "raise", "ValueError", "(", "\"IPv6 addresses must be between '[]'\"", ")", "(", "host", ",", "port", ")", "=", "tokens", "else", ":", "host", "=", "socket_string", "return", "(", "host", ",", "port", ")" ]
given a string representing a socket .
train
false
53,013
def _runlevel(): if ('upstart._runlevel' in __context__): return __context__['upstart._runlevel'] out = __salt__['cmd.run'](['runlevel', '{0}'.format(_find_utmp())], python_shell=False) try: ret = out.split()[1] except IndexError: ret = _default_runlevel() __context__['upstart._runlevel'] = ret return ret
[ "def", "_runlevel", "(", ")", ":", "if", "(", "'upstart._runlevel'", "in", "__context__", ")", ":", "return", "__context__", "[", "'upstart._runlevel'", "]", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "[", "'runlevel'", ",", "'{0}'", ".", "format", "(", "_find_utmp", "(", ")", ")", "]", ",", "python_shell", "=", "False", ")", "try", ":", "ret", "=", "out", ".", "split", "(", ")", "[", "1", "]", "except", "IndexError", ":", "ret", "=", "_default_runlevel", "(", ")", "__context__", "[", "'upstart._runlevel'", "]", "=", "ret", "return", "ret" ]
return the current runlevel .
train
true
53,014
def _relative_object_uri(resource_name, object_uri): obj_parts = object_uri.split('/') for length in range((len(obj_parts) + 1)): parent_uri = '/'.join(obj_parts[:length]) (parent_resource_name, _) = _resource_endpoint(parent_uri) if (resource_name == parent_resource_name): return parent_uri error_msg = 'Cannot get URL of resource %r from parent %r.' raise ValueError((error_msg % (resource_name, object_uri)))
[ "def", "_relative_object_uri", "(", "resource_name", ",", "object_uri", ")", ":", "obj_parts", "=", "object_uri", ".", "split", "(", "'/'", ")", "for", "length", "in", "range", "(", "(", "len", "(", "obj_parts", ")", "+", "1", ")", ")", ":", "parent_uri", "=", "'/'", ".", "join", "(", "obj_parts", "[", ":", "length", "]", ")", "(", "parent_resource_name", ",", "_", ")", "=", "_resource_endpoint", "(", "parent_uri", ")", "if", "(", "resource_name", "==", "parent_resource_name", ")", ":", "return", "parent_uri", "error_msg", "=", "'Cannot get URL of resource %r from parent %r.'", "raise", "ValueError", "(", "(", "error_msg", "%", "(", "resource_name", ",", "object_uri", ")", ")", ")" ]
returns object_uri .
train
false
53,015
def kdd_apk(actual, predicted, k=10): if (len(predicted) > k): predicted = predicted[:k] score = 0.0 num_hits = 0.0 for (i, p) in enumerate(predicted): if ((p in actual) and (p not in predicted[:i])): num_hits += 1.0 score += (num_hits / (i + 1.0)) if (not actual): return 0.0 return (score / len(actual))
[ "def", "kdd_apk", "(", "actual", ",", "predicted", ",", "k", "=", "10", ")", ":", "if", "(", "len", "(", "predicted", ")", ">", "k", ")", ":", "predicted", "=", "predicted", "[", ":", "k", "]", "score", "=", "0.0", "num_hits", "=", "0.0", "for", "(", "i", ",", "p", ")", "in", "enumerate", "(", "predicted", ")", ":", "if", "(", "(", "p", "in", "actual", ")", "and", "(", "p", "not", "in", "predicted", "[", ":", "i", "]", ")", ")", ":", "num_hits", "+=", "1.0", "score", "+=", "(", "num_hits", "/", "(", "i", "+", "1.0", ")", ")", "if", "(", "not", "actual", ")", ":", "return", "0.0", "return", "(", "score", "/", "len", "(", "actual", ")", ")" ]
computes the average precision at k for track 1 of the 2012 kdd cup .
train
false
53,016
def lookupText(name, timeout=None): return getResolver().lookupText(name, timeout)
[ "def", "lookupText", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupText", "(", "name", ",", "timeout", ")" ]
perform a txt record lookup .
train
false
53,017
def parametrize_ternary_quadratic(eq): (var, coeff, diop_type) = classify_diop(eq, _dict=False) if (diop_type in ('homogeneous_ternary_quadratic', 'homogeneous_ternary_quadratic_normal')): (x_0, y_0, z_0) = _diop_ternary_quadratic(var, coeff) return _parametrize_ternary_quadratic((x_0, y_0, z_0), var, coeff)
[ "def", "parametrize_ternary_quadratic", "(", "eq", ")", ":", "(", "var", ",", "coeff", ",", "diop_type", ")", "=", "classify_diop", "(", "eq", ",", "_dict", "=", "False", ")", "if", "(", "diop_type", "in", "(", "'homogeneous_ternary_quadratic'", ",", "'homogeneous_ternary_quadratic_normal'", ")", ")", ":", "(", "x_0", ",", "y_0", ",", "z_0", ")", "=", "_diop_ternary_quadratic", "(", "var", ",", "coeff", ")", "return", "_parametrize_ternary_quadratic", "(", "(", "x_0", ",", "y_0", ",", "z_0", ")", ",", "var", ",", "coeff", ")" ]
returns the parametrized general solution for the ternary quadratic equation eq which has the form ax^2 + by^2 + cz^2 + fxy + gyz + hxz = 0 .
train
false
53,018
def request_deferred(request): d = Deferred() if request.callback: d.addCallbacks(request.callback, request.errback) (request.callback, request.errback) = (d.callback, d.errback) return d
[ "def", "request_deferred", "(", "request", ")", ":", "d", "=", "Deferred", "(", ")", "if", "request", ".", "callback", ":", "d", ".", "addCallbacks", "(", "request", ".", "callback", ",", "request", ".", "errback", ")", "(", "request", ".", "callback", ",", "request", ".", "errback", ")", "=", "(", "d", ".", "callback", ",", "d", ".", "errback", ")", "return", "d" ]
wrap a request inside a deferred .
train
false
53,020
def import_document(document_type): if (document_type in ('map', 'image')): image = True doc_table = s3db.doc_image else: image = False doc_table = s3db.doc_document post_vars = request.post_vars file = post_vars.file real_filename = file.filename new_filename = doc_table.file.store(file, real_filename) date = request.utcnow location_id = post_vars.location vdoc_table = s3db.vulnerability_document id = vdoc_table.insert(document_type=document_type, date=date, location_id=location_id) record = dict(id=id) s3db.update_super(vdoc_table, record) doc_table.insert(doc_id=record['doc_id'], file=new_filename, name=real_filename, date=date, comments=post_vars.desc, location_id=location_id) if image: s3db.pr_image_modify(file.file, new_filename, real_filename, size=(250, 250))
[ "def", "import_document", "(", "document_type", ")", ":", "if", "(", "document_type", "in", "(", "'map'", ",", "'image'", ")", ")", ":", "image", "=", "True", "doc_table", "=", "s3db", ".", "doc_image", "else", ":", "image", "=", "False", "doc_table", "=", "s3db", ".", "doc_document", "post_vars", "=", "request", ".", "post_vars", "file", "=", "post_vars", ".", "file", "real_filename", "=", "file", ".", "filename", "new_filename", "=", "doc_table", ".", "file", ".", "store", "(", "file", ",", "real_filename", ")", "date", "=", "request", ".", "utcnow", "location_id", "=", "post_vars", ".", "location", "vdoc_table", "=", "s3db", ".", "vulnerability_document", "id", "=", "vdoc_table", ".", "insert", "(", "document_type", "=", "document_type", ",", "date", "=", "date", ",", "location_id", "=", "location_id", ")", "record", "=", "dict", "(", "id", "=", "id", ")", "s3db", ".", "update_super", "(", "vdoc_table", ",", "record", ")", "doc_table", ".", "insert", "(", "doc_id", "=", "record", "[", "'doc_id'", "]", ",", "file", "=", "new_filename", ",", "name", "=", "real_filename", ",", "date", "=", "date", ",", "comments", "=", "post_vars", ".", "desc", ",", "location_id", "=", "location_id", ")", "if", "image", ":", "s3db", ".", "pr_image_modify", "(", "file", ".", "file", ",", "new_filename", ",", "real_filename", ",", "size", "=", "(", "250", ",", "250", ")", ")" ]
controller to store a document .
train
false
53,022
def check_meta_data(facility): check_fields = ['user_count', 'latitude', 'longitude', 'address', 'contact_name', 'contact_phone', 'contact_email'] return any([((getattr(facility, field, None) is None) or (getattr(facility, field) == '')) for field in check_fields])
[ "def", "check_meta_data", "(", "facility", ")", ":", "check_fields", "=", "[", "'user_count'", ",", "'latitude'", ",", "'longitude'", ",", "'address'", ",", "'contact_name'", ",", "'contact_phone'", ",", "'contact_email'", "]", "return", "any", "(", "[", "(", "(", "getattr", "(", "facility", ",", "field", ",", "None", ")", "is", "None", ")", "or", "(", "getattr", "(", "facility", ",", "field", ")", "==", "''", ")", ")", "for", "field", "in", "check_fields", "]", ")" ]
checks whether any metadata is missing for the specified facility .
train
false
53,024
def physicalInformation(): a = TpPd(pd=6) b = MessageType(mesType=45) c = TimingAdvance() packet = ((a / b) / c) return packet
[ "def", "physicalInformation", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "6", ")", "b", "=", "MessageType", "(", "mesType", "=", "45", ")", "c", "=", "TimingAdvance", "(", ")", "packet", "=", "(", "(", "a", "/", "b", ")", "/", "c", ")", "return", "packet" ]
physical information section 9 .
train
true
53,025
@get('/option/<taskid>/list') def option_list(taskid): if (taskid not in DataStore.tasks): logger.warning(('[%s] Invalid task ID provided to option_list()' % taskid)) return jsonize({'success': False, 'message': 'Invalid task ID'}) logger.debug(('[%s] Listed task options' % taskid)) return jsonize({'success': True, 'options': DataStore.tasks[taskid].get_options()})
[ "@", "get", "(", "'/option/<taskid>/list'", ")", "def", "option_list", "(", "taskid", ")", ":", "if", "(", "taskid", "not", "in", "DataStore", ".", "tasks", ")", ":", "logger", ".", "warning", "(", "(", "'[%s] Invalid task ID provided to option_list()'", "%", "taskid", ")", ")", "return", "jsonize", "(", "{", "'success'", ":", "False", ",", "'message'", ":", "'Invalid task ID'", "}", ")", "logger", ".", "debug", "(", "(", "'[%s] Listed task options'", "%", "taskid", ")", ")", "return", "jsonize", "(", "{", "'success'", ":", "True", ",", "'options'", ":", "DataStore", ".", "tasks", "[", "taskid", "]", ".", "get_options", "(", ")", "}", ")" ]
list options for a certain task id .
train
false
53,026
def has_fully_registered(user_id): if (user_id is None): return False user_settings = get_user_settings(user_id, strict=True) return (user_settings.username and user_settings.last_agreed_to_terms and (user_settings.last_agreed_to_terms >= feconf.REGISTRATION_PAGE_LAST_UPDATED_UTC))
[ "def", "has_fully_registered", "(", "user_id", ")", ":", "if", "(", "user_id", "is", "None", ")", ":", "return", "False", "user_settings", "=", "get_user_settings", "(", "user_id", ",", "strict", "=", "True", ")", "return", "(", "user_settings", ".", "username", "and", "user_settings", ".", "last_agreed_to_terms", "and", "(", "user_settings", ".", "last_agreed_to_terms", ">=", "feconf", ".", "REGISTRATION_PAGE_LAST_UPDATED_UTC", ")", ")" ]
checks if a user has fully registered .
train
false
53,028
def write_pack_index_v1(f, entries, pack_checksum): f = SHA1Writer(f) fan_out_table = defaultdict((lambda : 0)) for (name, offset, entry_checksum) in entries: fan_out_table[ord(name[:1])] += 1 for i in range(256): f.write(struct.pack('>L', fan_out_table[i])) fan_out_table[(i + 1)] += fan_out_table[i] for (name, offset, entry_checksum) in entries: if (not (offset <= 4294967295)): raise TypeError('pack format 1 only supports offsets < 2Gb') f.write(struct.pack('>L20s', offset, name)) assert (len(pack_checksum) == 20) f.write(pack_checksum) return f.write_sha()
[ "def", "write_pack_index_v1", "(", "f", ",", "entries", ",", "pack_checksum", ")", ":", "f", "=", "SHA1Writer", "(", "f", ")", "fan_out_table", "=", "defaultdict", "(", "(", "lambda", ":", "0", ")", ")", "for", "(", "name", ",", "offset", ",", "entry_checksum", ")", "in", "entries", ":", "fan_out_table", "[", "ord", "(", "name", "[", ":", "1", "]", ")", "]", "+=", "1", "for", "i", "in", "range", "(", "256", ")", ":", "f", ".", "write", "(", "struct", ".", "pack", "(", "'>L'", ",", "fan_out_table", "[", "i", "]", ")", ")", "fan_out_table", "[", "(", "i", "+", "1", ")", "]", "+=", "fan_out_table", "[", "i", "]", "for", "(", "name", ",", "offset", ",", "entry_checksum", ")", "in", "entries", ":", "if", "(", "not", "(", "offset", "<=", "4294967295", ")", ")", ":", "raise", "TypeError", "(", "'pack format 1 only supports offsets < 2Gb'", ")", "f", ".", "write", "(", "struct", ".", "pack", "(", "'>L20s'", ",", "offset", ",", "name", ")", ")", "assert", "(", "len", "(", "pack_checksum", ")", "==", "20", ")", "f", ".", "write", "(", "pack_checksum", ")", "return", "f", ".", "write_sha", "(", ")" ]
write a new pack index file .
train
false
53,029
def cookie_signature(seed, *parts): sha1 = hmac.new(seed, digestmod=hashlib.sha1) for part in parts: if part: sha1.update(part) return sha1.hexdigest()
[ "def", "cookie_signature", "(", "seed", ",", "*", "parts", ")", ":", "sha1", "=", "hmac", ".", "new", "(", "seed", ",", "digestmod", "=", "hashlib", ".", "sha1", ")", "for", "part", "in", "parts", ":", "if", "part", ":", "sha1", ".", "update", "(", "part", ")", "return", "sha1", ".", "hexdigest", "(", ")" ]
generates a cookie signature .
train
true
53,030
def set_interface(dev, bInterfaceNumber, bAlternateSetting): dev.set_interface_altsetting(bInterfaceNumber, bAlternateSetting)
[ "def", "set_interface", "(", "dev", ",", "bInterfaceNumber", ",", "bAlternateSetting", ")", ":", "dev", ".", "set_interface_altsetting", "(", "bInterfaceNumber", ",", "bAlternateSetting", ")" ]
set the alternate setting of the interface .
train
false
53,031
@require_level('staff') @require_POST def sale_validation(request, course_id): try: invoice_number = request.POST['invoice_number'] except KeyError: return HttpResponseBadRequest('Missing required invoice_number parameter') try: invoice_number = int(invoice_number) except ValueError: return HttpResponseBadRequest('invoice_number must be an integer, {value} provided'.format(value=invoice_number)) try: event_type = request.POST['event_type'] except KeyError: return HttpResponseBadRequest('Missing required event_type parameter') course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) try: obj_invoice = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get(invoice_id=invoice_number, course_id=course_id) obj_invoice = obj_invoice.invoice except CourseRegistrationCodeInvoiceItem.DoesNotExist: return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=invoice_number)) if (event_type == 'invalidate'): return invalidate_invoice(obj_invoice) else: return re_validate_invoice(obj_invoice)
[ "@", "require_level", "(", "'staff'", ")", "@", "require_POST", "def", "sale_validation", "(", "request", ",", "course_id", ")", ":", "try", ":", "invoice_number", "=", "request", ".", "POST", "[", "'invoice_number'", "]", "except", "KeyError", ":", "return", "HttpResponseBadRequest", "(", "'Missing required invoice_number parameter'", ")", "try", ":", "invoice_number", "=", "int", "(", "invoice_number", ")", "except", "ValueError", ":", "return", "HttpResponseBadRequest", "(", "'invoice_number must be an integer, {value} provided'", ".", "format", "(", "value", "=", "invoice_number", ")", ")", "try", ":", "event_type", "=", "request", ".", "POST", "[", "'event_type'", "]", "except", "KeyError", ":", "return", "HttpResponseBadRequest", "(", "'Missing required event_type parameter'", ")", "course_id", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "try", ":", "obj_invoice", "=", "CourseRegistrationCodeInvoiceItem", ".", "objects", ".", "select_related", "(", "'invoice'", ")", ".", "get", "(", "invoice_id", "=", "invoice_number", ",", "course_id", "=", "course_id", ")", "obj_invoice", "=", "obj_invoice", ".", "invoice", "except", "CourseRegistrationCodeInvoiceItem", ".", "DoesNotExist", ":", "return", "HttpResponseNotFound", "(", "_", "(", "\"Invoice number '{num}' does not exist.\"", ")", ".", "format", "(", "num", "=", "invoice_number", ")", ")", "if", "(", "event_type", "==", "'invalidate'", ")", ":", "return", "invalidate_invoice", "(", "obj_invoice", ")", "else", ":", "return", "re_validate_invoice", "(", "obj_invoice", ")" ]
this method either invalidate or re validate the sale against the invoice number depending upon the event type .
train
false
53,032
def ConvertCloudMetadataResponsesToCloudInstance(metadata_responses): if (metadata_responses.instance_type == 'GOOGLE'): cloud_instance = GoogleCloudInstance() result = CloudInstance(cloud_type='GOOGLE', google=cloud_instance) elif (metadata_responses.instance_type == 'AMAZON'): cloud_instance = AmazonCloudInstance() result = CloudInstance(cloud_type='AMAZON', amazon=cloud_instance) else: raise ValueError(('Unknown cloud instance type: %s' % metadata_responses.instance_type)) for cloud_metadata in metadata_responses.responses: setattr(cloud_instance, cloud_metadata.label, cloud_metadata.text) if (result.cloud_type == 'GOOGLE'): cloud_instance.unique_id = MakeGoogleUniqueID(cloud_instance) return result
[ "def", "ConvertCloudMetadataResponsesToCloudInstance", "(", "metadata_responses", ")", ":", "if", "(", "metadata_responses", ".", "instance_type", "==", "'GOOGLE'", ")", ":", "cloud_instance", "=", "GoogleCloudInstance", "(", ")", "result", "=", "CloudInstance", "(", "cloud_type", "=", "'GOOGLE'", ",", "google", "=", "cloud_instance", ")", "elif", "(", "metadata_responses", ".", "instance_type", "==", "'AMAZON'", ")", ":", "cloud_instance", "=", "AmazonCloudInstance", "(", ")", "result", "=", "CloudInstance", "(", "cloud_type", "=", "'AMAZON'", ",", "amazon", "=", "cloud_instance", ")", "else", ":", "raise", "ValueError", "(", "(", "'Unknown cloud instance type: %s'", "%", "metadata_responses", ".", "instance_type", ")", ")", "for", "cloud_metadata", "in", "metadata_responses", ".", "responses", ":", "setattr", "(", "cloud_instance", ",", "cloud_metadata", ".", "label", ",", "cloud_metadata", ".", "text", ")", "if", "(", "result", ".", "cloud_type", "==", "'GOOGLE'", ")", ":", "cloud_instance", ".", "unique_id", "=", "MakeGoogleUniqueID", "(", "cloud_instance", ")", "return", "result" ]
convert cloudmetadataresponses to cloudinstance proto .
train
true
53,033
def lookup_template(namespace, name): return LOOKUP[namespace].get_template(name)
[ "def", "lookup_template", "(", "namespace", ",", "name", ")", ":", "return", "LOOKUP", "[", "namespace", "]", ".", "get_template", "(", "name", ")" ]
look up a mako template by namespace and name .
train
false
53,034
def UrnStringToClientId(urn): if urn.startswith(AFF4_PREFIX): urn = urn[len(AFF4_PREFIX):] components = urn.split('/') return components[0]
[ "def", "UrnStringToClientId", "(", "urn", ")", ":", "if", "urn", ".", "startswith", "(", "AFF4_PREFIX", ")", ":", "urn", "=", "urn", "[", "len", "(", "AFF4_PREFIX", ")", ":", "]", "components", "=", "urn", ".", "split", "(", "'/'", ")", "return", "components", "[", "0", "]" ]
converts given urn string to a client id string .
train
true
53,037
def getconsole(buffer=1): c = Console(buffer) return c
[ "def", "getconsole", "(", "buffer", "=", "1", ")", ":", "c", "=", "Console", "(", "buffer", ")", "return", "c" ]
get a console handle .
train
false
53,038
def register_account_page_class(cls): warn(u'register_account_page_class is deprecated in Review Board 3.0 and will be removed; use AccountPage.registry.register instead.', DeprecationWarning) AccountPage.registry.register(cls)
[ "def", "register_account_page_class", "(", "cls", ")", ":", "warn", "(", "u'register_account_page_class is deprecated in Review Board 3.0 and will be removed; use AccountPage.registry.register instead.'", ",", "DeprecationWarning", ")", "AccountPage", ".", "registry", ".", "register", "(", "cls", ")" ]
register a custom account page class .
train
false
53,039
def sh_chebyt(n, monic=False): base = sh_jacobi(n, 0.0, 0.5, monic=monic) if monic: return base if (n > 0): factor = ((4 ** n) / 2.0) else: factor = 1.0 base._scale(factor) return base
[ "def", "sh_chebyt", "(", "n", ",", "monic", "=", "False", ")", ":", "base", "=", "sh_jacobi", "(", "n", ",", "0.0", ",", "0.5", ",", "monic", "=", "monic", ")", "if", "monic", ":", "return", "base", "if", "(", "n", ">", "0", ")", ":", "factor", "=", "(", "(", "4", "**", "n", ")", "/", "2.0", ")", "else", ":", "factor", "=", "1.0", "base", ".", "_scale", "(", "factor", ")", "return", "base" ]
shifted chebyshev polynomial of the first kind .
train
false
53,042
def q_stat(x, nobs, type='ljungbox'): x = np.asarray(x) if (type == 'ljungbox'): ret = ((nobs * (nobs + 2)) * np.cumsum(((1.0 / (nobs - np.arange(1, (len(x) + 1)))) * (x ** 2)))) chi2 = stats.chi2.sf(ret, np.arange(1, (len(x) + 1))) return (ret, chi2)
[ "def", "q_stat", "(", "x", ",", "nobs", ",", "type", "=", "'ljungbox'", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "if", "(", "type", "==", "'ljungbox'", ")", ":", "ret", "=", "(", "(", "nobs", "*", "(", "nobs", "+", "2", ")", ")", "*", "np", ".", "cumsum", "(", "(", "(", "1.0", "/", "(", "nobs", "-", "np", ".", "arange", "(", "1", ",", "(", "len", "(", "x", ")", "+", "1", ")", ")", ")", ")", "*", "(", "x", "**", "2", ")", ")", ")", ")", "chi2", "=", "stats", ".", "chi2", ".", "sf", "(", "ret", ",", "np", ".", "arange", "(", "1", ",", "(", "len", "(", "x", ")", "+", "1", ")", ")", ")", "return", "(", "ret", ",", "chi2", ")" ]
returns ljung-box q statistic x : array-like array of autocorrelation coefficients .
train
false
53,043
def libdoc(library_or_resource, outfile, name='', version='', format=None): LibDoc().execute(library_or_resource, outfile, name=name, version=version, format=format)
[ "def", "libdoc", "(", "library_or_resource", ",", "outfile", ",", "name", "=", "''", ",", "version", "=", "''", ",", "format", "=", "None", ")", ":", "LibDoc", "(", ")", ".", "execute", "(", "library_or_resource", ",", "outfile", ",", "name", "=", "name", ",", "version", "=", "version", ",", "format", "=", "format", ")" ]
executes libdoc .
train
false
53,044
def unjelly(sexp, taster=DummySecurityOptions(), persistentLoad=None, invoker=None): return _Unjellier(taster, persistentLoad, invoker).unjellyFull(sexp)
[ "def", "unjelly", "(", "sexp", ",", "taster", "=", "DummySecurityOptions", "(", ")", ",", "persistentLoad", "=", "None", ",", "invoker", "=", "None", ")", ":", "return", "_Unjellier", "(", "taster", ",", "persistentLoad", ",", "invoker", ")", ".", "unjellyFull", "(", "sexp", ")" ]
unserialize from s-expression .
train
false
53,045
def test_require_multiple_existing_keys(): require('version', 'sudo_prompt')
[ "def", "test_require_multiple_existing_keys", "(", ")", ":", "require", "(", "'version'", ",", "'sudo_prompt'", ")" ]
when given multiple existing keys .
train
false
53,046
def get_latest_downloadable_changeset_revision_via_api(url, name, owner): error_message = '' parts = ['api', 'repositories', 'get_ordered_installable_revisions'] params = dict(name=name, owner=owner) api_url = get_api_url(base=url, parts=parts, params=params) (changeset_revisions, error_message) = json_from_url(api_url) if ((changeset_revisions is None) or error_message): return (None, error_message) if (len(changeset_revisions) >= 1): return (changeset_revisions[(-1)], error_message) return (hg_util.INITIAL_CHANGELOG_HASH, error_message)
[ "def", "get_latest_downloadable_changeset_revision_via_api", "(", "url", ",", "name", ",", "owner", ")", ":", "error_message", "=", "''", "parts", "=", "[", "'api'", ",", "'repositories'", ",", "'get_ordered_installable_revisions'", "]", "params", "=", "dict", "(", "name", "=", "name", ",", "owner", "=", "owner", ")", "api_url", "=", "get_api_url", "(", "base", "=", "url", ",", "parts", "=", "parts", ",", "params", "=", "params", ")", "(", "changeset_revisions", ",", "error_message", ")", "=", "json_from_url", "(", "api_url", ")", "if", "(", "(", "changeset_revisions", "is", "None", ")", "or", "error_message", ")", ":", "return", "(", "None", ",", "error_message", ")", "if", "(", "len", "(", "changeset_revisions", ")", ">=", "1", ")", ":", "return", "(", "changeset_revisions", "[", "(", "-", "1", ")", "]", ",", "error_message", ")", "return", "(", "hg_util", ".", "INITIAL_CHANGELOG_HASH", ",", "error_message", ")" ]
return the latest downloadable changeset revision for the repository defined by the received name and owner .
train
false
53,047
def is_flat(var, outdim=1): return (var.ndim == outdim)
[ "def", "is_flat", "(", "var", ",", "outdim", "=", "1", ")", ":", "return", "(", "var", ".", "ndim", "==", "outdim", ")" ]
verifies the dimensionality of the var is equal to outdim .
train
false
53,048
def adjust_processing_time(num_cores, workload, timing, epoch): sigma = 0.0 for i in range(num_cores): timing[i] = (workload[i] / (timing[i] - epoch)) sigma += timing[i] spread = [None for x in range(num_cores)] for i in range(num_cores): spread[i] = ((timing[i] * num_cores) / sigma) return spread
[ "def", "adjust_processing_time", "(", "num_cores", ",", "workload", ",", "timing", ",", "epoch", ")", ":", "sigma", "=", "0.0", "for", "i", "in", "range", "(", "num_cores", ")", ":", "timing", "[", "i", "]", "=", "(", "workload", "[", "i", "]", "/", "(", "timing", "[", "i", "]", "-", "epoch", ")", ")", "sigma", "+=", "timing", "[", "i", "]", "spread", "=", "[", "None", "for", "x", "in", "range", "(", "num_cores", ")", "]", "for", "i", "in", "range", "(", "num_cores", ")", ":", "spread", "[", "i", "]", "=", "(", "(", "timing", "[", "i", "]", "*", "num_cores", ")", "/", "sigma", ")", "return", "spread" ]
adjust processing time computes the nomalized relative worker throughput .
train
false
53,049
def get_timestamp_from_url(url): obj = urllib2.urlopen(url) return time.strptime(obj.info()['Last-Modified'], '%a, %d %b %Y %H:%M:%S GMT')
[ "def", "get_timestamp_from_url", "(", "url", ")", ":", "obj", "=", "urllib2", ".", "urlopen", "(", "url", ")", "return", "time", ".", "strptime", "(", "obj", ".", "info", "(", ")", "[", "'Last-Modified'", "]", ",", "'%a, %d %b %Y %H:%M:%S GMT'", ")" ]
gets the last-modified field from the http header associated with the file pointed to by the url .
train
false
53,050
def _addAccountRights(sidObject, user_right): try: if sidObject: _polHandle = win32security.LsaOpenPolicy(None, win32security.POLICY_ALL_ACCESS) user_rights_list = [user_right] _ret = win32security.LsaAddAccountRights(_polHandle, sidObject, user_rights_list) return True except Exception as e: log.error('Error attempting to add account right, exception was {0}'.format(e)) return False
[ "def", "_addAccountRights", "(", "sidObject", ",", "user_right", ")", ":", "try", ":", "if", "sidObject", ":", "_polHandle", "=", "win32security", ".", "LsaOpenPolicy", "(", "None", ",", "win32security", ".", "POLICY_ALL_ACCESS", ")", "user_rights_list", "=", "[", "user_right", "]", "_ret", "=", "win32security", ".", "LsaAddAccountRights", "(", "_polHandle", ",", "sidObject", ",", "user_rights_list", ")", "return", "True", "except", "Exception", "as", "e", ":", "log", ".", "error", "(", "'Error attempting to add account right, exception was {0}'", ".", "format", "(", "e", ")", ")", "return", "False" ]
helper function to add an account right to a user .
train
true
53,051
def delete_tags(filesystemid, tags, keyid=None, key=None, profile=None, region=None, **kwargs): client = _get_conn(key=key, keyid=keyid, profile=profile, region=region) client.delete_tags(FileSystemId=filesystemid, Tags=tags)
[ "def", "delete_tags", "(", "filesystemid", ",", "tags", ",", "keyid", "=", "None", ",", "key", "=", "None", ",", "profile", "=", "None", ",", "region", "=", "None", ",", "**", "kwargs", ")", ":", "client", "=", "_get_conn", "(", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ",", "region", "=", "region", ")", "client", ".", "delete_tags", "(", "FileSystemId", "=", "filesystemid", ",", "Tags", "=", "tags", ")" ]
deletes the specified tags from a file system .
train
true
53,052
def matching_details(field): our_args = [] our_kwargs = {} for (classes, args, kwargs) in introspection_details: if any([isinstance(field, x) for x in classes]): our_args.extend(args) our_kwargs.update(kwargs) return (our_args, our_kwargs)
[ "def", "matching_details", "(", "field", ")", ":", "our_args", "=", "[", "]", "our_kwargs", "=", "{", "}", "for", "(", "classes", ",", "args", ",", "kwargs", ")", "in", "introspection_details", ":", "if", "any", "(", "[", "isinstance", "(", "field", ",", "x", ")", "for", "x", "in", "classes", "]", ")", ":", "our_args", ".", "extend", "(", "args", ")", "our_kwargs", ".", "update", "(", "kwargs", ")", "return", "(", "our_args", ",", "our_kwargs", ")" ]
returns the union of all matching entries in introspection_details for the field .
train
false
53,053
def test_override_column_class_names(): class MyTable(tables.Table, ): population = tables.Column(verbose_name=u'Population') def get_column_class_names(self, classes_set, bound_column): classes_set.add((u'prefix-%s' % bound_column.name)) return classes_set TEST_DATA = [{u'name': u'Belgium', u'population': 11200000}, {u'name': u'Luxembourgh', u'population': 540000}, {u'name': u'France', u'population': 66000000}] html = MyTable(TEST_DATA).as_html(build_request()) assert (u'<td class="prefix-population">11200000</td>' in html)
[ "def", "test_override_column_class_names", "(", ")", ":", "class", "MyTable", "(", "tables", ".", "Table", ",", ")", ":", "population", "=", "tables", ".", "Column", "(", "verbose_name", "=", "u'Population'", ")", "def", "get_column_class_names", "(", "self", ",", "classes_set", ",", "bound_column", ")", ":", "classes_set", ".", "add", "(", "(", "u'prefix-%s'", "%", "bound_column", ".", "name", ")", ")", "return", "classes_set", "TEST_DATA", "=", "[", "{", "u'name'", ":", "u'Belgium'", ",", "u'population'", ":", "11200000", "}", ",", "{", "u'name'", ":", "u'Luxembourgh'", ",", "u'population'", ":", "540000", "}", ",", "{", "u'name'", ":", "u'France'", ",", "u'population'", ":", "66000000", "}", "]", "html", "=", "MyTable", "(", "TEST_DATA", ")", ".", "as_html", "(", "build_request", "(", ")", ")", "assert", "(", "u'<td class=\"prefix-population\">11200000</td>'", "in", "html", ")" ]
we control the output of css class names for a column by overriding get_column_class_names .
train
false
53,055
def _mask_crc(crc): return ((((crc >> 15) | (crc << 17)) + _CRC_MASK_DELTA) & 4294967295L)
[ "def", "_mask_crc", "(", "crc", ")", ":", "return", "(", "(", "(", "(", "crc", ">>", "15", ")", "|", "(", "crc", "<<", "17", ")", ")", "+", "_CRC_MASK_DELTA", ")", "&", "4294967295", "L", ")" ]
mask crc .
train
false
53,056
def is_readable_gs_handle(gs_handle): try: with files.open(gs_handle) as bak_file: bak_file.read(1) except files.PermissionDeniedError: return False return True
[ "def", "is_readable_gs_handle", "(", "gs_handle", ")", ":", "try", ":", "with", "files", ".", "open", "(", "gs_handle", ")", "as", "bak_file", ":", "bak_file", ".", "read", "(", "1", ")", "except", "files", ".", "PermissionDeniedError", ":", "return", "False", "return", "True" ]
return true if the application can read the specified gs_handle .
train
false
53,057
@flake8ext def check_no_sqlalchemy_event_import(logical_line, filename, noqa): if noqa: return is_import = (logical_line.startswith('import') or logical_line.startswith('from')) if (not is_import): return for kw in ('sqlalchemy', 'event'): if (kw not in logical_line): return (yield (0, 'N346: Register sqlalchemy events through neutron.db.api.sqla_listen so they can be cleaned up between unit tests'))
[ "@", "flake8ext", "def", "check_no_sqlalchemy_event_import", "(", "logical_line", ",", "filename", ",", "noqa", ")", ":", "if", "noqa", ":", "return", "is_import", "=", "(", "logical_line", ".", "startswith", "(", "'import'", ")", "or", "logical_line", ".", "startswith", "(", "'from'", ")", ")", "if", "(", "not", "is_import", ")", ":", "return", "for", "kw", "in", "(", "'sqlalchemy'", ",", "'event'", ")", ":", "if", "(", "kw", "not", "in", "logical_line", ")", ":", "return", "(", "yield", "(", "0", ",", "'N346: Register sqlalchemy events through neutron.db.api.sqla_listen so they can be cleaned up between unit tests'", ")", ")" ]
n346 - use neutron .
train
false
53,058
def optimize_images(pelican): for (dirpath, _, filenames) in os.walk(pelican.settings['OUTPUT_PATH']): for name in filenames: if (os.path.splitext(name)[1] in COMMANDS.keys()): optimize(dirpath, name)
[ "def", "optimize_images", "(", "pelican", ")", ":", "for", "(", "dirpath", ",", "_", ",", "filenames", ")", "in", "os", ".", "walk", "(", "pelican", ".", "settings", "[", "'OUTPUT_PATH'", "]", ")", ":", "for", "name", "in", "filenames", ":", "if", "(", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "1", "]", "in", "COMMANDS", ".", "keys", "(", ")", ")", ":", "optimize", "(", "dirpath", ",", "name", ")" ]
optimized jpg and png images .
train
true
53,059
def dump_traceback(file=sys.stderr, all_threads=True): current_thread = threading.current_thread() if all_threads: threads = threading.enumerate() else: threads = [current_thread] for thread in threads: if (thread == current_thread): name = 'Current thread' else: name = 'Thread' print >>file, ('%s %s %s (most recent call last):' % (name, thread.name, thread.ident)) frame = sys._current_frames()[thread.ident] traceback.print_stack(frame, file=file) print >>file
[ "def", "dump_traceback", "(", "file", "=", "sys", ".", "stderr", ",", "all_threads", "=", "True", ")", ":", "current_thread", "=", "threading", ".", "current_thread", "(", ")", "if", "all_threads", ":", "threads", "=", "threading", ".", "enumerate", "(", ")", "else", ":", "threads", "=", "[", "current_thread", "]", "for", "thread", "in", "threads", ":", "if", "(", "thread", "==", "current_thread", ")", ":", "name", "=", "'Current thread'", "else", ":", "name", "=", "'Thread'", "print", ">>", "file", ",", "(", "'%s %s %s (most recent call last):'", "%", "(", "name", ",", "thread", ".", "name", ",", "thread", ".", "ident", ")", ")", "frame", "=", "sys", ".", "_current_frames", "(", ")", "[", "thread", ".", "ident", "]", "traceback", ".", "print_stack", "(", "frame", ",", "file", "=", "file", ")", "print", ">>", "file" ]
print a thread stacktrace .
train
false
53,060
def _get_spyderplugins(plugin_path, is_io, modnames, modlist): if (not osp.isdir(plugin_path)): return for name in os.listdir(plugin_path): if (is_io and (not name.startswith(IO_PREFIX))): continue if ((not name.startswith(PLUGIN_PREFIX)) or name.startswith(IO_PREFIX)): continue _import_plugin(name, plugin_path, modnames, modlist)
[ "def", "_get_spyderplugins", "(", "plugin_path", ",", "is_io", ",", "modnames", ",", "modlist", ")", ":", "if", "(", "not", "osp", ".", "isdir", "(", "plugin_path", ")", ")", ":", "return", "for", "name", "in", "os", ".", "listdir", "(", "plugin_path", ")", ":", "if", "(", "is_io", "and", "(", "not", "name", ".", "startswith", "(", "IO_PREFIX", ")", ")", ")", ":", "continue", "if", "(", "(", "not", "name", ".", "startswith", "(", "PLUGIN_PREFIX", ")", ")", "or", "name", ".", "startswith", "(", "IO_PREFIX", ")", ")", ":", "continue", "_import_plugin", "(", "name", ",", "plugin_path", ",", "modnames", ",", "modlist", ")" ]
scan the directory plugin_path for plugin packages and loads them .
train
false
53,061
def test_cos_dataset(): skip_if_no_data() dataset = CosDataset() sample_batch = dataset.get_batch_design(batch_size=10000) assert (sample_batch.shape == (10000, 2)) assert (sample_batch[:, 0].min() >= dataset.min_x) assert (sample_batch[:, 0].max() <= dataset.max_x)
[ "def", "test_cos_dataset", "(", ")", ":", "skip_if_no_data", "(", ")", "dataset", "=", "CosDataset", "(", ")", "sample_batch", "=", "dataset", ".", "get_batch_design", "(", "batch_size", "=", "10000", ")", "assert", "(", "sample_batch", ".", "shape", "==", "(", "10000", ",", "2", ")", ")", "assert", "(", "sample_batch", "[", ":", ",", "0", "]", ".", "min", "(", ")", ">=", "dataset", ".", "min_x", ")", "assert", "(", "sample_batch", "[", ":", ",", "0", "]", ".", "max", "(", ")", "<=", "dataset", ".", "max_x", ")" ]
tests if the dataset generator yields the desired value .
train
false
53,062
def network_list_for_tenant(request, tenant_id, **params): LOG.debug(('network_list_for_tenant(): tenant_id=%s, params=%s' % (tenant_id, params))) networks = network_list(request, tenant_id=tenant_id, shared=False, **params) networks += network_list(request, shared=True, **params) return networks
[ "def", "network_list_for_tenant", "(", "request", ",", "tenant_id", ",", "**", "params", ")", ":", "LOG", ".", "debug", "(", "(", "'network_list_for_tenant(): tenant_id=%s, params=%s'", "%", "(", "tenant_id", ",", "params", ")", ")", ")", "networks", "=", "network_list", "(", "request", ",", "tenant_id", "=", "tenant_id", ",", "shared", "=", "False", ",", "**", "params", ")", "networks", "+=", "network_list", "(", "request", ",", "shared", "=", "True", ",", "**", "params", ")", "return", "networks" ]
return a network list available for the tenant .
train
false
53,063
def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False, can_delete=False, max_num=None, validate_max=False, min_num=None, validate_min=False): if (min_num is None): min_num = DEFAULT_MIN_NUM if (max_num is None): max_num = DEFAULT_MAX_NUM absolute_max = (max_num + DEFAULT_MAX_NUM) attrs = {'form': form, 'extra': extra, 'can_order': can_order, 'can_delete': can_delete, 'min_num': min_num, 'max_num': max_num, 'absolute_max': absolute_max, 'validate_min': validate_min, 'validate_max': validate_max} return type((form.__name__ + 'FormSet'), (formset,), attrs)
[ "def", "formset_factory", "(", "form", ",", "formset", "=", "BaseFormSet", ",", "extra", "=", "1", ",", "can_order", "=", "False", ",", "can_delete", "=", "False", ",", "max_num", "=", "None", ",", "validate_max", "=", "False", ",", "min_num", "=", "None", ",", "validate_min", "=", "False", ")", ":", "if", "(", "min_num", "is", "None", ")", ":", "min_num", "=", "DEFAULT_MIN_NUM", "if", "(", "max_num", "is", "None", ")", ":", "max_num", "=", "DEFAULT_MAX_NUM", "absolute_max", "=", "(", "max_num", "+", "DEFAULT_MAX_NUM", ")", "attrs", "=", "{", "'form'", ":", "form", ",", "'extra'", ":", "extra", ",", "'can_order'", ":", "can_order", ",", "'can_delete'", ":", "can_delete", ",", "'min_num'", ":", "min_num", ",", "'max_num'", ":", "max_num", ",", "'absolute_max'", ":", "absolute_max", ",", "'validate_min'", ":", "validate_min", ",", "'validate_max'", ":", "validate_max", "}", "return", "type", "(", "(", "form", ".", "__name__", "+", "'FormSet'", ")", ",", "(", "formset", ",", ")", ",", "attrs", ")" ]
return a formset for the given form class .
train
true
53,064
def is_indvar(expr): assert isinstance(expr, string_types), (u'%s is not a string' % expr) return (re.match(u'^[a-df-z]\\d*$', expr) is not None)
[ "def", "is_indvar", "(", "expr", ")", ":", "assert", "isinstance", "(", "expr", ",", "string_types", ")", ",", "(", "u'%s is not a string'", "%", "expr", ")", "return", "(", "re", ".", "match", "(", "u'^[a-df-z]\\\\d*$'", ",", "expr", ")", "is", "not", "None", ")" ]
an individual variable must be a single lowercase character other than e .
train
false
53,065
def always_yes(zcontext, in_url, out_url): isock = zcontext.socket(zmq.SUB) isock.connect(in_url) isock.setsockopt(zmq.SUBSCRIBE, '00') osock = zcontext.socket(zmq.PUSH) osock.connect(out_url) while True: isock.recv_string() osock.send_string('Y')
[ "def", "always_yes", "(", "zcontext", ",", "in_url", ",", "out_url", ")", ":", "isock", "=", "zcontext", ".", "socket", "(", "zmq", ".", "SUB", ")", "isock", ".", "connect", "(", "in_url", ")", "isock", ".", "setsockopt", "(", "zmq", ".", "SUBSCRIBE", ",", "'00'", ")", "osock", "=", "zcontext", ".", "socket", "(", "zmq", ".", "PUSH", ")", "osock", ".", "connect", "(", "out_url", ")", "while", "True", ":", "isock", ".", "recv_string", "(", ")", "osock", ".", "send_string", "(", "'Y'", ")" ]
coordinates in the lower-left quadrant are inside the unit circle .
train
false
53,066
def _table_proportion(count, nobs): table = np.column_stack((count, (nobs - count))) expected = (((table.sum(0) * table.sum(1)[:, None]) * 1.0) / table.sum()) n_rows = table.shape[0] return (table, expected, n_rows)
[ "def", "_table_proportion", "(", "count", ",", "nobs", ")", ":", "table", "=", "np", ".", "column_stack", "(", "(", "count", ",", "(", "nobs", "-", "count", ")", ")", ")", "expected", "=", "(", "(", "(", "table", ".", "sum", "(", "0", ")", "*", "table", ".", "sum", "(", "1", ")", "[", ":", ",", "None", "]", ")", "*", "1.0", ")", "/", "table", ".", "sum", "(", ")", ")", "n_rows", "=", "table", ".", "shape", "[", "0", "]", "return", "(", "table", ",", "expected", ",", "n_rows", ")" ]
create a k by 2 contingency table for proportion helper function for proportions_chisquare parameters count : integer or array_like the number of successes in nobs trials .
train
false
53,068
def looks_like_a_tool_yaml(path): return is_a_yaml_with_class(path, ['GalaxyTool'])
[ "def", "looks_like_a_tool_yaml", "(", "path", ")", ":", "return", "is_a_yaml_with_class", "(", "path", ",", "[", "'GalaxyTool'", "]", ")" ]
quick check to see if a file looks like it may be a galaxy yaml tool file .
train
false
53,069
def _new_value_pb(entity_pb, name): return entity_pb.properties.get_or_create(name)
[ "def", "_new_value_pb", "(", "entity_pb", ",", "name", ")", ":", "return", "entity_pb", ".", "properties", ".", "get_or_create", "(", "name", ")" ]
add a new value protobuf to an entity protobuf .
train
false
53,070
def chown_log_dir(uid, gid): if (_log_dir is None): return False try: os.chown(_log_dir, uid, gid) for entry in os.listdir(_log_dir): os.chown(os.path.join(_log_dir, entry), uid, gid) return True except OSError as ex: print >>sys.stderr, ('Failed to chown log directory %s: ex' % (_log_dir, ex)) return False
[ "def", "chown_log_dir", "(", "uid", ",", "gid", ")", ":", "if", "(", "_log_dir", "is", "None", ")", ":", "return", "False", "try", ":", "os", ".", "chown", "(", "_log_dir", ",", "uid", ",", "gid", ")", "for", "entry", "in", "os", ".", "listdir", "(", "_log_dir", ")", ":", "os", ".", "chown", "(", "os", ".", "path", ".", "join", "(", "_log_dir", ",", "entry", ")", ",", "uid", ",", "gid", ")", "return", "True", "except", "OSError", "as", "ex", ":", "print", ">>", "sys", ".", "stderr", ",", "(", "'Failed to chown log directory %s: ex'", "%", "(", "_log_dir", ",", "ex", ")", ")", "return", "False" ]
chown all files in the log dir to this user and group .
train
false
53,072
def post_update(base_mapper, states, uowtransaction, post_update_cols): cached_connections = _cached_connection_dict(base_mapper) states_to_update = list(_organize_states_for_post_update(base_mapper, states, uowtransaction)) for (table, mapper) in base_mapper._sorted_tables.items(): if (table not in mapper._pks_by_table): continue update = ((state, state_dict, sub_mapper, connection) for (state, state_dict, sub_mapper, connection) in states_to_update if (table in sub_mapper._pks_by_table)) update = _collect_post_update_commands(base_mapper, uowtransaction, table, update, post_update_cols) _emit_post_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update)
[ "def", "post_update", "(", "base_mapper", ",", "states", ",", "uowtransaction", ",", "post_update_cols", ")", ":", "cached_connections", "=", "_cached_connection_dict", "(", "base_mapper", ")", "states_to_update", "=", "list", "(", "_organize_states_for_post_update", "(", "base_mapper", ",", "states", ",", "uowtransaction", ")", ")", "for", "(", "table", ",", "mapper", ")", "in", "base_mapper", ".", "_sorted_tables", ".", "items", "(", ")", ":", "if", "(", "table", "not", "in", "mapper", ".", "_pks_by_table", ")", ":", "continue", "update", "=", "(", "(", "state", ",", "state_dict", ",", "sub_mapper", ",", "connection", ")", "for", "(", "state", ",", "state_dict", ",", "sub_mapper", ",", "connection", ")", "in", "states_to_update", "if", "(", "table", "in", "sub_mapper", ".", "_pks_by_table", ")", ")", "update", "=", "_collect_post_update_commands", "(", "base_mapper", ",", "uowtransaction", ",", "table", ",", "update", ",", "post_update_cols", ")", "_emit_post_update_statements", "(", "base_mapper", ",", "uowtransaction", ",", "cached_connections", ",", "mapper", ",", "table", ",", "update", ")" ]
issue update statements on behalf of a relationship() which specifies post_update .
train
false
53,073
def find_xontrib(name): if name.startswith('.'): spec = importlib.util.find_spec(name, package='xontrib') else: spec = importlib.util.find_spec(('.' + name), package='xontrib') return (spec or importlib.util.find_spec(name))
[ "def", "find_xontrib", "(", "name", ")", ":", "if", "name", ".", "startswith", "(", "'.'", ")", ":", "spec", "=", "importlib", ".", "util", ".", "find_spec", "(", "name", ",", "package", "=", "'xontrib'", ")", "else", ":", "spec", "=", "importlib", ".", "util", ".", "find_spec", "(", "(", "'.'", "+", "name", ")", ",", "package", "=", "'xontrib'", ")", "return", "(", "spec", "or", "importlib", ".", "util", ".", "find_spec", "(", "name", ")", ")" ]
finds a xontribution from its name .
train
false
53,074
def meter_calls(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): meter(('%s_calls' % get_qualname(fn))).mark() return fn(*args, **kwargs) return wrapper
[ "def", "meter_calls", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "meter", "(", "(", "'%s_calls'", "%", "get_qualname", "(", "fn", ")", ")", ")", ".", "mark", "(", ")", "return", "fn", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
decorator to the rate at which a function is called .
train
false
53,075
def ConvertValuesWithMetadata(metadata_value_pairs, token=None, options=None): no_converter_found_error = None for (rdf_type, metadata_values_group) in utils.GroupBy(metadata_value_pairs, (lambda pair: pair[1].__class__.__name__)).iteritems(): _ = rdf_type (_, first_value) = metadata_values_group[0] converters_classes = ExportConverter.GetConvertersByValue(first_value) if (not converters_classes): no_converter_found_error = ('No converters found for value: %s' % str(first_value)) continue converters = [cls(options) for cls in converters_classes] for converter in converters: for result in converter.BatchConvert(metadata_values_group, token=token): (yield result) if (no_converter_found_error is not None): raise NoConverterFound(no_converter_found_error)
[ "def", "ConvertValuesWithMetadata", "(", "metadata_value_pairs", ",", "token", "=", "None", ",", "options", "=", "None", ")", ":", "no_converter_found_error", "=", "None", "for", "(", "rdf_type", ",", "metadata_values_group", ")", "in", "utils", ".", "GroupBy", "(", "metadata_value_pairs", ",", "(", "lambda", "pair", ":", "pair", "[", "1", "]", ".", "__class__", ".", "__name__", ")", ")", ".", "iteritems", "(", ")", ":", "_", "=", "rdf_type", "(", "_", ",", "first_value", ")", "=", "metadata_values_group", "[", "0", "]", "converters_classes", "=", "ExportConverter", ".", "GetConvertersByValue", "(", "first_value", ")", "if", "(", "not", "converters_classes", ")", ":", "no_converter_found_error", "=", "(", "'No converters found for value: %s'", "%", "str", "(", "first_value", ")", ")", "continue", "converters", "=", "[", "cls", "(", "options", ")", "for", "cls", "in", "converters_classes", "]", "for", "converter", "in", "converters", ":", "for", "result", "in", "converter", ".", "BatchConvert", "(", "metadata_values_group", ",", "token", "=", "token", ")", ":", "(", "yield", "result", ")", "if", "(", "no_converter_found_error", "is", "not", "None", ")", ":", "raise", "NoConverterFound", "(", "no_converter_found_error", ")" ]
converts a set of rdfvalues into a set of export-friendly rdfvalues .
train
false
53,076
def _format_decimal(num, format=None): lang = translation.get_language() if (not localedata.exists(lang)): lang = settings.LANGUAGE_CODE locale = Locale(translation.to_locale(lang)) return Format(locale).decimal(num, format)
[ "def", "_format_decimal", "(", "num", ",", "format", "=", "None", ")", ":", "lang", "=", "translation", ".", "get_language", "(", ")", "if", "(", "not", "localedata", ".", "exists", "(", "lang", ")", ")", ":", "lang", "=", "settings", ".", "LANGUAGE_CODE", "locale", "=", "Locale", "(", "translation", ".", "to_locale", "(", "lang", ")", ")", "return", "Format", "(", "locale", ")", ".", "decimal", "(", "num", ",", "format", ")" ]
returns the string of a number formatted for the current language .
train
false
53,077
def FRAME_ATTR_NAMES_SET(): out = set() for frame_cls in frame_transform_graph.frame_set: for attr in frame_cls.get_frame_attr_names().keys(): out.add(attr) return out
[ "def", "FRAME_ATTR_NAMES_SET", "(", ")", ":", "out", "=", "set", "(", ")", "for", "frame_cls", "in", "frame_transform_graph", ".", "frame_set", ":", "for", "attr", "in", "frame_cls", ".", "get_frame_attr_names", "(", ")", ".", "keys", "(", ")", ":", "out", ".", "add", "(", "attr", ")", "return", "out" ]
set of all possible frame-specific attributes .
train
false
53,078
def quick_api(api_key, secret_key, port=8000): auth = LinkedInAuthentication(api_key, secret_key, 'http://localhost:8000/', PERMISSIONS.enums.values()) app = LinkedInApplication(authentication=auth) print auth.authorization_url _wait_for_user_to_enter_browser(app, port) return app
[ "def", "quick_api", "(", "api_key", ",", "secret_key", ",", "port", "=", "8000", ")", ":", "auth", "=", "LinkedInAuthentication", "(", "api_key", ",", "secret_key", ",", "'http://localhost:8000/'", ",", "PERMISSIONS", ".", "enums", ".", "values", "(", ")", ")", "app", "=", "LinkedInApplication", "(", "authentication", "=", "auth", ")", "print", "auth", ".", "authorization_url", "_wait_for_user_to_enter_browser", "(", "app", ",", "port", ")", "return", "app" ]
this method helps you get access to linkedin api quickly when using it from the interpreter .
train
true
53,079
@register.inclusion_tag(u'generic/includes/disqus_sso.html', takes_context=True) def disqus_sso_script(context): settings = context[u'settings'] public_key = getattr(settings, u'COMMENTS_DISQUS_API_PUBLIC_KEY', u'') secret_key = getattr(settings, u'COMMENTS_DISQUS_API_SECRET_KEY', u'') user = context[u'request'].user if (public_key and secret_key and user.is_authenticated()): context[u'public_key'] = public_key context[u'sso_data'] = _get_disqus_sso(user, public_key, secret_key) return context
[ "@", "register", ".", "inclusion_tag", "(", "u'generic/includes/disqus_sso.html'", ",", "takes_context", "=", "True", ")", "def", "disqus_sso_script", "(", "context", ")", ":", "settings", "=", "context", "[", "u'settings'", "]", "public_key", "=", "getattr", "(", "settings", ",", "u'COMMENTS_DISQUS_API_PUBLIC_KEY'", ",", "u''", ")", "secret_key", "=", "getattr", "(", "settings", ",", "u'COMMENTS_DISQUS_API_SECRET_KEY'", ",", "u''", ")", "user", "=", "context", "[", "u'request'", "]", ".", "user", "if", "(", "public_key", "and", "secret_key", "and", "user", ".", "is_authenticated", "(", ")", ")", ":", "context", "[", "u'public_key'", "]", "=", "public_key", "context", "[", "u'sso_data'", "]", "=", "_get_disqus_sso", "(", "user", ",", "public_key", ",", "secret_key", ")", "return", "context" ]
provides a generic context variable which adds single-sign-on support to disqus if comments_disqus_api_public_key and comments_disqus_api_secret_key are specified .
train
false
53,081
def get_site_path(*joins): return os.path.join(local.site_path, *joins)
[ "def", "get_site_path", "(", "*", "joins", ")", ":", "return", "os", ".", "path", ".", "join", "(", "local", ".", "site_path", ",", "*", "joins", ")" ]
return path of current site .
train
false
53,082
def CastTo(ob, target): if hasattr(target, 'index'): if ('CLSID' not in ob.__class__.__dict__): ob = gencache.EnsureDispatch(ob) if ('CLSID' not in ob.__class__.__dict__): raise ValueError('Must be a makepy-able object for this to work') clsid = ob.CLSID mod = gencache.GetModuleForCLSID(clsid) mod = gencache.GetModuleForTypelib(mod.CLSID, mod.LCID, mod.MajorVersion, mod.MinorVersion) target_clsid = mod.NamesToIIDMap.get(target) if (target_clsid is None): raise ValueError(("The interface name '%s' does not appear in the same library as object '%r'" % (target, ob))) mod = gencache.GetModuleForCLSID(target_clsid) target_class = getattr(mod, target) target_class = getattr(target_class, 'default_interface', target_class) return target_class(ob) raise ValueError
[ "def", "CastTo", "(", "ob", ",", "target", ")", ":", "if", "hasattr", "(", "target", ",", "'index'", ")", ":", "if", "(", "'CLSID'", "not", "in", "ob", ".", "__class__", ".", "__dict__", ")", ":", "ob", "=", "gencache", ".", "EnsureDispatch", "(", "ob", ")", "if", "(", "'CLSID'", "not", "in", "ob", ".", "__class__", ".", "__dict__", ")", ":", "raise", "ValueError", "(", "'Must be a makepy-able object for this to work'", ")", "clsid", "=", "ob", ".", "CLSID", "mod", "=", "gencache", ".", "GetModuleForCLSID", "(", "clsid", ")", "mod", "=", "gencache", ".", "GetModuleForTypelib", "(", "mod", ".", "CLSID", ",", "mod", ".", "LCID", ",", "mod", ".", "MajorVersion", ",", "mod", ".", "MinorVersion", ")", "target_clsid", "=", "mod", ".", "NamesToIIDMap", ".", "get", "(", "target", ")", "if", "(", "target_clsid", "is", "None", ")", ":", "raise", "ValueError", "(", "(", "\"The interface name '%s' does not appear in the same library as object '%r'\"", "%", "(", "target", ",", "ob", ")", ")", ")", "mod", "=", "gencache", ".", "GetModuleForCLSID", "(", "target_clsid", ")", "target_class", "=", "getattr", "(", "mod", ",", "target", ")", "target_class", "=", "getattr", "(", "target_class", ",", "'default_interface'", ",", "target_class", ")", "return", "target_class", "(", "ob", ")", "raise", "ValueError" ]
cast a com object to another interface .
train
false