id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
41,064
def fmin(fn, space, algo, max_evals, trials=None, rstate=None, allow_trials_fmin=True, pass_expr_memo_ctrl=None, catch_eval_exceptions=False, verbose=0, return_argmin=True): if (rstate is None): env_rseed = os.environ.get('HYPEROPT_FMIN_SEED', '') if env_rseed: rstate = np.random.RandomState(int(env_rseed)) else: rstate = np.random.RandomState() if (allow_trials_fmin and hasattr(trials, 'fmin')): return trials.fmin(fn, space, algo=algo, max_evals=max_evals, rstate=rstate, pass_expr_memo_ctrl=pass_expr_memo_ctrl, verbose=verbose, catch_eval_exceptions=catch_eval_exceptions, return_argmin=return_argmin) if (trials is None): trials = base.Trials() domain = base.Domain(fn, space, pass_expr_memo_ctrl=pass_expr_memo_ctrl) rval = FMinIter(algo, domain, trials, max_evals=max_evals, rstate=rstate, verbose=verbose) rval.catch_eval_exceptions = catch_eval_exceptions rval.exhaust() if return_argmin: return trials.argmin
[ "def", "fmin", "(", "fn", ",", "space", ",", "algo", ",", "max_evals", ",", "trials", "=", "None", ",", "rstate", "=", "None", ",", "allow_trials_fmin", "=", "True", ",", "pass_expr_memo_ctrl", "=", "None", ",", "catch_eval_exceptions", "=", "False", ",", "verbose", "=", "0", ",", "return_argmin", "=", "True", ")", ":", "if", "(", "rstate", "is", "None", ")", ":", "env_rseed", "=", "os", ".", "environ", ".", "get", "(", "'HYPEROPT_FMIN_SEED'", ",", "''", ")", "if", "env_rseed", ":", "rstate", "=", "np", ".", "random", ".", "RandomState", "(", "int", "(", "env_rseed", ")", ")", "else", ":", "rstate", "=", "np", ".", "random", ".", "RandomState", "(", ")", "if", "(", "allow_trials_fmin", "and", "hasattr", "(", "trials", ",", "'fmin'", ")", ")", ":", "return", "trials", ".", "fmin", "(", "fn", ",", "space", ",", "algo", "=", "algo", ",", "max_evals", "=", "max_evals", ",", "rstate", "=", "rstate", ",", "pass_expr_memo_ctrl", "=", "pass_expr_memo_ctrl", ",", "verbose", "=", "verbose", ",", "catch_eval_exceptions", "=", "catch_eval_exceptions", ",", "return_argmin", "=", "return_argmin", ")", "if", "(", "trials", "is", "None", ")", ":", "trials", "=", "base", ".", "Trials", "(", ")", "domain", "=", "base", ".", "Domain", "(", "fn", ",", "space", ",", "pass_expr_memo_ctrl", "=", "pass_expr_memo_ctrl", ")", "rval", "=", "FMinIter", "(", "algo", ",", "domain", ",", "trials", ",", "max_evals", "=", "max_evals", ",", "rstate", "=", "rstate", ",", "verbose", "=", "verbose", ")", "rval", ".", "catch_eval_exceptions", "=", "catch_eval_exceptions", "rval", ".", "exhaust", "(", ")", "if", "return_argmin", ":", "return", "trials", ".", "argmin" ]
minimize a function using the downhill simplex algorithm .
train
false
41,065
def create_lock(name): lock_path = get_lock_path(name) if (not check_lock(lock_path)): return touch_file(lock_path) else: return False
[ "def", "create_lock", "(", "name", ")", ":", "lock_path", "=", "get_lock_path", "(", "name", ")", "if", "(", "not", "check_lock", "(", "lock_path", ")", ")", ":", "return", "touch_file", "(", "lock_path", ")", "else", ":", "return", "False" ]
creates a file in the /locks folder by the given name .
train
false
41,067
def revert_exploration(committer_id, exploration_id, current_version, revert_to_version): exploration_model = exp_models.ExplorationModel.get(exploration_id, strict=False) if (current_version > exploration_model.version): raise Exception(('Unexpected error: trying to update version %s of exploration from version %s. Please reload the page and try again.' % (exploration_model.version, current_version))) elif (current_version < exploration_model.version): raise Exception(('Trying to update version %s of exploration from version %s, which is too old. Please reload the page and try again.' % (exploration_model.version, current_version))) exploration = get_exploration_by_id(exploration_id, version=revert_to_version) exploration_rights = rights_manager.get_exploration_rights(exploration.id) if (exploration_rights.status != rights_manager.ACTIVITY_STATUS_PRIVATE): exploration.validate(strict=True) else: exploration.validate() exp_models.ExplorationModel.revert(exploration_model, committer_id, ('Reverted exploration to version %s' % revert_to_version), revert_to_version) memcache_services.delete(_get_exploration_memcache_key(exploration_id)) update_exploration_summary(exploration_id, None)
[ "def", "revert_exploration", "(", "committer_id", ",", "exploration_id", ",", "current_version", ",", "revert_to_version", ")", ":", "exploration_model", "=", "exp_models", ".", "ExplorationModel", ".", "get", "(", "exploration_id", ",", "strict", "=", "False", ")", "if", "(", "current_version", ">", "exploration_model", ".", "version", ")", ":", "raise", "Exception", "(", "(", "'Unexpected error: trying to update version %s of exploration from version %s. Please reload the page and try again.'", "%", "(", "exploration_model", ".", "version", ",", "current_version", ")", ")", ")", "elif", "(", "current_version", "<", "exploration_model", ".", "version", ")", ":", "raise", "Exception", "(", "(", "'Trying to update version %s of exploration from version %s, which is too old. Please reload the page and try again.'", "%", "(", "exploration_model", ".", "version", ",", "current_version", ")", ")", ")", "exploration", "=", "get_exploration_by_id", "(", "exploration_id", ",", "version", "=", "revert_to_version", ")", "exploration_rights", "=", "rights_manager", ".", "get_exploration_rights", "(", "exploration", ".", "id", ")", "if", "(", "exploration_rights", ".", "status", "!=", "rights_manager", ".", "ACTIVITY_STATUS_PRIVATE", ")", ":", "exploration", ".", "validate", "(", "strict", "=", "True", ")", "else", ":", "exploration", ".", "validate", "(", ")", "exp_models", ".", "ExplorationModel", ".", "revert", "(", "exploration_model", ",", "committer_id", ",", "(", "'Reverted exploration to version %s'", "%", "revert_to_version", ")", ",", "revert_to_version", ")", "memcache_services", ".", "delete", "(", "_get_exploration_memcache_key", "(", "exploration_id", ")", ")", "update_exploration_summary", "(", "exploration_id", ",", "None", ")" ]
reverts an exploration to the given version number .
train
false
41,068
def inference_network(x): with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn=tf.nn.elu, normalizer_fn=slim.batch_norm, normalizer_params={'scale': True}): net = tf.reshape(x, [M, 28, 28, 1]) net = slim.conv2d(net, 32, 5, stride=2) net = slim.conv2d(net, 64, 5, stride=2) net = slim.conv2d(net, 128, 5, padding='VALID') net = slim.dropout(net, 0.9) net = slim.flatten(net) params = slim.fully_connected(net, (d * 2), activation_fn=None) mu = params[:, :d] sigma = tf.nn.softplus(params[:, d:]) return (mu, sigma)
[ "def", "inference_network", "(", "x", ")", ":", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", ",", "slim", ".", "fully_connected", "]", ",", "activation_fn", "=", "tf", ".", "nn", ".", "elu", ",", "normalizer_fn", "=", "slim", ".", "batch_norm", ",", "normalizer_params", "=", "{", "'scale'", ":", "True", "}", ")", ":", "net", "=", "tf", ".", "reshape", "(", "x", ",", "[", "M", ",", "28", ",", "28", ",", "1", "]", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "32", ",", "5", ",", "stride", "=", "2", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "64", ",", "5", ",", "stride", "=", "2", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "128", ",", "5", ",", "padding", "=", "'VALID'", ")", "net", "=", "slim", ".", "dropout", "(", "net", ",", "0.9", ")", "net", "=", "slim", ".", "flatten", "(", "net", ")", "params", "=", "slim", ".", "fully_connected", "(", "net", ",", "(", "d", "*", "2", ")", ",", "activation_fn", "=", "None", ")", "mu", "=", "params", "[", ":", ",", ":", "d", "]", "sigma", "=", "tf", ".", "nn", ".", "softplus", "(", "params", "[", ":", ",", "d", ":", "]", ")", "return", "(", "mu", ",", "sigma", ")" ]
inference network to parameterize variational model .
train
false
41,069
def setTexIfNoShaders(obj): if (hasattr(obj, 'useShaders') and (not obj.useShaders)): if hasattr(obj, '_needTextureUpdate'): obj._needTextureUpdate = True
[ "def", "setTexIfNoShaders", "(", "obj", ")", ":", "if", "(", "hasattr", "(", "obj", ",", "'useShaders'", ")", "and", "(", "not", "obj", ".", "useShaders", ")", ")", ":", "if", "hasattr", "(", "obj", ",", "'_needTextureUpdate'", ")", ":", "obj", ".", "_needTextureUpdate", "=", "True" ]
useful decorator for classes that need to update texture after other properties .
train
false
41,072
def two_sum(a, b): x = (a + b) eb = (x - a) eb = (b - eb) ea = (x - b) ea = (a - ea) return (x, (ea + eb))
[ "def", "two_sum", "(", "a", ",", "b", ")", ":", "x", "=", "(", "a", "+", "b", ")", "eb", "=", "(", "x", "-", "a", ")", "eb", "=", "(", "b", "-", "eb", ")", "ea", "=", "(", "x", "-", "b", ")", "ea", "=", "(", "a", "-", "ea", ")", "return", "(", "x", ",", "(", "ea", "+", "eb", ")", ")" ]
add a and b exactly .
train
false
41,073
def insert_enterprise_fields(request, form_desc): if (not enterprise_enabled()): return add_data_sharing_consent_field(request, form_desc)
[ "def", "insert_enterprise_fields", "(", "request", ",", "form_desc", ")", ":", "if", "(", "not", "enterprise_enabled", "(", ")", ")", ":", "return", "add_data_sharing_consent_field", "(", "request", ",", "form_desc", ")" ]
enterprise methods which modify the logistration form are called from this method .
train
false
41,074
def encryptData(key, data, mode=AESModeOfOperation.ModeOfOperation[u'CBC']): key = bytearray(key) if (mode == AESModeOfOperation.ModeOfOperation[u'CBC']): data = append_PKCS7_padding(data) keysize = len(key) assert (keysize in AES.KeySize.values()), u'invalid key size: {0}'.format(keysize) iv = bytearray([i for i in os.urandom(16)]) moo = AESModeOfOperation() (mode, length, ciph) = moo.encrypt(data, mode, key, keysize, iv) return (bytes(iv) + bytes(ciph))
[ "def", "encryptData", "(", "key", ",", "data", ",", "mode", "=", "AESModeOfOperation", ".", "ModeOfOperation", "[", "u'CBC'", "]", ")", ":", "key", "=", "bytearray", "(", "key", ")", "if", "(", "mode", "==", "AESModeOfOperation", ".", "ModeOfOperation", "[", "u'CBC'", "]", ")", ":", "data", "=", "append_PKCS7_padding", "(", "data", ")", "keysize", "=", "len", "(", "key", ")", "assert", "(", "keysize", "in", "AES", ".", "KeySize", ".", "values", "(", ")", ")", ",", "u'invalid key size: {0}'", ".", "format", "(", "keysize", ")", "iv", "=", "bytearray", "(", "[", "i", "for", "i", "in", "os", ".", "urandom", "(", "16", ")", "]", ")", "moo", "=", "AESModeOfOperation", "(", ")", "(", "mode", ",", "length", ",", "ciph", ")", "=", "moo", ".", "encrypt", "(", "data", ",", "mode", ",", "key", ",", "keysize", ",", "iv", ")", "return", "(", "bytes", "(", "iv", ")", "+", "bytes", "(", "ciph", ")", ")" ]
module function to encrypt the given data with the given key .
train
false
41,075
def align_file_position(f, size): align = ((size - 1) - (f.tell() % size)) f.seek(align, 1)
[ "def", "align_file_position", "(", "f", ",", "size", ")", ":", "align", "=", "(", "(", "size", "-", "1", ")", "-", "(", "f", ".", "tell", "(", ")", "%", "size", ")", ")", "f", ".", "seek", "(", "align", ",", "1", ")" ]
align the position in the file to the next block of specified size .
train
true
41,077
def MultiArgMax(x): m = x.max() return (i for (i, v) in enumerate(x) if (v == m))
[ "def", "MultiArgMax", "(", "x", ")", ":", "m", "=", "x", ".", "max", "(", ")", "return", "(", "i", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "x", ")", "if", "(", "v", "==", "m", ")", ")" ]
get tuple of indices where the max value of array x occurs .
train
true
41,078
@public def half_gcdex(f, g, *gens, **args): options.allowed_flags(args, ['auto', 'polys']) try: ((F, G), opt) = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: (domain, (a, b)) = construct_domain(exc.exprs) try: (s, h) = domain.half_gcdex(a, b) except NotImplementedError: raise ComputationFailed('half_gcdex', 2, exc) else: return (domain.to_sympy(s), domain.to_sympy(h)) (s, h) = F.half_gcdex(G, auto=opt.auto) if (not opt.polys): return (s.as_expr(), h.as_expr()) else: return (s, h)
[ "@", "public", "def", "half_gcdex", "(", "f", ",", "g", ",", "*", "gens", ",", "**", "args", ")", ":", "options", ".", "allowed_flags", "(", "args", ",", "[", "'auto'", ",", "'polys'", "]", ")", "try", ":", "(", "(", "F", ",", "G", ")", ",", "opt", ")", "=", "parallel_poly_from_expr", "(", "(", "f", ",", "g", ")", ",", "*", "gens", ",", "**", "args", ")", "except", "PolificationFailed", "as", "exc", ":", "(", "domain", ",", "(", "a", ",", "b", ")", ")", "=", "construct_domain", "(", "exc", ".", "exprs", ")", "try", ":", "(", "s", ",", "h", ")", "=", "domain", ".", "half_gcdex", "(", "a", ",", "b", ")", "except", "NotImplementedError", ":", "raise", "ComputationFailed", "(", "'half_gcdex'", ",", "2", ",", "exc", ")", "else", ":", "return", "(", "domain", ".", "to_sympy", "(", "s", ")", ",", "domain", ".", "to_sympy", "(", "h", ")", ")", "(", "s", ",", "h", ")", "=", "F", ".", "half_gcdex", "(", "G", ",", "auto", "=", "opt", ".", "auto", ")", "if", "(", "not", "opt", ".", "polys", ")", ":", "return", "(", "s", ".", "as_expr", "(", ")", ",", "h", ".", "as_expr", "(", ")", ")", "else", ":", "return", "(", "s", ",", "h", ")" ]
half extended euclidean algorithm of f and g .
train
false
41,079
@receiver(post_save, sender=Job) def purge_fastly_cache(sender, instance, **kwargs): if kwargs.get('raw', False): return if (instance.status == Job.STATUS_APPROVED): purge_url(reverse('jobs:job_detail', kwargs={'pk': instance.pk})) purge_url(reverse('jobs:job_list')) purge_url(reverse('jobs:job_rss'))
[ "@", "receiver", "(", "post_save", ",", "sender", "=", "Job", ")", "def", "purge_fastly_cache", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "if", "kwargs", ".", "get", "(", "'raw'", ",", "False", ")", ":", "return", "if", "(", "instance", ".", "status", "==", "Job", ".", "STATUS_APPROVED", ")", ":", "purge_url", "(", "reverse", "(", "'jobs:job_detail'", ",", "kwargs", "=", "{", "'pk'", ":", "instance", ".", "pk", "}", ")", ")", "purge_url", "(", "reverse", "(", "'jobs:job_list'", ")", ")", "purge_url", "(", "reverse", "(", "'jobs:job_rss'", ")", ")" ]
purge fastly .
train
false
41,080
def human_resource_project(): s3.prep = (lambda r: ((r.method == 'options') and (r.representation == 's3json'))) return s3_rest_controller()
[ "def", "human_resource_project", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "(", "r", ".", "method", "==", "'options'", ")", "and", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", ")", "return", "s3_rest_controller", "(", ")" ]
rest controller for options .
train
false
41,081
def is_valid_old_asn(asn): return (isinstance(asn, numbers.Integral) and (0 <= asn <= 65535))
[ "def", "is_valid_old_asn", "(", "asn", ")", ":", "return", "(", "isinstance", "(", "asn", ",", "numbers", ".", "Integral", ")", "and", "(", "0", "<=", "asn", "<=", "65535", ")", ")" ]
returns true if the given as number is two octet .
train
false
41,083
def translation_from_matrix(matrix): return numpy.array(matrix, copy=False)[:3, 3].copy()
[ "def", "translation_from_matrix", "(", "matrix", ")", ":", "return", "numpy", ".", "array", "(", "matrix", ",", "copy", "=", "False", ")", "[", ":", "3", ",", "3", "]", ".", "copy", "(", ")" ]
return translation vector from translation matrix .
train
false
41,085
def s3_request(*args, **kwargs): error = None try: r = S3Request(*args, **kwargs) except (AttributeError, SyntaxError): error = 400 except KeyError: error = 404 if error: if (kwargs.get('catch_errors') is False): raise message = sys.exc_info()[1] if hasattr(message, 'message'): message = message.message if (current.auth.permission.format == 'html'): current.session.error = message redirect(URL(f='index')) else: headers = {'Content-Type': 'application/json'} current.log.error(message) raise HTTP(error, body=current.xml.json_message(success=False, statuscode=error, message=message), web2py_error=message, **headers) return r
[ "def", "s3_request", "(", "*", "args", ",", "**", "kwargs", ")", ":", "error", "=", "None", "try", ":", "r", "=", "S3Request", "(", "*", "args", ",", "**", "kwargs", ")", "except", "(", "AttributeError", ",", "SyntaxError", ")", ":", "error", "=", "400", "except", "KeyError", ":", "error", "=", "404", "if", "error", ":", "if", "(", "kwargs", ".", "get", "(", "'catch_errors'", ")", "is", "False", ")", ":", "raise", "message", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "if", "hasattr", "(", "message", ",", "'message'", ")", ":", "message", "=", "message", ".", "message", "if", "(", "current", ".", "auth", ".", "permission", ".", "format", "==", "'html'", ")", ":", "current", ".", "session", ".", "error", "=", "message", "redirect", "(", "URL", "(", "f", "=", "'index'", ")", ")", "else", ":", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", "current", ".", "log", ".", "error", "(", "message", ")", "raise", "HTTP", "(", "error", ",", "body", "=", "current", ".", "xml", ".", "json_message", "(", "success", "=", "False", ",", "statuscode", "=", "error", ",", "message", "=", "message", ")", ",", "web2py_error", "=", "message", ",", "**", "headers", ")", "return", "r" ]
helper function to generate s3request instances .
train
false
41,086
def check_for_external_modification(fileName, old_mtime): new_modification_time = get_last_modification(fileName) if (new_modification_time > old_mtime): return True return False
[ "def", "check_for_external_modification", "(", "fileName", ",", "old_mtime", ")", ":", "new_modification_time", "=", "get_last_modification", "(", "fileName", ")", "if", "(", "new_modification_time", ">", "old_mtime", ")", ":", "return", "True", "return", "False" ]
check if the file was modified outside ninja .
train
false
41,087
def can_access(func): @wraps(func) def wrapper(*args, **kwargs): user = getattr(g, 'user', None) event_id = kwargs.get('event_id') if (not event_id): raise ServerError() get_object_or_404(EventModel, event_id) if (user and (user.has_role(event_id) or user.is_staff)): return func(*args, **kwargs) else: raise PermissionDeniedError() return wrapper
[ "def", "can_access", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "user", "=", "getattr", "(", "g", ",", "'user'", ",", "None", ")", "event_id", "=", "kwargs", ".", "get", "(", "'event_id'", ")", "if", "(", "not", "event_id", ")", ":", "raise", "ServerError", "(", ")", "get_object_or_404", "(", "EventModel", ",", "event_id", ")", "if", "(", "user", "and", "(", "user", ".", "has_role", "(", "event_id", ")", "or", "user", ".", "is_staff", ")", ")", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "raise", "PermissionDeniedError", "(", ")", "return", "wrapper" ]
check if user can read/update/delete an event .
train
false
41,088
def _extract_war_version(war): basename = os.path.basename(war) war_package = os.path.splitext(basename)[0] version = re.findall('-([\\d.-]+)$', war_package) return (version[0] if (version and (len(version) == 1)) else None)
[ "def", "_extract_war_version", "(", "war", ")", ":", "basename", "=", "os", ".", "path", ".", "basename", "(", "war", ")", "war_package", "=", "os", ".", "path", ".", "splitext", "(", "basename", ")", "[", "0", "]", "version", "=", "re", ".", "findall", "(", "'-([\\\\d.-]+)$'", ",", "war_package", ")", "return", "(", "version", "[", "0", "]", "if", "(", "version", "and", "(", "len", "(", "version", ")", "==", "1", ")", ")", "else", "None", ")" ]
extract the version from the war file name .
train
true
41,089
def _grid_out_property(field_name, docstring): def getter(self): self._ensure_file() if (field_name == 'length'): return self._file.get(field_name, 0) return self._file.get(field_name, None) docstring += '\n\nThis attribute is read-only.' return property(getter, doc=docstring)
[ "def", "_grid_out_property", "(", "field_name", ",", "docstring", ")", ":", "def", "getter", "(", "self", ")", ":", "self", ".", "_ensure_file", "(", ")", "if", "(", "field_name", "==", "'length'", ")", ":", "return", "self", ".", "_file", ".", "get", "(", "field_name", ",", "0", ")", "return", "self", ".", "_file", ".", "get", "(", "field_name", ",", "None", ")", "docstring", "+=", "'\\n\\nThis attribute is read-only.'", "return", "property", "(", "getter", ",", "doc", "=", "docstring", ")" ]
create a gridout property .
train
true
41,090
def json_loads(obj, keys=None): if (not obj): return None if (not keys): keys = obj.keys() for key in keys: try: obj[key] = json.loads(obj[key]) except: pass return obj
[ "def", "json_loads", "(", "obj", ",", "keys", "=", "None", ")", ":", "if", "(", "not", "obj", ")", ":", "return", "None", "if", "(", "not", "keys", ")", ":", "keys", "=", "obj", ".", "keys", "(", ")", "for", "key", "in", "keys", ":", "try", ":", "obj", "[", "key", "]", "=", "json", ".", "loads", "(", "obj", "[", "key", "]", ")", "except", ":", "pass", "return", "obj" ]
given an object .
train
false
41,091
def process_queue(queue, quantity=1, backend='sqlite'): event = salt.utils.event.get_event('master', __opts__['sock_dir'], __opts__['transport'], opts=__opts__, listen=False) try: items = pop(queue=queue, quantity=quantity, backend=backend) except SaltInvocationError as exc: error_txt = '{0}'.format(exc) __jid_event__.fire_event({'errors': error_txt}, 'progress') return False data = {'items': items, 'backend': backend, 'queue': queue} event.fire_event(data, tagify([queue, 'process'], prefix='queue'))
[ "def", "process_queue", "(", "queue", ",", "quantity", "=", "1", ",", "backend", "=", "'sqlite'", ")", ":", "event", "=", "salt", ".", "utils", ".", "event", ".", "get_event", "(", "'master'", ",", "__opts__", "[", "'sock_dir'", "]", ",", "__opts__", "[", "'transport'", "]", ",", "opts", "=", "__opts__", ",", "listen", "=", "False", ")", "try", ":", "items", "=", "pop", "(", "queue", "=", "queue", ",", "quantity", "=", "quantity", ",", "backend", "=", "backend", ")", "except", "SaltInvocationError", "as", "exc", ":", "error_txt", "=", "'{0}'", ".", "format", "(", "exc", ")", "__jid_event__", ".", "fire_event", "(", "{", "'errors'", ":", "error_txt", "}", ",", "'progress'", ")", "return", "False", "data", "=", "{", "'items'", ":", "items", ",", "'backend'", ":", "backend", ",", "'queue'", ":", "queue", "}", "event", ".", "fire_event", "(", "data", ",", "tagify", "(", "[", "queue", ",", "'process'", "]", ",", "prefix", "=", "'queue'", ")", ")" ]
pop items off a queue and create an event on the salt event bus to be processed by a reactor .
train
true
41,092
@logic.validate(logic.schema.job_list_schema) def job_list(context, data_dict): _check_access(u'job_list', context, data_dict) dictized_jobs = [] queues = data_dict.get(u'queues') if queues: queues = [jobs.get_queue(q) for q in queues] else: queues = jobs.get_all_queues() for queue in queues: for job in queue.jobs: dictized_jobs.append(jobs.dictize_job(job)) return dictized_jobs
[ "@", "logic", ".", "validate", "(", "logic", ".", "schema", ".", "job_list_schema", ")", "def", "job_list", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "u'job_list'", ",", "context", ",", "data_dict", ")", "dictized_jobs", "=", "[", "]", "queues", "=", "data_dict", ".", "get", "(", "u'queues'", ")", "if", "queues", ":", "queues", "=", "[", "jobs", ".", "get_queue", "(", "q", ")", "for", "q", "in", "queues", "]", "else", ":", "queues", "=", "jobs", ".", "get_all_queues", "(", ")", "for", "queue", "in", "queues", ":", "for", "job", "in", "queue", ".", "jobs", ":", "dictized_jobs", ".", "append", "(", "jobs", ".", "dictize_job", "(", "job", ")", ")", "return", "dictized_jobs" ]
list enqueued background jobs .
train
false
41,093
def get_base_alias(freqstr): return _base_and_stride(freqstr)[0]
[ "def", "get_base_alias", "(", "freqstr", ")", ":", "return", "_base_and_stride", "(", "freqstr", ")", "[", "0", "]" ]
returns the base frequency alias .
train
false
41,094
def submit_facility_form(context): facility_form = find_id_with_wait(context, 'facility_form') name_field = find_id_with_wait(context, 'id_name') name_field.send_keys('The Fortress of Solitude') facility_form.submit()
[ "def", "submit_facility_form", "(", "context", ")", ":", "facility_form", "=", "find_id_with_wait", "(", "context", ",", "'facility_form'", ")", "name_field", "=", "find_id_with_wait", "(", "context", ",", "'id_name'", ")", "name_field", ".", "send_keys", "(", "'The Fortress of Solitude'", ")", "facility_form", ".", "submit", "(", ")" ]
just do the minimum to submit the facility form .
train
false
41,096
def get_scm(): global _SCM if (not _SCM): from pants.scm.git import Git worktree = Git.detect_worktree() if (worktree and os.path.isdir(worktree)): git = Git(worktree=worktree) try: logger.debug(u'Detected git repository at {} on branch {}'.format(worktree, git.branch_name)) set_scm(git) except git.LocalException as e: logger.info(u'Failed to load git repository at {}: {}'.format(worktree, e)) return _SCM
[ "def", "get_scm", "(", ")", ":", "global", "_SCM", "if", "(", "not", "_SCM", ")", ":", "from", "pants", ".", "scm", ".", "git", "import", "Git", "worktree", "=", "Git", ".", "detect_worktree", "(", ")", "if", "(", "worktree", "and", "os", ".", "path", ".", "isdir", "(", "worktree", ")", ")", ":", "git", "=", "Git", "(", "worktree", "=", "worktree", ")", "try", ":", "logger", ".", "debug", "(", "u'Detected git repository at {} on branch {}'", ".", "format", "(", "worktree", ",", "git", ".", "branch_name", ")", ")", "set_scm", "(", "git", ")", "except", "git", ".", "LocalException", "as", "e", ":", "logger", ".", "info", "(", "u'Failed to load git repository at {}: {}'", ".", "format", "(", "worktree", ",", "e", ")", ")", "return", "_SCM" ]
returns the pants scm if any .
train
true
41,097
def test_numeric_seed_watershed(): image = np.zeros((5, 6)) image[:, 3:] = 1 compact = watershed(image, 2, compactness=0.01) expected = np.array([[1, 1, 1, 1, 2, 2], [1, 1, 1, 1, 2, 2], [1, 1, 1, 1, 2, 2], [1, 1, 1, 1, 2, 2], [1, 1, 1, 1, 2, 2]], dtype=np.int32) np.testing.assert_equal(compact, expected)
[ "def", "test_numeric_seed_watershed", "(", ")", ":", "image", "=", "np", ".", "zeros", "(", "(", "5", ",", "6", ")", ")", "image", "[", ":", ",", "3", ":", "]", "=", "1", "compact", "=", "watershed", "(", "image", ",", "2", ",", "compactness", "=", "0.01", ")", "expected", "=", "np", ".", "array", "(", "[", "[", "1", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", "]", ",", "[", "1", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", "]", ",", "[", "1", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", "]", ",", "[", "1", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", "]", ",", "[", "1", ",", "1", ",", "1", ",", "1", ",", "2", ",", "2", "]", "]", ",", "dtype", "=", "np", ".", "int32", ")", "np", ".", "testing", ".", "assert_equal", "(", "compact", ",", "expected", ")" ]
test that passing just the number of seeds to watershed works .
train
false
41,099
def get_grade_book_page(request, course, course_key): current_offset = request.GET.get('offset', 0) enrolled_students = User.objects.filter(courseenrollment__course_id=course_key, courseenrollment__is_active=1).order_by('username').select_related('profile') total_students = enrolled_students.count() page = calculate_page_info(current_offset, total_students) offset = page['offset'] total_pages = page['total_pages'] if (total_pages > 1): enrolled_students = enrolled_students[offset:(offset + MAX_STUDENTS_PER_PAGE_GRADE_BOOK)] with modulestore().bulk_operations(course.location.course_key): student_info = [{'username': student.username, 'id': student.id, 'email': student.email, 'grade_summary': CourseGradeFactory().create(student, course).summary} for student in enrolled_students] return (student_info, page)
[ "def", "get_grade_book_page", "(", "request", ",", "course", ",", "course_key", ")", ":", "current_offset", "=", "request", ".", "GET", ".", "get", "(", "'offset'", ",", "0", ")", "enrolled_students", "=", "User", ".", "objects", ".", "filter", "(", "courseenrollment__course_id", "=", "course_key", ",", "courseenrollment__is_active", "=", "1", ")", ".", "order_by", "(", "'username'", ")", ".", "select_related", "(", "'profile'", ")", "total_students", "=", "enrolled_students", ".", "count", "(", ")", "page", "=", "calculate_page_info", "(", "current_offset", ",", "total_students", ")", "offset", "=", "page", "[", "'offset'", "]", "total_pages", "=", "page", "[", "'total_pages'", "]", "if", "(", "total_pages", ">", "1", ")", ":", "enrolled_students", "=", "enrolled_students", "[", "offset", ":", "(", "offset", "+", "MAX_STUDENTS_PER_PAGE_GRADE_BOOK", ")", "]", "with", "modulestore", "(", ")", ".", "bulk_operations", "(", "course", ".", "location", ".", "course_key", ")", ":", "student_info", "=", "[", "{", "'username'", ":", "student", ".", "username", ",", "'id'", ":", "student", ".", "id", ",", "'email'", ":", "student", ".", "email", ",", "'grade_summary'", ":", "CourseGradeFactory", "(", ")", ".", "create", "(", "student", ",", "course", ")", ".", "summary", "}", "for", "student", "in", "enrolled_students", "]", "return", "(", "student_info", ",", "page", ")" ]
get student records per page along with page information i .
train
false
41,101
def QueryInfoKey(key): regqueryinfokey = advapi32['RegQueryInfoKeyW'] regqueryinfokey.restype = ctypes.c_long regqueryinfokey.argtypes = [ctypes.c_void_p, ctypes.c_wchar_p, LPDWORD, LPDWORD, LPDWORD, LPDWORD, LPDWORD, LPDWORD, LPDWORD, LPDWORD, LPDWORD, ctypes.POINTER(FileTime)] null = LPDWORD() num_sub_keys = ctypes.wintypes.DWORD() num_values = ctypes.wintypes.DWORD() ft = FileTime() rc = regqueryinfokey(key.handle, ctypes.c_wchar_p(), null, null, ctypes.byref(num_sub_keys), null, null, ctypes.byref(num_values), null, null, null, ctypes.byref(ft)) if (rc != ERROR_SUCCESS): raise ctypes.WinError(2) last_modified = (ft.dwLowDateTime | (ft.dwHighDateTime << 32)) last_modified = ((last_modified / 10000000) - WIN_UNIX_DIFF_MSECS) return (num_sub_keys.value, num_values.value, last_modified)
[ "def", "QueryInfoKey", "(", "key", ")", ":", "regqueryinfokey", "=", "advapi32", "[", "'RegQueryInfoKeyW'", "]", "regqueryinfokey", ".", "restype", "=", "ctypes", ".", "c_long", "regqueryinfokey", ".", "argtypes", "=", "[", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_wchar_p", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "LPDWORD", ",", "ctypes", ".", "POINTER", "(", "FileTime", ")", "]", "null", "=", "LPDWORD", "(", ")", "num_sub_keys", "=", "ctypes", ".", "wintypes", ".", "DWORD", "(", ")", "num_values", "=", "ctypes", ".", "wintypes", ".", "DWORD", "(", ")", "ft", "=", "FileTime", "(", ")", "rc", "=", "regqueryinfokey", "(", "key", ".", "handle", ",", "ctypes", ".", "c_wchar_p", "(", ")", ",", "null", ",", "null", ",", "ctypes", ".", "byref", "(", "num_sub_keys", ")", ",", "null", ",", "null", ",", "ctypes", ".", "byref", "(", "num_values", ")", ",", "null", ",", "null", ",", "null", ",", "ctypes", ".", "byref", "(", "ft", ")", ")", "if", "(", "rc", "!=", "ERROR_SUCCESS", ")", ":", "raise", "ctypes", ".", "WinError", "(", "2", ")", "last_modified", "=", "(", "ft", ".", "dwLowDateTime", "|", "(", "ft", ".", "dwHighDateTime", "<<", "32", ")", ")", "last_modified", "=", "(", "(", "last_modified", "/", "10000000", ")", "-", "WIN_UNIX_DIFF_MSECS", ")", "return", "(", "num_sub_keys", ".", "value", ",", "num_values", ".", "value", ",", "last_modified", ")" ]
this calls the windows regqueryinfokey function in a unicode safe way .
train
true
41,103
def get_index_dtype(arrays=(), maxval=None, check_contents=False): int32max = np.iinfo(np.int32).max dtype = np.intc if (maxval is not None): if (maxval > int32max): dtype = np.int64 if isinstance(arrays, np.ndarray): arrays = (arrays,) for arr in arrays: arr = np.asarray(arr) if (arr.dtype > np.int32): if check_contents: if (arr.size == 0): continue elif np.issubdtype(arr.dtype, np.integer): maxval = arr.max() minval = arr.min() if ((minval >= np.iinfo(np.int32).min) and (maxval <= np.iinfo(np.int32).max)): continue dtype = np.int64 break return dtype
[ "def", "get_index_dtype", "(", "arrays", "=", "(", ")", ",", "maxval", "=", "None", ",", "check_contents", "=", "False", ")", ":", "int32max", "=", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "max", "dtype", "=", "np", ".", "intc", "if", "(", "maxval", "is", "not", "None", ")", ":", "if", "(", "maxval", ">", "int32max", ")", ":", "dtype", "=", "np", ".", "int64", "if", "isinstance", "(", "arrays", ",", "np", ".", "ndarray", ")", ":", "arrays", "=", "(", "arrays", ",", ")", "for", "arr", "in", "arrays", ":", "arr", "=", "np", ".", "asarray", "(", "arr", ")", "if", "(", "arr", ".", "dtype", ">", "np", ".", "int32", ")", ":", "if", "check_contents", ":", "if", "(", "arr", ".", "size", "==", "0", ")", ":", "continue", "elif", "np", ".", "issubdtype", "(", "arr", ".", "dtype", ",", "np", ".", "integer", ")", ":", "maxval", "=", "arr", ".", "max", "(", ")", "minval", "=", "arr", ".", "min", "(", ")", "if", "(", "(", "minval", ">=", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "min", ")", "and", "(", "maxval", "<=", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "max", ")", ")", ":", "continue", "dtype", "=", "np", ".", "int64", "break", "return", "dtype" ]
based on input arrays a .
train
false
41,104
def mako_websafe(text=''): return conditional_websafe(text)
[ "def", "mako_websafe", "(", "text", "=", "''", ")", ":", "return", "conditional_websafe", "(", "text", ")" ]
wrapper for conditional_websafe so cached templates dont explode .
train
false
41,105
@contextfunction def projects_task_list(context, tasks, time_slots=None, nomass=False, in_progress=False, by_project=False, by_milestone=False, by_assigned=False, noheader=False): if (time_slots is None): time_slots = [] request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('projects/tags/task_list', {'tasks': tasks, 'nomass': nomass, 'by_project': by_project, 'by_milestone': by_milestone, 'by_assigned': by_assigned, 'in_progress': in_progress, 'noheader': noheader, 'time_slots': time_slots}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "projects_task_list", "(", "context", ",", "tasks", ",", "time_slots", "=", "None", ",", "nomass", "=", "False", ",", "in_progress", "=", "False", ",", "by_project", "=", "False", ",", "by_milestone", "=", "False", ",", "by_assigned", "=", "False", ",", "noheader", "=", "False", ")", ":", "if", "(", "time_slots", "is", "None", ")", ":", "time_slots", "=", "[", "]", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ")", ":", "response_format", "=", "context", "[", "'response_format'", "]", "return", "Markup", "(", "render_to_string", "(", "'projects/tags/task_list'", ",", "{", "'tasks'", ":", "tasks", ",", "'nomass'", ":", "nomass", ",", "'by_project'", ":", "by_project", ",", "'by_milestone'", ":", "by_milestone", ",", "'by_assigned'", ":", "by_assigned", ",", "'in_progress'", ":", "in_progress", ",", "'noheader'", ":", "noheader", ",", "'time_slots'", ":", "time_slots", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")", ")" ]
print a list of tasks .
train
false
41,106
def getinfo(func): assert (inspect.ismethod(func) or inspect.isfunction(func)) if (sys.version_info[0] >= 3): argspec = inspect.getfullargspec(func) else: argspec = inspect.getargspec(func) (regargs, varargs, varkwargs, defaults) = argspec[:4] argnames = list(regargs) if varargs: argnames.append(varargs) if varkwargs: argnames.append(varkwargs) signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults, formatvalue=(lambda value: ''))[1:(-1)] if hasattr(func, '__closure__'): _closure = func.__closure__ _globals = func.__globals__ else: _closure = func.func_closure _globals = func.func_globals return dict(name=func.__name__, argnames=argnames, signature=signature, defaults=func.__defaults__, doc=func.__doc__, module=func.__module__, dict=func.__dict__, globals=_globals, closure=_closure)
[ "def", "getinfo", "(", "func", ")", ":", "assert", "(", "inspect", ".", "ismethod", "(", "func", ")", "or", "inspect", ".", "isfunction", "(", "func", ")", ")", "if", "(", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ")", ":", "argspec", "=", "inspect", ".", "getfullargspec", "(", "func", ")", "else", ":", "argspec", "=", "inspect", ".", "getargspec", "(", "func", ")", "(", "regargs", ",", "varargs", ",", "varkwargs", ",", "defaults", ")", "=", "argspec", "[", ":", "4", "]", "argnames", "=", "list", "(", "regargs", ")", "if", "varargs", ":", "argnames", ".", "append", "(", "varargs", ")", "if", "varkwargs", ":", "argnames", ".", "append", "(", "varkwargs", ")", "signature", "=", "inspect", ".", "formatargspec", "(", "regargs", ",", "varargs", ",", "varkwargs", ",", "defaults", ",", "formatvalue", "=", "(", "lambda", "value", ":", "''", ")", ")", "[", "1", ":", "(", "-", "1", ")", "]", "if", "hasattr", "(", "func", ",", "'__closure__'", ")", ":", "_closure", "=", "func", ".", "__closure__", "_globals", "=", "func", ".", "__globals__", "else", ":", "_closure", "=", "func", ".", "func_closure", "_globals", "=", "func", ".", "func_globals", "return", "dict", "(", "name", "=", "func", ".", "__name__", ",", "argnames", "=", "argnames", ",", "signature", "=", "signature", ",", "defaults", "=", "func", ".", "__defaults__", ",", "doc", "=", "func", ".", "__doc__", ",", "module", "=", "func", ".", "__module__", ",", "dict", "=", "func", ".", "__dict__", ",", "globals", "=", "_globals", ",", "closure", "=", "_closure", ")" ]
returns an info dictionary containing: - name - argnames - defaults - signature - doc - module - dict .
train
false
41,107
def _ModifiedDecoder(wire_type, decode_value, modify_value): def InnerDecode(buffer, pos): (result, new_pos) = decode_value(buffer, pos) return (modify_value(result), new_pos) return _SimpleDecoder(wire_type, InnerDecode)
[ "def", "_ModifiedDecoder", "(", "wire_type", ",", "decode_value", ",", "modify_value", ")", ":", "def", "InnerDecode", "(", "buffer", ",", "pos", ")", ":", "(", "result", ",", "new_pos", ")", "=", "decode_value", "(", "buffer", ",", "pos", ")", "return", "(", "modify_value", "(", "result", ")", ",", "new_pos", ")", "return", "_SimpleDecoder", "(", "wire_type", ",", "InnerDecode", ")" ]
like simpledecoder but additionally invokes modify_value on every value before storing it .
train
true
41,108
@app.route('/public') def public_timeline(): return render_template('timeline.html', messages=query_db('\n select message.*, user.* from message, user\n where message.author_id = user.user_id\n order by message.pub_date desc limit ?', [PER_PAGE]))
[ "@", "app", ".", "route", "(", "'/public'", ")", "def", "public_timeline", "(", ")", ":", "return", "render_template", "(", "'timeline.html'", ",", "messages", "=", "query_db", "(", "'\\n select message.*, user.* from message, user\\n where message.author_id = user.user_id\\n order by message.pub_date desc limit ?'", ",", "[", "PER_PAGE", "]", ")", ")" ]
displays the latest messages of all users .
train
false
41,109
def fmtFloat(float_value, point=1): fmt = ('%' + ('0.%(b)df' % {'b': point})) return locale.format_string(fmt, float_value)
[ "def", "fmtFloat", "(", "float_value", ",", "point", "=", "1", ")", ":", "fmt", "=", "(", "'%'", "+", "(", "'0.%(b)df'", "%", "{", "'b'", ":", "point", "}", ")", ")", "return", "locale", ".", "format_string", "(", "fmt", ",", "float_value", ")" ]
return a string with decimal separator according to current locale .
train
false
41,111
def _replace_register(flow_params, register_number, register_value): try: reg_port = flow_params[register_value] del flow_params[register_value] flow_params['reg{:d}'.format(register_number)] = reg_port except KeyError: pass
[ "def", "_replace_register", "(", "flow_params", ",", "register_number", ",", "register_value", ")", ":", "try", ":", "reg_port", "=", "flow_params", "[", "register_value", "]", "del", "flow_params", "[", "register_value", "]", "flow_params", "[", "'reg{:d}'", ".", "format", "(", "register_number", ")", "]", "=", "reg_port", "except", "KeyError", ":", "pass" ]
replace value from flows to given register number register_value key in dictionary will be replaced by register number given by register_number .
train
false
41,113
def root_etree_to_dict(element, iterable=(list, list.append)): return {element.tag: iterable[0]([etree_to_dict(element, iterable)])}
[ "def", "root_etree_to_dict", "(", "element", ",", "iterable", "=", "(", "list", ",", "list", ".", "append", ")", ")", ":", "return", "{", "element", ".", "tag", ":", "iterable", "[", "0", "]", "(", "[", "etree_to_dict", "(", "element", ",", "iterable", ")", "]", ")", "}" ]
takes an xml root element and returns the corresponding dict .
train
false
41,114
def test_deepreload(): with TemporaryDirectory() as tmpdir: with prepended_to_syspath(tmpdir): with open(os.path.join(tmpdir, 'A.py'), 'w') as f: f.write('class Object(object):\n pass\n') with open(os.path.join(tmpdir, 'B.py'), 'w') as f: f.write('import A\n') import A import B obj = A.Object() dreload(B, exclude=['A']) nt.assert_true(isinstance(obj, A.Object)) obj = A.Object() dreload(B) nt.assert_false(isinstance(obj, A.Object))
[ "def", "test_deepreload", "(", ")", ":", "with", "TemporaryDirectory", "(", ")", "as", "tmpdir", ":", "with", "prepended_to_syspath", "(", "tmpdir", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'A.py'", ")", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'class Object(object):\\n pass\\n'", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'B.py'", ")", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'import A\\n'", ")", "import", "A", "import", "B", "obj", "=", "A", ".", "Object", "(", ")", "dreload", "(", "B", ",", "exclude", "=", "[", "'A'", "]", ")", "nt", ".", "assert_true", "(", "isinstance", "(", "obj", ",", "A", ".", "Object", ")", ")", "obj", "=", "A", ".", "Object", "(", ")", "dreload", "(", "B", ")", "nt", ".", "assert_false", "(", "isinstance", "(", "obj", ",", "A", ".", "Object", ")", ")" ]
test that dreload does deep reloads and skips excluded modules .
train
false
41,115
def TextBeforeCursor(): return ToUnicode(vim.current.line[:CurrentColumn()])
[ "def", "TextBeforeCursor", "(", ")", ":", "return", "ToUnicode", "(", "vim", ".", "current", ".", "line", "[", ":", "CurrentColumn", "(", ")", "]", ")" ]
returns the text before currentcolumn .
train
false
41,117
def CDLRISEFALL3METHODS(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLRISEFALL3METHODS)
[ "def", "CDLRISEFALL3METHODS", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLRISEFALL3METHODS", ")" ]
rising/falling three methods .
train
false
41,118
def _retcode_to_bool(retcode): if (retcode == 0): return True else: return False
[ "def", "_retcode_to_bool", "(", "retcode", ")", ":", "if", "(", "retcode", "==", "0", ")", ":", "return", "True", "else", ":", "return", "False" ]
evaulates open vswitch commands retcode value .
train
false
41,120
def _possibly_unimplemented(cls, require=True): if require: return cls else: def wrap(fc): def wrapper(*a, **kw): try: return fc(*a, **kw) except (NotImplementedError, TypeError, ValueError, IndexError, AttributeError): raise nose.SkipTest('feature not implemented') wrapper.__name__ = fc.__name__ return wrapper new_dict = dict(cls.__dict__) for (name, func) in cls.__dict__.items(): if name.startswith('test_'): new_dict[name] = wrap(func) return type((cls.__name__ + 'NotImplemented'), cls.__bases__, new_dict)
[ "def", "_possibly_unimplemented", "(", "cls", ",", "require", "=", "True", ")", ":", "if", "require", ":", "return", "cls", "else", ":", "def", "wrap", "(", "fc", ")", ":", "def", "wrapper", "(", "*", "a", ",", "**", "kw", ")", ":", "try", ":", "return", "fc", "(", "*", "a", ",", "**", "kw", ")", "except", "(", "NotImplementedError", ",", "TypeError", ",", "ValueError", ",", "IndexError", ",", "AttributeError", ")", ":", "raise", "nose", ".", "SkipTest", "(", "'feature not implemented'", ")", "wrapper", ".", "__name__", "=", "fc", ".", "__name__", "return", "wrapper", "new_dict", "=", "dict", "(", "cls", ".", "__dict__", ")", "for", "(", "name", ",", "func", ")", "in", "cls", ".", "__dict__", ".", "items", "(", ")", ":", "if", "name", ".", "startswith", "(", "'test_'", ")", ":", "new_dict", "[", "name", "]", "=", "wrap", "(", "func", ")", "return", "type", "(", "(", "cls", ".", "__name__", "+", "'NotImplemented'", ")", ",", "cls", ".", "__bases__", ",", "new_dict", ")" ]
construct a class that either runs tests as usual .
train
false
41,121
def makedelta(fp, sequence): previous = None for im in sequence: if (not previous): for s in (getheader(im) + getdata(im)): fp.write(s) else: delta = ImageChops.subtract_modulo(im, previous) bbox = delta.getbbox() if (not bbox): bbox = (0, 0, 1, 1) for s in getdata(im.crop(bbox), offset=bbox[:2]): fp.write(s) previous = im.copy() fp.write(';')
[ "def", "makedelta", "(", "fp", ",", "sequence", ")", ":", "previous", "=", "None", "for", "im", "in", "sequence", ":", "if", "(", "not", "previous", ")", ":", "for", "s", "in", "(", "getheader", "(", "im", ")", "+", "getdata", "(", "im", ")", ")", ":", "fp", ".", "write", "(", "s", ")", "else", ":", "delta", "=", "ImageChops", ".", "subtract_modulo", "(", "im", ",", "previous", ")", "bbox", "=", "delta", ".", "getbbox", "(", ")", "if", "(", "not", "bbox", ")", ":", "bbox", "=", "(", "0", ",", "0", ",", "1", ",", "1", ")", "for", "s", "in", "getdata", "(", "im", ".", "crop", "(", "bbox", ")", ",", "offset", "=", "bbox", "[", ":", "2", "]", ")", ":", "fp", ".", "write", "(", "s", ")", "previous", "=", "im", ".", "copy", "(", ")", "fp", ".", "write", "(", "';'", ")" ]
convert list of image frames to a gif animation file .
train
false
41,122
def is_valid_vni(vni): return (isinstance(vni, numbers.Integral) and (0 <= vni <= 16777215))
[ "def", "is_valid_vni", "(", "vni", ")", ":", "return", "(", "isinstance", "(", "vni", ",", "numbers", ".", "Integral", ")", "and", "(", "0", "<=", "vni", "<=", "16777215", ")", ")" ]
returns true if the given virtual network identifier for vxlan is valid .
train
false
41,123
def set_rng_state(new_state): default_generator.set_state(new_state)
[ "def", "set_rng_state", "(", "new_state", ")", ":", "default_generator", ".", "set_state", "(", "new_state", ")" ]
sets the random number generator state .
train
false
41,124
def _get_object_refs(obj_type): lst_objs = [] for key in _db_content[obj_type]: lst_objs.append(key) return lst_objs
[ "def", "_get_object_refs", "(", "obj_type", ")", ":", "lst_objs", "=", "[", "]", "for", "key", "in", "_db_content", "[", "obj_type", "]", ":", "lst_objs", ".", "append", "(", "key", ")", "return", "lst_objs" ]
get object references of the type .
train
false
41,125
def _dok_gen_triples(A): for ((r, c), v) in A.items(): (yield (r, c, v))
[ "def", "_dok_gen_triples", "(", "A", ")", ":", "for", "(", "(", "r", ",", "c", ")", ",", "v", ")", "in", "A", ".", "items", "(", ")", ":", "(", "yield", "(", "r", ",", "c", ",", "v", ")", ")" ]
converts a scipy sparse matrix in **dictionary of keys** format to an iterable of weighted edge triples .
train
false
41,127
def _create_instance_dict(**kwargs): inst = {} inst['image_ref'] = 'cedef40a-ed67-4d10-800e-17455edce175' inst['reservation_id'] = 'r-fakeres' inst['user_id'] = kwargs.get('user_id', 'admin') inst['project_id'] = kwargs.get('project_id', 'fake') inst['instance_type_id'] = '1' if ('host' in kwargs): inst['host'] = kwargs.get('host') inst['vcpus'] = kwargs.get('vcpus', 1) inst['memory_mb'] = kwargs.get('memory_mb', 20) inst['root_gb'] = kwargs.get('root_gb', 30) inst['ephemeral_gb'] = kwargs.get('ephemeral_gb', 30) inst['vm_state'] = kwargs.get('vm_state', vm_states.ACTIVE) inst['power_state'] = kwargs.get('power_state', power_state.RUNNING) inst['task_state'] = kwargs.get('task_state', None) inst['availability_zone'] = kwargs.get('availability_zone', None) inst['ami_launch_index'] = 0 inst['launched_on'] = kwargs.get('launched_on', 'dummy') return inst
[ "def", "_create_instance_dict", "(", "**", "kwargs", ")", ":", "inst", "=", "{", "}", "inst", "[", "'image_ref'", "]", "=", "'cedef40a-ed67-4d10-800e-17455edce175'", "inst", "[", "'reservation_id'", "]", "=", "'r-fakeres'", "inst", "[", "'user_id'", "]", "=", "kwargs", ".", "get", "(", "'user_id'", ",", "'admin'", ")", "inst", "[", "'project_id'", "]", "=", "kwargs", ".", "get", "(", "'project_id'", ",", "'fake'", ")", "inst", "[", "'instance_type_id'", "]", "=", "'1'", "if", "(", "'host'", "in", "kwargs", ")", ":", "inst", "[", "'host'", "]", "=", "kwargs", ".", "get", "(", "'host'", ")", "inst", "[", "'vcpus'", "]", "=", "kwargs", ".", "get", "(", "'vcpus'", ",", "1", ")", "inst", "[", "'memory_mb'", "]", "=", "kwargs", ".", "get", "(", "'memory_mb'", ",", "20", ")", "inst", "[", "'root_gb'", "]", "=", "kwargs", ".", "get", "(", "'root_gb'", ",", "30", ")", "inst", "[", "'ephemeral_gb'", "]", "=", "kwargs", ".", "get", "(", "'ephemeral_gb'", ",", "30", ")", "inst", "[", "'vm_state'", "]", "=", "kwargs", ".", "get", "(", "'vm_state'", ",", "vm_states", ".", "ACTIVE", ")", "inst", "[", "'power_state'", "]", "=", "kwargs", ".", "get", "(", "'power_state'", ",", "power_state", ".", "RUNNING", ")", "inst", "[", "'task_state'", "]", "=", "kwargs", ".", "get", "(", "'task_state'", ",", "None", ")", "inst", "[", "'availability_zone'", "]", "=", "kwargs", ".", "get", "(", "'availability_zone'", ",", "None", ")", "inst", "[", "'ami_launch_index'", "]", "=", "0", "inst", "[", "'launched_on'", "]", "=", "kwargs", ".", "get", "(", "'launched_on'", ",", "'dummy'", ")", "return", "inst" ]
create a dictionary for a test instance .
train
false
41,128
@flake8ext def check_assertisinstance(logical_line, filename): if ('neutron/tests/' in filename): if re.search('assertTrue\\(\\s*isinstance\\(\\s*[^,]*,\\s*[^,]*\\)\\)', logical_line): msg = 'N331: Use assertIsInstance(observed, type) instead of assertTrue(isinstance(observed, type))' (yield (0, msg))
[ "@", "flake8ext", "def", "check_assertisinstance", "(", "logical_line", ",", "filename", ")", ":", "if", "(", "'neutron/tests/'", "in", "filename", ")", ":", "if", "re", ".", "search", "(", "'assertTrue\\\\(\\\\s*isinstance\\\\(\\\\s*[^,]*,\\\\s*[^,]*\\\\)\\\\)'", ",", "logical_line", ")", ":", "msg", "=", "'N331: Use assertIsInstance(observed, type) instead of assertTrue(isinstance(observed, type))'", "(", "yield", "(", "0", ",", "msg", ")", ")" ]
n331 - enforce using assertisinstance .
train
false
41,129
def conversation(): t = Twitter(auth=authen()) try: id = int(g['stuff'].split()[0]) except: printNicely(red("Sorry I can't understand.")) return tid = c['tweet_dict'][id] tweet = t.statuses.show(id=tid) limit = c['CONVERSATION_MAX'] thread_ref = [] thread_ref.append(tweet) prev_tid = tweet['in_reply_to_status_id'] while (prev_tid and limit): limit -= 1 tweet = t.statuses.show(id=prev_tid) prev_tid = tweet['in_reply_to_status_id'] thread_ref.append(tweet) for tweet in reversed(thread_ref): draw(t=tweet) printNicely('')
[ "def", "conversation", "(", ")", ":", "t", "=", "Twitter", "(", "auth", "=", "authen", "(", ")", ")", "try", ":", "id", "=", "int", "(", "g", "[", "'stuff'", "]", ".", "split", "(", ")", "[", "0", "]", ")", "except", ":", "printNicely", "(", "red", "(", "\"Sorry I can't understand.\"", ")", ")", "return", "tid", "=", "c", "[", "'tweet_dict'", "]", "[", "id", "]", "tweet", "=", "t", ".", "statuses", ".", "show", "(", "id", "=", "tid", ")", "limit", "=", "c", "[", "'CONVERSATION_MAX'", "]", "thread_ref", "=", "[", "]", "thread_ref", ".", "append", "(", "tweet", ")", "prev_tid", "=", "tweet", "[", "'in_reply_to_status_id'", "]", "while", "(", "prev_tid", "and", "limit", ")", ":", "limit", "-=", "1", "tweet", "=", "t", ".", "statuses", ".", "show", "(", "id", "=", "prev_tid", ")", "prev_tid", "=", "tweet", "[", "'in_reply_to_status_id'", "]", "thread_ref", ".", "append", "(", "tweet", ")", "for", "tweet", "in", "reversed", "(", "thread_ref", ")", ":", "draw", "(", "t", "=", "tweet", ")", "printNicely", "(", "''", ")" ]
conversation view .
train
false
41,131
def modifyPdpContextRequest(): a = TpPd(pd=8) b = MessageType(mesType=72) c = RadioPriorityAndSpareHalfOctets() d = LlcServiceAccessPointIdentifier() e = QualityOfService() packet = ((((a / b) / c) / d) / e) return packet
[ "def", "modifyPdpContextRequest", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "8", ")", "b", "=", "MessageType", "(", "mesType", "=", "72", ")", "c", "=", "RadioPriorityAndSpareHalfOctets", "(", ")", "d", "=", "LlcServiceAccessPointIdentifier", "(", ")", "e", "=", "QualityOfService", "(", ")", "packet", "=", "(", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "/", "e", ")", "return", "packet" ]
modify pdp context request section 9 .
train
true
41,132
def createUserObject(master, author, src=None): if (not src): log.msg('No vcs information found, unable to create User Object') return defer.succeed(None) if (src in srcs): usdict = dict(identifier=author, attr_type=src, attr_data=author) else: log.msg(('Unrecognized source argument: %s' % src)) return defer.succeed(None) return master.db.users.findUserByAttr(identifier=usdict['identifier'], attr_type=usdict['attr_type'], attr_data=usdict['attr_data'])
[ "def", "createUserObject", "(", "master", ",", "author", ",", "src", "=", "None", ")", ":", "if", "(", "not", "src", ")", ":", "log", ".", "msg", "(", "'No vcs information found, unable to create User Object'", ")", "return", "defer", ".", "succeed", "(", "None", ")", "if", "(", "src", "in", "srcs", ")", ":", "usdict", "=", "dict", "(", "identifier", "=", "author", ",", "attr_type", "=", "src", ",", "attr_data", "=", "author", ")", "else", ":", "log", ".", "msg", "(", "(", "'Unrecognized source argument: %s'", "%", "src", ")", ")", "return", "defer", ".", "succeed", "(", "None", ")", "return", "master", ".", "db", ".", "users", ".", "findUserByAttr", "(", "identifier", "=", "usdict", "[", "'identifier'", "]", ",", "attr_type", "=", "usdict", "[", "'attr_type'", "]", ",", "attr_data", "=", "usdict", "[", "'attr_data'", "]", ")" ]
take a change author and source and translate them into a user object .
train
true
41,133
def safe_mkdir_for(path): safe_mkdir(os.path.dirname(path), clean=False)
[ "def", "safe_mkdir_for", "(", "path", ")", ":", "safe_mkdir", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ",", "clean", "=", "False", ")" ]
ensure that the parent directory for a file is present .
train
true
41,134
def quotedata(data): return re.sub('(?m)^\\.', '..', re.sub('(?:\\r\\n|\\n|\\r(?!\\n))', CRLF, data))
[ "def", "quotedata", "(", "data", ")", ":", "return", "re", ".", "sub", "(", "'(?m)^\\\\.'", ",", "'..'", ",", "re", ".", "sub", "(", "'(?:\\\\r\\\\n|\\\\n|\\\\r(?!\\\\n))'", ",", "CRLF", ",", "data", ")", ")" ]
quote data for email .
train
false
41,135
def resolve_deps(header_deps): altered = 1 while altered: altered = 0 for (hdr, deps) in header_deps.items(): start = len(deps) for dep in deps.keys(): deps.update(header_deps.get(dep, {})) if (len(deps) != start): altered = 1
[ "def", "resolve_deps", "(", "header_deps", ")", ":", "altered", "=", "1", "while", "altered", ":", "altered", "=", "0", "for", "(", "hdr", ",", "deps", ")", "in", "header_deps", ".", "items", "(", ")", ":", "start", "=", "len", "(", "deps", ")", "for", "dep", "in", "deps", ".", "keys", "(", ")", ":", "deps", ".", "update", "(", "header_deps", ".", "get", "(", "dep", ",", "{", "}", ")", ")", "if", "(", "len", "(", "deps", ")", "!=", "start", ")", ":", "altered", "=", "1" ]
alter the provided dictionary to flatten includes-of-includes .
train
false
41,137
def addl_env_args(addl_env): if (addl_env is None): return [] return (['env'] + [('%s=%s' % pair) for pair in addl_env.items()])
[ "def", "addl_env_args", "(", "addl_env", ")", ":", "if", "(", "addl_env", "is", "None", ")", ":", "return", "[", "]", "return", "(", "[", "'env'", "]", "+", "[", "(", "'%s=%s'", "%", "pair", ")", "for", "pair", "in", "addl_env", ".", "items", "(", ")", "]", ")" ]
build arguments for adding additional environment vars with env .
train
false
41,138
def decode_unicode_string(string): if (string.startswith('[BASE64-DATA]') and string.endswith('[/BASE64-DATA]')): return base64.b64decode(string[len('[BASE64-DATA]'):(- len('[/BASE64-DATA]'))]) return string
[ "def", "decode_unicode_string", "(", "string", ")", ":", "if", "(", "string", ".", "startswith", "(", "'[BASE64-DATA]'", ")", "and", "string", ".", "endswith", "(", "'[/BASE64-DATA]'", ")", ")", ":", "return", "base64", ".", "b64decode", "(", "string", "[", "len", "(", "'[BASE64-DATA]'", ")", ":", "(", "-", "len", "(", "'[/BASE64-DATA]'", ")", ")", "]", ")", "return", "string" ]
decode string encoded by unicode_string .
train
true
41,139
def log_method(function): label = ('acceptance:' + function.__name__) def log_result(result, action): action.add_success_fields(result=_ensure_encodeable(result)) return result @wraps(function) def wrapper(self, *args, **kwargs): serializable_args = tuple((_ensure_encodeable(a) for a in args)) serializable_kwargs = {} for kwarg in kwargs: serializable_kwargs[kwarg] = _ensure_encodeable(kwargs[kwarg]) context = start_action(action_type=label, args=serializable_args, kwargs=serializable_kwargs) with context.context(): d = DeferredContext(function(self, *args, **kwargs)) d.addCallback(log_result, context) d.addActionFinish() return d.result return wrapper
[ "def", "log_method", "(", "function", ")", ":", "label", "=", "(", "'acceptance:'", "+", "function", ".", "__name__", ")", "def", "log_result", "(", "result", ",", "action", ")", ":", "action", ".", "add_success_fields", "(", "result", "=", "_ensure_encodeable", "(", "result", ")", ")", "return", "result", "@", "wraps", "(", "function", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "serializable_args", "=", "tuple", "(", "(", "_ensure_encodeable", "(", "a", ")", "for", "a", "in", "args", ")", ")", "serializable_kwargs", "=", "{", "}", "for", "kwarg", "in", "kwargs", ":", "serializable_kwargs", "[", "kwarg", "]", "=", "_ensure_encodeable", "(", "kwargs", "[", "kwarg", "]", ")", "context", "=", "start_action", "(", "action_type", "=", "label", ",", "args", "=", "serializable_args", ",", "kwargs", "=", "serializable_kwargs", ")", "with", "context", ".", "context", "(", ")", ":", "d", "=", "DeferredContext", "(", "function", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ")", "d", ".", "addCallback", "(", "log_result", ",", "context", ")", "d", ".", "addActionFinish", "(", ")", "return", "d", ".", "result", "return", "wrapper" ]
decorator that log calls to the given function .
train
false
41,140
def tearDownModule(): global ENGINE global SESSION ENGINE.dispose() ENGINE = None SESSION = None
[ "def", "tearDownModule", "(", ")", ":", "global", "ENGINE", "global", "SESSION", "ENGINE", ".", "dispose", "(", ")", "ENGINE", "=", "None", "SESSION", "=", "None" ]
close database .
train
false
41,141
def libvlc_video_set_aspect_ratio(p_mi, psz_aspect): f = (_Cfunctions.get('libvlc_video_set_aspect_ratio', None) or _Cfunction('libvlc_video_set_aspect_ratio', ((1,), (1,)), None, None, MediaPlayer, ctypes.c_char_p)) return f(p_mi, psz_aspect)
[ "def", "libvlc_video_set_aspect_ratio", "(", "p_mi", ",", "psz_aspect", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_video_set_aspect_ratio'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_video_set_aspect_ratio'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_char_p", ")", ")", "return", "f", "(", "p_mi", ",", "psz_aspect", ")" ]
set new video aspect ratio .
train
true
41,142
def market(language): a = regionalize(language) a = (((len(a) > 0) and a[0]) or None) return a
[ "def", "market", "(", "language", ")", ":", "a", "=", "regionalize", "(", "language", ")", "a", "=", "(", "(", "(", "len", "(", "a", ")", ">", "0", ")", "and", "a", "[", "0", "]", ")", "or", "None", ")", "return", "a" ]
returns the first item from regionalize .
train
false
41,143
def finger(match, hash_type=None): if (hash_type is None): hash_type = __opts__['hash_type'] skey = get_key(__opts__) return skey.finger(match, hash_type)
[ "def", "finger", "(", "match", ",", "hash_type", "=", "None", ")", ":", "if", "(", "hash_type", "is", "None", ")", ":", "hash_type", "=", "__opts__", "[", "'hash_type'", "]", "skey", "=", "get_key", "(", "__opts__", ")", "return", "skey", ".", "finger", "(", "match", ",", "hash_type", ")" ]
return the matching key fingerprints .
train
true
41,145
def no_tracing(func): if (not hasattr(sys, 'gettrace')): return func else: def wrapper(*args, **kwargs): original_trace = sys.gettrace() try: sys.settrace(None) return func(*args, **kwargs) finally: sys.settrace(original_trace) wrapper.__name__ = func.__name__ return wrapper
[ "def", "no_tracing", "(", "func", ")", ":", "if", "(", "not", "hasattr", "(", "sys", ",", "'gettrace'", ")", ")", ":", "return", "func", "else", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "original_trace", "=", "sys", ".", "gettrace", "(", ")", "try", ":", "sys", ".", "settrace", "(", "None", ")", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "finally", ":", "sys", ".", "settrace", "(", "original_trace", ")", "wrapper", ".", "__name__", "=", "func", ".", "__name__", "return", "wrapper" ]
decorator to temporarily turn off tracing for the duration of a test .
train
false
41,146
@box(types.EnumMember) def box_enum(typ, val, c): valobj = c.box(typ.dtype, val) cls_obj = c.pyapi.unserialize(c.pyapi.serialize_object(typ.instance_class)) return c.pyapi.call_function_objargs(cls_obj, (valobj,))
[ "@", "box", "(", "types", ".", "EnumMember", ")", "def", "box_enum", "(", "typ", ",", "val", ",", "c", ")", ":", "valobj", "=", "c", ".", "box", "(", "typ", ".", "dtype", ",", "val", ")", "cls_obj", "=", "c", ".", "pyapi", ".", "unserialize", "(", "c", ".", "pyapi", ".", "serialize_object", "(", "typ", ".", "instance_class", ")", ")", "return", "c", ".", "pyapi", ".", "call_function_objargs", "(", "cls_obj", ",", "(", "valobj", ",", ")", ")" ]
fetch an enum member given its native value .
train
false
41,148
def _check_for_missing_and_disallowed_fields(document, entries, fields): (missing_fields, disallowed_fields) = ([], []) for (field, in_votes, in_consensus, mandatory) in fields: if (mandatory and ((document.is_consensus and in_consensus) or (document.is_vote and in_votes))): if (field not in entries.keys()): missing_fields.append(field) elif ((document.is_consensus and (not in_consensus)) or (document.is_vote and (not in_votes))): if (field in entries.keys()): disallowed_fields.append(field) if missing_fields: raise ValueError(('Network status document is missing mandatory field: %s' % ', '.join(missing_fields))) if disallowed_fields: raise ValueError(("Network status document has fields that shouldn't appear in this document type or version: %s" % ', '.join(disallowed_fields)))
[ "def", "_check_for_missing_and_disallowed_fields", "(", "document", ",", "entries", ",", "fields", ")", ":", "(", "missing_fields", ",", "disallowed_fields", ")", "=", "(", "[", "]", ",", "[", "]", ")", "for", "(", "field", ",", "in_votes", ",", "in_consensus", ",", "mandatory", ")", "in", "fields", ":", "if", "(", "mandatory", "and", "(", "(", "document", ".", "is_consensus", "and", "in_consensus", ")", "or", "(", "document", ".", "is_vote", "and", "in_votes", ")", ")", ")", ":", "if", "(", "field", "not", "in", "entries", ".", "keys", "(", ")", ")", ":", "missing_fields", ".", "append", "(", "field", ")", "elif", "(", "(", "document", ".", "is_consensus", "and", "(", "not", "in_consensus", ")", ")", "or", "(", "document", ".", "is_vote", "and", "(", "not", "in_votes", ")", ")", ")", ":", "if", "(", "field", "in", "entries", ".", "keys", "(", ")", ")", ":", "disallowed_fields", ".", "append", "(", "field", ")", "if", "missing_fields", ":", "raise", "ValueError", "(", "(", "'Network status document is missing mandatory field: %s'", "%", "', '", ".", "join", "(", "missing_fields", ")", ")", ")", "if", "disallowed_fields", ":", "raise", "ValueError", "(", "(", "\"Network status document has fields that shouldn't appear in this document type or version: %s\"", "%", "', '", ".", "join", "(", "disallowed_fields", ")", ")", ")" ]
checks that we have mandatory fields for our type .
train
false
41,149
def get_register_support_url(request=None, force_is_admin=False): siteconfig = SiteConfiguration.objects.get_current() if siteconfig.get(u'send_support_usage_stats'): support_data = serialize_support_data(request, force_is_admin) else: support_data = u'' return (settings.REGISTER_SUPPORT_URL % {u'support_data': support_data})
[ "def", "get_register_support_url", "(", "request", "=", "None", ",", "force_is_admin", "=", "False", ")", ":", "siteconfig", "=", "SiteConfiguration", ".", "objects", ".", "get_current", "(", ")", "if", "siteconfig", ".", "get", "(", "u'send_support_usage_stats'", ")", ":", "support_data", "=", "serialize_support_data", "(", "request", ",", "force_is_admin", ")", "else", ":", "support_data", "=", "u''", "return", "(", "settings", ".", "REGISTER_SUPPORT_URL", "%", "{", "u'support_data'", ":", "support_data", "}", ")" ]
return the url for registering the review board support page .
train
false
41,150
def is_hash_used(context, builder, h): deleted = ir.Constant(h.type, DELETED) return builder.icmp_unsigned('<', h, deleted)
[ "def", "is_hash_used", "(", "context", ",", "builder", ",", "h", ")", ":", "deleted", "=", "ir", ".", "Constant", "(", "h", ".", "type", ",", "DELETED", ")", "return", "builder", ".", "icmp_unsigned", "(", "'<'", ",", "h", ",", "deleted", ")" ]
whether the hash value denotes an active entry .
train
false
41,151
def group_followee_count(context, data_dict): return _followee_count(context, data_dict, context['model'].UserFollowingGroup)
[ "def", "group_followee_count", "(", "context", ",", "data_dict", ")", ":", "return", "_followee_count", "(", "context", ",", "data_dict", ",", "context", "[", "'model'", "]", ".", "UserFollowingGroup", ")" ]
return the number of groups that are followed by the given user .
train
false
41,153
def get_local_id(*args, **kargs): raise _stub_error
[ "def", "get_local_id", "(", "*", "args", ",", "**", "kargs", ")", ":", "raise", "_stub_error" ]
opencl get_local_id() .
train
false
41,154
def minkowski_distance_p(x, y, p=2): x = np.asarray(x) y = np.asarray(y) if (p == np.inf): return np.amax(np.abs((y - x)), axis=(-1)) elif (p == 1): return np.sum(np.abs((y - x)), axis=(-1)) else: return np.sum((np.abs((y - x)) ** p), axis=(-1))
[ "def", "minkowski_distance_p", "(", "x", ",", "y", ",", "p", "=", "2", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "y", "=", "np", ".", "asarray", "(", "y", ")", "if", "(", "p", "==", "np", ".", "inf", ")", ":", "return", "np", ".", "amax", "(", "np", ".", "abs", "(", "(", "y", "-", "x", ")", ")", ",", "axis", "=", "(", "-", "1", ")", ")", "elif", "(", "p", "==", "1", ")", ":", "return", "np", ".", "sum", "(", "np", ".", "abs", "(", "(", "y", "-", "x", ")", ")", ",", "axis", "=", "(", "-", "1", ")", ")", "else", ":", "return", "np", ".", "sum", "(", "(", "np", ".", "abs", "(", "(", "y", "-", "x", ")", ")", "**", "p", ")", ",", "axis", "=", "(", "-", "1", ")", ")" ]
compute the p-th power of the l**p distance between two arrays .
train
false
41,155
@pytest.mark.skipif('not HAS_BEAUTIFUL_SOUP') def test_identify_table(): soup = BeautifulSoup('<html><body></body></html>') assert (html.identify_table(soup, {}, 0) is False) assert (html.identify_table(None, {}, 0) is False) soup = BeautifulSoup('<table id="foo"><tr><th>A</th></tr><tr><td>B</td></tr></table>').table assert (html.identify_table(soup, {}, 2) is False) assert (html.identify_table(soup, {}, 1) is True) assert (html.identify_table(soup, {'table_id': 2}, 1) is False) assert (html.identify_table(soup, {'table_id': 1}, 1) is True) assert (html.identify_table(soup, {'table_id': 'bar'}, 1) is False) assert (html.identify_table(soup, {'table_id': 'foo'}, 1) is True)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "'not HAS_BEAUTIFUL_SOUP'", ")", "def", "test_identify_table", "(", ")", ":", "soup", "=", "BeautifulSoup", "(", "'<html><body></body></html>'", ")", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "}", ",", "0", ")", "is", "False", ")", "assert", "(", "html", ".", "identify_table", "(", "None", ",", "{", "}", ",", "0", ")", "is", "False", ")", "soup", "=", "BeautifulSoup", "(", "'<table id=\"foo\"><tr><th>A</th></tr><tr><td>B</td></tr></table>'", ")", ".", "table", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "}", ",", "2", ")", "is", "False", ")", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "}", ",", "1", ")", "is", "True", ")", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "'table_id'", ":", "2", "}", ",", "1", ")", "is", "False", ")", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "'table_id'", ":", "1", "}", ",", "1", ")", "is", "True", ")", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "'table_id'", ":", "'bar'", "}", ",", "1", ")", "is", "False", ")", "assert", "(", "html", ".", "identify_table", "(", "soup", ",", "{", "'table_id'", ":", "'foo'", "}", ",", "1", ")", "is", "True", ")" ]
test to make sure that identify_table() returns whether the given beautifulsoup tag is the correct table to process .
train
false
41,156
def _result_type_many(*arrays_and_dtypes): try: return np.result_type(*arrays_and_dtypes) except ValueError: return reduce(np.result_type, arrays_and_dtypes)
[ "def", "_result_type_many", "(", "*", "arrays_and_dtypes", ")", ":", "try", ":", "return", "np", ".", "result_type", "(", "*", "arrays_and_dtypes", ")", "except", "ValueError", ":", "return", "reduce", "(", "np", ".", "result_type", ",", "arrays_and_dtypes", ")" ]
wrapper around numpy .
train
true
41,158
def test_user_flag(script, data, virtualenv): virtualenv.system_site_packages = True script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') script.pip('install', '-f', data.find_links, '--no-index', '--user', 'simple2==2.0') result = script.pip('list', '--user', '--format=legacy') assert ('simple (1.0)' not in result.stdout) assert ('simple2 (2.0)' in result.stdout)
[ "def", "test_user_flag", "(", "script", ",", "data", ",", "virtualenv", ")", ":", "virtualenv", ".", "system_site_packages", "=", "True", "script", ".", "pip", "(", "'install'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'--no-index'", ",", "'simple==1.0'", ")", "script", ".", "pip", "(", "'install'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'--no-index'", ",", "'--user'", ",", "'simple2==2.0'", ")", "result", "=", "script", ".", "pip", "(", "'list'", ",", "'--user'", ",", "'--format=legacy'", ")", "assert", "(", "'simple (1.0)'", "not", "in", "result", ".", "stdout", ")", "assert", "(", "'simple2 (2.0)'", "in", "result", ".", "stdout", ")" ]
test the behavior of --user flag in the list command .
train
false
41,159
def getTransformedOutlineByPath(elementNode, path, yAxisPointingUpward): aroundsFromPath = intercircle.getAroundsFromPath(path, getStrokeRadius(elementNode)) return getChainMatrixSVGIfNecessary(elementNode, yAxisPointingUpward).getTransformedPaths(aroundsFromPath)
[ "def", "getTransformedOutlineByPath", "(", "elementNode", ",", "path", ",", "yAxisPointingUpward", ")", ":", "aroundsFromPath", "=", "intercircle", ".", "getAroundsFromPath", "(", "path", ",", "getStrokeRadius", "(", "elementNode", ")", ")", "return", "getChainMatrixSVGIfNecessary", "(", "elementNode", ",", "yAxisPointingUpward", ")", ".", "getTransformedPaths", "(", "aroundsFromPath", ")" ]
get the outline from the path .
train
false
41,161
def xhtml_unescape(value): return re.sub('&(#?)(\\w+?);', _convert_entity, _unicode(value))
[ "def", "xhtml_unescape", "(", "value", ")", ":", "return", "re", ".", "sub", "(", "'&(#?)(\\\\w+?);'", ",", "_convert_entity", ",", "_unicode", "(", "value", ")", ")" ]
un-escapes an xml-escaped string .
train
false
41,163
def placebo_session(function): @functools.wraps(function) def wrapper(*args, **kwargs): session_kwargs = {'region_name': os.environ.get('AWS_DEFAULT_REGION', 'us-east-1')} profile_name = os.environ.get('PLACEBO_PROFILE', None) if profile_name: session_kwargs['profile_name'] = profile_name session = boto3.Session(**session_kwargs) self = args[0] prefix = ((self.__class__.__name__ + '.') + function.__name__) record_dir = os.path.join(PLACEBO_DIR, prefix) if (not os.path.exists(record_dir)): os.makedirs(record_dir) pill = placebo.attach(session, data_path=record_dir) if (os.environ.get('PLACEBO_MODE') == 'record'): pill.record() else: pill.playback() kwargs['session'] = session return function(*args, **kwargs) return wrapper
[ "def", "placebo_session", "(", "function", ")", ":", "@", "functools", ".", "wraps", "(", "function", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "session_kwargs", "=", "{", "'region_name'", ":", "os", ".", "environ", ".", "get", "(", "'AWS_DEFAULT_REGION'", ",", "'us-east-1'", ")", "}", "profile_name", "=", "os", ".", "environ", ".", "get", "(", "'PLACEBO_PROFILE'", ",", "None", ")", "if", "profile_name", ":", "session_kwargs", "[", "'profile_name'", "]", "=", "profile_name", "session", "=", "boto3", ".", "Session", "(", "**", "session_kwargs", ")", "self", "=", "args", "[", "0", "]", "prefix", "=", "(", "(", "self", ".", "__class__", ".", "__name__", "+", "'.'", ")", "+", "function", ".", "__name__", ")", "record_dir", "=", "os", ".", "path", ".", "join", "(", "PLACEBO_DIR", ",", "prefix", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "record_dir", ")", ")", ":", "os", ".", "makedirs", "(", "record_dir", ")", "pill", "=", "placebo", ".", "attach", "(", "session", ",", "data_path", "=", "record_dir", ")", "if", "(", "os", ".", "environ", ".", "get", "(", "'PLACEBO_MODE'", ")", "==", "'record'", ")", ":", "pill", ".", "record", "(", ")", "else", ":", "pill", ".", "playback", "(", ")", "kwargs", "[", "'session'", "]", "=", "session", "return", "function", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
decorator to help do testing with placebo .
train
true
41,164
def rebooted(name): return _virt_call(name, 'reboot', 'rebooted', 'Machine has been rebooted')
[ "def", "rebooted", "(", "name", ")", ":", "return", "_virt_call", "(", "name", ",", "'reboot'", ",", "'rebooted'", ",", "'Machine has been rebooted'", ")" ]
reboots vms .
train
false
41,165
def make_acl(scheme, credential, read=False, write=False, create=False, delete=False, admin=False, all=False): if all: permissions = Permissions.ALL else: permissions = 0 if read: permissions |= Permissions.READ if write: permissions |= Permissions.WRITE if create: permissions |= Permissions.CREATE if delete: permissions |= Permissions.DELETE if admin: permissions |= Permissions.ADMIN return ACL(permissions, Id(scheme, credential))
[ "def", "make_acl", "(", "scheme", ",", "credential", ",", "read", "=", "False", ",", "write", "=", "False", ",", "create", "=", "False", ",", "delete", "=", "False", ",", "admin", "=", "False", ",", "all", "=", "False", ")", ":", "if", "all", ":", "permissions", "=", "Permissions", ".", "ALL", "else", ":", "permissions", "=", "0", "if", "read", ":", "permissions", "|=", "Permissions", ".", "READ", "if", "write", ":", "permissions", "|=", "Permissions", ".", "WRITE", "if", "create", ":", "permissions", "|=", "Permissions", ".", "CREATE", "if", "delete", ":", "permissions", "|=", "Permissions", ".", "DELETE", "if", "admin", ":", "permissions", "|=", "Permissions", ".", "ADMIN", "return", "ACL", "(", "permissions", ",", "Id", "(", "scheme", ",", "credential", ")", ")" ]
given a scheme and credential .
train
false
41,166
def get_cache_subnet_group(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: csg = conn.describe_cache_subnet_groups(name) csg = csg['DescribeCacheSubnetGroupsResponse'] csg = csg['DescribeCacheSubnetGroupsResult']['CacheSubnetGroups'][0] except boto.exception.BotoServerError as e: msg = 'Failed to get cache subnet group {0}.'.format(name) log.error(msg) log.debug(e) return False except (IndexError, TypeError, KeyError): msg = 'Failed to get cache subnet group {0} (2).'.format(name) log.error(msg) return False ret = {} for (key, val) in six.iteritems(csg): if (key == 'CacheSubnetGroupName'): ret['cache_subnet_group_name'] = val elif (key == 'CacheSubnetGroupDescription'): ret['cache_subnet_group_description'] = val elif (key == 'VpcId'): ret['vpc_id'] = val elif (key == 'Subnets'): ret['subnets'] = [] for subnet in val: _subnet = {} _subnet['subnet_id'] = subnet['SubnetIdentifier'] _az = subnet['SubnetAvailabilityZone']['Name'] _subnet['subnet_availability_zone'] = _az ret['subnets'].append(_subnet) else: ret[key] = val return ret
[ "def", "get_cache_subnet_group", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "csg", "=", "conn", ".", "describe_cache_subnet_groups", "(", "name", ")", "csg", "=", "csg", "[", "'DescribeCacheSubnetGroupsResponse'", "]", "csg", "=", "csg", "[", "'DescribeCacheSubnetGroupsResult'", "]", "[", "'CacheSubnetGroups'", "]", "[", "0", "]", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "msg", "=", "'Failed to get cache subnet group {0}.'", ".", "format", "(", "name", ")", "log", ".", "error", "(", "msg", ")", "log", ".", "debug", "(", "e", ")", "return", "False", "except", "(", "IndexError", ",", "TypeError", ",", "KeyError", ")", ":", "msg", "=", "'Failed to get cache subnet group {0} (2).'", ".", "format", "(", "name", ")", "log", ".", "error", "(", "msg", ")", "return", "False", "ret", "=", "{", "}", "for", "(", "key", ",", "val", ")", "in", "six", ".", "iteritems", "(", "csg", ")", ":", "if", "(", "key", "==", "'CacheSubnetGroupName'", ")", ":", "ret", "[", "'cache_subnet_group_name'", "]", "=", "val", "elif", "(", "key", "==", "'CacheSubnetGroupDescription'", ")", ":", "ret", "[", "'cache_subnet_group_description'", "]", "=", "val", "elif", "(", "key", "==", "'VpcId'", ")", ":", "ret", "[", "'vpc_id'", "]", "=", "val", "elif", "(", "key", "==", "'Subnets'", ")", ":", "ret", "[", "'subnets'", "]", "=", "[", "]", "for", "subnet", "in", "val", ":", "_subnet", "=", "{", "}", "_subnet", "[", "'subnet_id'", "]", "=", "subnet", "[", "'SubnetIdentifier'", "]", "_az", "=", "subnet", "[", "'SubnetAvailabilityZone'", "]", "[", "'Name'", "]", "_subnet", "[", "'subnet_availability_zone'", "]", "=", "_az", "ret", "[", "'subnets'", "]", ".", "append", "(", "_subnet", ")", "else", ":", "ret", "[", "key", "]", "=", "val", "return", "ret" ]
get information about a cache subnet group .
train
true
41,169
def _mask_tester(norm_instance, vals): masked_array = np.ma.array(vals) masked_array[0] = np.ma.masked assert_array_equal(masked_array.mask, norm_instance(masked_array).mask)
[ "def", "_mask_tester", "(", "norm_instance", ",", "vals", ")", ":", "masked_array", "=", "np", ".", "ma", ".", "array", "(", "vals", ")", "masked_array", "[", "0", "]", "=", "np", ".", "ma", ".", "masked", "assert_array_equal", "(", "masked_array", ".", "mask", ",", "norm_instance", "(", "masked_array", ")", ".", "mask", ")" ]
checks mask handling .
train
false
41,170
def image_field_data(request, include_empty_option=False): try: images = get_available_images(request, request.user.project_id) except Exception: exceptions.handle(request, _('Unable to retrieve images')) images.sort(key=(lambda c: c.name)) images_list = [('', _('Select Image'))] for image in images: image_label = u'{} ({})'.format(image.name, filesizeformat(image.size)) images_list.append((image.id, image_label)) if (not images): return [('', _('No images available'))] return images_list
[ "def", "image_field_data", "(", "request", ",", "include_empty_option", "=", "False", ")", ":", "try", ":", "images", "=", "get_available_images", "(", "request", ",", "request", ".", "user", ".", "project_id", ")", "except", "Exception", ":", "exceptions", ".", "handle", "(", "request", ",", "_", "(", "'Unable to retrieve images'", ")", ")", "images", ".", "sort", "(", "key", "=", "(", "lambda", "c", ":", "c", ".", "name", ")", ")", "images_list", "=", "[", "(", "''", ",", "_", "(", "'Select Image'", ")", ")", "]", "for", "image", "in", "images", ":", "image_label", "=", "u'{} ({})'", ".", "format", "(", "image", ".", "name", ",", "filesizeformat", "(", "image", ".", "size", ")", ")", "images_list", ".", "append", "(", "(", "image", ".", "id", ",", "image_label", ")", ")", "if", "(", "not", "images", ")", ":", "return", "[", "(", "''", ",", "_", "(", "'No images available'", ")", ")", "]", "return", "images_list" ]
returns a list of tuples of all images .
train
true
41,172
def getPointsRoundZAxis(planeAngle, points): planeArray = [] for point in points: planeArray.append((planeAngle * point)) return planeArray
[ "def", "getPointsRoundZAxis", "(", "planeAngle", ",", "points", ")", ":", "planeArray", "=", "[", "]", "for", "point", "in", "points", ":", "planeArray", ".", "append", "(", "(", "planeAngle", "*", "point", ")", ")", "return", "planeArray" ]
get points rotated by the plane angle .
train
false
41,173
def format_duration_in_millis(duration=0): (seconds, millis) = divmod(duration, 1000) (minutes, seconds) = divmod(seconds, 60) (hours, minutes) = divmod(minutes, 60) (days, hours) = divmod(hours, 24) output = [] written = False if days: written = True output.append(('%dd' % days)) if (written or hours): written = True output.append(('%dh' % hours)) if (written or minutes): output.append(('%dm' % minutes)) output.append(('%ds' % seconds)) return ':'.join(output)
[ "def", "format_duration_in_millis", "(", "duration", "=", "0", ")", ":", "(", "seconds", ",", "millis", ")", "=", "divmod", "(", "duration", ",", "1000", ")", "(", "minutes", ",", "seconds", ")", "=", "divmod", "(", "seconds", ",", "60", ")", "(", "hours", ",", "minutes", ")", "=", "divmod", "(", "minutes", ",", "60", ")", "(", "days", ",", "hours", ")", "=", "divmod", "(", "hours", ",", "24", ")", "output", "=", "[", "]", "written", "=", "False", "if", "days", ":", "written", "=", "True", "output", ".", "append", "(", "(", "'%dd'", "%", "days", ")", ")", "if", "(", "written", "or", "hours", ")", ":", "written", "=", "True", "output", ".", "append", "(", "(", "'%dh'", "%", "hours", ")", ")", "if", "(", "written", "or", "minutes", ")", ":", "output", ".", "append", "(", "(", "'%dm'", "%", "minutes", ")", ")", "output", ".", "append", "(", "(", "'%ds'", "%", "seconds", ")", ")", "return", "':'", ".", "join", "(", "output", ")" ]
formats the difference between two times in millis as xd:xh:xm:xs .
train
false
41,174
def test_indices(): n_seeds_test = [1, 3, 4] n_targets_test = [2, 3, 200] rng = np.random.RandomState(42) for n_seeds in n_seeds_test: for n_targets in n_targets_test: idx = rng.permutation(np.arange((n_seeds + n_targets))) seeds = idx[:n_seeds] targets = idx[n_seeds:] indices = seed_target_indices(seeds, targets) assert_true((len(indices) == 2)) assert_true((len(indices[0]) == len(indices[1]))) assert_true((len(indices[0]) == (n_seeds * n_targets))) for seed in seeds: assert_true((np.sum((indices[0] == seed)) == n_targets)) for target in targets: assert_true((np.sum((indices[1] == target)) == n_seeds))
[ "def", "test_indices", "(", ")", ":", "n_seeds_test", "=", "[", "1", ",", "3", ",", "4", "]", "n_targets_test", "=", "[", "2", ",", "3", ",", "200", "]", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "42", ")", "for", "n_seeds", "in", "n_seeds_test", ":", "for", "n_targets", "in", "n_targets_test", ":", "idx", "=", "rng", ".", "permutation", "(", "np", ".", "arange", "(", "(", "n_seeds", "+", "n_targets", ")", ")", ")", "seeds", "=", "idx", "[", ":", "n_seeds", "]", "targets", "=", "idx", "[", "n_seeds", ":", "]", "indices", "=", "seed_target_indices", "(", "seeds", ",", "targets", ")", "assert_true", "(", "(", "len", "(", "indices", ")", "==", "2", ")", ")", "assert_true", "(", "(", "len", "(", "indices", "[", "0", "]", ")", "==", "len", "(", "indices", "[", "1", "]", ")", ")", ")", "assert_true", "(", "(", "len", "(", "indices", "[", "0", "]", ")", "==", "(", "n_seeds", "*", "n_targets", ")", ")", ")", "for", "seed", "in", "seeds", ":", "assert_true", "(", "(", "np", ".", "sum", "(", "(", "indices", "[", "0", "]", "==", "seed", ")", ")", "==", "n_targets", ")", ")", "for", "target", "in", "targets", ":", "assert_true", "(", "(", "np", ".", "sum", "(", "(", "indices", "[", "1", "]", "==", "target", ")", ")", "==", "n_seeds", ")", ")" ]
test connectivity indexing methods .
train
false
41,175
def remove_compiled_app(): app = get_app() remove_compiled_application(apath(app, r=request)) session.flash = T('compiled application removed') redirect(URL('site'))
[ "def", "remove_compiled_app", "(", ")", ":", "app", "=", "get_app", "(", ")", "remove_compiled_application", "(", "apath", "(", "app", ",", "r", "=", "request", ")", ")", "session", ".", "flash", "=", "T", "(", "'compiled application removed'", ")", "redirect", "(", "URL", "(", "'site'", ")", ")" ]
remove the compiled application .
train
false
41,176
def remove_trailing_version_from_href(href): parsed_url = urlparse.urlsplit(href) url_parts = parsed_url.path.rsplit('/', 1) expression = re.compile('^v([0-9]+|[0-9]+\\.[0-9]+)(/.*|$)') if (not expression.match(url_parts.pop())): LOG.debug('href %s does not contain version', href) raise ValueError((_('href %s does not contain version') % href)) new_path = url_join(*url_parts) parsed_url = list(parsed_url) parsed_url[2] = new_path return urlparse.urlunsplit(parsed_url)
[ "def", "remove_trailing_version_from_href", "(", "href", ")", ":", "parsed_url", "=", "urlparse", ".", "urlsplit", "(", "href", ")", "url_parts", "=", "parsed_url", ".", "path", ".", "rsplit", "(", "'/'", ",", "1", ")", "expression", "=", "re", ".", "compile", "(", "'^v([0-9]+|[0-9]+\\\\.[0-9]+)(/.*|$)'", ")", "if", "(", "not", "expression", ".", "match", "(", "url_parts", ".", "pop", "(", ")", ")", ")", ":", "LOG", ".", "debug", "(", "'href %s does not contain version'", ",", "href", ")", "raise", "ValueError", "(", "(", "_", "(", "'href %s does not contain version'", ")", "%", "href", ")", ")", "new_path", "=", "url_join", "(", "*", "url_parts", ")", "parsed_url", "=", "list", "(", "parsed_url", ")", "parsed_url", "[", "2", "]", "=", "new_path", "return", "urlparse", ".", "urlunsplit", "(", "parsed_url", ")" ]
removes the api version from the href .
train
false
41,177
def merge_color_and_opacity(color, opacity): if (color is None): return None rgb_tup = hex_to_rgb(color) if (opacity is None): return 'rgb {}'.format(rgb_tup) rgba_tup = (rgb_tup + (opacity,)) return 'rgba {}'.format(rgba_tup)
[ "def", "merge_color_and_opacity", "(", "color", ",", "opacity", ")", ":", "if", "(", "color", "is", "None", ")", ":", "return", "None", "rgb_tup", "=", "hex_to_rgb", "(", "color", ")", "if", "(", "opacity", "is", "None", ")", ":", "return", "'rgb {}'", ".", "format", "(", "rgb_tup", ")", "rgba_tup", "=", "(", "rgb_tup", "+", "(", "opacity", ",", ")", ")", "return", "'rgba {}'", ".", "format", "(", "rgba_tup", ")" ]
merge hex color with an alpha to get an rgba tuple .
train
false
41,178
def signsimp(expr, evaluate=None): if (evaluate is None): evaluate = global_evaluate[0] expr = sympify(expr) if ((not isinstance(expr, Expr)) or expr.is_Atom): return expr e = sub_post(sub_pre(expr)) if ((not isinstance(e, Expr)) or e.is_Atom): return e if e.is_Add: return e.func(*[signsimp(a) for a in e.args]) if evaluate: e = e.xreplace({m: (- (- m)) for m in e.atoms(Mul) if ((- (- m)) != m)}) return e
[ "def", "signsimp", "(", "expr", ",", "evaluate", "=", "None", ")", ":", "if", "(", "evaluate", "is", "None", ")", ":", "evaluate", "=", "global_evaluate", "[", "0", "]", "expr", "=", "sympify", "(", "expr", ")", "if", "(", "(", "not", "isinstance", "(", "expr", ",", "Expr", ")", ")", "or", "expr", ".", "is_Atom", ")", ":", "return", "expr", "e", "=", "sub_post", "(", "sub_pre", "(", "expr", ")", ")", "if", "(", "(", "not", "isinstance", "(", "e", ",", "Expr", ")", ")", "or", "e", ".", "is_Atom", ")", ":", "return", "e", "if", "e", ".", "is_Add", ":", "return", "e", ".", "func", "(", "*", "[", "signsimp", "(", "a", ")", "for", "a", "in", "e", ".", "args", "]", ")", "if", "evaluate", ":", "e", "=", "e", ".", "xreplace", "(", "{", "m", ":", "(", "-", "(", "-", "m", ")", ")", "for", "m", "in", "e", ".", "atoms", "(", "Mul", ")", "if", "(", "(", "-", "(", "-", "m", ")", ")", "!=", "m", ")", "}", ")", "return", "e" ]
make all add sub-expressions canonical wrt sign .
train
false
41,179
def _sortChunk(records, key, chunkIndex, fields): title(additional=('(key=%s, chunkIndex=%d)' % (str(key), chunkIndex))) assert (len(records) > 0) records.sort(key=itemgetter(*key)) if (chunkIndex is not None): filename = ('chunk_%d.csv' % chunkIndex) with FileRecordStream(filename, write=True, fields=fields) as o: for r in records: o.appendRecord(r) assert (os.path.getsize(filename) > 0) return records
[ "def", "_sortChunk", "(", "records", ",", "key", ",", "chunkIndex", ",", "fields", ")", ":", "title", "(", "additional", "=", "(", "'(key=%s, chunkIndex=%d)'", "%", "(", "str", "(", "key", ")", ",", "chunkIndex", ")", ")", ")", "assert", "(", "len", "(", "records", ")", ">", "0", ")", "records", ".", "sort", "(", "key", "=", "itemgetter", "(", "*", "key", ")", ")", "if", "(", "chunkIndex", "is", "not", "None", ")", ":", "filename", "=", "(", "'chunk_%d.csv'", "%", "chunkIndex", ")", "with", "FileRecordStream", "(", "filename", ",", "write", "=", "True", ",", "fields", "=", "fields", ")", "as", "o", ":", "for", "r", "in", "records", ":", "o", ".", "appendRecord", "(", "r", ")", "assert", "(", "os", ".", "path", ".", "getsize", "(", "filename", ")", ">", "0", ")", "return", "records" ]
sort in memory chunk of records records - a list of records read from the original dataset key - a list of indices to sort the records by chunkindex - the index of the current chunk the records contain only the fields requested by the user .
train
true
41,181
def read_tag(fid, pos=None, shape=None, rlims=None): if (pos is not None): fid.seek(pos, 0) tag = _read_tag_header(fid) if (tag.size > 0): matrix_coding = (_is_matrix & tag.type) if (matrix_coding != 0): tag.data = _read_matrix(fid, tag, shape, rlims, matrix_coding) else: fun = _call_dict.get(tag.type) if (fun is not None): tag.data = fun(fid, tag, shape, rlims) else: raise Exception(('Unimplemented tag data type %s' % tag.type)) if (tag.next != FIFF.FIFFV_NEXT_SEQ): fid.seek(tag.next, 1) return tag
[ "def", "read_tag", "(", "fid", ",", "pos", "=", "None", ",", "shape", "=", "None", ",", "rlims", "=", "None", ")", ":", "if", "(", "pos", "is", "not", "None", ")", ":", "fid", ".", "seek", "(", "pos", ",", "0", ")", "tag", "=", "_read_tag_header", "(", "fid", ")", "if", "(", "tag", ".", "size", ">", "0", ")", ":", "matrix_coding", "=", "(", "_is_matrix", "&", "tag", ".", "type", ")", "if", "(", "matrix_coding", "!=", "0", ")", ":", "tag", ".", "data", "=", "_read_matrix", "(", "fid", ",", "tag", ",", "shape", ",", "rlims", ",", "matrix_coding", ")", "else", ":", "fun", "=", "_call_dict", ".", "get", "(", "tag", ".", "type", ")", "if", "(", "fun", "is", "not", "None", ")", ":", "tag", ".", "data", "=", "fun", "(", "fid", ",", "tag", ",", "shape", ",", "rlims", ")", "else", ":", "raise", "Exception", "(", "(", "'Unimplemented tag data type %s'", "%", "tag", ".", "type", ")", ")", "if", "(", "tag", ".", "next", "!=", "FIFF", ".", "FIFFV_NEXT_SEQ", ")", ":", "fid", ".", "seek", "(", "tag", ".", "next", ",", "1", ")", "return", "tag" ]
read a tag from a file at a given position .
train
false
41,182
def del_from_section(kwargs): section = kwargs.get('section', '') if (section in ('servers', 'rss', 'categories')): keyword = kwargs.get('keyword') if keyword: item = config.get_config(section, keyword) if item: item.delete() del item config.save_config() if (section == 'servers'): Downloader.do.update_server(keyword, None) return True else: return False
[ "def", "del_from_section", "(", "kwargs", ")", ":", "section", "=", "kwargs", ".", "get", "(", "'section'", ",", "''", ")", "if", "(", "section", "in", "(", "'servers'", ",", "'rss'", ",", "'categories'", ")", ")", ":", "keyword", "=", "kwargs", ".", "get", "(", "'keyword'", ")", "if", "keyword", ":", "item", "=", "config", ".", "get_config", "(", "section", ",", "keyword", ")", "if", "item", ":", "item", ".", "delete", "(", ")", "del", "item", "config", ".", "save_config", "(", ")", "if", "(", "section", "==", "'servers'", ")", ":", "Downloader", ".", "do", ".", "update_server", "(", "keyword", ",", "None", ")", "return", "True", "else", ":", "return", "False" ]
remove keyword in section .
train
false
41,183
def dirs_in_library(library, item): return [ancestor for ancestor in ancestry(item) if ancestor.startswith(library)][1:]
[ "def", "dirs_in_library", "(", "library", ",", "item", ")", ":", "return", "[", "ancestor", "for", "ancestor", "in", "ancestry", "(", "item", ")", "if", "ancestor", ".", "startswith", "(", "library", ")", "]", "[", "1", ":", "]" ]
creates a list of ancestor directories in the beets library path .
train
false
41,184
def secgroup_list(profile=None): conn = _auth(profile) return conn.secgroup_list()
[ "def", "secgroup_list", "(", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "secgroup_list", "(", ")" ]
return a list of available security groups cli example: .
train
false
41,185
def admin_translationinline(model, inline_class=admin.StackedInline, **kwargs): kwargs[u'extra'] = 1 kwargs[u'max_num'] = len(settings.LANGUAGES) kwargs[u'model'] = model return type(str((model.__class__.__name__ + u'Inline')), (inline_class,), kwargs)
[ "def", "admin_translationinline", "(", "model", ",", "inline_class", "=", "admin", ".", "StackedInline", ",", "**", "kwargs", ")", ":", "kwargs", "[", "u'extra'", "]", "=", "1", "kwargs", "[", "u'max_num'", "]", "=", "len", "(", "settings", ".", "LANGUAGES", ")", "kwargs", "[", "u'model'", "]", "=", "model", "return", "type", "(", "str", "(", "(", "model", ".", "__class__", ".", "__name__", "+", "u'Inline'", ")", ")", ",", "(", "inline_class", ",", ")", ",", "kwargs", ")" ]
returns a new inline type suitable for the django administration:: from django .
train
false
41,186
def _validate_username(username): if (not isinstance(username, basestring)): raise AccountUsernameInvalid(u'Username must be a string') if (len(username) < USERNAME_MIN_LENGTH): raise AccountUsernameInvalid(u"Username '{username}' must be at least {min} characters long".format(username=username, min=USERNAME_MIN_LENGTH)) if (len(username) > USERNAME_MAX_LENGTH): raise AccountUsernameInvalid(u"Username '{username}' must be at most {max} characters long".format(username=username, max=USERNAME_MAX_LENGTH)) try: validate_slug(username) except ValidationError: raise AccountUsernameInvalid(u"Username '{username}' must contain only A-Z, a-z, 0-9, -, or _ characters")
[ "def", "_validate_username", "(", "username", ")", ":", "if", "(", "not", "isinstance", "(", "username", ",", "basestring", ")", ")", ":", "raise", "AccountUsernameInvalid", "(", "u'Username must be a string'", ")", "if", "(", "len", "(", "username", ")", "<", "USERNAME_MIN_LENGTH", ")", ":", "raise", "AccountUsernameInvalid", "(", "u\"Username '{username}' must be at least {min} characters long\"", ".", "format", "(", "username", "=", "username", ",", "min", "=", "USERNAME_MIN_LENGTH", ")", ")", "if", "(", "len", "(", "username", ")", ">", "USERNAME_MAX_LENGTH", ")", ":", "raise", "AccountUsernameInvalid", "(", "u\"Username '{username}' must be at most {max} characters long\"", ".", "format", "(", "username", "=", "username", ",", "max", "=", "USERNAME_MAX_LENGTH", ")", ")", "try", ":", "validate_slug", "(", "username", ")", "except", "ValidationError", ":", "raise", "AccountUsernameInvalid", "(", "u\"Username '{username}' must contain only A-Z, a-z, 0-9, -, or _ characters\"", ")" ]
validate the username .
train
false
41,187
def identity(x): return x
[ "def", "identity", "(", "x", ")", ":", "return", "x" ]
returns the identity matrix of shape .
train
false
41,188
def check_is_fitted(estimator, attributes, msg=None, all_or_any=all): if (msg is None): msg = "This %(name)s instance is not fitted yet. Call 'fit' with appropriate arguments before using this method." if (not hasattr(estimator, 'fit')): raise TypeError(('%s is not an estimator instance.' % estimator)) if (not isinstance(attributes, (list, tuple))): attributes = [attributes] if (not all_or_any([hasattr(estimator, attr) for attr in attributes])): raise NotFittedError((msg % {'name': type(estimator).__name__}))
[ "def", "check_is_fitted", "(", "estimator", ",", "attributes", ",", "msg", "=", "None", ",", "all_or_any", "=", "all", ")", ":", "if", "(", "msg", "is", "None", ")", ":", "msg", "=", "\"This %(name)s instance is not fitted yet. Call 'fit' with appropriate arguments before using this method.\"", "if", "(", "not", "hasattr", "(", "estimator", ",", "'fit'", ")", ")", ":", "raise", "TypeError", "(", "(", "'%s is not an estimator instance.'", "%", "estimator", ")", ")", "if", "(", "not", "isinstance", "(", "attributes", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "attributes", "=", "[", "attributes", "]", "if", "(", "not", "all_or_any", "(", "[", "hasattr", "(", "estimator", ",", "attr", ")", "for", "attr", "in", "attributes", "]", ")", ")", ":", "raise", "NotFittedError", "(", "(", "msg", "%", "{", "'name'", ":", "type", "(", "estimator", ")", ".", "__name__", "}", ")", ")" ]
perform is_fitted validation for estimator .
train
true
41,191
def _saveModel(model, experimentDir, checkpointLabel, newSerialization=False): checkpointDir = _getModelCheckpointDir(experimentDir, checkpointLabel) if newSerialization: model.writeToCheckpoint(checkpointDir) else: model.save(saveModelDir=checkpointDir)
[ "def", "_saveModel", "(", "model", ",", "experimentDir", ",", "checkpointLabel", ",", "newSerialization", "=", "False", ")", ":", "checkpointDir", "=", "_getModelCheckpointDir", "(", "experimentDir", ",", "checkpointLabel", ")", "if", "newSerialization", ":", "model", ".", "writeToCheckpoint", "(", "checkpointDir", ")", "else", ":", "model", ".", "save", "(", "saveModelDir", "=", "checkpointDir", ")" ]
save model .
train
true
41,192
def save_generator(generator): _GENERATOR_DB[generator] = []
[ "def", "save_generator", "(", "generator", ")", ":", "_GENERATOR_DB", "[", "generator", "]", "=", "[", "]" ]
save the generator for later use initialize the removed content list .
train
false