id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
8,358
def get_header(name, raiseError=True): retval = _headers.get(str(name).strip().lower().replace('_', '-')) if ((not retval) and raiseError): raise AssertionError(("'%s' is an unknown header" % name)) return retval
[ "def", "get_header", "(", "name", ",", "raiseError", "=", "True", ")", ":", "retval", "=", "_headers", ".", "get", "(", "str", "(", "name", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", ".", "replace", "(", "'_'", ",", "'-'", ")", ")", "if", "(", "(", "not", "retval", ")", "and", "raiseError", ")", ":", "raise", "AssertionError", "(", "(", "\"'%s' is an unknown header\"", "%", "name", ")", ")", "return", "retval" ]
find the given httpheader instance this function finds the corresponding httpheader for the name provided .
train
false
8,361
def get_probit_endog(true_params, exog, noise_level): N = exog.shape[0] Xdotparams = sp.dot(exog, true_params) noise = (noise_level * sp.randn(*Xdotparams.shape)) cdf = stats.norm._cdf((- Xdotparams)) endog = sp.zeros(N) for i in range(N): endog[i] = sp.searchsorted(cdf[i, :], sp.rand()) return endog
[ "def", "get_probit_endog", "(", "true_params", ",", "exog", ",", "noise_level", ")", ":", "N", "=", "exog", ".", "shape", "[", "0", "]", "Xdotparams", "=", "sp", ".", "dot", "(", "exog", ",", "true_params", ")", "noise", "=", "(", "noise_level", "*", "sp", ".", "randn", "(", "*", "Xdotparams", ".", "shape", ")", ")", "cdf", "=", "stats", ".", "norm", ".", "_cdf", "(", "(", "-", "Xdotparams", ")", ")", "endog", "=", "sp", ".", "zeros", "(", "N", ")", "for", "i", "in", "range", "(", "N", ")", ":", "endog", "[", "i", "]", "=", "sp", ".", "searchsorted", "(", "cdf", "[", "i", ",", ":", "]", ",", "sp", ".", "rand", "(", ")", ")", "return", "endog" ]
gets an endogenous response that is consistent with the true_params .
train
false
8,362
def _parse_step_syslog_from_log4j_records(records): result = {} for record in records: message = record['message'] if _is_counter_log4j_record(record): result['counters'] = _parse_indented_counters(message.splitlines()) continue m = _OUTPUT_DIRECTORY_RE.match(message) if m: result['output_dir'] = m.group('output_dir') continue m = _SUBMITTED_APPLICATION_RE.match(message) if m: result['application_id'] = m.group('application_id') continue m = _RUNNING_JOB_RE.match(message) if m: result['job_id'] = m.group('job_id') continue m = _TASK_ATTEMPT_FAILED_RE.match(message) if m: error_str = '\n'.join(message.splitlines()[1:]) if (not error_str): error_str = message error = dict(attempt_id=m.group('attempt_id'), hadoop_error=dict(message=error_str, num_lines=record['num_lines'], start_line=record['start_line'])) result.setdefault('errors', []) result['errors'].append(error) return result
[ "def", "_parse_step_syslog_from_log4j_records", "(", "records", ")", ":", "result", "=", "{", "}", "for", "record", "in", "records", ":", "message", "=", "record", "[", "'message'", "]", "if", "_is_counter_log4j_record", "(", "record", ")", ":", "result", "[", "'counters'", "]", "=", "_parse_indented_counters", "(", "message", ".", "splitlines", "(", ")", ")", "continue", "m", "=", "_OUTPUT_DIRECTORY_RE", ".", "match", "(", "message", ")", "if", "m", ":", "result", "[", "'output_dir'", "]", "=", "m", ".", "group", "(", "'output_dir'", ")", "continue", "m", "=", "_SUBMITTED_APPLICATION_RE", ".", "match", "(", "message", ")", "if", "m", ":", "result", "[", "'application_id'", "]", "=", "m", ".", "group", "(", "'application_id'", ")", "continue", "m", "=", "_RUNNING_JOB_RE", ".", "match", "(", "message", ")", "if", "m", ":", "result", "[", "'job_id'", "]", "=", "m", ".", "group", "(", "'job_id'", ")", "continue", "m", "=", "_TASK_ATTEMPT_FAILED_RE", ".", "match", "(", "message", ")", "if", "m", ":", "error_str", "=", "'\\n'", ".", "join", "(", "message", ".", "splitlines", "(", ")", "[", "1", ":", "]", ")", "if", "(", "not", "error_str", ")", ":", "error_str", "=", "message", "error", "=", "dict", "(", "attempt_id", "=", "m", ".", "group", "(", "'attempt_id'", ")", ",", "hadoop_error", "=", "dict", "(", "message", "=", "error_str", ",", "num_lines", "=", "record", "[", "'num_lines'", "]", ",", "start_line", "=", "record", "[", "'start_line'", "]", ")", ")", "result", ".", "setdefault", "(", "'errors'", ",", "[", "]", ")", "result", "[", "'errors'", "]", ".", "append", "(", "error", ")", "return", "result" ]
pulls errors .
train
false
8,363
@hgcommand def release_apply(ui, repo, clname, **opts): c = repo[None] if (not releaseBranch): raise hg_util.Abort('no active release branches') if (c.branch() != releaseBranch): if (c.modified() or c.added() or c.removed()): raise hg_util.Abort('uncommitted local changes - cannot switch branches') err = hg_clean(repo, releaseBranch) if err: raise hg_util.Abort(err) try: err = clpatch_or_undo(ui, repo, clname, opts, mode='backport') if err: raise hg_util.Abort(err) except Exception as e: hg_clean(repo, 'default') raise e
[ "@", "hgcommand", "def", "release_apply", "(", "ui", ",", "repo", ",", "clname", ",", "**", "opts", ")", ":", "c", "=", "repo", "[", "None", "]", "if", "(", "not", "releaseBranch", ")", ":", "raise", "hg_util", ".", "Abort", "(", "'no active release branches'", ")", "if", "(", "c", ".", "branch", "(", ")", "!=", "releaseBranch", ")", ":", "if", "(", "c", ".", "modified", "(", ")", "or", "c", ".", "added", "(", ")", "or", "c", ".", "removed", "(", ")", ")", ":", "raise", "hg_util", ".", "Abort", "(", "'uncommitted local changes - cannot switch branches'", ")", "err", "=", "hg_clean", "(", "repo", ",", "releaseBranch", ")", "if", "err", ":", "raise", "hg_util", ".", "Abort", "(", "err", ")", "try", ":", "err", "=", "clpatch_or_undo", "(", "ui", ",", "repo", ",", "clname", ",", "opts", ",", "mode", "=", "'backport'", ")", "if", "err", ":", "raise", "hg_util", ".", "Abort", "(", "err", ")", "except", "Exception", "as", "e", ":", "hg_clean", "(", "repo", ",", "'default'", ")", "raise", "e" ]
apply a cl to the release branch creates a new cl copying a previously committed change from the main branch to the release branch .
train
false
8,364
def _key_splitting(rect_dict, keys, values, key_subset, horizontal, gap): result = OrderedDict() L = len(key_subset) for (name, (x, y, w, h)) in iteritems(rect_dict): if (key_subset == name[:L]): divisions = _split_rect(x, y, w, h, values, horizontal, gap) for (key, rect) in zip(keys, divisions): result[(name + (key,))] = rect else: result[name] = (x, y, w, h) return result
[ "def", "_key_splitting", "(", "rect_dict", ",", "keys", ",", "values", ",", "key_subset", ",", "horizontal", ",", "gap", ")", ":", "result", "=", "OrderedDict", "(", ")", "L", "=", "len", "(", "key_subset", ")", "for", "(", "name", ",", "(", "x", ",", "y", ",", "w", ",", "h", ")", ")", "in", "iteritems", "(", "rect_dict", ")", ":", "if", "(", "key_subset", "==", "name", "[", ":", "L", "]", ")", ":", "divisions", "=", "_split_rect", "(", "x", ",", "y", ",", "w", ",", "h", ",", "values", ",", "horizontal", ",", "gap", ")", "for", "(", "key", ",", "rect", ")", "in", "zip", "(", "keys", ",", "divisions", ")", ":", "result", "[", "(", "name", "+", "(", "key", ",", ")", ")", "]", "=", "rect", "else", ":", "result", "[", "name", "]", "=", "(", "x", ",", "y", ",", "w", ",", "h", ")", "return", "result" ]
given a dictionary where each entry is a rectangle .
train
false
8,365
def timestamp_utc(value): try: return dt_util.utc_from_timestamp(value).strftime(DATE_STR_FORMAT) except (ValueError, TypeError): return value
[ "def", "timestamp_utc", "(", "value", ")", ":", "try", ":", "return", "dt_util", ".", "utc_from_timestamp", "(", "value", ")", ".", "strftime", "(", "DATE_STR_FORMAT", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "value" ]
filter to convert given timestamp to utc date/time .
train
false
8,367
def password_changed(password, user=None, password_validators=None): if (password_validators is None): password_validators = get_default_password_validators() for validator in password_validators: password_changed = getattr(validator, 'password_changed', (lambda *a: None)) password_changed(password, user)
[ "def", "password_changed", "(", "password", ",", "user", "=", "None", ",", "password_validators", "=", "None", ")", ":", "if", "(", "password_validators", "is", "None", ")", ":", "password_validators", "=", "get_default_password_validators", "(", ")", "for", "validator", "in", "password_validators", ":", "password_changed", "=", "getattr", "(", "validator", ",", "'password_changed'", ",", "(", "lambda", "*", "a", ":", "None", ")", ")", "password_changed", "(", "password", ",", "user", ")" ]
inform all validators that have implemented a password_changed() method that the password has been changed .
train
false
8,368
@handle_response_format @treeio_login_required def task_time_slot_view(request, time_slot_id, response_format='html'): task_time_slot = get_object_or_404(TaskTimeSlot, pk=time_slot_id) task = task_time_slot.task if ((not request.user.profile.has_permission(task_time_slot)) and (not request.user.profile.has_permission(task))): return user_denied(request, message="You don't have access to this Task Time Slot") context = _get_default_context(request) context.update({'task_time_slot': task_time_slot, 'task': task}) return render_to_response('projects/task_time_view', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "task_time_slot_view", "(", "request", ",", "time_slot_id", ",", "response_format", "=", "'html'", ")", ":", "task_time_slot", "=", "get_object_or_404", "(", "TaskTimeSlot", ",", "pk", "=", "time_slot_id", ")", "task", "=", "task_time_slot", ".", "task", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "task_time_slot", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "task", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Task Time Slot\"", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'task_time_slot'", ":", "task_time_slot", ",", "'task'", ":", "task", "}", ")", "return", "render_to_response", "(", "'projects/task_time_view'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
task time slot view page .
train
false
8,369
def quad_potential(C, is_cov, as_cov): if issparse(C): if (not chol_available): raise ImportError('Sparse mass matrices require scikits.sparse') if (is_cov != as_cov): return QuadPotential_Sparse(C) else: raise ValueError('Sparse precission matrices are not supported') partial_check_positive_definite(C) if (C.ndim == 1): if (is_cov != as_cov): return ElemWiseQuadPotential(C) else: return ElemWiseQuadPotential((1.0 / C)) elif (is_cov != as_cov): return QuadPotential(C) else: return QuadPotential_Inv(C)
[ "def", "quad_potential", "(", "C", ",", "is_cov", ",", "as_cov", ")", ":", "if", "issparse", "(", "C", ")", ":", "if", "(", "not", "chol_available", ")", ":", "raise", "ImportError", "(", "'Sparse mass matrices require scikits.sparse'", ")", "if", "(", "is_cov", "!=", "as_cov", ")", ":", "return", "QuadPotential_Sparse", "(", "C", ")", "else", ":", "raise", "ValueError", "(", "'Sparse precission matrices are not supported'", ")", "partial_check_positive_definite", "(", "C", ")", "if", "(", "C", ".", "ndim", "==", "1", ")", ":", "if", "(", "is_cov", "!=", "as_cov", ")", ":", "return", "ElemWiseQuadPotential", "(", "C", ")", "else", ":", "return", "ElemWiseQuadPotential", "(", "(", "1.0", "/", "C", ")", ")", "elif", "(", "is_cov", "!=", "as_cov", ")", ":", "return", "QuadPotential", "(", "C", ")", "else", ":", "return", "QuadPotential_Inv", "(", "C", ")" ]
parameters c : arraylike .
train
false
8,370
def decode_return_list(codec='ascii'): def outer(f): def wrap(*args, **kwargs): return [res.decode(codec) for res in f(*args, **kwargs)] return wrap return outer
[ "def", "decode_return_list", "(", "codec", "=", "'ascii'", ")", ":", "def", "outer", "(", "f", ")", ":", "def", "wrap", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "[", "res", ".", "decode", "(", "codec", ")", "for", "res", "in", "f", "(", "*", "args", ",", "**", "kwargs", ")", "]", "return", "wrap", "return", "outer" ]
decodes a list return value .
train
true
8,371
def follow_user(context, data_dict): if ('user' not in context): raise logic.NotAuthorized(_('You must be logged in to follow users')) model = context['model'] session = context['session'] userobj = model.User.get(context['user']) if (not userobj): raise logic.NotAuthorized(_('You must be logged in to follow users')) schema = (context.get('schema') or ckan.logic.schema.default_follow_user_schema()) (validated_data_dict, errors) = _validate(data_dict, schema, context) if errors: model.Session.rollback() raise ValidationError(errors) if (userobj.id == validated_data_dict['id']): message = _('You cannot follow yourself') raise ValidationError({'message': message}, error_summary=message) if model.UserFollowingUser.is_following(userobj.id, validated_data_dict['id']): followeduserobj = model.User.get(validated_data_dict['id']) name = followeduserobj.display_name message = _('You are already following {0}').format(name) raise ValidationError({'message': message}, error_summary=message) follower = model_save.follower_dict_save(validated_data_dict, context, model.UserFollowingUser) if (not context.get('defer_commit')): model.repo.commit() log.debug(u'User {follower} started following user {object}'.format(follower=follower.follower_id, object=follower.object_id)) return model_dictize.user_following_user_dictize(follower, context)
[ "def", "follow_user", "(", "context", ",", "data_dict", ")", ":", "if", "(", "'user'", "not", "in", "context", ")", ":", "raise", "logic", ".", "NotAuthorized", "(", "_", "(", "'You must be logged in to follow users'", ")", ")", "model", "=", "context", "[", "'model'", "]", "session", "=", "context", "[", "'session'", "]", "userobj", "=", "model", ".", "User", ".", "get", "(", "context", "[", "'user'", "]", ")", "if", "(", "not", "userobj", ")", ":", "raise", "logic", ".", "NotAuthorized", "(", "_", "(", "'You must be logged in to follow users'", ")", ")", "schema", "=", "(", "context", ".", "get", "(", "'schema'", ")", "or", "ckan", ".", "logic", ".", "schema", ".", "default_follow_user_schema", "(", ")", ")", "(", "validated_data_dict", ",", "errors", ")", "=", "_validate", "(", "data_dict", ",", "schema", ",", "context", ")", "if", "errors", ":", "model", ".", "Session", ".", "rollback", "(", ")", "raise", "ValidationError", "(", "errors", ")", "if", "(", "userobj", ".", "id", "==", "validated_data_dict", "[", "'id'", "]", ")", ":", "message", "=", "_", "(", "'You cannot follow yourself'", ")", "raise", "ValidationError", "(", "{", "'message'", ":", "message", "}", ",", "error_summary", "=", "message", ")", "if", "model", ".", "UserFollowingUser", ".", "is_following", "(", "userobj", ".", "id", ",", "validated_data_dict", "[", "'id'", "]", ")", ":", "followeduserobj", "=", "model", ".", "User", ".", "get", "(", "validated_data_dict", "[", "'id'", "]", ")", "name", "=", "followeduserobj", ".", "display_name", "message", "=", "_", "(", "'You are already following {0}'", ")", ".", "format", "(", "name", ")", "raise", "ValidationError", "(", "{", "'message'", ":", "message", "}", ",", "error_summary", "=", "message", ")", "follower", "=", "model_save", ".", "follower_dict_save", "(", "validated_data_dict", ",", "context", ",", "model", ".", "UserFollowingUser", ")", "if", "(", "not", "context", ".", "get", "(", "'defer_commit'", ")", ")", ":", "model", ".", "repo", ".", "commit", "(", ")", "log", ".", "debug", "(", "u'User {follower} started following user {object}'", ".", "format", "(", "follower", "=", "follower", ".", "follower_id", ",", "object", "=", "follower", ".", "object_id", ")", ")", "return", "model_dictize", ".", "user_following_user_dictize", "(", "follower", ",", "context", ")" ]
adds the current user as follower of the given user .
train
false
8,372
def flatten(class_dict): for (cls, fields_list) in class_dict.items(): for fields in fields_list: (yield (cls, fields))
[ "def", "flatten", "(", "class_dict", ")", ":", "for", "(", "cls", ",", "fields_list", ")", "in", "class_dict", ".", "items", "(", ")", ":", "for", "fields", "in", "fields_list", ":", "(", "yield", "(", "cls", ",", "fields", ")", ")" ]
flatten -> list returns a single .
train
false
8,373
def compare_parser_results(left, right): def to_los(obj): 'Generate a list of strings representation of object.' if (type(obj) is list): return [('%d) %s' % pair) for pair in itertools.izip(itertools.count(), obj)] else: return [('i) %s' % obj)] return difflib.Differ().compare(to_los(left), to_los(right))
[ "def", "compare_parser_results", "(", "left", ",", "right", ")", ":", "def", "to_los", "(", "obj", ")", ":", "if", "(", "type", "(", "obj", ")", "is", "list", ")", ":", "return", "[", "(", "'%d) %s'", "%", "pair", ")", "for", "pair", "in", "itertools", ".", "izip", "(", "itertools", ".", "count", "(", ")", ",", "obj", ")", "]", "else", ":", "return", "[", "(", "'i) %s'", "%", "obj", ")", "]", "return", "difflib", ".", "Differ", "(", ")", ".", "compare", "(", "to_los", "(", "left", ")", ",", "to_los", "(", "right", ")", ")" ]
generates a textual report on the differences between .
train
false
8,374
def words_to_word_ids(data=[], word_to_id={}, unk_key='UNK'): word_ids = [] for word in data: if (word_to_id.get(word) is not None): word_ids.append(word_to_id[word]) else: word_ids.append(word_to_id[unk_key]) return word_ids
[ "def", "words_to_word_ids", "(", "data", "=", "[", "]", ",", "word_to_id", "=", "{", "}", ",", "unk_key", "=", "'UNK'", ")", ":", "word_ids", "=", "[", "]", "for", "word", "in", "data", ":", "if", "(", "word_to_id", ".", "get", "(", "word", ")", "is", "not", "None", ")", ":", "word_ids", ".", "append", "(", "word_to_id", "[", "word", "]", ")", "else", ":", "word_ids", ".", "append", "(", "word_to_id", "[", "unk_key", "]", ")", "return", "word_ids" ]
given a context in list format and the vocabulary .
train
true
8,375
def _mutuallyExclusiveArguments(argumentPairs): def wrapper(wrappee): argspec = inspect.getargspec(wrappee) @wraps(wrappee) def wrapped(*args, **kwargs): arguments = _passed(argspec, args, kwargs) for (this, that) in argumentPairs: if ((this in arguments) and (that in arguments)): raise TypeError('nope') return wrappee(*args, **kwargs) return wrapped return wrapper
[ "def", "_mutuallyExclusiveArguments", "(", "argumentPairs", ")", ":", "def", "wrapper", "(", "wrappee", ")", ":", "argspec", "=", "inspect", ".", "getargspec", "(", "wrappee", ")", "@", "wraps", "(", "wrappee", ")", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "arguments", "=", "_passed", "(", "argspec", ",", "args", ",", "kwargs", ")", "for", "(", "this", ",", "that", ")", "in", "argumentPairs", ":", "if", "(", "(", "this", "in", "arguments", ")", "and", "(", "that", "in", "arguments", ")", ")", ":", "raise", "TypeError", "(", "'nope'", ")", "return", "wrappee", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped", "return", "wrapper" ]
decorator which causes its decoratee to raise a l{typeerror} if two of the given arguments are passed at the same time .
train
false
8,377
def _is_list_like(obj): return ((not is_string_like(obj)) and iterable(obj))
[ "def", "_is_list_like", "(", "obj", ")", ":", "return", "(", "(", "not", "is_string_like", "(", "obj", ")", ")", "and", "iterable", "(", "obj", ")", ")" ]
returns whether the obj is iterable and not a string .
train
false
8,378
def order_tables_from_mysql(db=None, user=None, password=None, dbname=None): order_tool = OrderTablesFromMySQL(db=db, user=user, password=password, dbname=dbname) return order_tool.order_tables()
[ "def", "order_tables_from_mysql", "(", "db", "=", "None", ",", "user", "=", "None", ",", "password", "=", "None", ",", "dbname", "=", "None", ")", ":", "order_tool", "=", "OrderTablesFromMySQL", "(", "db", "=", "db", ",", "user", "=", "user", ",", "password", "=", "password", ",", "dbname", "=", "dbname", ")", "return", "order_tool", ".", "order_tables", "(", ")" ]
sorts tables in constraint order .
train
false
8,379
def get_promo_img_url(base_url_format, obj, size, default_format='default-{size}.png'): split_id = re.match('((\\d*?)\\d{1,3})$', str(obj.pk)) suffix = (obj.promo_img_hash or 'never') return (base_url_format % ((split_id.group(2) or 0), obj.pk, size, suffix))
[ "def", "get_promo_img_url", "(", "base_url_format", ",", "obj", ",", "size", ",", "default_format", "=", "'default-{size}.png'", ")", ":", "split_id", "=", "re", ".", "match", "(", "'((\\\\d*?)\\\\d{1,3})$'", ",", "str", "(", "obj", ".", "pk", ")", ")", "suffix", "=", "(", "obj", ".", "promo_img_hash", "or", "'never'", ")", "return", "(", "base_url_format", "%", "(", "(", "split_id", ".", "group", "(", "2", ")", "or", "0", ")", ",", "obj", ".", "pk", ",", "size", ",", "suffix", ")", ")" ]
returns either the promo img url for a given .
train
false
8,380
def fixup_parse_tree(cls_node): for node in cls_node.children: if (node.type == syms.suite): return for (i, node) in enumerate(cls_node.children): if (node.type == token.COLON): break else: raise ValueError("No class suite and no ':'!") suite = Node(syms.suite, []) while cls_node.children[(i + 1):]: move_node = cls_node.children[(i + 1)] suite.append_child(move_node.clone()) move_node.remove() cls_node.append_child(suite) node = suite
[ "def", "fixup_parse_tree", "(", "cls_node", ")", ":", "for", "node", "in", "cls_node", ".", "children", ":", "if", "(", "node", ".", "type", "==", "syms", ".", "suite", ")", ":", "return", "for", "(", "i", ",", "node", ")", "in", "enumerate", "(", "cls_node", ".", "children", ")", ":", "if", "(", "node", ".", "type", "==", "token", ".", "COLON", ")", ":", "break", "else", ":", "raise", "ValueError", "(", "\"No class suite and no ':'!\"", ")", "suite", "=", "Node", "(", "syms", ".", "suite", ",", "[", "]", ")", "while", "cls_node", ".", "children", "[", "(", "i", "+", "1", ")", ":", "]", ":", "move_node", "=", "cls_node", ".", "children", "[", "(", "i", "+", "1", ")", "]", "suite", ".", "append_child", "(", "move_node", ".", "clone", "(", ")", ")", "move_node", ".", "remove", "(", ")", "cls_node", ".", "append_child", "(", "suite", ")", "node", "=", "suite" ]
one-line classes dont get a suite in the parse tree so we add one to normalize the tree .
train
true
8,383
def win_cmd(command, **kwargs): logging_command = kwargs.get('logging_command', None) try: proc = NonBlockingPopen(command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE, stream_stds=kwargs.get('display_ssh_output', True), logging_command=logging_command) if (logging_command is None): log.debug("Executing command(PID %s): '%s'", proc.pid, command) else: log.debug("Executing command(PID %s): '%s'", proc.pid, logging_command) proc.poll_and_read_until_finish() proc.communicate() return proc.returncode except Exception as err: log.error("Failed to execute command '{0}': {1}\n".format(logging_command, err), exc_info=True) return 1
[ "def", "win_cmd", "(", "command", ",", "**", "kwargs", ")", ":", "logging_command", "=", "kwargs", ".", "get", "(", "'logging_command'", ",", "None", ")", "try", ":", "proc", "=", "NonBlockingPopen", "(", "command", ",", "shell", "=", "True", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stream_stds", "=", "kwargs", ".", "get", "(", "'display_ssh_output'", ",", "True", ")", ",", "logging_command", "=", "logging_command", ")", "if", "(", "logging_command", "is", "None", ")", ":", "log", ".", "debug", "(", "\"Executing command(PID %s): '%s'\"", ",", "proc", ".", "pid", ",", "command", ")", "else", ":", "log", ".", "debug", "(", "\"Executing command(PID %s): '%s'\"", ",", "proc", ".", "pid", ",", "logging_command", ")", "proc", ".", "poll_and_read_until_finish", "(", ")", "proc", ".", "communicate", "(", ")", "return", "proc", ".", "returncode", "except", "Exception", "as", "err", ":", "log", ".", "error", "(", "\"Failed to execute command '{0}': {1}\\n\"", ".", "format", "(", "logging_command", ",", "err", ")", ",", "exc_info", "=", "True", ")", "return", "1" ]
wrapper for commands to be run against windows boxes .
train
true
8,384
def _throttle(session, maxlim=None, timeout=None, storage=_LATEST_FAILED_LOGINS): address = session.address if isinstance(address, tuple): address = address[0] now = time.time() if (maxlim and timeout): latest_fails = storage[address] if (latest_fails and (len(latest_fails) >= maxlim)): if ((now - latest_fails[(-1)]) < timeout): return True else: storage[address] = [] return False else: storage[address].append(time.time()) return False
[ "def", "_throttle", "(", "session", ",", "maxlim", "=", "None", ",", "timeout", "=", "None", ",", "storage", "=", "_LATEST_FAILED_LOGINS", ")", ":", "address", "=", "session", ".", "address", "if", "isinstance", "(", "address", ",", "tuple", ")", ":", "address", "=", "address", "[", "0", "]", "now", "=", "time", ".", "time", "(", ")", "if", "(", "maxlim", "and", "timeout", ")", ":", "latest_fails", "=", "storage", "[", "address", "]", "if", "(", "latest_fails", "and", "(", "len", "(", "latest_fails", ")", ">=", "maxlim", ")", ")", ":", "if", "(", "(", "now", "-", "latest_fails", "[", "(", "-", "1", ")", "]", ")", "<", "timeout", ")", ":", "return", "True", "else", ":", "storage", "[", "address", "]", "=", "[", "]", "return", "False", "else", ":", "storage", "[", "address", "]", ".", "append", "(", "time", ".", "time", "(", ")", ")", "return", "False" ]
this will check the sessions address against the _latest_logins dictionary to check they havent spammed too many fails recently .
train
false
8,385
def commit(): connection._commit() set_clean()
[ "def", "commit", "(", ")", ":", "connection", ".", "_commit", "(", ")", "set_clean", "(", ")" ]
does the commit itself and resets the dirty flag .
train
false
8,388
def configure_callback(conf): global MARATHON_HOST, MARATHON_PORT, MARATHON_URL, VERBOSE_LOGGING for node in conf.children: if (node.key == 'Host'): MARATHON_HOST = node.values[0] elif (node.key == 'Port'): MARATHON_PORT = int(node.values[0]) elif (node.key == 'Verbose'): VERBOSE_LOGGING = bool(node.values[0]) else: collectd.warning(('marathon plugin: Unknown config key: %s.' % node.key)) MARATHON_URL = (((('http://' + MARATHON_HOST) + ':') + str(MARATHON_PORT)) + '/metrics') log_verbose(('Configured with host=%s, port=%s, url=%s' % (MARATHON_HOST, MARATHON_PORT, MARATHON_URL)))
[ "def", "configure_callback", "(", "conf", ")", ":", "global", "MARATHON_HOST", ",", "MARATHON_PORT", ",", "MARATHON_URL", ",", "VERBOSE_LOGGING", "for", "node", "in", "conf", ".", "children", ":", "if", "(", "node", ".", "key", "==", "'Host'", ")", ":", "MARATHON_HOST", "=", "node", ".", "values", "[", "0", "]", "elif", "(", "node", ".", "key", "==", "'Port'", ")", ":", "MARATHON_PORT", "=", "int", "(", "node", ".", "values", "[", "0", "]", ")", "elif", "(", "node", ".", "key", "==", "'Verbose'", ")", ":", "VERBOSE_LOGGING", "=", "bool", "(", "node", ".", "values", "[", "0", "]", ")", "else", ":", "collectd", ".", "warning", "(", "(", "'marathon plugin: Unknown config key: %s.'", "%", "node", ".", "key", ")", ")", "MARATHON_URL", "=", "(", "(", "(", "(", "'http://'", "+", "MARATHON_HOST", ")", "+", "':'", ")", "+", "str", "(", "MARATHON_PORT", ")", ")", "+", "'/metrics'", ")", "log_verbose", "(", "(", "'Configured with host=%s, port=%s, url=%s'", "%", "(", "MARATHON_HOST", ",", "MARATHON_PORT", ",", "MARATHON_URL", ")", ")", ")" ]
received configuration information .
train
false
8,389
def levelise(level): if (not level): return (False, False) if (level is True): return (True, True) if isinstance(level, int): return (True, (level - 1)) try: (deep, subs) = (int(level[0]), level[1:]) return (bool(deep), subs) except Exception as error: log.warning(error) raise
[ "def", "levelise", "(", "level", ")", ":", "if", "(", "not", "level", ")", ":", "return", "(", "False", ",", "False", ")", "if", "(", "level", "is", "True", ")", ":", "return", "(", "True", ",", "True", ")", "if", "isinstance", "(", "level", ",", "int", ")", ":", "return", "(", "True", ",", "(", "level", "-", "1", ")", ")", "try", ":", "(", "deep", ",", "subs", ")", "=", "(", "int", "(", "level", "[", "0", "]", ")", ",", "level", "[", "1", ":", "]", ")", "return", "(", "bool", "(", "deep", ")", ",", "subs", ")", "except", "Exception", "as", "error", ":", "log", ".", "warning", "(", "error", ")", "raise" ]
describe which levels are allowed to do deep merging .
train
true
8,390
def consume_items(queue, callback, verbose=True): from pylons import tmpl_context as c chan = connection_manager.get_channel() chan.basic_qos(prefetch_size=0, prefetch_count=10, a_global=False) def _callback(msg): if verbose: count_str = '' if ('message_count' in msg.delivery_info): count_str = ('(%d remaining)' % msg.delivery_info['message_count']) print ('%s: 1 item %s' % (queue, count_str)) cfg.reset_caches() c.use_write_db = {} ret = callback(msg) msg.channel.basic_ack(msg.delivery_tag) sys.stdout.flush() return ret chan.basic_consume(queue=queue, callback=_callback) try: while chan.callbacks: try: chan.wait() except KeyboardInterrupt: break finally: worker.join() if chan.is_open: chan.close()
[ "def", "consume_items", "(", "queue", ",", "callback", ",", "verbose", "=", "True", ")", ":", "from", "pylons", "import", "tmpl_context", "as", "c", "chan", "=", "connection_manager", ".", "get_channel", "(", ")", "chan", ".", "basic_qos", "(", "prefetch_size", "=", "0", ",", "prefetch_count", "=", "10", ",", "a_global", "=", "False", ")", "def", "_callback", "(", "msg", ")", ":", "if", "verbose", ":", "count_str", "=", "''", "if", "(", "'message_count'", "in", "msg", ".", "delivery_info", ")", ":", "count_str", "=", "(", "'(%d remaining)'", "%", "msg", ".", "delivery_info", "[", "'message_count'", "]", ")", "print", "(", "'%s: 1 item %s'", "%", "(", "queue", ",", "count_str", ")", ")", "cfg", ".", "reset_caches", "(", ")", "c", ".", "use_write_db", "=", "{", "}", "ret", "=", "callback", "(", "msg", ")", "msg", ".", "channel", ".", "basic_ack", "(", "msg", ".", "delivery_tag", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "return", "ret", "chan", ".", "basic_consume", "(", "queue", "=", "queue", ",", "callback", "=", "_callback", ")", "try", ":", "while", "chan", ".", "callbacks", ":", "try", ":", "chan", ".", "wait", "(", ")", "except", "KeyboardInterrupt", ":", "break", "finally", ":", "worker", ".", "join", "(", ")", "if", "chan", ".", "is_open", ":", "chan", ".", "close", "(", ")" ]
a lighter-weight version of handle_items that uses amqps basic .
train
false
8,392
def send_poetry(sock, poetry_file, num_bytes, delay): inputf = open(poetry_file) while True: bytes = inputf.read(num_bytes) if (not bytes): sock.close() inputf.close() return print ('Sending %d bytes' % len(bytes)) try: sock.sendall(bytes) except socket.error: sock.close() inputf.close() return time.sleep(delay)
[ "def", "send_poetry", "(", "sock", ",", "poetry_file", ",", "num_bytes", ",", "delay", ")", ":", "inputf", "=", "open", "(", "poetry_file", ")", "while", "True", ":", "bytes", "=", "inputf", ".", "read", "(", "num_bytes", ")", "if", "(", "not", "bytes", ")", ":", "sock", ".", "close", "(", ")", "inputf", ".", "close", "(", ")", "return", "print", "(", "'Sending %d bytes'", "%", "len", "(", "bytes", ")", ")", "try", ":", "sock", ".", "sendall", "(", "bytes", ")", "except", "socket", ".", "error", ":", "sock", ".", "close", "(", ")", "inputf", ".", "close", "(", ")", "return", "time", ".", "sleep", "(", "delay", ")" ]
send some poetry slowly down the socket .
train
false
8,393
def normalize_ws(text): return ' '.join(text.split())
[ "def", "normalize_ws", "(", "text", ")", ":", "return", "' '", ".", "join", "(", "text", ".", "split", "(", ")", ")" ]
do lws folding .
train
false
8,395
def _my_hilbert(x, n_fft=None, envelope=False): from scipy.signal import hilbert n_x = x.shape[(-1)] out = hilbert(x, N=n_fft)[:n_x] if (envelope is True): out = np.abs(out) return out
[ "def", "_my_hilbert", "(", "x", ",", "n_fft", "=", "None", ",", "envelope", "=", "False", ")", ":", "from", "scipy", ".", "signal", "import", "hilbert", "n_x", "=", "x", ".", "shape", "[", "(", "-", "1", ")", "]", "out", "=", "hilbert", "(", "x", ",", "N", "=", "n_fft", ")", "[", ":", "n_x", "]", "if", "(", "envelope", "is", "True", ")", ":", "out", "=", "np", ".", "abs", "(", "out", ")", "return", "out" ]
compute hilbert transform of signals w/ zero padding .
train
false
8,396
def test_image_size(): thisurl = 'http://www.google.fr/images/srpr/logo3w.png' img = display.Image(url=thisurl, width=200, height=200) nt.assert_equal((u'<img src="%s" width="200" height="200"/>' % thisurl), img._repr_html_()) img = display.Image(url=thisurl, width=200) nt.assert_equal((u'<img src="%s" width="200"/>' % thisurl), img._repr_html_()) img = display.Image(url=thisurl) nt.assert_equal((u'<img src="%s"/>' % thisurl), img._repr_html_()) img = display.Image(url=thisurl, unconfined=True) nt.assert_equal((u'<img src="%s" class="unconfined"/>' % thisurl), img._repr_html_())
[ "def", "test_image_size", "(", ")", ":", "thisurl", "=", "'http://www.google.fr/images/srpr/logo3w.png'", "img", "=", "display", ".", "Image", "(", "url", "=", "thisurl", ",", "width", "=", "200", ",", "height", "=", "200", ")", "nt", ".", "assert_equal", "(", "(", "u'<img src=\"%s\" width=\"200\" height=\"200\"/>'", "%", "thisurl", ")", ",", "img", ".", "_repr_html_", "(", ")", ")", "img", "=", "display", ".", "Image", "(", "url", "=", "thisurl", ",", "width", "=", "200", ")", "nt", ".", "assert_equal", "(", "(", "u'<img src=\"%s\" width=\"200\"/>'", "%", "thisurl", ")", ",", "img", ".", "_repr_html_", "(", ")", ")", "img", "=", "display", ".", "Image", "(", "url", "=", "thisurl", ")", "nt", ".", "assert_equal", "(", "(", "u'<img src=\"%s\"/>'", "%", "thisurl", ")", ",", "img", ".", "_repr_html_", "(", ")", ")", "img", "=", "display", ".", "Image", "(", "url", "=", "thisurl", ",", "unconfined", "=", "True", ")", "nt", ".", "assert_equal", "(", "(", "u'<img src=\"%s\" class=\"unconfined\"/>'", "%", "thisurl", ")", ",", "img", ".", "_repr_html_", "(", ")", ")" ]
simple test for display .
train
false
8,397
@profiler.trace def ipsecsiteconnection_create(request, **kwargs): body = {'ipsec_site_connection': {'name': kwargs['name'], 'description': kwargs['description'], 'dpd': kwargs['dpd'], 'ikepolicy_id': kwargs['ikepolicy_id'], 'initiator': kwargs['initiator'], 'ipsecpolicy_id': kwargs['ipsecpolicy_id'], 'mtu': kwargs['mtu'], 'peer_address': kwargs['peer_address'], 'peer_cidrs': kwargs['peer_cidrs'], 'peer_id': kwargs['peer_id'], 'psk': kwargs['psk'], 'vpnservice_id': kwargs['vpnservice_id'], 'admin_state_up': kwargs['admin_state_up']}} ipsecsiteconnection = neutronclient(request).create_ipsec_site_connection(body).get('ipsec_site_connection') return IPSecSiteConnection(ipsecsiteconnection)
[ "@", "profiler", ".", "trace", "def", "ipsecsiteconnection_create", "(", "request", ",", "**", "kwargs", ")", ":", "body", "=", "{", "'ipsec_site_connection'", ":", "{", "'name'", ":", "kwargs", "[", "'name'", "]", ",", "'description'", ":", "kwargs", "[", "'description'", "]", ",", "'dpd'", ":", "kwargs", "[", "'dpd'", "]", ",", "'ikepolicy_id'", ":", "kwargs", "[", "'ikepolicy_id'", "]", ",", "'initiator'", ":", "kwargs", "[", "'initiator'", "]", ",", "'ipsecpolicy_id'", ":", "kwargs", "[", "'ipsecpolicy_id'", "]", ",", "'mtu'", ":", "kwargs", "[", "'mtu'", "]", ",", "'peer_address'", ":", "kwargs", "[", "'peer_address'", "]", ",", "'peer_cidrs'", ":", "kwargs", "[", "'peer_cidrs'", "]", ",", "'peer_id'", ":", "kwargs", "[", "'peer_id'", "]", ",", "'psk'", ":", "kwargs", "[", "'psk'", "]", ",", "'vpnservice_id'", ":", "kwargs", "[", "'vpnservice_id'", "]", ",", "'admin_state_up'", ":", "kwargs", "[", "'admin_state_up'", "]", "}", "}", "ipsecsiteconnection", "=", "neutronclient", "(", "request", ")", ".", "create_ipsec_site_connection", "(", "body", ")", ".", "get", "(", "'ipsec_site_connection'", ")", "return", "IPSecSiteConnection", "(", "ipsecsiteconnection", ")" ]
create ipsecsiteconnection .
train
false
8,398
def warnOnException(func): def w(*args, **kwds): try: func(*args, **kwds) except: printExc('Ignored exception:') return w
[ "def", "warnOnException", "(", "func", ")", ":", "def", "w", "(", "*", "args", ",", "**", "kwds", ")", ":", "try", ":", "func", "(", "*", "args", ",", "**", "kwds", ")", "except", ":", "printExc", "(", "'Ignored exception:'", ")", "return", "w" ]
decorator that catches/ignores exceptions and prints a stack trace .
train
false
8,399
def function_named(fn, name): try: fn.__name__ = name except TypeError: fn = types.FunctionType(fn.__code__, fn.__globals__, name, fn.__defaults__, fn.__closure__) return fn
[ "def", "function_named", "(", "fn", ",", "name", ")", ":", "try", ":", "fn", ".", "__name__", "=", "name", "except", "TypeError", ":", "fn", "=", "types", ".", "FunctionType", "(", "fn", ".", "__code__", ",", "fn", ".", "__globals__", ",", "name", ",", "fn", ".", "__defaults__", ",", "fn", ".", "__closure__", ")", "return", "fn" ]
return a function with a given __name__ .
train
false
8,400
@preserve_value(sys, 'dont_write_bytecode') def _load_module_no_bytecode(filename, module_file, module_file_path, py_source_description): sys.dont_write_bytecode = 1 new_module = imp.load_module(os.path.splitext(filename)[0].replace('-', '_'), module_file, module_file_path, py_source_description) return new_module
[ "@", "preserve_value", "(", "sys", ",", "'dont_write_bytecode'", ")", "def", "_load_module_no_bytecode", "(", "filename", ",", "module_file", ",", "module_file_path", ",", "py_source_description", ")", ":", "sys", ".", "dont_write_bytecode", "=", "1", "new_module", "=", "imp", ".", "load_module", "(", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "0", "]", ".", "replace", "(", "'-'", ",", "'_'", ")", ",", "module_file", ",", "module_file_path", ",", "py_source_description", ")", "return", "new_module" ]
helper function to load a module while setting sys .
train
false
8,401
def to_node(node_state): return Node(uuid=node_state.uuid, hostname=node_state.hostname, applications=(node_state.applications or {}), manifestations=(node_state.manifestations or {}))
[ "def", "to_node", "(", "node_state", ")", ":", "return", "Node", "(", "uuid", "=", "node_state", ".", "uuid", ",", "hostname", "=", "node_state", ".", "hostname", ",", "applications", "=", "(", "node_state", ".", "applications", "or", "{", "}", ")", ",", "manifestations", "=", "(", "node_state", ".", "manifestations", "or", "{", "}", ")", ")" ]
convert a nodestate to a corresponding node .
train
false
8,402
def gather_filenames(arglist): l = [] for filename in arglist: if os.path.isdir(filename): thislist = [] for (dirpath, dirnames, filenames) in os.walk(filename): for f in filenames: if _is_valid_filename(f): f = os.path.join(dirpath, f) thislist.append(f) thislist.sort() l.extend(thislist) else: l.append(filename) return l
[ "def", "gather_filenames", "(", "arglist", ")", ":", "l", "=", "[", "]", "for", "filename", "in", "arglist", ":", "if", "os", ".", "path", ".", "isdir", "(", "filename", ")", ":", "thislist", "=", "[", "]", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "filename", ")", ":", "for", "f", "in", "filenames", ":", "if", "_is_valid_filename", "(", "f", ")", ":", "f", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "f", ")", "thislist", ".", "append", "(", "f", ")", "thislist", ".", "sort", "(", ")", "l", ".", "extend", "(", "thislist", ")", "else", ":", "l", ".", "append", "(", "filename", ")", "return", "l" ]
collect script files from within directories .
train
false
8,403
def prde_normal_denom(fa, fd, G, DE): (dn, ds) = splitfactor(fd, DE) (Gas, Gds) = list(zip(*G)) gd = reduce((lambda i, j: i.lcm(j)), Gds, Poly(1, DE.t)) (en, es) = splitfactor(gd, DE) p = dn.gcd(en) h = en.gcd(en.diff(DE.t)).quo(p.gcd(p.diff(DE.t))) a = (dn * h) c = (a * h) ba = ((a * fa) - ((dn * derivation(h, DE)) * fd)) (ba, bd) = ba.cancel(fd, include=True) G = [(c * A).cancel(D, include=True) for (A, D) in G] return (a, (ba, bd), G, h)
[ "def", "prde_normal_denom", "(", "fa", ",", "fd", ",", "G", ",", "DE", ")", ":", "(", "dn", ",", "ds", ")", "=", "splitfactor", "(", "fd", ",", "DE", ")", "(", "Gas", ",", "Gds", ")", "=", "list", "(", "zip", "(", "*", "G", ")", ")", "gd", "=", "reduce", "(", "(", "lambda", "i", ",", "j", ":", "i", ".", "lcm", "(", "j", ")", ")", ",", "Gds", ",", "Poly", "(", "1", ",", "DE", ".", "t", ")", ")", "(", "en", ",", "es", ")", "=", "splitfactor", "(", "gd", ",", "DE", ")", "p", "=", "dn", ".", "gcd", "(", "en", ")", "h", "=", "en", ".", "gcd", "(", "en", ".", "diff", "(", "DE", ".", "t", ")", ")", ".", "quo", "(", "p", ".", "gcd", "(", "p", ".", "diff", "(", "DE", ".", "t", ")", ")", ")", "a", "=", "(", "dn", "*", "h", ")", "c", "=", "(", "a", "*", "h", ")", "ba", "=", "(", "(", "a", "*", "fa", ")", "-", "(", "(", "dn", "*", "derivation", "(", "h", ",", "DE", ")", ")", "*", "fd", ")", ")", "(", "ba", ",", "bd", ")", "=", "ba", ".", "cancel", "(", "fd", ",", "include", "=", "True", ")", "G", "=", "[", "(", "c", "*", "A", ")", ".", "cancel", "(", "D", ",", "include", "=", "True", ")", "for", "(", "A", ",", "D", ")", "in", "G", "]", "return", "(", "a", ",", "(", "ba", ",", "bd", ")", ",", "G", ",", "h", ")" ]
parametric risch differential equation - normal part of the denominator .
train
false
8,404
def _api_osx_icon(name, output, kwargs): value = kwargs.get('value', '1').strip() cfg.osx_menu.set((value != '0')) return report(output)
[ "def", "_api_osx_icon", "(", "name", ",", "output", ",", "kwargs", ")", ":", "value", "=", "kwargs", ".", "get", "(", "'value'", ",", "'1'", ")", ".", "strip", "(", ")", "cfg", ".", "osx_menu", ".", "set", "(", "(", "value", "!=", "'0'", ")", ")", "return", "report", "(", "output", ")" ]
api: accepts output .
train
false
8,405
@not_implemented_for('undirected') def condensation(G, scc=None): if (scc is None): scc = nx.strongly_connected_components(G) mapping = {} members = {} C = nx.DiGraph() i = 0 for (i, component) in enumerate(scc): members[i] = component mapping.update(((n, i) for n in component)) number_of_components = (i + 1) C.add_nodes_from(range(number_of_components)) C.add_edges_from(((mapping[u], mapping[v]) for (u, v) in G.edges() if (mapping[u] != mapping[v]))) nx.set_node_attributes(C, 'members', members) C.graph['mapping'] = mapping return C
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "condensation", "(", "G", ",", "scc", "=", "None", ")", ":", "if", "(", "scc", "is", "None", ")", ":", "scc", "=", "nx", ".", "strongly_connected_components", "(", "G", ")", "mapping", "=", "{", "}", "members", "=", "{", "}", "C", "=", "nx", ".", "DiGraph", "(", ")", "i", "=", "0", "for", "(", "i", ",", "component", ")", "in", "enumerate", "(", "scc", ")", ":", "members", "[", "i", "]", "=", "component", "mapping", ".", "update", "(", "(", "(", "n", ",", "i", ")", "for", "n", "in", "component", ")", ")", "number_of_components", "=", "(", "i", "+", "1", ")", "C", ".", "add_nodes_from", "(", "range", "(", "number_of_components", ")", ")", "C", ".", "add_edges_from", "(", "(", "(", "mapping", "[", "u", "]", ",", "mapping", "[", "v", "]", ")", "for", "(", "u", ",", "v", ")", "in", "G", ".", "edges", "(", ")", "if", "(", "mapping", "[", "u", "]", "!=", "mapping", "[", "v", "]", ")", ")", ")", "nx", ".", "set_node_attributes", "(", "C", ",", "'members'", ",", "members", ")", "C", ".", "graph", "[", "'mapping'", "]", "=", "mapping", "return", "C" ]
returns the condensation of g .
train
false
8,406
def corner_kitchen_rosenfeld(image, mode='constant', cval=0): (imx, imy) = _compute_derivatives(image, mode=mode, cval=cval) (imxx, imxy) = _compute_derivatives(imx, mode=mode, cval=cval) (imyx, imyy) = _compute_derivatives(imy, mode=mode, cval=cval) numerator = (((imxx * (imy ** 2)) + (imyy * (imx ** 2))) - (((2 * imxy) * imx) * imy)) denominator = ((imx ** 2) + (imy ** 2)) response = np.zeros_like(image, dtype=np.double) mask = (denominator != 0) response[mask] = (numerator[mask] / denominator[mask]) return response
[ "def", "corner_kitchen_rosenfeld", "(", "image", ",", "mode", "=", "'constant'", ",", "cval", "=", "0", ")", ":", "(", "imx", ",", "imy", ")", "=", "_compute_derivatives", "(", "image", ",", "mode", "=", "mode", ",", "cval", "=", "cval", ")", "(", "imxx", ",", "imxy", ")", "=", "_compute_derivatives", "(", "imx", ",", "mode", "=", "mode", ",", "cval", "=", "cval", ")", "(", "imyx", ",", "imyy", ")", "=", "_compute_derivatives", "(", "imy", ",", "mode", "=", "mode", ",", "cval", "=", "cval", ")", "numerator", "=", "(", "(", "(", "imxx", "*", "(", "imy", "**", "2", ")", ")", "+", "(", "imyy", "*", "(", "imx", "**", "2", ")", ")", ")", "-", "(", "(", "(", "2", "*", "imxy", ")", "*", "imx", ")", "*", "imy", ")", ")", "denominator", "=", "(", "(", "imx", "**", "2", ")", "+", "(", "imy", "**", "2", ")", ")", "response", "=", "np", ".", "zeros_like", "(", "image", ",", "dtype", "=", "np", ".", "double", ")", "mask", "=", "(", "denominator", "!=", "0", ")", "response", "[", "mask", "]", "=", "(", "numerator", "[", "mask", "]", "/", "denominator", "[", "mask", "]", ")", "return", "response" ]
compute kitchen and rosenfeld corner measure response image .
train
false
8,407
def set_signal_winch(handler): global winch_handler old_handler = winch_handler winch_handler = handler return old_handler
[ "def", "set_signal_winch", "(", "handler", ")", ":", "global", "winch_handler", "old_handler", "=", "winch_handler", "winch_handler", "=", "handler", "return", "old_handler" ]
return the old signal handler .
train
false
8,408
@not_implemented_for('undirected') def is_attracting_component(G): ac = list(attracting_components(G)) if (len(ac[0]) == len(G)): attracting = True else: attracting = False return attracting
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "is_attracting_component", "(", "G", ")", ":", "ac", "=", "list", "(", "attracting_components", "(", "G", ")", ")", "if", "(", "len", "(", "ac", "[", "0", "]", ")", "==", "len", "(", "G", ")", ")", ":", "attracting", "=", "True", "else", ":", "attracting", "=", "False", "return", "attracting" ]
returns true if g consists of a single attracting component .
train
false
8,411
def create_consistencygroup(ctxt, host='test_host@fakedrv#fakepool', name='test_cg', description='this is a test cg', status=fields.ConsistencyGroupStatus.AVAILABLE, availability_zone='fake_az', volume_type_id=None, cgsnapshot_id=None, source_cgid=None, **kwargs): cg = objects.ConsistencyGroup(ctxt) cg.host = host cg.user_id = (ctxt.user_id or fake.USER_ID) cg.project_id = (ctxt.project_id or fake.PROJECT_ID) cg.status = status cg.name = name cg.description = description cg.availability_zone = availability_zone if volume_type_id: cg.volume_type_id = volume_type_id cg.cgsnapshot_id = cgsnapshot_id cg.source_cgid = source_cgid new_id = kwargs.pop('id', None) cg.update(kwargs) cg.create() if (new_id and (new_id != cg.id)): db.consistencygroup_update(ctxt, cg.id, {'id': new_id}) cg = objects.ConsistencyGroup.get_by_id(ctxt, new_id) return cg
[ "def", "create_consistencygroup", "(", "ctxt", ",", "host", "=", "'test_host@fakedrv#fakepool'", ",", "name", "=", "'test_cg'", ",", "description", "=", "'this is a test cg'", ",", "status", "=", "fields", ".", "ConsistencyGroupStatus", ".", "AVAILABLE", ",", "availability_zone", "=", "'fake_az'", ",", "volume_type_id", "=", "None", ",", "cgsnapshot_id", "=", "None", ",", "source_cgid", "=", "None", ",", "**", "kwargs", ")", ":", "cg", "=", "objects", ".", "ConsistencyGroup", "(", "ctxt", ")", "cg", ".", "host", "=", "host", "cg", ".", "user_id", "=", "(", "ctxt", ".", "user_id", "or", "fake", ".", "USER_ID", ")", "cg", ".", "project_id", "=", "(", "ctxt", ".", "project_id", "or", "fake", ".", "PROJECT_ID", ")", "cg", ".", "status", "=", "status", "cg", ".", "name", "=", "name", "cg", ".", "description", "=", "description", "cg", ".", "availability_zone", "=", "availability_zone", "if", "volume_type_id", ":", "cg", ".", "volume_type_id", "=", "volume_type_id", "cg", ".", "cgsnapshot_id", "=", "cgsnapshot_id", "cg", ".", "source_cgid", "=", "source_cgid", "new_id", "=", "kwargs", ".", "pop", "(", "'id'", ",", "None", ")", "cg", ".", "update", "(", "kwargs", ")", "cg", ".", "create", "(", ")", "if", "(", "new_id", "and", "(", "new_id", "!=", "cg", ".", "id", ")", ")", ":", "db", ".", "consistencygroup_update", "(", "ctxt", ",", "cg", ".", "id", ",", "{", "'id'", ":", "new_id", "}", ")", "cg", "=", "objects", ".", "ConsistencyGroup", ".", "get_by_id", "(", "ctxt", ",", "new_id", ")", "return", "cg" ]
create a consistencygroup object in the db .
train
false
8,413
def _CopyDocumentToProtocolBuffer(document, pb): pb.set_storage(document_pb.Document.DISK) if document.doc_id: pb.set_id(document.doc_id.encode('utf-8')) if document.language: pb.set_language(document.language.encode('utf-8')) for field in document.fields: field_pb = pb.add_field() _CopyFieldToProtocolBuffer(field, field_pb) pb.set_order_id(document.rank) return pb
[ "def", "_CopyDocumentToProtocolBuffer", "(", "document", ",", "pb", ")", ":", "pb", ".", "set_storage", "(", "document_pb", ".", "Document", ".", "DISK", ")", "if", "document", ".", "doc_id", ":", "pb", ".", "set_id", "(", "document", ".", "doc_id", ".", "encode", "(", "'utf-8'", ")", ")", "if", "document", ".", "language", ":", "pb", ".", "set_language", "(", "document", ".", "language", ".", "encode", "(", "'utf-8'", ")", ")", "for", "field", "in", "document", ".", "fields", ":", "field_pb", "=", "pb", ".", "add_field", "(", ")", "_CopyFieldToProtocolBuffer", "(", "field", ",", "field_pb", ")", "pb", ".", "set_order_id", "(", "document", ".", "rank", ")", "return", "pb" ]
copies document to a document_pb .
train
false
8,414
def dmp_ground_extract(f, g, u, K): fc = dmp_ground_content(f, u, K) gc = dmp_ground_content(g, u, K) gcd = K.gcd(fc, gc) if (not K.is_one(gcd)): f = dmp_quo_ground(f, gcd, u, K) g = dmp_quo_ground(g, gcd, u, K) return (gcd, f, g)
[ "def", "dmp_ground_extract", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "fc", "=", "dmp_ground_content", "(", "f", ",", "u", ",", "K", ")", "gc", "=", "dmp_ground_content", "(", "g", ",", "u", ",", "K", ")", "gcd", "=", "K", ".", "gcd", "(", "fc", ",", "gc", ")", "if", "(", "not", "K", ".", "is_one", "(", "gcd", ")", ")", ":", "f", "=", "dmp_quo_ground", "(", "f", ",", "gcd", ",", "u", ",", "K", ")", "g", "=", "dmp_quo_ground", "(", "g", ",", "gcd", ",", "u", ",", "K", ")", "return", "(", "gcd", ",", "f", ",", "g", ")" ]
extract common content from a pair of polynomials in k[x] .
train
false
8,415
def axes(*args, **kwargs): nargs = len(args) if (len(args) == 0): return subplot(111, **kwargs) if (nargs > 1): raise TypeError(u'Only one non keyword arg to axes allowed') arg = args[0] if isinstance(arg, Axes): a = gcf().sca(arg) else: rect = arg a = gcf().add_axes(rect, **kwargs) return a
[ "def", "axes", "(", "*", "args", ",", "**", "kwargs", ")", ":", "nargs", "=", "len", "(", "args", ")", "if", "(", "len", "(", "args", ")", "==", "0", ")", ":", "return", "subplot", "(", "111", ",", "**", "kwargs", ")", "if", "(", "nargs", ">", "1", ")", ":", "raise", "TypeError", "(", "u'Only one non keyword arg to axes allowed'", ")", "arg", "=", "args", "[", "0", "]", "if", "isinstance", "(", "arg", ",", "Axes", ")", ":", "a", "=", "gcf", "(", ")", ".", "sca", "(", "arg", ")", "else", ":", "rect", "=", "arg", "a", "=", "gcf", "(", ")", ".", "add_axes", "(", "rect", ",", "**", "kwargs", ")", "return", "a" ]
add an axes at position rect specified by: - axes() by itself creates a default full subplot window axis .
train
false
8,417
@register_canonicalize('local_incsubtensor_of_allocs') @register_stabilize('local_incsubtensor_of_allocs') @gof.local_optimizer([IncSubtensor, AdvancedIncSubtensor, AdvancedIncSubtensor1]) def local_incsubtensor_of_zeros(node): if (isinstance(node.op, (IncSubtensor, AdvancedIncSubtensor, AdvancedIncSubtensor1)) and (not node.op.set_instead_of_inc)): x = node.inputs[0] y = node.inputs[1] try: if (get_scalar_constant_value(y, elemwise=False) == 0): return [x] except NotScalarConstantError: return
[ "@", "register_canonicalize", "(", "'local_incsubtensor_of_allocs'", ")", "@", "register_stabilize", "(", "'local_incsubtensor_of_allocs'", ")", "@", "gof", ".", "local_optimizer", "(", "[", "IncSubtensor", ",", "AdvancedIncSubtensor", ",", "AdvancedIncSubtensor1", "]", ")", "def", "local_incsubtensor_of_zeros", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "(", "IncSubtensor", ",", "AdvancedIncSubtensor", ",", "AdvancedIncSubtensor1", ")", ")", "and", "(", "not", "node", ".", "op", ".", "set_instead_of_inc", ")", ")", ":", "x", "=", "node", ".", "inputs", "[", "0", "]", "y", "=", "node", ".", "inputs", "[", "1", "]", "try", ":", "if", "(", "get_scalar_constant_value", "(", "y", ",", "elemwise", "=", "False", ")", "==", "0", ")", ":", "return", "[", "x", "]", "except", "NotScalarConstantError", ":", "return" ]
incsubtensor -> x .
train
false
8,418
def dereference_type(t): if (t.strip() in ['void *', 'char *']): return t.strip() try: return t[:t.rindex('*')].strip() except: return t.strip()
[ "def", "dereference_type", "(", "t", ")", ":", "if", "(", "t", ".", "strip", "(", ")", "in", "[", "'void *'", ",", "'char *'", "]", ")", ":", "return", "t", ".", "strip", "(", ")", "try", ":", "return", "t", "[", ":", "t", ".", "rindex", "(", "'*'", ")", "]", ".", "strip", "(", ")", "except", ":", "return", "t", ".", "strip", "(", ")" ]
removes everything after the last star character in a type string .
train
false
8,419
def _is_function_class_equation(func_class, f, symbol): if (f.is_Mul or f.is_Add): return all((_is_function_class_equation(func_class, arg, symbol) for arg in f.args)) if f.is_Pow: if (not f.exp.has(symbol)): return _is_function_class_equation(func_class, f.base, symbol) else: return False if (not f.has(symbol)): return True if isinstance(f, func_class): try: g = Poly(f.args[0], symbol) return (g.degree() <= 1) except PolynomialError: return False else: return False
[ "def", "_is_function_class_equation", "(", "func_class", ",", "f", ",", "symbol", ")", ":", "if", "(", "f", ".", "is_Mul", "or", "f", ".", "is_Add", ")", ":", "return", "all", "(", "(", "_is_function_class_equation", "(", "func_class", ",", "arg", ",", "symbol", ")", "for", "arg", "in", "f", ".", "args", ")", ")", "if", "f", ".", "is_Pow", ":", "if", "(", "not", "f", ".", "exp", ".", "has", "(", "symbol", ")", ")", ":", "return", "_is_function_class_equation", "(", "func_class", ",", "f", ".", "base", ",", "symbol", ")", "else", ":", "return", "False", "if", "(", "not", "f", ".", "has", "(", "symbol", ")", ")", ":", "return", "True", "if", "isinstance", "(", "f", ",", "func_class", ")", ":", "try", ":", "g", "=", "Poly", "(", "f", ".", "args", "[", "0", "]", ",", "symbol", ")", "return", "(", "g", ".", "degree", "(", ")", "<=", "1", ")", "except", "PolynomialError", ":", "return", "False", "else", ":", "return", "False" ]
tests whether the equation is an equation of the given function class .
train
false
8,420
def getAlterationFileLines(fileName): lines = getAlterationLines(fileName) if (len(lines) == 0): return [] return ([getAlterationFileLineBlindly(fileName)] + lines)
[ "def", "getAlterationFileLines", "(", "fileName", ")", ":", "lines", "=", "getAlterationLines", "(", "fileName", ")", "if", "(", "len", "(", "lines", ")", "==", "0", ")", ":", "return", "[", "]", "return", "(", "[", "getAlterationFileLineBlindly", "(", "fileName", ")", "]", "+", "lines", ")" ]
get the alteration file line and the text lines from the filename in the alterations directories .
train
false
8,421
def _renameColumn(cname): cname = cname.replace('ID', 'Id') return _renameTable(cname)
[ "def", "_renameColumn", "(", "cname", ")", ":", "cname", "=", "cname", ".", "replace", "(", "'ID'", ",", "'Id'", ")", "return", "_renameTable", "(", "cname", ")" ]
build the name of a column .
train
false
8,422
def undo(config='root', files=None, num_pre=None, num_post=None): (pre, post) = _get_num_interval(config, num_pre, num_post) changes = status(config, pre, post) changed = set(changes.keys()) requested = set((files or changed)) if (not requested.issubset(changed)): raise CommandExecutionError('Given file list contains files that are not presentin the changed filelist: {0}'.format((changed - requested))) cmdret = __salt__['cmd.run']('snapper -c {0} undochange {1}..{2} {3}'.format(config, pre, post, ' '.join(requested))) try: components = cmdret.split(' ') ret = {} for comp in components: (key, val) = comp.split(':') ret[key] = val return ret except ValueError as exc: raise CommandExecutionError('Error while processing Snapper response: {0}'.format(cmdret))
[ "def", "undo", "(", "config", "=", "'root'", ",", "files", "=", "None", ",", "num_pre", "=", "None", ",", "num_post", "=", "None", ")", ":", "(", "pre", ",", "post", ")", "=", "_get_num_interval", "(", "config", ",", "num_pre", ",", "num_post", ")", "changes", "=", "status", "(", "config", ",", "pre", ",", "post", ")", "changed", "=", "set", "(", "changes", ".", "keys", "(", ")", ")", "requested", "=", "set", "(", "(", "files", "or", "changed", ")", ")", "if", "(", "not", "requested", ".", "issubset", "(", "changed", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Given file list contains files that are not presentin the changed filelist: {0}'", ".", "format", "(", "(", "changed", "-", "requested", ")", ")", ")", "cmdret", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'snapper -c {0} undochange {1}..{2} {3}'", ".", "format", "(", "config", ",", "pre", ",", "post", ",", "' '", ".", "join", "(", "requested", ")", ")", ")", "try", ":", "components", "=", "cmdret", ".", "split", "(", "' '", ")", "ret", "=", "{", "}", "for", "comp", "in", "components", ":", "(", "key", ",", "val", ")", "=", "comp", ".", "split", "(", "':'", ")", "ret", "[", "key", "]", "=", "val", "return", "ret", "except", "ValueError", "as", "exc", ":", "raise", "CommandExecutionError", "(", "'Error while processing Snapper response: {0}'", ".", "format", "(", "cmdret", ")", ")" ]
undo the effect of a cl creates a new cl that undoes an earlier cl .
train
true
8,427
def _straight_line_vertices(adjacency_mat, node_coords, directed=False): if (not issparse(adjacency_mat)): adjacency_mat = np.asarray(adjacency_mat, float) if ((adjacency_mat.ndim != 2) or (adjacency_mat.shape[0] != adjacency_mat.shape[1])): raise ValueError('Adjacency matrix should be square.') arrow_vertices = np.array([]) edges = _get_edges(adjacency_mat) line_vertices = node_coords[edges.ravel()] if directed: arrows = np.array(list(_get_directed_edges(adjacency_mat))) arrow_vertices = node_coords[arrows.ravel()] arrow_vertices = arrow_vertices.reshape(((len(arrow_vertices) / 2), 4)) return (line_vertices, arrow_vertices)
[ "def", "_straight_line_vertices", "(", "adjacency_mat", ",", "node_coords", ",", "directed", "=", "False", ")", ":", "if", "(", "not", "issparse", "(", "adjacency_mat", ")", ")", ":", "adjacency_mat", "=", "np", ".", "asarray", "(", "adjacency_mat", ",", "float", ")", "if", "(", "(", "adjacency_mat", ".", "ndim", "!=", "2", ")", "or", "(", "adjacency_mat", ".", "shape", "[", "0", "]", "!=", "adjacency_mat", ".", "shape", "[", "1", "]", ")", ")", ":", "raise", "ValueError", "(", "'Adjacency matrix should be square.'", ")", "arrow_vertices", "=", "np", ".", "array", "(", "[", "]", ")", "edges", "=", "_get_edges", "(", "adjacency_mat", ")", "line_vertices", "=", "node_coords", "[", "edges", ".", "ravel", "(", ")", "]", "if", "directed", ":", "arrows", "=", "np", ".", "array", "(", "list", "(", "_get_directed_edges", "(", "adjacency_mat", ")", ")", ")", "arrow_vertices", "=", "node_coords", "[", "arrows", ".", "ravel", "(", ")", "]", "arrow_vertices", "=", "arrow_vertices", ".", "reshape", "(", "(", "(", "len", "(", "arrow_vertices", ")", "/", "2", ")", ",", "4", ")", ")", "return", "(", "line_vertices", ",", "arrow_vertices", ")" ]
generate the vertices for straight lines between nodes .
train
true
8,428
def test_get_set_vector(): rng = np.random.RandomState([2014, 5, 8]) class DummyModel(Model, ): "\n A Model that exercises this test by having a few different\n parameters with different shapes and dimensionalities.\n\n Don't instantiate more than one of these because the parameters\n are class-level attributes.\n " _params = [sharedX(rng.randn(5)), sharedX(rng.randn(5, 3)), sharedX(rng.randn(4, 4, 4))] model = DummyModel() vector = model.get_param_vector() model.set_param_vector((0.0 * vector)) assert np.allclose((0.0 * vector), model.get_param_vector()) model.set_param_vector(vector) assert np.allclose(model.get_param_vector(), vector)
[ "def", "test_get_set_vector", "(", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "[", "2014", ",", "5", ",", "8", "]", ")", "class", "DummyModel", "(", "Model", ",", ")", ":", "_params", "=", "[", "sharedX", "(", "rng", ".", "randn", "(", "5", ")", ")", ",", "sharedX", "(", "rng", ".", "randn", "(", "5", ",", "3", ")", ")", ",", "sharedX", "(", "rng", ".", "randn", "(", "4", ",", "4", ",", "4", ")", ")", "]", "model", "=", "DummyModel", "(", ")", "vector", "=", "model", ".", "get_param_vector", "(", ")", "model", ".", "set_param_vector", "(", "(", "0.0", "*", "vector", ")", ")", "assert", "np", ".", "allclose", "(", "(", "0.0", "*", "vector", ")", ",", "model", ".", "get_param_vector", "(", ")", ")", "model", ".", "set_param_vector", "(", "vector", ")", "assert", "np", ".", "allclose", "(", "model", ".", "get_param_vector", "(", ")", ",", "vector", ")" ]
tests that get_vector and set_vector use the same format .
train
false
8,429
def _ports_match(protocol, module_min, module_max, rule_min, rule_max): if (protocol == 'icmp'): if (module_min and (int(module_min) == (-1))): module_min = None if (module_max and (int(module_max) == (-1))): module_max = None if ((protocol in ['tcp', 'udp']) and (module_min is None) and (module_max is None)): if (rule_min and (int(rule_min) == 1) and rule_max and (int(rule_max) == 65535)): return True if module_min: module_min = int(module_min) if module_max: module_max = int(module_max) if rule_min: rule_min = int(rule_min) if rule_max: rule_max = int(rule_max) return ((module_min == rule_min) and (module_max == rule_max))
[ "def", "_ports_match", "(", "protocol", ",", "module_min", ",", "module_max", ",", "rule_min", ",", "rule_max", ")", ":", "if", "(", "protocol", "==", "'icmp'", ")", ":", "if", "(", "module_min", "and", "(", "int", "(", "module_min", ")", "==", "(", "-", "1", ")", ")", ")", ":", "module_min", "=", "None", "if", "(", "module_max", "and", "(", "int", "(", "module_max", ")", "==", "(", "-", "1", ")", ")", ")", ":", "module_max", "=", "None", "if", "(", "(", "protocol", "in", "[", "'tcp'", ",", "'udp'", "]", ")", "and", "(", "module_min", "is", "None", ")", "and", "(", "module_max", "is", "None", ")", ")", ":", "if", "(", "rule_min", "and", "(", "int", "(", "rule_min", ")", "==", "1", ")", "and", "rule_max", "and", "(", "int", "(", "rule_max", ")", "==", "65535", ")", ")", ":", "return", "True", "if", "module_min", ":", "module_min", "=", "int", "(", "module_min", ")", "if", "module_max", ":", "module_max", "=", "int", "(", "module_max", ")", "if", "rule_min", ":", "rule_min", "=", "int", "(", "rule_min", ")", "if", "rule_max", ":", "rule_max", "=", "int", "(", "rule_max", ")", "return", "(", "(", "module_min", "==", "rule_min", ")", "and", "(", "module_max", "==", "rule_max", ")", ")" ]
capture the complex port matching logic .
train
false
8,430
def _gather_pillar(pillarenv, pillar_override): pillar = salt.pillar.get_pillar(__opts__, __grains__, __opts__['id'], __opts__['environment'], pillar=pillar_override, pillarenv=pillarenv) ret = pillar.compile_pillar() if (pillar_override and isinstance(pillar_override, dict)): ret.update(pillar_override) return ret
[ "def", "_gather_pillar", "(", "pillarenv", ",", "pillar_override", ")", ":", "pillar", "=", "salt", ".", "pillar", ".", "get_pillar", "(", "__opts__", ",", "__grains__", ",", "__opts__", "[", "'id'", "]", ",", "__opts__", "[", "'environment'", "]", ",", "pillar", "=", "pillar_override", ",", "pillarenv", "=", "pillarenv", ")", "ret", "=", "pillar", ".", "compile_pillar", "(", ")", "if", "(", "pillar_override", "and", "isinstance", "(", "pillar_override", ",", "dict", ")", ")", ":", "ret", ".", "update", "(", "pillar_override", ")", "return", "ret" ]
gathers pillar with a custom set of grains .
train
true
8,431
def update_aoa_contributors_metric(day=None): if day: start = end = day else: latest_metric = _get_latest_metric(AOA_CONTRIBUTORS_METRIC_CODE) if (latest_metric is not None): start = (latest_metric.end + timedelta(days=1)) else: try: first_reply = Reply.objects.order_by('created')[0] start = (first_reply.created.date() + timedelta(days=30)) except IndexError: return end = (date.today() - timedelta(days=1)) day = start while (day <= end): thirty_days_back = (day - timedelta(days=30)) contributors = Reply.objects.filter(created__gte=thirty_days_back, created__lt=day).values_list('twitter_username').distinct() count = contributors.count() metric_kind = MetricKind.objects.get(code=AOA_CONTRIBUTORS_METRIC_CODE) Metric.objects.create(kind=metric_kind, start=thirty_days_back, end=day, value=count) day = (day + timedelta(days=1))
[ "def", "update_aoa_contributors_metric", "(", "day", "=", "None", ")", ":", "if", "day", ":", "start", "=", "end", "=", "day", "else", ":", "latest_metric", "=", "_get_latest_metric", "(", "AOA_CONTRIBUTORS_METRIC_CODE", ")", "if", "(", "latest_metric", "is", "not", "None", ")", ":", "start", "=", "(", "latest_metric", ".", "end", "+", "timedelta", "(", "days", "=", "1", ")", ")", "else", ":", "try", ":", "first_reply", "=", "Reply", ".", "objects", ".", "order_by", "(", "'created'", ")", "[", "0", "]", "start", "=", "(", "first_reply", ".", "created", ".", "date", "(", ")", "+", "timedelta", "(", "days", "=", "30", ")", ")", "except", "IndexError", ":", "return", "end", "=", "(", "date", ".", "today", "(", ")", "-", "timedelta", "(", "days", "=", "1", ")", ")", "day", "=", "start", "while", "(", "day", "<=", "end", ")", ":", "thirty_days_back", "=", "(", "day", "-", "timedelta", "(", "days", "=", "30", ")", ")", "contributors", "=", "Reply", ".", "objects", ".", "filter", "(", "created__gte", "=", "thirty_days_back", ",", "created__lt", "=", "day", ")", ".", "values_list", "(", "'twitter_username'", ")", ".", "distinct", "(", ")", "count", "=", "contributors", ".", "count", "(", ")", "metric_kind", "=", "MetricKind", ".", "objects", ".", "get", "(", "code", "=", "AOA_CONTRIBUTORS_METRIC_CODE", ")", "Metric", ".", "objects", ".", "create", "(", "kind", "=", "metric_kind", ",", "start", "=", "thirty_days_back", ",", "end", "=", "day", ",", "value", "=", "count", ")", "day", "=", "(", "day", "+", "timedelta", "(", "days", "=", "1", ")", ")" ]
calculate and save the aoa contributor counts .
train
false
8,432
def test_create_user_story_attachment_without_file(client): us = f.UserStoryFactory.create() f.MembershipFactory(project=us.project, user=us.owner, is_admin=True) attachment_data = {'description': 'test', 'attached_file': None, 'project': us.project_id} url = reverse('userstory-attachments-list') client.login(us.owner) response = client.post(url, attachment_data) assert (response.status_code == 400)
[ "def", "test_create_user_story_attachment_without_file", "(", "client", ")", ":", "us", "=", "f", ".", "UserStoryFactory", ".", "create", "(", ")", "f", ".", "MembershipFactory", "(", "project", "=", "us", ".", "project", ",", "user", "=", "us", ".", "owner", ",", "is_admin", "=", "True", ")", "attachment_data", "=", "{", "'description'", ":", "'test'", ",", "'attached_file'", ":", "None", ",", "'project'", ":", "us", ".", "project_id", "}", "url", "=", "reverse", "(", "'userstory-attachments-list'", ")", "client", ".", "login", "(", "us", ".", "owner", ")", "response", "=", "client", ".", "post", "(", "url", ",", "attachment_data", ")", "assert", "(", "response", ".", "status_code", "==", "400", ")" ]
bug test "dont create attachments without attached_file" .
train
false
8,433
def libvlc_audio_set_volume(p_mi, i_volume): f = (_Cfunctions.get('libvlc_audio_set_volume', None) or _Cfunction('libvlc_audio_set_volume', ((1,), (1,)), None, ctypes.c_int, MediaPlayer, ctypes.c_int)) return f(p_mi, i_volume)
[ "def", "libvlc_audio_set_volume", "(", "p_mi", ",", "i_volume", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_set_volume'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_set_volume'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaPlayer", ",", "ctypes", ".", "c_int", ")", ")", "return", "f", "(", "p_mi", ",", "i_volume", ")" ]
set current software audio volume .
train
true
8,434
def append_PKCS7_padding(b): numpads = (16 - (len(b) % 16)) return (b + (numpads * bytes(chr(numpads), encoding=u'ascii')))
[ "def", "append_PKCS7_padding", "(", "b", ")", ":", "numpads", "=", "(", "16", "-", "(", "len", "(", "b", ")", "%", "16", ")", ")", "return", "(", "b", "+", "(", "numpads", "*", "bytes", "(", "chr", "(", "numpads", ")", ",", "encoding", "=", "u'ascii'", ")", ")", ")" ]
function to pad the given data to a multiple of 16-bytes by pkcs7 padding .
train
false
8,435
@dsym.command(name='sdks', short_help='List SDKs') @click.option('--sdk', help='Only include the given SDK instead of all.') @click.option('--version', help='Optionally a version filter. For instance 9 returns all versions 9.*, 9.1 returns 9.1.* etc.') @configuration def sdks(sdk, version): from sentry.models import DSymSDK last_prefix = None click.secho((' %-8s %-10s %-12s %-8s %s' % ('SDK', 'Version', 'Build', 'CPU', 'Bundles')), fg='cyan') click.secho(('-' * click.get_terminal_size()[0]), fg='yellow') for sdk in DSymSDK.objects.enumerate_sdks(sdk=sdk, version=version): prefix = (' %-8s %-10s ' % (sdk['sdk_name'], sdk['version'])) if (prefix == last_prefix): prefix = (' ' * len(prefix)) else: last_prefix = prefix click.echo(('%s%-12s %-8s %d' % (prefix, sdk['build'], sdk['cpu_name'], sdk['bundle_count'])))
[ "@", "dsym", ".", "command", "(", "name", "=", "'sdks'", ",", "short_help", "=", "'List SDKs'", ")", "@", "click", ".", "option", "(", "'--sdk'", ",", "help", "=", "'Only include the given SDK instead of all.'", ")", "@", "click", ".", "option", "(", "'--version'", ",", "help", "=", "'Optionally a version filter. For instance 9 returns all versions 9.*, 9.1 returns 9.1.* etc.'", ")", "@", "configuration", "def", "sdks", "(", "sdk", ",", "version", ")", ":", "from", "sentry", ".", "models", "import", "DSymSDK", "last_prefix", "=", "None", "click", ".", "secho", "(", "(", "' %-8s %-10s %-12s %-8s %s'", "%", "(", "'SDK'", ",", "'Version'", ",", "'Build'", ",", "'CPU'", ",", "'Bundles'", ")", ")", ",", "fg", "=", "'cyan'", ")", "click", ".", "secho", "(", "(", "'-'", "*", "click", ".", "get_terminal_size", "(", ")", "[", "0", "]", ")", ",", "fg", "=", "'yellow'", ")", "for", "sdk", "in", "DSymSDK", ".", "objects", ".", "enumerate_sdks", "(", "sdk", "=", "sdk", ",", "version", "=", "version", ")", ":", "prefix", "=", "(", "' %-8s %-10s '", "%", "(", "sdk", "[", "'sdk_name'", "]", ",", "sdk", "[", "'version'", "]", ")", ")", "if", "(", "prefix", "==", "last_prefix", ")", ":", "prefix", "=", "(", "' '", "*", "len", "(", "prefix", ")", ")", "else", ":", "last_prefix", "=", "prefix", "click", ".", "echo", "(", "(", "'%s%-12s %-8s %d'", "%", "(", "prefix", ",", "sdk", "[", "'build'", "]", ",", "sdk", "[", "'cpu_name'", "]", ",", "sdk", "[", "'bundle_count'", "]", ")", ")", ")" ]
print a list of all installed sdks and a breakdown of the symbols contained within .
train
false
8,436
def getRush(): if importCtypesFailed: return None policy = getThreadPolicy(getDefault=False, flavour=THREAD_TIME_CONSTRAINT_POLICY) default = getThreadPolicy(getDefault=True, flavour=THREAD_TIME_CONSTRAINT_POLICY) return (policy.period != default.period)
[ "def", "getRush", "(", ")", ":", "if", "importCtypesFailed", ":", "return", "None", "policy", "=", "getThreadPolicy", "(", "getDefault", "=", "False", ",", "flavour", "=", "THREAD_TIME_CONSTRAINT_POLICY", ")", "default", "=", "getThreadPolicy", "(", "getDefault", "=", "True", ",", "flavour", "=", "THREAD_TIME_CONSTRAINT_POLICY", ")", "return", "(", "policy", ".", "period", "!=", "default", ".", "period", ")" ]
determine whether or not we are in rush mode .
train
false
8,437
def subplot_tool(targetfig=None): tbar = rcParams[u'toolbar'] rcParams[u'toolbar'] = u'None' if (targetfig is None): manager = get_current_fig_manager() targetfig = manager.canvas.figure else: for manager in _pylab_helpers.Gcf._activeQue: if (manager.canvas.figure == targetfig): break else: raise RuntimeError(u'Could not find manager for targetfig') toolfig = figure(figsize=(6, 3)) toolfig.subplots_adjust(top=0.9) ret = SubplotTool(targetfig, toolfig) rcParams[u'toolbar'] = tbar _pylab_helpers.Gcf.set_active(manager) return ret
[ "def", "subplot_tool", "(", "targetfig", "=", "None", ")", ":", "tbar", "=", "rcParams", "[", "u'toolbar'", "]", "rcParams", "[", "u'toolbar'", "]", "=", "u'None'", "if", "(", "targetfig", "is", "None", ")", ":", "manager", "=", "get_current_fig_manager", "(", ")", "targetfig", "=", "manager", ".", "canvas", ".", "figure", "else", ":", "for", "manager", "in", "_pylab_helpers", ".", "Gcf", ".", "_activeQue", ":", "if", "(", "manager", ".", "canvas", ".", "figure", "==", "targetfig", ")", ":", "break", "else", ":", "raise", "RuntimeError", "(", "u'Could not find manager for targetfig'", ")", "toolfig", "=", "figure", "(", "figsize", "=", "(", "6", ",", "3", ")", ")", "toolfig", ".", "subplots_adjust", "(", "top", "=", "0.9", ")", "ret", "=", "SubplotTool", "(", "targetfig", ",", "toolfig", ")", "rcParams", "[", "u'toolbar'", "]", "=", "tbar", "_pylab_helpers", ".", "Gcf", ".", "set_active", "(", "manager", ")", "return", "ret" ]
launch a subplot tool window for *targetfig* .
train
false
8,438
def FakeGetPlatform(): if (sys.platform == 'darwin'): return 'macosx-' else: return distutils.util.get_platform()
[ "def", "FakeGetPlatform", "(", ")", ":", "if", "(", "sys", ".", "platform", "==", "'darwin'", ")", ":", "return", "'macosx-'", "else", ":", "return", "distutils", ".", "util", ".", "get_platform", "(", ")" ]
fake distutils .
train
false
8,440
def has_c(): return _USE_C
[ "def", "has_c", "(", ")", ":", "return", "_USE_C" ]
is the c extension installed? .
train
false
8,443
def renderSymbol(symbol, size, pen, brush, device=None): penPxWidth = max(np.ceil(pen.widthF()), 1) if (device is None): device = QtGui.QImage(int((size + penPxWidth)), int((size + penPxWidth)), QtGui.QImage.Format_ARGB32) device.fill(0) p = QtGui.QPainter(device) try: p.setRenderHint(p.Antialiasing) p.translate((device.width() * 0.5), (device.height() * 0.5)) drawSymbol(p, symbol, size, pen, brush) finally: p.end() return device
[ "def", "renderSymbol", "(", "symbol", ",", "size", ",", "pen", ",", "brush", ",", "device", "=", "None", ")", ":", "penPxWidth", "=", "max", "(", "np", ".", "ceil", "(", "pen", ".", "widthF", "(", ")", ")", ",", "1", ")", "if", "(", "device", "is", "None", ")", ":", "device", "=", "QtGui", ".", "QImage", "(", "int", "(", "(", "size", "+", "penPxWidth", ")", ")", ",", "int", "(", "(", "size", "+", "penPxWidth", ")", ")", ",", "QtGui", ".", "QImage", ".", "Format_ARGB32", ")", "device", ".", "fill", "(", "0", ")", "p", "=", "QtGui", ".", "QPainter", "(", "device", ")", "try", ":", "p", ".", "setRenderHint", "(", "p", ".", "Antialiasing", ")", "p", ".", "translate", "(", "(", "device", ".", "width", "(", ")", "*", "0.5", ")", ",", "(", "device", ".", "height", "(", ")", "*", "0.5", ")", ")", "drawSymbol", "(", "p", ",", "symbol", ",", "size", ",", "pen", ",", "brush", ")", "finally", ":", "p", ".", "end", "(", ")", "return", "device" ]
render a symbol specification to qimage .
train
false
8,444
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
8,445
def read_mesh(fname): fmt = op.splitext(fname)[1].lower() if (fmt == '.gz'): fmt = op.splitext(op.splitext(fname)[0])[1].lower() if (fmt in '.obj'): return WavefrontReader.read(fname) elif (not format): raise ValueError('read_mesh needs could not determine format.') else: raise ValueError(('read_mesh does not understand format %s.' % fmt))
[ "def", "read_mesh", "(", "fname", ")", ":", "fmt", "=", "op", ".", "splitext", "(", "fname", ")", "[", "1", "]", ".", "lower", "(", ")", "if", "(", "fmt", "==", "'.gz'", ")", ":", "fmt", "=", "op", ".", "splitext", "(", "op", ".", "splitext", "(", "fname", ")", "[", "0", "]", ")", "[", "1", "]", ".", "lower", "(", ")", "if", "(", "fmt", "in", "'.obj'", ")", ":", "return", "WavefrontReader", ".", "read", "(", "fname", ")", "elif", "(", "not", "format", ")", ":", "raise", "ValueError", "(", "'read_mesh needs could not determine format.'", ")", "else", ":", "raise", "ValueError", "(", "(", "'read_mesh does not understand format %s.'", "%", "fmt", ")", ")" ]
read mesh data from file .
train
true
8,446
def exchange_shared(a, b): raise NotImplementedError('TODO: implement the function')
[ "def", "exchange_shared", "(", "a", ",", "b", ")", ":", "raise", "NotImplementedError", "(", "'TODO: implement the function'", ")" ]
a: a theano shared variable b: a theano shared variable uses get_value and set_value to swap the values stored in a and b .
train
false
8,447
def _param_if_exists(if_exists): if if_exists: return '--if-exists ' else: return ''
[ "def", "_param_if_exists", "(", "if_exists", ")", ":", "if", "if_exists", ":", "return", "'--if-exists '", "else", ":", "return", "''" ]
returns --if-exist parameter for open vswitch command .
train
false
8,449
def iirdesign(wp, ws, gpass, gstop, analog=False, ftype='ellip', output='ba'): try: ordfunc = filter_dict[ftype][1] except KeyError: raise ValueError(('Invalid IIR filter type: %s' % ftype)) except IndexError: raise ValueError(('%s does not have order selection. Use iirfilter function.' % ftype)) wp = atleast_1d(wp) ws = atleast_1d(ws) band_type = (2 * (len(wp) - 1)) band_type += 1 if (wp[0] >= ws[0]): band_type += 1 btype = {1: 'lowpass', 2: 'highpass', 3: 'bandstop', 4: 'bandpass'}[band_type] (N, Wn) = ordfunc(wp, ws, gpass, gstop, analog=analog) return iirfilter(N, Wn, rp=gpass, rs=gstop, analog=analog, btype=btype, ftype=ftype, output=output)
[ "def", "iirdesign", "(", "wp", ",", "ws", ",", "gpass", ",", "gstop", ",", "analog", "=", "False", ",", "ftype", "=", "'ellip'", ",", "output", "=", "'ba'", ")", ":", "try", ":", "ordfunc", "=", "filter_dict", "[", "ftype", "]", "[", "1", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "(", "'Invalid IIR filter type: %s'", "%", "ftype", ")", ")", "except", "IndexError", ":", "raise", "ValueError", "(", "(", "'%s does not have order selection. Use iirfilter function.'", "%", "ftype", ")", ")", "wp", "=", "atleast_1d", "(", "wp", ")", "ws", "=", "atleast_1d", "(", "ws", ")", "band_type", "=", "(", "2", "*", "(", "len", "(", "wp", ")", "-", "1", ")", ")", "band_type", "+=", "1", "if", "(", "wp", "[", "0", "]", ">=", "ws", "[", "0", "]", ")", ":", "band_type", "+=", "1", "btype", "=", "{", "1", ":", "'lowpass'", ",", "2", ":", "'highpass'", ",", "3", ":", "'bandstop'", ",", "4", ":", "'bandpass'", "}", "[", "band_type", "]", "(", "N", ",", "Wn", ")", "=", "ordfunc", "(", "wp", ",", "ws", ",", "gpass", ",", "gstop", ",", "analog", "=", "analog", ")", "return", "iirfilter", "(", "N", ",", "Wn", ",", "rp", "=", "gpass", ",", "rs", "=", "gstop", ",", "analog", "=", "analog", ",", "btype", "=", "btype", ",", "ftype", "=", "ftype", ",", "output", "=", "output", ")" ]
complete iir digital and analog filter design .
train
false
8,452
@handle_response_format @treeio_login_required @module_admin_required() def index_users(request, response_format='html'): users = User.objects.order_by('user__username') return render_to_response('core/administration/index_users', {'users': users}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "index_users", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "users", "=", "User", ".", "objects", ".", "order_by", "(", "'user__username'", ")", "return", "render_to_response", "(", "'core/administration/index_users'", ",", "{", "'users'", ":", "users", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
user list .
train
false
8,454
def create_location(org, course, run, block_type, block_id): return modulestore().make_course_key(org, course, run).make_usage_key(block_type, block_id)
[ "def", "create_location", "(", "org", ",", "course", ",", "run", ",", "block_type", ",", "block_id", ")", ":", "return", "modulestore", "(", ")", ".", "make_course_key", "(", "org", ",", "course", ",", "run", ")", ".", "make_usage_key", "(", "block_type", ",", "block_id", ")" ]
returns the usage key for the given key parameters using the default modulestore .
train
false
8,455
@cli.command() def colordemo(): for color in ('red', 'green', 'blue'): click.echo(click.style(('I am colored %s' % color), fg=color)) click.echo(click.style(('I am background colored %s' % color), bg=color))
[ "@", "cli", ".", "command", "(", ")", "def", "colordemo", "(", ")", ":", "for", "color", "in", "(", "'red'", ",", "'green'", ",", "'blue'", ")", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "(", "'I am colored %s'", "%", "color", ")", ",", "fg", "=", "color", ")", ")", "click", ".", "echo", "(", "click", ".", "style", "(", "(", "'I am background colored %s'", "%", "color", ")", ",", "bg", "=", "color", ")", ")" ]
demonstrates ansi color support .
train
false
8,456
def gen_all_identities(): def _build_identity_dict(mail, display_name, given_name, surname): ' Helper function to return a dict of test identity ' meta_dict = {'Shib-Identity-Provider': IDP, 'REMOTE_USER': REMOTE_USER} if (display_name is not None): meta_dict['displayName'] = display_name if (mail is not None): meta_dict['mail'] = mail if (given_name is not None): meta_dict['givenName'] = given_name if (surname is not None): meta_dict['sn'] = surname return meta_dict for mail in MAILS: for given_name in GIVENNAMES: for surname in SNS: for display_name in DISPLAYNAMES: (yield _build_identity_dict(mail, display_name, given_name, surname))
[ "def", "gen_all_identities", "(", ")", ":", "def", "_build_identity_dict", "(", "mail", ",", "display_name", ",", "given_name", ",", "surname", ")", ":", "meta_dict", "=", "{", "'Shib-Identity-Provider'", ":", "IDP", ",", "'REMOTE_USER'", ":", "REMOTE_USER", "}", "if", "(", "display_name", "is", "not", "None", ")", ":", "meta_dict", "[", "'displayName'", "]", "=", "display_name", "if", "(", "mail", "is", "not", "None", ")", ":", "meta_dict", "[", "'mail'", "]", "=", "mail", "if", "(", "given_name", "is", "not", "None", ")", ":", "meta_dict", "[", "'givenName'", "]", "=", "given_name", "if", "(", "surname", "is", "not", "None", ")", ":", "meta_dict", "[", "'sn'", "]", "=", "surname", "return", "meta_dict", "for", "mail", "in", "MAILS", ":", "for", "given_name", "in", "GIVENNAMES", ":", "for", "surname", "in", "SNS", ":", "for", "display_name", "in", "DISPLAYNAMES", ":", "(", "yield", "_build_identity_dict", "(", "mail", ",", "display_name", ",", "given_name", ",", "surname", ")", ")" ]
a generator for all combinations of test inputs .
train
false
8,458
def sigmoid_prime(z): return (sigmoid(z) * (1 - sigmoid(z)))
[ "def", "sigmoid_prime", "(", "z", ")", ":", "return", "(", "sigmoid", "(", "z", ")", "*", "(", "1", "-", "sigmoid", "(", "z", ")", ")", ")" ]
derivative of the sigmoid function .
train
false
8,460
def assert_element_text_matches(output, path, expression): text = xml_find_text(output, path) if (re.match(expression, text) is None): errmsg = ("Expected element with path '%s' to contain text matching '%s', instead text '%s' was found." % (path, expression, text)) raise AssertionError(errmsg)
[ "def", "assert_element_text_matches", "(", "output", ",", "path", ",", "expression", ")", ":", "text", "=", "xml_find_text", "(", "output", ",", "path", ")", "if", "(", "re", ".", "match", "(", "expression", ",", "text", ")", "is", "None", ")", ":", "errmsg", "=", "(", "\"Expected element with path '%s' to contain text matching '%s', instead text '%s' was found.\"", "%", "(", "path", ",", "expression", ",", "text", ")", ")", "raise", "AssertionError", "(", "errmsg", ")" ]
asserts the text of the first element matching the specified path matches the specified regular expression .
train
false
8,462
@requires_version('scipy', '0.11') def test_add_patch_info(): src = read_source_spaces(fname_small) src_new = read_source_spaces(fname_small) for s in src_new: s['nearest'] = None s['nearest_dist'] = None s['pinfo'] = None try: add_source_space_distances(src_new, dist_limit=1e-05) except RuntimeError: pass else: assert_true(all(((s['nearest'] is None) for s in src_new))) assert_true(all(((s['nearest_dist'] is None) for s in src_new))) assert_true(all(((s['pinfo'] is None) for s in src_new))) add_source_space_distances(src_new) for (s1, s2) in zip(src, src_new): assert_array_equal(s1['nearest'], s2['nearest']) assert_allclose(s1['nearest_dist'], s2['nearest_dist'], atol=1e-07) assert_equal(len(s1['pinfo']), len(s2['pinfo'])) for (p1, p2) in zip(s1['pinfo'], s2['pinfo']): assert_array_equal(p1, p2)
[ "@", "requires_version", "(", "'scipy'", ",", "'0.11'", ")", "def", "test_add_patch_info", "(", ")", ":", "src", "=", "read_source_spaces", "(", "fname_small", ")", "src_new", "=", "read_source_spaces", "(", "fname_small", ")", "for", "s", "in", "src_new", ":", "s", "[", "'nearest'", "]", "=", "None", "s", "[", "'nearest_dist'", "]", "=", "None", "s", "[", "'pinfo'", "]", "=", "None", "try", ":", "add_source_space_distances", "(", "src_new", ",", "dist_limit", "=", "1e-05", ")", "except", "RuntimeError", ":", "pass", "else", ":", "assert_true", "(", "all", "(", "(", "(", "s", "[", "'nearest'", "]", "is", "None", ")", "for", "s", "in", "src_new", ")", ")", ")", "assert_true", "(", "all", "(", "(", "(", "s", "[", "'nearest_dist'", "]", "is", "None", ")", "for", "s", "in", "src_new", ")", ")", ")", "assert_true", "(", "all", "(", "(", "(", "s", "[", "'pinfo'", "]", "is", "None", ")", "for", "s", "in", "src_new", ")", ")", ")", "add_source_space_distances", "(", "src_new", ")", "for", "(", "s1", ",", "s2", ")", "in", "zip", "(", "src", ",", "src_new", ")", ":", "assert_array_equal", "(", "s1", "[", "'nearest'", "]", ",", "s2", "[", "'nearest'", "]", ")", "assert_allclose", "(", "s1", "[", "'nearest_dist'", "]", ",", "s2", "[", "'nearest_dist'", "]", ",", "atol", "=", "1e-07", ")", "assert_equal", "(", "len", "(", "s1", "[", "'pinfo'", "]", ")", ",", "len", "(", "s2", "[", "'pinfo'", "]", ")", ")", "for", "(", "p1", ",", "p2", ")", "in", "zip", "(", "s1", "[", "'pinfo'", "]", ",", "s2", "[", "'pinfo'", "]", ")", ":", "assert_array_equal", "(", "p1", ",", "p2", ")" ]
test adding patch info to source space .
train
false
8,463
def case_appointment(): def prep(r): if (r.method == 'import'): ptable = s3db.pr_person ptable.pe_label.requires = None return True s3.prep = prep table = s3db.dvr_case_appointment return s3_rest_controller(csv_extra_fields=[{'label': 'Appointment Type', 'field': table.type_id}, {'label': 'Appointment Date', 'field': table.date}, {'label': 'Appointment Status', 'field': table.status}])
[ "def", "case_appointment", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "(", "r", ".", "method", "==", "'import'", ")", ":", "ptable", "=", "s3db", ".", "pr_person", "ptable", ".", "pe_label", ".", "requires", "=", "None", "return", "True", "s3", ".", "prep", "=", "prep", "table", "=", "s3db", ".", "dvr_case_appointment", "return", "s3_rest_controller", "(", "csv_extra_fields", "=", "[", "{", "'label'", ":", "'Appointment Type'", ",", "'field'", ":", "table", ".", "type_id", "}", ",", "{", "'label'", ":", "'Appointment Date'", ",", "'field'", ":", "table", ".", "date", "}", ",", "{", "'label'", ":", "'Appointment Status'", ",", "'field'", ":", "table", ".", "status", "}", "]", ")" ]
appointments: restful crud controller .
train
false
8,465
def load_windowstime(buf, pos): unix_epoch = 11644473600 (val1, pos) = load_le32(buf, pos) (val2, pos) = load_le32(buf, pos) (secs, n1secs) = divmod(((val2 << 32) | val1), 10000000) dt = datetime.fromtimestamp((secs - unix_epoch), UTC) dt = dt.replace(microsecond=(n1secs // 10)) return (dt, pos)
[ "def", "load_windowstime", "(", "buf", ",", "pos", ")", ":", "unix_epoch", "=", "11644473600", "(", "val1", ",", "pos", ")", "=", "load_le32", "(", "buf", ",", "pos", ")", "(", "val2", ",", "pos", ")", "=", "load_le32", "(", "buf", ",", "pos", ")", "(", "secs", ",", "n1secs", ")", "=", "divmod", "(", "(", "(", "val2", "<<", "32", ")", "|", "val1", ")", ",", "10000000", ")", "dt", "=", "datetime", ".", "fromtimestamp", "(", "(", "secs", "-", "unix_epoch", ")", ",", "UTC", ")", "dt", "=", "dt", ".", "replace", "(", "microsecond", "=", "(", "n1secs", "//", "10", ")", ")", "return", "(", "dt", ",", "pos", ")" ]
load le64 windows timestamp .
train
true
8,466
def libvlc_media_library_release(p_mlib): f = (_Cfunctions.get('libvlc_media_library_release', None) or _Cfunction('libvlc_media_library_release', ((1,),), None, None, MediaLibrary)) return f(p_mlib)
[ "def", "libvlc_media_library_release", "(", "p_mlib", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_library_release'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_library_release'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaLibrary", ")", ")", "return", "f", "(", "p_mlib", ")" ]
release media library object .
train
false
8,467
@pytest.fixture def admin_group(db): return Group.objects.create(name='Admins', rules='*:*')
[ "@", "pytest", ".", "fixture", "def", "admin_group", "(", "db", ")", ":", "return", "Group", ".", "objects", ".", "create", "(", "name", "=", "'Admins'", ",", "rules", "=", "'*:*'", ")" ]
create the admins group .
train
false
8,468
@register.filter def can_read(obj, user): return obj.can_read(user)
[ "@", "register", ".", "filter", "def", "can_read", "(", "obj", ",", "user", ")", ":", "return", "obj", ".", "can_read", "(", "user", ")" ]
takes article or related to article model .
train
false
8,469
def Ref(stack, fn_name, args): if (args in stack): RefClass = hot_funcs.GetResource else: RefClass = ParamRef return RefClass(stack, fn_name, args)
[ "def", "Ref", "(", "stack", ",", "fn_name", ",", "args", ")", ":", "if", "(", "args", "in", "stack", ")", ":", "RefClass", "=", "hot_funcs", ".", "GetResource", "else", ":", "RefClass", "=", "ParamRef", "return", "RefClass", "(", "stack", ",", "fn_name", ",", "args", ")" ]
a function for resolving parameters or resource references .
train
false
8,471
def _generate_unsampled_indices(random_state, n_samples): sample_indices = _generate_sample_indices(random_state, n_samples) sample_counts = bincount(sample_indices, minlength=n_samples) unsampled_mask = (sample_counts == 0) indices_range = np.arange(n_samples) unsampled_indices = indices_range[unsampled_mask] return unsampled_indices
[ "def", "_generate_unsampled_indices", "(", "random_state", ",", "n_samples", ")", ":", "sample_indices", "=", "_generate_sample_indices", "(", "random_state", ",", "n_samples", ")", "sample_counts", "=", "bincount", "(", "sample_indices", ",", "minlength", "=", "n_samples", ")", "unsampled_mask", "=", "(", "sample_counts", "==", "0", ")", "indices_range", "=", "np", ".", "arange", "(", "n_samples", ")", "unsampled_indices", "=", "indices_range", "[", "unsampled_mask", "]", "return", "unsampled_indices" ]
private function used to forest .
train
false
8,472
def findRequirements(): requirementsPath = os.path.join(REPO_DIR, 'requirements.txt') requirements = parse_file(requirementsPath) if nupicBindingsPrereleaseInstalled(): requirements = [req for req in requirements if ('nupic.bindings' not in req)] return requirements
[ "def", "findRequirements", "(", ")", ":", "requirementsPath", "=", "os", ".", "path", ".", "join", "(", "REPO_DIR", ",", "'requirements.txt'", ")", "requirements", "=", "parse_file", "(", "requirementsPath", ")", "if", "nupicBindingsPrereleaseInstalled", "(", ")", ":", "requirements", "=", "[", "req", "for", "req", "in", "requirements", "if", "(", "'nupic.bindings'", "not", "in", "req", ")", "]", "return", "requirements" ]
read the requirements .
train
true
8,473
def syncitall(list_accounts, config): threads = threadutil.accountThreads() for accountname in list_accounts: account = accounts.SyncableAccount(config, accountname) thread = threadutil.InstanceLimitedThread(ACCOUNT_LIMITED_THREAD_NAME, target=account.syncrunner, name=('Account sync %s' % accountname)) thread.setDaemon(True) thread.start() threads.add(thread) threads.wait()
[ "def", "syncitall", "(", "list_accounts", ",", "config", ")", ":", "threads", "=", "threadutil", ".", "accountThreads", "(", ")", "for", "accountname", "in", "list_accounts", ":", "account", "=", "accounts", ".", "SyncableAccount", "(", "config", ",", "accountname", ")", "thread", "=", "threadutil", ".", "InstanceLimitedThread", "(", "ACCOUNT_LIMITED_THREAD_NAME", ",", "target", "=", "account", ".", "syncrunner", ",", "name", "=", "(", "'Account sync %s'", "%", "accountname", ")", ")", "thread", ".", "setDaemon", "(", "True", ")", "thread", ".", "start", "(", ")", "threads", ".", "add", "(", "thread", ")", "threads", ".", "wait", "(", ")" ]
the target when in multithreading mode for running accounts threads .
train
false
8,474
def force_mapping(m): if isinstance(m, (LazyObject, LazySettings)): m = m._wrapped return (DictAttribute(m) if (not isinstance(m, Mapping)) else m)
[ "def", "force_mapping", "(", "m", ")", ":", "if", "isinstance", "(", "m", ",", "(", "LazyObject", ",", "LazySettings", ")", ")", ":", "m", "=", "m", ".", "_wrapped", "return", "(", "DictAttribute", "(", "m", ")", "if", "(", "not", "isinstance", "(", "m", ",", "Mapping", ")", ")", "else", "m", ")" ]
wrap object into supporting the mapping interface if necessary .
train
false
8,476
def form(): form = FORM(TABLE(TR('Your name:', INPUT(_type='text', _name='name', requires=IS_NOT_EMPTY())), TR('Your email:', INPUT(_type='text', _name='email', requires=IS_EMAIL())), TR('Admin', INPUT(_type='checkbox', _name='admin')), TR('Sure?', SELECT('yes', 'no', _name='sure', requires=IS_IN_SET(['yes', 'no']))), TR('Profile', TEXTAREA(_name='profile', value='write something here')), TR('', INPUT(_type='submit', _value='SUBMIT')))) if form.process().accepted: response.flash = 'form accepted' elif form.errors: response.flash = 'form is invalid' else: response.flash = 'please fill the form' return dict(form=form, vars=form.vars)
[ "def", "form", "(", ")", ":", "form", "=", "FORM", "(", "TABLE", "(", "TR", "(", "'Your name:'", ",", "INPUT", "(", "_type", "=", "'text'", ",", "_name", "=", "'name'", ",", "requires", "=", "IS_NOT_EMPTY", "(", ")", ")", ")", ",", "TR", "(", "'Your email:'", ",", "INPUT", "(", "_type", "=", "'text'", ",", "_name", "=", "'email'", ",", "requires", "=", "IS_EMAIL", "(", ")", ")", ")", ",", "TR", "(", "'Admin'", ",", "INPUT", "(", "_type", "=", "'checkbox'", ",", "_name", "=", "'admin'", ")", ")", ",", "TR", "(", "'Sure?'", ",", "SELECT", "(", "'yes'", ",", "'no'", ",", "_name", "=", "'sure'", ",", "requires", "=", "IS_IN_SET", "(", "[", "'yes'", ",", "'no'", "]", ")", ")", ")", ",", "TR", "(", "'Profile'", ",", "TEXTAREA", "(", "_name", "=", "'profile'", ",", "value", "=", "'write something here'", ")", ")", ",", "TR", "(", "''", ",", "INPUT", "(", "_type", "=", "'submit'", ",", "_value", "=", "'SUBMIT'", ")", ")", ")", ")", "if", "form", ".", "process", "(", ")", ".", "accepted", ":", "response", ".", "flash", "=", "'form accepted'", "elif", "form", ".", "errors", ":", "response", ".", "flash", "=", "'form is invalid'", "else", ":", "response", ".", "flash", "=", "'please fill the form'", "return", "dict", "(", "form", "=", "form", ",", "vars", "=", "form", ".", "vars", ")" ]
create a qformlayout with the specified sizes and items .
train
false
8,477
def train_regressor(orig_wordvecs, w2v_W, w2v_vocab): d = defaultdict((lambda : 0)) for w in w2v_vocab.keys(): d[w] = 1 shared = OrderedDict() count = 0 for w in list(orig_wordvecs.keys())[:(-2)]: if (d[w] > 0): shared[w] = count count += 1 w2v = np.zeros((len(shared), 300), dtype='float32') sg = np.zeros((len(shared), 620), dtype='float32') for w in shared.keys(): w2v[shared[w]] = w2v_W[w2v_vocab[w]] sg[shared[w]] = orig_wordvecs[w] train_set = ArrayIterator(X=w2v, y=sg, make_onehot=False) layers = [Linear(nout=620, init=Gaussian(loc=0.0, scale=0.1)), Bias(init=Constant(0.0))] clf = Model(layers=layers) cost = GeneralizedCost(costfunc=SumSquared()) opt = GradientDescentMomentum(0.1, 0.9, gradient_clip_value=5.0) callbacks = Callbacks(clf) clf.fit(train_set, num_epochs=20, optimizer=opt, cost=cost, callbacks=callbacks) return clf
[ "def", "train_regressor", "(", "orig_wordvecs", ",", "w2v_W", ",", "w2v_vocab", ")", ":", "d", "=", "defaultdict", "(", "(", "lambda", ":", "0", ")", ")", "for", "w", "in", "w2v_vocab", ".", "keys", "(", ")", ":", "d", "[", "w", "]", "=", "1", "shared", "=", "OrderedDict", "(", ")", "count", "=", "0", "for", "w", "in", "list", "(", "orig_wordvecs", ".", "keys", "(", ")", ")", "[", ":", "(", "-", "2", ")", "]", ":", "if", "(", "d", "[", "w", "]", ">", "0", ")", ":", "shared", "[", "w", "]", "=", "count", "count", "+=", "1", "w2v", "=", "np", ".", "zeros", "(", "(", "len", "(", "shared", ")", ",", "300", ")", ",", "dtype", "=", "'float32'", ")", "sg", "=", "np", ".", "zeros", "(", "(", "len", "(", "shared", ")", ",", "620", ")", ",", "dtype", "=", "'float32'", ")", "for", "w", "in", "shared", ".", "keys", "(", ")", ":", "w2v", "[", "shared", "[", "w", "]", "]", "=", "w2v_W", "[", "w2v_vocab", "[", "w", "]", "]", "sg", "[", "shared", "[", "w", "]", "]", "=", "orig_wordvecs", "[", "w", "]", "train_set", "=", "ArrayIterator", "(", "X", "=", "w2v", ",", "y", "=", "sg", ",", "make_onehot", "=", "False", ")", "layers", "=", "[", "Linear", "(", "nout", "=", "620", ",", "init", "=", "Gaussian", "(", "loc", "=", "0.0", ",", "scale", "=", "0.1", ")", ")", ",", "Bias", "(", "init", "=", "Constant", "(", "0.0", ")", ")", "]", "clf", "=", "Model", "(", "layers", "=", "layers", ")", "cost", "=", "GeneralizedCost", "(", "costfunc", "=", "SumSquared", "(", ")", ")", "opt", "=", "GradientDescentMomentum", "(", "0.1", ",", "0.9", ",", "gradient_clip_value", "=", "5.0", ")", "callbacks", "=", "Callbacks", "(", "clf", ")", "clf", ".", "fit", "(", "train_set", ",", "num_epochs", "=", "20", ",", "optimizer", "=", "opt", ",", "cost", "=", "cost", ",", "callbacks", "=", "callbacks", ")", "return", "clf" ]
return regressor to map word2vec to rnn word space .
train
false
8,478
def poisson(y_true, y_pred): return tf.reduce_sum((y_pred - (y_true * tf.log((y_pred + 1e-08)))))
[ "def", "poisson", "(", "y_true", ",", "y_pred", ")", ":", "return", "tf", ".", "reduce_sum", "(", "(", "y_pred", "-", "(", "y_true", "*", "tf", ".", "log", "(", "(", "y_pred", "+", "1e-08", ")", ")", ")", ")", ")" ]
draw samples from a poisson distribution .
train
false
8,480
def xen_mem(name): global conn global conn_info return (conn_info[1] * 1024)
[ "def", "xen_mem", "(", "name", ")", ":", "global", "conn", "global", "conn_info", "return", "(", "conn_info", "[", "1", "]", "*", "1024", ")" ]
return node memory .
train
false
8,481
@handle_response_format @treeio_login_required def agent_delete(request, agent_id, response_format='html'): view_agent = get_object_or_404(ServiceAgent, pk=agent_id) if (not request.user.profile.has_permission(view_agent, mode='w')): return user_denied(request, message="You don't have access to this Service Agent") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): view_agent.trash = True view_agent.save() else: view_agent.delete() return HttpResponseRedirect(reverse('services_agent_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('services_agent_view', args=[view_agent.id])) context = _get_default_context(request) context.update({'view_agent': view_agent}) return render_to_response('services/agent_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "agent_delete", "(", "request", ",", "agent_id", ",", "response_format", "=", "'html'", ")", ":", "view_agent", "=", "get_object_or_404", "(", "ServiceAgent", ",", "pk", "=", "agent_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "view_agent", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Service Agent\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "view_agent", ".", "trash", "=", "True", "view_agent", ".", "save", "(", ")", "else", ":", "view_agent", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_agent_index'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_agent_view'", ",", "args", "=", "[", "view_agent", ".", "id", "]", ")", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'view_agent'", ":", "view_agent", "}", ")", "return", "render_to_response", "(", "'services/agent_delete'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
agent delete .
train
false
8,482
def to_md(journal): out = [] (year, month) = ((-1), (-1)) for e in journal.entries: if (not (e.date.year == year)): year = e.date.year out.append(str(year)) out.append(((u'=' * len(str(year))) + u'\n')) if (not (e.date.month == month)): month = e.date.month out.append(e.date.strftime(u'%B')) out.append(((u'-' * len(e.date.strftime(u'%B'))) + u'\n')) out.append(e.to_md()) result = u'\n'.join(out) return result
[ "def", "to_md", "(", "journal", ")", ":", "out", "=", "[", "]", "(", "year", ",", "month", ")", "=", "(", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", "for", "e", "in", "journal", ".", "entries", ":", "if", "(", "not", "(", "e", ".", "date", ".", "year", "==", "year", ")", ")", ":", "year", "=", "e", ".", "date", ".", "year", "out", ".", "append", "(", "str", "(", "year", ")", ")", "out", ".", "append", "(", "(", "(", "u'='", "*", "len", "(", "str", "(", "year", ")", ")", ")", "+", "u'\\n'", ")", ")", "if", "(", "not", "(", "e", ".", "date", ".", "month", "==", "month", ")", ")", ":", "month", "=", "e", ".", "date", ".", "month", "out", ".", "append", "(", "e", ".", "date", ".", "strftime", "(", "u'%B'", ")", ")", "out", ".", "append", "(", "(", "(", "u'-'", "*", "len", "(", "e", ".", "date", ".", "strftime", "(", "u'%B'", ")", ")", ")", "+", "u'\\n'", ")", ")", "out", ".", "append", "(", "e", ".", "to_md", "(", ")", ")", "result", "=", "u'\\n'", ".", "join", "(", "out", ")", "return", "result" ]
returns a markdown representation of the journal .
train
false
8,483
def do_score_for_object(parser, token): bits = token.contents.split() if (len(bits) != 4): raise template.TemplateSyntaxError(("'%s' tag takes exactly three arguments" % bits[0])) if (bits[2] != 'as'): raise template.TemplateSyntaxError(("second argument to '%s' tag must be 'as'" % bits[0])) return ScoreForObjectNode(bits[1], bits[3])
[ "def", "do_score_for_object", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "(", "len", "(", "bits", ")", "!=", "4", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "\"'%s' tag takes exactly three arguments\"", "%", "bits", "[", "0", "]", ")", ")", "if", "(", "bits", "[", "2", "]", "!=", "'as'", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "\"second argument to '%s' tag must be 'as'\"", "%", "bits", "[", "0", "]", ")", ")", "return", "ScoreForObjectNode", "(", "bits", "[", "1", "]", ",", "bits", "[", "3", "]", ")" ]
retrieves the total score for an object and the number of votes its received and stores them in a context variable which has score and num_votes properties .
train
false
8,484
def get_vmdk_adapter_type(adapter_type): if (adapter_type in [constants.ADAPTER_TYPE_LSILOGICSAS, constants.ADAPTER_TYPE_PARAVIRTUAL]): vmdk_adapter_type = constants.DEFAULT_ADAPTER_TYPE else: vmdk_adapter_type = adapter_type return vmdk_adapter_type
[ "def", "get_vmdk_adapter_type", "(", "adapter_type", ")", ":", "if", "(", "adapter_type", "in", "[", "constants", ".", "ADAPTER_TYPE_LSILOGICSAS", ",", "constants", ".", "ADAPTER_TYPE_PARAVIRTUAL", "]", ")", ":", "vmdk_adapter_type", "=", "constants", ".", "DEFAULT_ADAPTER_TYPE", "else", ":", "vmdk_adapter_type", "=", "adapter_type", "return", "vmdk_adapter_type" ]
return the adapter type to be used in vmdk descriptor .
train
false
8,486
def to_scipy_sparse(m, **options): dtype = options.get('dtype', 'complex') if isinstance(m, (Matrix, Expr)): return sympy_to_scipy_sparse(m, dtype=dtype) elif isinstance(m, numpy_ndarray): if (not sparse): raise ImportError return sparse.csr_matrix(m) elif isinstance(m, scipy_sparse_matrix): return m raise TypeError(('Expected sympy/numpy/scipy.sparse matrix, got: %r' % m))
[ "def", "to_scipy_sparse", "(", "m", ",", "**", "options", ")", ":", "dtype", "=", "options", ".", "get", "(", "'dtype'", ",", "'complex'", ")", "if", "isinstance", "(", "m", ",", "(", "Matrix", ",", "Expr", ")", ")", ":", "return", "sympy_to_scipy_sparse", "(", "m", ",", "dtype", "=", "dtype", ")", "elif", "isinstance", "(", "m", ",", "numpy_ndarray", ")", ":", "if", "(", "not", "sparse", ")", ":", "raise", "ImportError", "return", "sparse", ".", "csr_matrix", "(", "m", ")", "elif", "isinstance", "(", "m", ",", "scipy_sparse_matrix", ")", ":", "return", "m", "raise", "TypeError", "(", "(", "'Expected sympy/numpy/scipy.sparse matrix, got: %r'", "%", "m", ")", ")" ]
convert a sympy/numpy matrix to a scipy .
train
false
8,487
def xnormpath(path): return os.path.normpath(path).replace(os.sep, '/')
[ "def", "xnormpath", "(", "path", ")", ":", "return", "os", ".", "path", ".", "normpath", "(", "path", ")", ".", "replace", "(", "os", ".", "sep", ",", "'/'", ")" ]
cross-platform version of os .
train
false
8,488
def image_persistent(call=None, kwargs=None): if (call != 'function'): raise SaltCloudSystemExit('The image_persistent function must be called with -f or --function.') if (kwargs is None): kwargs = {} name = kwargs.get('name', None) persist = kwargs.get('persist', None) image_id = kwargs.get('image_id', None) if (persist is None): raise SaltCloudSystemExit("The image_persistent function requires 'persist' to be set to 'True' or 'False'.") if image_id: if name: log.warning("Both the 'image_id' and 'name' arguments were provided. 'image_id' will take precedence.") elif name: image_id = get_image_id(kwargs={'name': name}) else: raise SaltCloudSystemExit("The image_persistent function requires either a 'name' or an 'image_id' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) response = server.one.image.persistent(auth, int(image_id), salt.utils.is_true(persist)) data = {'action': 'image.persistent', 'response': response[0], 'image_id': response[1], 'error_code': response[2]} return data
[ "def", "image_persistent", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The image_persistent function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "name", "=", "kwargs", ".", "get", "(", "'name'", ",", "None", ")", "persist", "=", "kwargs", ".", "get", "(", "'persist'", ",", "None", ")", "image_id", "=", "kwargs", ".", "get", "(", "'image_id'", ",", "None", ")", "if", "(", "persist", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_persistent function requires 'persist' to be set to 'True' or 'False'.\"", ")", "if", "image_id", ":", "if", "name", ":", "log", ".", "warning", "(", "\"Both the 'image_id' and 'name' arguments were provided. 'image_id' will take precedence.\"", ")", "elif", "name", ":", "image_id", "=", "get_image_id", "(", "kwargs", "=", "{", "'name'", ":", "name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_persistent function requires either a 'name' or an 'image_id' to be provided.\"", ")", "(", "server", ",", "user", ",", "password", ")", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "image", ".", "persistent", "(", "auth", ",", "int", "(", "image_id", ")", ",", "salt", ".", "utils", ".", "is_true", "(", "persist", ")", ")", "data", "=", "{", "'action'", ":", "'image.persistent'", ",", "'response'", ":", "response", "[", "0", "]", ",", "'image_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", "}", "return", "data" ]
sets the image as persistent or not persistent .
train
true
8,491
def deferred(timeout=None): (reactor, reactor_thread) = threaded_reactor() if (reactor is None): raise ImportError('twisted is not available or could not be imported') try: ((timeout is None) or (timeout + 0)) except TypeError: raise TypeError("'timeout' argument must be a number or None") def decorate(func): def wrapper(*args, **kargs): q = Queue() def callback(value): q.put(None) def errback(failure): try: failure.raiseException() except: q.put(sys.exc_info()) def g(): try: d = func(*args, **kargs) try: d.addCallbacks(callback, errback) except AttributeError: raise TypeError('you must return a twisted Deferred from your test case!') except: q.put(sys.exc_info()) reactor.callFromThread(g) try: error = q.get(timeout=timeout) except Empty: raise TimeExpired(('timeout expired before end of test (%f s.)' % timeout)) if (error is not None): (exc_type, exc_value, tb) = error raise exc_type, exc_value, tb wrapper = make_decorator(func)(wrapper) return wrapper return decorate
[ "def", "deferred", "(", "timeout", "=", "None", ")", ":", "(", "reactor", ",", "reactor_thread", ")", "=", "threaded_reactor", "(", ")", "if", "(", "reactor", "is", "None", ")", ":", "raise", "ImportError", "(", "'twisted is not available or could not be imported'", ")", "try", ":", "(", "(", "timeout", "is", "None", ")", "or", "(", "timeout", "+", "0", ")", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "\"'timeout' argument must be a number or None\"", ")", "def", "decorate", "(", "func", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kargs", ")", ":", "q", "=", "Queue", "(", ")", "def", "callback", "(", "value", ")", ":", "q", ".", "put", "(", "None", ")", "def", "errback", "(", "failure", ")", ":", "try", ":", "failure", ".", "raiseException", "(", ")", "except", ":", "q", ".", "put", "(", "sys", ".", "exc_info", "(", ")", ")", "def", "g", "(", ")", ":", "try", ":", "d", "=", "func", "(", "*", "args", ",", "**", "kargs", ")", "try", ":", "d", ".", "addCallbacks", "(", "callback", ",", "errback", ")", "except", "AttributeError", ":", "raise", "TypeError", "(", "'you must return a twisted Deferred from your test case!'", ")", "except", ":", "q", ".", "put", "(", "sys", ".", "exc_info", "(", ")", ")", "reactor", ".", "callFromThread", "(", "g", ")", "try", ":", "error", "=", "q", ".", "get", "(", "timeout", "=", "timeout", ")", "except", "Empty", ":", "raise", "TimeExpired", "(", "(", "'timeout expired before end of test (%f s.)'", "%", "timeout", ")", ")", "if", "(", "error", "is", "not", "None", ")", ":", "(", "exc_type", ",", "exc_value", ",", "tb", ")", "=", "error", "raise", "exc_type", ",", "exc_value", ",", "tb", "wrapper", "=", "make_decorator", "(", "func", ")", "(", "wrapper", ")", "return", "wrapper", "return", "decorate" ]
indicate a column-based mapped attribute that by default will not load unless accessed .
train
true
8,492
def has_unaccent(cr): cr.execute("SELECT proname FROM pg_proc WHERE proname='unaccent'") return (len(cr.fetchall()) > 0)
[ "def", "has_unaccent", "(", "cr", ")", ":", "cr", ".", "execute", "(", "\"SELECT proname FROM pg_proc WHERE proname='unaccent'\"", ")", "return", "(", "len", "(", "cr", ".", "fetchall", "(", ")", ")", ">", "0", ")" ]
test if the database has an unaccent function .
train
false