id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
46,674
@register.filter def blockers(user): try: return Relationship.objects.get_blockers_for_user(user) except AttributeError: return []
[ "@", "register", ".", "filter", "def", "blockers", "(", "user", ")", ":", "try", ":", "return", "Relationship", ".", "objects", ".", "get_blockers_for_user", "(", "user", ")", "except", "AttributeError", ":", "return", "[", "]" ]
returns list of people blocking user .
train
false
46,676
def api_request(request, *args, **kwargs): plugin = pm.api_plugin_for_request(request) if (plugin and plugin.request_class): req = plugin.request_class(*args, **kwargs) else: raise Exception('Invalid request type') return req
[ "def", "api_request", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "plugin", "=", "pm", ".", "api_plugin_for_request", "(", "request", ")", "if", "(", "plugin", "and", "plugin", ".", "request_class", ")", ":", "req", "=", "plugin", ".", "request_class", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "raise", "Exception", "(", "'Invalid request type'", ")", "return", "req" ]
create an api request .
train
true
46,677
def onCellAppDataDel(key): DEBUG_MSG(('onCellAppDataDel: %s' % key))
[ "def", "onCellAppDataDel", "(", "key", ")", ":", "DEBUG_MSG", "(", "(", "'onCellAppDataDel: %s'", "%", "key", ")", ")" ]
kbengine method .
train
false
46,678
def validators(*chained_validators): def validator_chain(match): for chained_validator in chained_validators: if (not chained_validator(match)): return False return True return validator_chain
[ "def", "validators", "(", "*", "chained_validators", ")", ":", "def", "validator_chain", "(", "match", ")", ":", "for", "chained_validator", "in", "chained_validators", ":", "if", "(", "not", "chained_validator", "(", "match", ")", ")", ":", "return", "False", "return", "True", "return", "validator_chain" ]
creates a validator chain from several validator functions .
train
true
46,679
def make_touch_strip_light_message(states): states = [TOUCHSTRIP_STATE_TO_BRIGHTNESS[state] for state in states] return make_message(25, tuple([_make_touch_strip_light(state) for state in chunks(states, 2)]))
[ "def", "make_touch_strip_light_message", "(", "states", ")", ":", "states", "=", "[", "TOUCHSTRIP_STATE_TO_BRIGHTNESS", "[", "state", "]", "for", "state", "in", "states", "]", "return", "make_message", "(", "25", ",", "tuple", "(", "[", "_make_touch_strip_light", "(", "state", ")", "for", "state", "in", "chunks", "(", "states", ",", "2", ")", "]", ")", ")" ]
the 31 touch strip leds are set with 3 bits per led .
train
false
46,680
def lucene_version(core_name=None): ret = _get_return_dict() if ((_get_none_or_value(core_name) is None) and _check_for_cores()): success = True for name in __salt__['config.option']('solr.cores'): resp = _get_admin_info('system', core_name=name) if resp['success']: version_num = resp['data']['lucene']['lucene-spec-version'] data = {name: {'lucene_version': version_num}} else: data = {name: {'lucene_version': None}} success = False ret = _update_return_dict(ret, success, data, resp['errors']) return ret else: resp = _get_admin_info('system', core_name=core_name) if resp['success']: version_num = resp['data']['lucene']['lucene-spec-version'] return _get_return_dict(True, {'version': version_num}, resp['errors']) else: return resp
[ "def", "lucene_version", "(", "core_name", "=", "None", ")", ":", "ret", "=", "_get_return_dict", "(", ")", "if", "(", "(", "_get_none_or_value", "(", "core_name", ")", "is", "None", ")", "and", "_check_for_cores", "(", ")", ")", ":", "success", "=", "True", "for", "name", "in", "__salt__", "[", "'config.option'", "]", "(", "'solr.cores'", ")", ":", "resp", "=", "_get_admin_info", "(", "'system'", ",", "core_name", "=", "name", ")", "if", "resp", "[", "'success'", "]", ":", "version_num", "=", "resp", "[", "'data'", "]", "[", "'lucene'", "]", "[", "'lucene-spec-version'", "]", "data", "=", "{", "name", ":", "{", "'lucene_version'", ":", "version_num", "}", "}", "else", ":", "data", "=", "{", "name", ":", "{", "'lucene_version'", ":", "None", "}", "}", "success", "=", "False", "ret", "=", "_update_return_dict", "(", "ret", ",", "success", ",", "data", ",", "resp", "[", "'errors'", "]", ")", "return", "ret", "else", ":", "resp", "=", "_get_admin_info", "(", "'system'", ",", "core_name", "=", "core_name", ")", "if", "resp", "[", "'success'", "]", ":", "version_num", "=", "resp", "[", "'data'", "]", "[", "'lucene'", "]", "[", "'lucene-spec-version'", "]", "return", "_get_return_dict", "(", "True", ",", "{", "'version'", ":", "version_num", "}", ",", "resp", "[", "'errors'", "]", ")", "else", ":", "return", "resp" ]
gets the lucene version that solr is using .
train
true
46,682
def get_wrapped_function(function): if ((not hasattr(function, 'func_closure')) or (not function.func_closure)): return function def _get_wrapped_function(function): if ((not hasattr(function, 'func_closure')) or (not function.func_closure)): return None for closure in function.func_closure: func = closure.cell_contents deeper_func = _get_wrapped_function(func) if deeper_func: return deeper_func elif hasattr(closure.cell_contents, '__call__'): return closure.cell_contents return _get_wrapped_function(function)
[ "def", "get_wrapped_function", "(", "function", ")", ":", "if", "(", "(", "not", "hasattr", "(", "function", ",", "'func_closure'", ")", ")", "or", "(", "not", "function", ".", "func_closure", ")", ")", ":", "return", "function", "def", "_get_wrapped_function", "(", "function", ")", ":", "if", "(", "(", "not", "hasattr", "(", "function", ",", "'func_closure'", ")", ")", "or", "(", "not", "function", ".", "func_closure", ")", ")", ":", "return", "None", "for", "closure", "in", "function", ".", "func_closure", ":", "func", "=", "closure", ".", "cell_contents", "deeper_func", "=", "_get_wrapped_function", "(", "func", ")", "if", "deeper_func", ":", "return", "deeper_func", "elif", "hasattr", "(", "closure", ".", "cell_contents", ",", "'__call__'", ")", ":", "return", "closure", ".", "cell_contents", "return", "_get_wrapped_function", "(", "function", ")" ]
get the method at the bottom of a stack of decorators .
train
false
46,684
def make_bad_fd(): file = open(TESTFN, 'wb') try: return file.fileno() finally: file.close() unlink(TESTFN)
[ "def", "make_bad_fd", "(", ")", ":", "file", "=", "open", "(", "TESTFN", ",", "'wb'", ")", "try", ":", "return", "file", ".", "fileno", "(", ")", "finally", ":", "file", ".", "close", "(", ")", "unlink", "(", "TESTFN", ")" ]
create an invalid file descriptor by opening and closing a file and return its fd .
train
false
46,685
def addSlab(derivation, positives): copyShallow = derivation.elementNode.getCopyShallow() copyShallow.attributes['path'] = [Vector3(), Vector3(0.0, 0.0, derivation.height)] extrudeDerivation = extrude.ExtrudeDerivation(copyShallow) beveledRectangle = getBeveledRectangle(derivation.bevel, (- derivation.topRight)) outsidePath = euclidean.getVector3Path(beveledRectangle) extrude.addPositives(extrudeDerivation, [outsidePath], positives)
[ "def", "addSlab", "(", "derivation", ",", "positives", ")", ":", "copyShallow", "=", "derivation", ".", "elementNode", ".", "getCopyShallow", "(", ")", "copyShallow", ".", "attributes", "[", "'path'", "]", "=", "[", "Vector3", "(", ")", ",", "Vector3", "(", "0.0", ",", "0.0", ",", "derivation", ".", "height", ")", "]", "extrudeDerivation", "=", "extrude", ".", "ExtrudeDerivation", "(", "copyShallow", ")", "beveledRectangle", "=", "getBeveledRectangle", "(", "derivation", ".", "bevel", ",", "(", "-", "derivation", ".", "topRight", ")", ")", "outsidePath", "=", "euclidean", ".", "getVector3Path", "(", "beveledRectangle", ")", "extrude", ".", "addPositives", "(", "extrudeDerivation", ",", "[", "outsidePath", "]", ",", "positives", ")" ]
add slab .
train
false
46,686
def reconstruct_graph(inputs, outputs, tag=None): if (tag is None): tag = '' nw_inputs = [safe_new(x, tag) for x in inputs] givens = OrderedDict() for (nw_x, x) in izip(nw_inputs, inputs): givens[x] = nw_x allinputs = theano.gof.graph.inputs(outputs) for inp in allinputs: if isinstance(inp, theano.Constant): givens[inp] = inp.clone() nw_outputs = clone(outputs, replace=givens) return (nw_inputs, nw_outputs)
[ "def", "reconstruct_graph", "(", "inputs", ",", "outputs", ",", "tag", "=", "None", ")", ":", "if", "(", "tag", "is", "None", ")", ":", "tag", "=", "''", "nw_inputs", "=", "[", "safe_new", "(", "x", ",", "tag", ")", "for", "x", "in", "inputs", "]", "givens", "=", "OrderedDict", "(", ")", "for", "(", "nw_x", ",", "x", ")", "in", "izip", "(", "nw_inputs", ",", "inputs", ")", ":", "givens", "[", "x", "]", "=", "nw_x", "allinputs", "=", "theano", ".", "gof", ".", "graph", ".", "inputs", "(", "outputs", ")", "for", "inp", "in", "allinputs", ":", "if", "isinstance", "(", "inp", ",", "theano", ".", "Constant", ")", ":", "givens", "[", "inp", "]", "=", "inp", ".", "clone", "(", ")", "nw_outputs", "=", "clone", "(", "outputs", ",", "replace", "=", "givens", ")", "return", "(", "nw_inputs", ",", "nw_outputs", ")" ]
different interface to clone .
train
false
46,687
def collection_create(collection_name, options=None, **kwargs): if (options is None): options = {} if (not isinstance(options, dict)): raise SaltInvocationError('options parameter must be a dictionary') options_string = _validate_collection_options(options) _query((('admin/collections?action=CREATE&wt=json&name=' + collection_name) + options_string), **kwargs)
[ "def", "collection_create", "(", "collection_name", ",", "options", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "options", "is", "None", ")", ":", "options", "=", "{", "}", "if", "(", "not", "isinstance", "(", "options", ",", "dict", ")", ")", ":", "raise", "SaltInvocationError", "(", "'options parameter must be a dictionary'", ")", "options_string", "=", "_validate_collection_options", "(", "options", ")", "_query", "(", "(", "(", "'admin/collections?action=CREATE&wt=json&name='", "+", "collection_name", ")", "+", "options_string", ")", ",", "**", "kwargs", ")" ]
create a collection .
train
true
46,688
@receiver(pre_save, sender=UserPreference) def pre_save_callback(sender, **kwargs): user_preference = kwargs['instance'] user_preference._old_value = get_changed_fields_dict(user_preference, sender).get('value', None)
[ "@", "receiver", "(", "pre_save", ",", "sender", "=", "UserPreference", ")", "def", "pre_save_callback", "(", "sender", ",", "**", "kwargs", ")", ":", "user_preference", "=", "kwargs", "[", "'instance'", "]", "user_preference", ".", "_old_value", "=", "get_changed_fields_dict", "(", "user_preference", ",", "sender", ")", ".", "get", "(", "'value'", ",", "None", ")" ]
event changes to user preferences .
train
false
46,689
def upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, uploader): local_path = resource_dict.get(property_name, None) if (local_path is None): local_path = parent_dir if is_s3_url(local_path): LOG.debug('Property {0} of {1} is already a S3 URL'.format(property_name, resource_id)) return local_path local_path = make_abs_path(parent_dir, local_path) if is_local_folder(local_path): return zip_and_upload(local_path, uploader) elif is_local_file(local_path): return uploader.upload_with_dedup(local_path) raise exceptions.InvalidLocalPathError(resource_id=resource_id, property_name=property_name, local_path=local_path)
[ "def", "upload_local_artifacts", "(", "resource_id", ",", "resource_dict", ",", "property_name", ",", "parent_dir", ",", "uploader", ")", ":", "local_path", "=", "resource_dict", ".", "get", "(", "property_name", ",", "None", ")", "if", "(", "local_path", "is", "None", ")", ":", "local_path", "=", "parent_dir", "if", "is_s3_url", "(", "local_path", ")", ":", "LOG", ".", "debug", "(", "'Property {0} of {1} is already a S3 URL'", ".", "format", "(", "property_name", ",", "resource_id", ")", ")", "return", "local_path", "local_path", "=", "make_abs_path", "(", "parent_dir", ",", "local_path", ")", "if", "is_local_folder", "(", "local_path", ")", ":", "return", "zip_and_upload", "(", "local_path", ",", "uploader", ")", "elif", "is_local_file", "(", "local_path", ")", ":", "return", "uploader", ".", "upload_with_dedup", "(", "local_path", ")", "raise", "exceptions", ".", "InvalidLocalPathError", "(", "resource_id", "=", "resource_id", ",", "property_name", "=", "property_name", ",", "local_path", "=", "local_path", ")" ]
upload local artifacts referenced by the property at given resource and return s3 url of the uploaded object .
train
false
46,690
def _norm_plot_pos(observations): (ppos, sorted_res) = stats.probplot(observations, fit=False) return stats.norm.cdf(ppos)
[ "def", "_norm_plot_pos", "(", "observations", ")", ":", "(", "ppos", ",", "sorted_res", ")", "=", "stats", ".", "probplot", "(", "observations", ",", "fit", "=", "False", ")", "return", "stats", ".", "norm", ".", "cdf", "(", "ppos", ")" ]
computes standard normal plotting positions using scipy .
train
false
46,691
def set_diff_opcode_generator_class(renderer): assert renderer globals()[u'_generator'] = renderer
[ "def", "set_diff_opcode_generator_class", "(", "renderer", ")", ":", "assert", "renderer", "globals", "(", ")", "[", "u'_generator'", "]", "=", "renderer" ]
sets the diffopcodegenerator class used for generating opcodes .
train
false
46,692
def old_def(item, default): try: return CFG['misc'][item] except KeyError: return default
[ "def", "old_def", "(", "item", ",", "default", ")", ":", "try", ":", "return", "CFG", "[", "'misc'", "]", "[", "item", "]", "except", "KeyError", ":", "return", "default" ]
get old ini setting from [misc] .
train
false
46,693
def getNormalized(complexNumber): complexNumberLength = abs(complexNumber) if (complexNumberLength > 0.0): return (complexNumber / complexNumberLength) return complexNumber
[ "def", "getNormalized", "(", "complexNumber", ")", ":", "complexNumberLength", "=", "abs", "(", "complexNumber", ")", "if", "(", "complexNumberLength", ">", "0.0", ")", ":", "return", "(", "complexNumber", "/", "complexNumberLength", ")", "return", "complexNumber" ]
get the normalized complex .
train
false
46,694
def isEmptyOrCancelled(fileName, wasCancelled): return ((str(fileName) == '') or (str(fileName) == '()') or wasCancelled)
[ "def", "isEmptyOrCancelled", "(", "fileName", ",", "wasCancelled", ")", ":", "return", "(", "(", "str", "(", "fileName", ")", "==", "''", ")", "or", "(", "str", "(", "fileName", ")", "==", "'()'", ")", "or", "wasCancelled", ")" ]
determine if the filename is empty or the dialog was cancelled .
train
false
46,695
def is_color_transparent(c): if (len(c) < 4): return False if (float(c[3]) == 0.0): return True return False
[ "def", "is_color_transparent", "(", "c", ")", ":", "if", "(", "len", "(", "c", ")", "<", "4", ")", ":", "return", "False", "if", "(", "float", "(", "c", "[", "3", "]", ")", "==", "0.0", ")", ":", "return", "True", "return", "False" ]
return true if the alpha channel is 0 .
train
false
46,696
def _checkValueItemParent(policy_element, policy_name, policy_key, policy_valueName, xpath_object, policy_file_data, check_deleted=False, test_item=True): for element in xpath_object(policy_element): for value_item in element.getchildren(): search_string = _processValueItem(value_item, policy_key, policy_valueName, policy_element, element, check_deleted=check_deleted) if (not test_item): return search_string if _regexSearchRegPolData(re.escape(search_string), policy_file_data): log.debug('found the search string in the pol file, {0} is configured'.format(policy_name)) return True return False
[ "def", "_checkValueItemParent", "(", "policy_element", ",", "policy_name", ",", "policy_key", ",", "policy_valueName", ",", "xpath_object", ",", "policy_file_data", ",", "check_deleted", "=", "False", ",", "test_item", "=", "True", ")", ":", "for", "element", "in", "xpath_object", "(", "policy_element", ")", ":", "for", "value_item", "in", "element", ".", "getchildren", "(", ")", ":", "search_string", "=", "_processValueItem", "(", "value_item", ",", "policy_key", ",", "policy_valueName", ",", "policy_element", ",", "element", ",", "check_deleted", "=", "check_deleted", ")", "if", "(", "not", "test_item", ")", ":", "return", "search_string", "if", "_regexSearchRegPolData", "(", "re", ".", "escape", "(", "search_string", ")", ",", "policy_file_data", ")", ":", "log", ".", "debug", "(", "'found the search string in the pol file, {0} is configured'", ".", "format", "(", "policy_name", ")", ")", "return", "True", "return", "False" ]
helper function to process the parent of a value item object if test_item is true .
train
true
46,697
def get_hmac(password): salt = _security.password_salt if (salt is None): raise RuntimeError(('The configuration value `SECURITY_PASSWORD_SALT` must not be None when the value of `SECURITY_PASSWORD_HASH` is set to "%s"' % _security.password_hash)) h = hmac.new(encode_string(salt), encode_string(password), hashlib.sha512) return base64.b64encode(h.digest())
[ "def", "get_hmac", "(", "password", ")", ":", "salt", "=", "_security", ".", "password_salt", "if", "(", "salt", "is", "None", ")", ":", "raise", "RuntimeError", "(", "(", "'The configuration value `SECURITY_PASSWORD_SALT` must not be None when the value of `SECURITY_PASSWORD_HASH` is set to \"%s\"'", "%", "_security", ".", "password_hash", ")", ")", "h", "=", "hmac", ".", "new", "(", "encode_string", "(", "salt", ")", ",", "encode_string", "(", "password", ")", ",", "hashlib", ".", "sha512", ")", "return", "base64", ".", "b64encode", "(", "h", ".", "digest", "(", ")", ")" ]
returns a base64 encoded hmac+sha512 of the password signed with the salt specified by security_password_salt .
train
true
46,699
def iou_coe(output, target, threshold=0.5, epsilon=1e-10): pre = tf.cast((output > threshold), dtype=tf.float32) truth = tf.cast((target > threshold), dtype=tf.float32) intersection = tf.reduce_sum((pre * truth)) union = tf.reduce_sum(tf.cast(((pre + truth) > threshold), dtype=tf.float32)) return (tf.reduce_sum(intersection) / (tf.reduce_sum(union) + epsilon))
[ "def", "iou_coe", "(", "output", ",", "target", ",", "threshold", "=", "0.5", ",", "epsilon", "=", "1e-10", ")", ":", "pre", "=", "tf", ".", "cast", "(", "(", "output", ">", "threshold", ")", ",", "dtype", "=", "tf", ".", "float32", ")", "truth", "=", "tf", ".", "cast", "(", "(", "target", ">", "threshold", ")", ",", "dtype", "=", "tf", ".", "float32", ")", "intersection", "=", "tf", ".", "reduce_sum", "(", "(", "pre", "*", "truth", ")", ")", "union", "=", "tf", ".", "reduce_sum", "(", "tf", ".", "cast", "(", "(", "(", "pre", "+", "truth", ")", ">", "threshold", ")", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "return", "(", "tf", ".", "reduce_sum", "(", "intersection", ")", "/", "(", "tf", ".", "reduce_sum", "(", "union", ")", "+", "epsilon", ")", ")" ]
non-differentiable intersection over union .
train
false
46,700
def repr_events(h, events): return u', '.join((u'{0}({1})->{2}'.format(_rcb(callback_for(h, fd, fl, u'(GONE)')), fd, repr_flag(fl)) for (fd, fl) in events))
[ "def", "repr_events", "(", "h", ",", "events", ")", ":", "return", "u', '", ".", "join", "(", "(", "u'{0}({1})->{2}'", ".", "format", "(", "_rcb", "(", "callback_for", "(", "h", ",", "fd", ",", "fl", ",", "u'(GONE)'", ")", ")", ",", "fd", ",", "repr_flag", "(", "fl", ")", ")", "for", "(", "fd", ",", "fl", ")", "in", "events", ")", ")" ]
return description of events returned by poll .
train
false
46,701
def default_settings(params): def _default_settings(fn, command): for (k, w) in params.items(): settings.setdefault(k, w) return fn(command) return decorator(_default_settings)
[ "def", "default_settings", "(", "params", ")", ":", "def", "_default_settings", "(", "fn", ",", "command", ")", ":", "for", "(", "k", ",", "w", ")", "in", "params", ".", "items", "(", ")", ":", "settings", ".", "setdefault", "(", "k", ",", "w", ")", "return", "fn", "(", "command", ")", "return", "decorator", "(", "_default_settings", ")" ]
adds default values to settings if it not presented .
train
true
46,704
def notify_about_volume_swap(context, instance, host, action, phase, old_volume_id, new_volume_id, exception=None): ips = _get_instance_ips(instance) flavor = flavor_notification.FlavorPayload(instance.flavor) (fault, priority) = _get_fault_and_priority_from_exc(exception) payload = instance_notification.InstanceActionVolumeSwapPayload(instance=instance, fault=fault, ip_addresses=ips, flavor=flavor, old_volume_id=old_volume_id, new_volume_id=new_volume_id) instance_notification.InstanceActionVolumeSwapNotification(context=context, priority=priority, publisher=notification_base.NotificationPublisher(context=context, host=host, binary='nova-compute'), event_type=notification_base.EventType(object='instance', action=action, phase=phase), payload=payload).emit(context)
[ "def", "notify_about_volume_swap", "(", "context", ",", "instance", ",", "host", ",", "action", ",", "phase", ",", "old_volume_id", ",", "new_volume_id", ",", "exception", "=", "None", ")", ":", "ips", "=", "_get_instance_ips", "(", "instance", ")", "flavor", "=", "flavor_notification", ".", "FlavorPayload", "(", "instance", ".", "flavor", ")", "(", "fault", ",", "priority", ")", "=", "_get_fault_and_priority_from_exc", "(", "exception", ")", "payload", "=", "instance_notification", ".", "InstanceActionVolumeSwapPayload", "(", "instance", "=", "instance", ",", "fault", "=", "fault", ",", "ip_addresses", "=", "ips", ",", "flavor", "=", "flavor", ",", "old_volume_id", "=", "old_volume_id", ",", "new_volume_id", "=", "new_volume_id", ")", "instance_notification", ".", "InstanceActionVolumeSwapNotification", "(", "context", "=", "context", ",", "priority", "=", "priority", ",", "publisher", "=", "notification_base", ".", "NotificationPublisher", "(", "context", "=", "context", ",", "host", "=", "host", ",", "binary", "=", "'nova-compute'", ")", ",", "event_type", "=", "notification_base", ".", "EventType", "(", "object", "=", "'instance'", ",", "action", "=", "action", ",", "phase", "=", "phase", ")", ",", "payload", "=", "payload", ")", ".", "emit", "(", "context", ")" ]
send versioned notification about the volume swap action on the instance .
train
false
46,705
def get_rows_to_write(worksheet): rows = {} for ((row, col), cell) in worksheet._cells.items(): rows.setdefault(row, []).append((col, cell)) for row_idx in worksheet.row_dimensions: if (row_idx not in rows): rows[row_idx] = [] return sorted(rows.items())
[ "def", "get_rows_to_write", "(", "worksheet", ")", ":", "rows", "=", "{", "}", "for", "(", "(", "row", ",", "col", ")", ",", "cell", ")", "in", "worksheet", ".", "_cells", ".", "items", "(", ")", ":", "rows", ".", "setdefault", "(", "row", ",", "[", "]", ")", ".", "append", "(", "(", "col", ",", "cell", ")", ")", "for", "row_idx", "in", "worksheet", ".", "row_dimensions", ":", "if", "(", "row_idx", "not", "in", "rows", ")", ":", "rows", "[", "row_idx", "]", "=", "[", "]", "return", "sorted", "(", "rows", ".", "items", "(", ")", ")" ]
return all rows .
train
false
46,709
def _split_gcd(*a): g = a[0] b1 = [g] b2 = [] for x in a[1:]: g1 = gcd(g, x) if (g1 == 1): b2.append(x) else: g = g1 b1.append(x) return (g, b1, b2)
[ "def", "_split_gcd", "(", "*", "a", ")", ":", "g", "=", "a", "[", "0", "]", "b1", "=", "[", "g", "]", "b2", "=", "[", "]", "for", "x", "in", "a", "[", "1", ":", "]", ":", "g1", "=", "gcd", "(", "g", ",", "x", ")", "if", "(", "g1", "==", "1", ")", ":", "b2", ".", "append", "(", "x", ")", "else", ":", "g", "=", "g1", "b1", ".", "append", "(", "x", ")", "return", "(", "g", ",", "b1", ",", "b2", ")" ]
split the list of integers a into a list of integers .
train
false
46,710
def unparseEndpoint(args, kwargs): description = ':'.join(([quoteStringArgument(str(arg)) for arg in args] + sorted([('%s=%s' % (quoteStringArgument(str(key)), quoteStringArgument(str(value)))) for (key, value) in iteritems(kwargs)]))) return description
[ "def", "unparseEndpoint", "(", "args", ",", "kwargs", ")", ":", "description", "=", "':'", ".", "join", "(", "(", "[", "quoteStringArgument", "(", "str", "(", "arg", ")", ")", "for", "arg", "in", "args", "]", "+", "sorted", "(", "[", "(", "'%s=%s'", "%", "(", "quoteStringArgument", "(", "str", "(", "key", ")", ")", ",", "quoteStringArgument", "(", "str", "(", "value", ")", ")", ")", ")", "for", "(", "key", ",", "value", ")", "in", "iteritems", "(", "kwargs", ")", "]", ")", ")", ")", "return", "description" ]
un-parse the already-parsed args and kwargs back into endpoint syntax .
train
false
46,711
def Application_Start(): pass
[ "def", "Application_Start", "(", ")", ":", "pass" ]
code that runs on application startup .
train
false
46,713
def commit(): connection._commit() set_clean()
[ "def", "commit", "(", ")", ":", "connection", ".", "_commit", "(", ")", "set_clean", "(", ")" ]
does the commit itself and resets the dirty flag .
train
false
46,715
def test_deprecated(): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') deprecated_func() assert_true((len(w) == 1)) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') deprecated_class() assert_true((len(w) == 1))
[ "def", "test_deprecated", "(", ")", ":", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", "as", "w", ":", "warnings", ".", "simplefilter", "(", "'always'", ")", "deprecated_func", "(", ")", "assert_true", "(", "(", "len", "(", "w", ")", "==", "1", ")", ")", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", "as", "w", ":", "warnings", ".", "simplefilter", "(", "'always'", ")", "deprecated_class", "(", ")", "assert_true", "(", "(", "len", "(", "w", ")", "==", "1", ")", ")" ]
test deprecated function .
train
false
46,716
@cache_permission def can_commit_translation(user, project): return check_permission(user, project, 'trans.commit_translation')
[ "@", "cache_permission", "def", "can_commit_translation", "(", "user", ",", "project", ")", ":", "return", "check_permission", "(", "user", ",", "project", ",", "'trans.commit_translation'", ")" ]
checks whether user can commit to translation repository .
train
false
46,717
def monomial_div(A, B): C = monomial_ldiv(A, B) if all(((c >= 0) for c in C)): return tuple(C) else: return None
[ "def", "monomial_div", "(", "A", ",", "B", ")", ":", "C", "=", "monomial_ldiv", "(", "A", ",", "B", ")", "if", "all", "(", "(", "(", "c", ">=", "0", ")", "for", "c", "in", "C", ")", ")", ":", "return", "tuple", "(", "C", ")", "else", ":", "return", "None" ]
division of tuples representing monomials .
train
false
46,718
def _escape_split(sep, argstr): escaped_sep = ('\\%s' % sep) if (escaped_sep not in argstr): return argstr.split(sep) (before, _, after) = argstr.partition(escaped_sep) startlist = before.split(sep) unfinished = startlist[(-1)] startlist = startlist[:(-1)] endlist = _escape_split(sep, after) unfinished += (sep + endlist[0]) return ((startlist + [unfinished]) + endlist[1:])
[ "def", "_escape_split", "(", "sep", ",", "argstr", ")", ":", "escaped_sep", "=", "(", "'\\\\%s'", "%", "sep", ")", "if", "(", "escaped_sep", "not", "in", "argstr", ")", ":", "return", "argstr", ".", "split", "(", "sep", ")", "(", "before", ",", "_", ",", "after", ")", "=", "argstr", ".", "partition", "(", "escaped_sep", ")", "startlist", "=", "before", ".", "split", "(", "sep", ")", "unfinished", "=", "startlist", "[", "(", "-", "1", ")", "]", "startlist", "=", "startlist", "[", ":", "(", "-", "1", ")", "]", "endlist", "=", "_escape_split", "(", "sep", ",", "after", ")", "unfinished", "+=", "(", "sep", "+", "endlist", "[", "0", "]", ")", "return", "(", "(", "startlist", "+", "[", "unfinished", "]", ")", "+", "endlist", "[", "1", ":", "]", ")" ]
allows for escaping of the separator: e .
train
false
46,719
def spherical_kn(n, z, derivative=False): if derivative: return _spherical_kn_d(n, z) else: return _spherical_kn(n, z)
[ "def", "spherical_kn", "(", "n", ",", "z", ",", "derivative", "=", "False", ")", ":", "if", "derivative", ":", "return", "_spherical_kn_d", "(", "n", ",", "z", ")", "else", ":", "return", "_spherical_kn", "(", "n", ",", "z", ")" ]
modified spherical bessel function of the second kind or its derivative .
train
false
46,721
def linear_search(sequence, target): for (index, item) in enumerate(sequence): if (item == target): return index return None
[ "def", "linear_search", "(", "sequence", ",", "target", ")", ":", "for", "(", "index", ",", "item", ")", "in", "enumerate", "(", "sequence", ")", ":", "if", "(", "item", "==", "target", ")", ":", "return", "index", "return", "None" ]
pure implementation of linear search algorithm in python .
train
false
46,722
def get_all_orgs(): return BACKEND.get_all_orgs()
[ "def", "get_all_orgs", "(", ")", ":", "return", "BACKEND", ".", "get_all_orgs", "(", ")" ]
this returns a set of orgs that are considered within a microsite .
train
false
46,723
@not_implemented_for('undirected') @not_implemented_for('multigraph') def score_sequence(G): return sorted((d for (v, d) in G.out_degree()))
[ "@", "not_implemented_for", "(", "'undirected'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "score_sequence", "(", "G", ")", ":", "return", "sorted", "(", "(", "d", "for", "(", "v", ",", "d", ")", "in", "G", ".", "out_degree", "(", ")", ")", ")" ]
returns the score sequence for the given tournament graph .
train
false
46,726
def fourier_gaussian(input, sigma, n=(-1), axis=(-1), output=None): input = numpy.asarray(input) (output, return_value) = _get_output_fourier(output, input) axis = _ni_support._check_axis(axis, input.ndim) sigmas = _ni_support._normalize_sequence(sigma, input.ndim) sigmas = numpy.asarray(sigmas, dtype=numpy.float64) if (not sigmas.flags.contiguous): sigmas = sigmas.copy() _nd_image.fourier_filter(input, sigmas, n, axis, output, 0) return return_value
[ "def", "fourier_gaussian", "(", "input", ",", "sigma", ",", "n", "=", "(", "-", "1", ")", ",", "axis", "=", "(", "-", "1", ")", ",", "output", "=", "None", ")", ":", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "(", "output", ",", "return_value", ")", "=", "_get_output_fourier", "(", "output", ",", "input", ")", "axis", "=", "_ni_support", ".", "_check_axis", "(", "axis", ",", "input", ".", "ndim", ")", "sigmas", "=", "_ni_support", ".", "_normalize_sequence", "(", "sigma", ",", "input", ".", "ndim", ")", "sigmas", "=", "numpy", ".", "asarray", "(", "sigmas", ",", "dtype", "=", "numpy", ".", "float64", ")", "if", "(", "not", "sigmas", ".", "flags", ".", "contiguous", ")", ":", "sigmas", "=", "sigmas", ".", "copy", "(", ")", "_nd_image", ".", "fourier_filter", "(", "input", ",", "sigmas", ",", "n", ",", "axis", ",", "output", ",", "0", ")", "return", "return_value" ]
multi-dimensional gaussian fourier filter .
train
false
46,727
def importAndRunFunction(path, moduleName, funcName, **keywords): import sys originalPath = sys.path try: augmentedPath = ([path] + sys.path) sys.path = augmentedPath func = getattr(__import__(moduleName, fromlist=[funcName]), funcName) sys.path = originalPath except: sys.path = originalPath raise return func(**keywords)
[ "def", "importAndRunFunction", "(", "path", ",", "moduleName", ",", "funcName", ",", "**", "keywords", ")", ":", "import", "sys", "originalPath", "=", "sys", ".", "path", "try", ":", "augmentedPath", "=", "(", "[", "path", "]", "+", "sys", ".", "path", ")", "sys", ".", "path", "=", "augmentedPath", "func", "=", "getattr", "(", "__import__", "(", "moduleName", ",", "fromlist", "=", "[", "funcName", "]", ")", ",", "funcName", ")", "sys", ".", "path", "=", "originalPath", "except", ":", "sys", ".", "path", "=", "originalPath", "raise", "return", "func", "(", "**", "keywords", ")" ]
run a named function specified by a filesystem path .
train
true
46,728
def master(options): if (not options.silence): print ('Master started on PID %s' % os.getpid()) if options.port: webdir = File('.') web = Site(webdir) web.log = (lambda _: None) reactor.listenTCP(options.port, web) factory = Factory() port = reactor.listenTCP(options.wsport, factory, backlog=options.backlog) port.stopReading() for i in range(options.workers): args = [executable, '-u', __file__, '--fd', str(port.fileno()), '--cpuid', str(i)] args.extend(sys.argv[1:]) reactor.spawnProcess(None, executable, args, childFDs={0: 0, 1: 1, 2: 2, port.fileno(): port.fileno()}, env=os.environ) reactor.run()
[ "def", "master", "(", "options", ")", ":", "if", "(", "not", "options", ".", "silence", ")", ":", "print", "(", "'Master started on PID %s'", "%", "os", ".", "getpid", "(", ")", ")", "if", "options", ".", "port", ":", "webdir", "=", "File", "(", "'.'", ")", "web", "=", "Site", "(", "webdir", ")", "web", ".", "log", "=", "(", "lambda", "_", ":", "None", ")", "reactor", ".", "listenTCP", "(", "options", ".", "port", ",", "web", ")", "factory", "=", "Factory", "(", ")", "port", "=", "reactor", ".", "listenTCP", "(", "options", ".", "wsport", ",", "factory", ",", "backlog", "=", "options", ".", "backlog", ")", "port", ".", "stopReading", "(", ")", "for", "i", "in", "range", "(", "options", ".", "workers", ")", ":", "args", "=", "[", "executable", ",", "'-u'", ",", "__file__", ",", "'--fd'", ",", "str", "(", "port", ".", "fileno", "(", ")", ")", ",", "'--cpuid'", ",", "str", "(", "i", ")", "]", "args", ".", "extend", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", "reactor", ".", "spawnProcess", "(", "None", ",", "executable", ",", "args", ",", "childFDs", "=", "{", "0", ":", "0", ",", "1", ":", "1", ",", "2", ":", "2", ",", "port", ".", "fileno", "(", ")", ":", "port", ".", "fileno", "(", ")", "}", ",", "env", "=", "os", ".", "environ", ")", "reactor", ".", "run", "(", ")" ]
start of the master process .
train
false
46,729
def catch_status_code_error(func): func._catch_status_code_error = True return func
[ "def", "catch_status_code_error", "(", "func", ")", ":", "func", ".", "_catch_status_code_error", "=", "True", "return", "func" ]
non-200 response will been regarded as fetch failed and will not pass to callback .
train
false
46,731
def ignore_static(f): @wraps(f) def decorated_function(*args, **kwargs): if request.path.startswith('/static'): return return f(*args, **kwargs) return decorated_function
[ "def", "ignore_static", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "decorated_function", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "request", ".", "path", ".", "startswith", "(", "'/static'", ")", ":", "return", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "decorated_function" ]
only executes the wrapped function if were not loading a static resource .
train
false
46,732
def _log(msg, facility, loglevel): logger = logging.getLogger(facility) logger.log(loglevel, msg)
[ "def", "_log", "(", "msg", ",", "facility", ",", "loglevel", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "facility", ")", "logger", ".", "log", "(", "loglevel", ",", "msg", ")" ]
log into the internal werkzeug logger .
train
false
46,733
def expand_ipaddress_pattern(string, family): if (family not in [4, 6]): raise Exception('Invalid IP address family: {}'.format(family)) if (family == 4): regex = IP4_EXPANSION_PATTERN base = 10 else: regex = IP6_EXPANSION_PATTERN base = 16 (lead, pattern, remnant) = re.split(regex, string, maxsplit=1) (x, y) = pattern.split('-') for i in range(int(x, base), (int(y, base) + 1)): if re.search(regex, remnant): for string in expand_ipaddress_pattern(remnant, family): (yield ''.join([lead, format(i, ('x' if (family == 6) else 'd')), string])) else: (yield ''.join([lead, format(i, ('x' if (family == 6) else 'd')), remnant]))
[ "def", "expand_ipaddress_pattern", "(", "string", ",", "family", ")", ":", "if", "(", "family", "not", "in", "[", "4", ",", "6", "]", ")", ":", "raise", "Exception", "(", "'Invalid IP address family: {}'", ".", "format", "(", "family", ")", ")", "if", "(", "family", "==", "4", ")", ":", "regex", "=", "IP4_EXPANSION_PATTERN", "base", "=", "10", "else", ":", "regex", "=", "IP6_EXPANSION_PATTERN", "base", "=", "16", "(", "lead", ",", "pattern", ",", "remnant", ")", "=", "re", ".", "split", "(", "regex", ",", "string", ",", "maxsplit", "=", "1", ")", "(", "x", ",", "y", ")", "=", "pattern", ".", "split", "(", "'-'", ")", "for", "i", "in", "range", "(", "int", "(", "x", ",", "base", ")", ",", "(", "int", "(", "y", ",", "base", ")", "+", "1", ")", ")", ":", "if", "re", ".", "search", "(", "regex", ",", "remnant", ")", ":", "for", "string", "in", "expand_ipaddress_pattern", "(", "remnant", ",", "family", ")", ":", "(", "yield", "''", ".", "join", "(", "[", "lead", ",", "format", "(", "i", ",", "(", "'x'", "if", "(", "family", "==", "6", ")", "else", "'d'", ")", ")", ",", "string", "]", ")", ")", "else", ":", "(", "yield", "''", ".", "join", "(", "[", "lead", ",", "format", "(", "i", ",", "(", "'x'", "if", "(", "family", "==", "6", ")", "else", "'d'", ")", ")", ",", "remnant", "]", ")", ")" ]
expand an ip address pattern into a list of strings .
train
false
46,735
def reorder_missing_vector(vector, missing, inplace=False, prefix=None): if (prefix is None): prefix = find_best_blas_type((vector,))[0] reorder = prefix_reorder_missing_vector_map[prefix] if (not inplace): vector = np.copy(vector, order='F') reorder(vector, np.asfortranarray(missing)) return vector
[ "def", "reorder_missing_vector", "(", "vector", ",", "missing", ",", "inplace", "=", "False", ",", "prefix", "=", "None", ")", ":", "if", "(", "prefix", "is", "None", ")", ":", "prefix", "=", "find_best_blas_type", "(", "(", "vector", ",", ")", ")", "[", "0", "]", "reorder", "=", "prefix_reorder_missing_vector_map", "[", "prefix", "]", "if", "(", "not", "inplace", ")", ":", "vector", "=", "np", ".", "copy", "(", "vector", ",", "order", "=", "'F'", ")", "reorder", "(", "vector", ",", "np", ".", "asfortranarray", "(", "missing", ")", ")", "return", "vector" ]
reorder the elements of a time-varying vector where all non-missing values are in the first elements of the vector .
train
false
46,736
def get_cohort_names(course): return {cohort.id: cohort.name for cohort in get_course_cohorts(course)}
[ "def", "get_cohort_names", "(", "course", ")", ":", "return", "{", "cohort", ".", "id", ":", "cohort", ".", "name", "for", "cohort", "in", "get_course_cohorts", "(", "course", ")", "}" ]
return a dict that maps cohort ids to names for the given course .
train
false
46,739
def _new_hda(app, sa_session, ext, designation, visible, dbkey, permissions=UNSET): primary_data = app.model.HistoryDatasetAssociation(extension=ext, designation=designation, visible=visible, dbkey=dbkey, create_dataset=True, flush=False, sa_session=sa_session) if (permissions is not UNSET): app.security_agent.set_all_dataset_permissions(primary_data.dataset, permissions, new=True, flush=False) sa_session.add(primary_data) return primary_data
[ "def", "_new_hda", "(", "app", ",", "sa_session", ",", "ext", ",", "designation", ",", "visible", ",", "dbkey", ",", "permissions", "=", "UNSET", ")", ":", "primary_data", "=", "app", ".", "model", ".", "HistoryDatasetAssociation", "(", "extension", "=", "ext", ",", "designation", "=", "designation", ",", "visible", "=", "visible", ",", "dbkey", "=", "dbkey", ",", "create_dataset", "=", "True", ",", "flush", "=", "False", ",", "sa_session", "=", "sa_session", ")", "if", "(", "permissions", "is", "not", "UNSET", ")", ":", "app", ".", "security_agent", ".", "set_all_dataset_permissions", "(", "primary_data", ".", "dataset", ",", "permissions", ",", "new", "=", "True", ",", "flush", "=", "False", ")", "sa_session", ".", "add", "(", "primary_data", ")", "return", "primary_data" ]
return a new unflushed hda with dataset and permissions setup .
train
false
46,740
def filter_whitespace(mode, text): if (mode == 'all'): return text elif (mode == 'single'): text = re.sub('([\\t ]+)', ' ', text) text = re.sub('(\\s*\\n\\s*)', '\n', text) return text elif (mode == 'oneline'): return re.sub('(\\s+)', ' ', text) else: raise Exception(('invalid whitespace mode %s' % mode))
[ "def", "filter_whitespace", "(", "mode", ",", "text", ")", ":", "if", "(", "mode", "==", "'all'", ")", ":", "return", "text", "elif", "(", "mode", "==", "'single'", ")", ":", "text", "=", "re", ".", "sub", "(", "'([\\\\t ]+)'", ",", "' '", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "'(\\\\s*\\\\n\\\\s*)'", ",", "'\\n'", ",", "text", ")", "return", "text", "elif", "(", "mode", "==", "'oneline'", ")", ":", "return", "re", ".", "sub", "(", "'(\\\\s+)'", ",", "' '", ",", "text", ")", "else", ":", "raise", "Exception", "(", "(", "'invalid whitespace mode %s'", "%", "mode", ")", ")" ]
transform whitespace in text according to mode .
train
false
46,741
def vni_to_bin(vni): return type_desc.Int3.from_user(vni)
[ "def", "vni_to_bin", "(", "vni", ")", ":", "return", "type_desc", ".", "Int3", ".", "from_user", "(", "vni", ")" ]
converts integer vni to binary representation .
train
false
46,742
def generate_random_mac(old_mac): random.seed() new_mac = old_mac[:8].lower().replace('-', ':') for i in xrange(0, 6): if ((i % 2) == 0): new_mac += ':' new_mac += '0123456789abcdef'[random.randint(0, 15)] if (new_mac == old_mac): new_mac = generate_random_mac(old_mac) return new_mac
[ "def", "generate_random_mac", "(", "old_mac", ")", ":", "random", ".", "seed", "(", ")", "new_mac", "=", "old_mac", "[", ":", "8", "]", ".", "lower", "(", ")", ".", "replace", "(", "'-'", ",", "':'", ")", "for", "i", "in", "xrange", "(", "0", ",", "6", ")", ":", "if", "(", "(", "i", "%", "2", ")", "==", "0", ")", ":", "new_mac", "+=", "':'", "new_mac", "+=", "'0123456789abcdef'", "[", "random", ".", "randint", "(", "0", ",", "15", ")", "]", "if", "(", "new_mac", "==", "old_mac", ")", ":", "new_mac", "=", "generate_random_mac", "(", "old_mac", ")", "return", "new_mac" ]
generates a random mac address .
train
false
46,743
def sort_js_files(js_files): modules = [f for f in js_files if f.endswith(MODULE_EXT)] mocks = [f for f in js_files if f.endswith(MOCK_EXT)] specs = [f for f in js_files if f.endswith(SPEC_EXT)] other_sources = [f for f in js_files if ((not f.endswith(MODULE_EXT)) and (not f.endswith(MOCK_EXT)) and (not f.endswith(SPEC_EXT)))] sources = (modules + other_sources) return (sources, mocks, specs)
[ "def", "sort_js_files", "(", "js_files", ")", ":", "modules", "=", "[", "f", "for", "f", "in", "js_files", "if", "f", ".", "endswith", "(", "MODULE_EXT", ")", "]", "mocks", "=", "[", "f", "for", "f", "in", "js_files", "if", "f", ".", "endswith", "(", "MOCK_EXT", ")", "]", "specs", "=", "[", "f", "for", "f", "in", "js_files", "if", "f", ".", "endswith", "(", "SPEC_EXT", ")", "]", "other_sources", "=", "[", "f", "for", "f", "in", "js_files", "if", "(", "(", "not", "f", ".", "endswith", "(", "MODULE_EXT", ")", ")", "and", "(", "not", "f", ".", "endswith", "(", "MOCK_EXT", ")", ")", "and", "(", "not", "f", ".", "endswith", "(", "SPEC_EXT", ")", ")", ")", "]", "sources", "=", "(", "modules", "+", "other_sources", ")", "return", "(", "sources", ",", "mocks", ",", "specs", ")" ]
sorts javascript files in js_files into source files .
train
true
46,744
def setup_plugins(): if os.path.isdir(PLUGINS_DIR): load_plugins([PLUGINS_DIR]) if args.plugin_dirs: load_plugins(args.plugin_dirs)
[ "def", "setup_plugins", "(", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "PLUGINS_DIR", ")", ":", "load_plugins", "(", "[", "PLUGINS_DIR", "]", ")", "if", "args", ".", "plugin_dirs", ":", "load_plugins", "(", "args", ".", "plugin_dirs", ")" ]
loads any additional plugins .
train
false
46,745
def test_meshdata(): square_vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]], dtype=np.float) square_faces = np.array([[0, 1, 2], [0, 2, 3]], dtype=np.uint) square_normals = np.array([[0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1]], dtype=np.float) square_edges = np.array([[0, 1], [0, 2], [0, 3], [1, 2], [2, 3]], dtype=np.uint) mesh = MeshData(vertices=square_vertices, faces=square_faces) assert_array_equal(square_vertices, mesh.get_vertices()) assert_array_equal(square_faces, mesh.get_faces()) assert_array_equal(square_normals, mesh.get_vertex_normals()) assert_array_equal(square_edges, mesh.get_edges())
[ "def", "test_meshdata", "(", ")", ":", "square_vertices", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "0", "]", ",", "[", "1", ",", "0", ",", "0", "]", ",", "[", "1", ",", "1", ",", "0", "]", ",", "[", "0", ",", "1", ",", "0", "]", "]", ",", "dtype", "=", "np", ".", "float", ")", "square_faces", "=", "np", ".", "array", "(", "[", "[", "0", ",", "1", ",", "2", "]", ",", "[", "0", ",", "2", ",", "3", "]", "]", ",", "dtype", "=", "np", ".", "uint", ")", "square_normals", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "1", "]", "]", ",", "dtype", "=", "np", ".", "float", ")", "square_edges", "=", "np", ".", "array", "(", "[", "[", "0", ",", "1", "]", ",", "[", "0", ",", "2", "]", ",", "[", "0", ",", "3", "]", ",", "[", "1", ",", "2", "]", ",", "[", "2", ",", "3", "]", "]", ",", "dtype", "=", "np", ".", "uint", ")", "mesh", "=", "MeshData", "(", "vertices", "=", "square_vertices", ",", "faces", "=", "square_faces", ")", "assert_array_equal", "(", "square_vertices", ",", "mesh", ".", "get_vertices", "(", ")", ")", "assert_array_equal", "(", "square_faces", ",", "mesh", ".", "get_faces", "(", ")", ")", "assert_array_equal", "(", "square_normals", ",", "mesh", ".", "get_vertex_normals", "(", ")", ")", "assert_array_equal", "(", "square_edges", ",", "mesh", ".", "get_edges", "(", ")", ")" ]
test meshdata class its a unit square cut in two triangular element .
train
false
46,746
def test_help_completion(qtmodeltester, monkeypatch, stubs, key_config_stub): module = 'qutebrowser.completion.models.miscmodels' key_config_stub.set_bindings_for('normal', {'s': 'stop', 'rr': 'roll'}) _patch_cmdutils(monkeypatch, stubs, (module + '.cmdutils')) _patch_configdata(monkeypatch, stubs, (module + '.configdata.DATA')) model = miscmodels.HelpCompletionModel() qtmodeltester.data_display_may_return_none = True qtmodeltester.check(model) _check_completions(model, {'Commands': [(':stop', 'stop qutebrowser', 's'), (':drop', 'drop all user data', ''), (':roll', 'never gonna give you up', 'rr'), (':hide', '', '')], 'Settings': [('general->time', 'Is an illusion.', ''), ('general->volume', 'Goes to 11', ''), ('ui->gesture', 'Waggle your hands to control qutebrowser', ''), ('ui->mind', 'Enable mind-control ui (experimental)', ''), ('ui->voice', 'Whether to respond to voice commands', ''), ('searchengines->DEFAULT', '', '')]})
[ "def", "test_help_completion", "(", "qtmodeltester", ",", "monkeypatch", ",", "stubs", ",", "key_config_stub", ")", ":", "module", "=", "'qutebrowser.completion.models.miscmodels'", "key_config_stub", ".", "set_bindings_for", "(", "'normal'", ",", "{", "'s'", ":", "'stop'", ",", "'rr'", ":", "'roll'", "}", ")", "_patch_cmdutils", "(", "monkeypatch", ",", "stubs", ",", "(", "module", "+", "'.cmdutils'", ")", ")", "_patch_configdata", "(", "monkeypatch", ",", "stubs", ",", "(", "module", "+", "'.configdata.DATA'", ")", ")", "model", "=", "miscmodels", ".", "HelpCompletionModel", "(", ")", "qtmodeltester", ".", "data_display_may_return_none", "=", "True", "qtmodeltester", ".", "check", "(", "model", ")", "_check_completions", "(", "model", ",", "{", "'Commands'", ":", "[", "(", "':stop'", ",", "'stop qutebrowser'", ",", "'s'", ")", ",", "(", "':drop'", ",", "'drop all user data'", ",", "''", ")", ",", "(", "':roll'", ",", "'never gonna give you up'", ",", "'rr'", ")", ",", "(", "':hide'", ",", "''", ",", "''", ")", "]", ",", "'Settings'", ":", "[", "(", "'general->time'", ",", "'Is an illusion.'", ",", "''", ")", ",", "(", "'general->volume'", ",", "'Goes to 11'", ",", "''", ")", ",", "(", "'ui->gesture'", ",", "'Waggle your hands to control qutebrowser'", ",", "''", ")", ",", "(", "'ui->mind'", ",", "'Enable mind-control ui (experimental)'", ",", "''", ")", ",", "(", "'ui->voice'", ",", "'Whether to respond to voice commands'", ",", "''", ")", ",", "(", "'searchengines->DEFAULT'", ",", "''", ",", "''", ")", "]", "}", ")" ]
test the results of command completion .
train
false
46,748
def _FindRuleTriggerFiles(rule, sources): return rule.get('rule_sources', [])
[ "def", "_FindRuleTriggerFiles", "(", "rule", ",", "sources", ")", ":", "return", "rule", ".", "get", "(", "'rule_sources'", ",", "[", "]", ")" ]
find the list of files which a particular rule applies to .
train
false
46,749
def check_write_to_datafiles(con, warning, critical, perf_data): warning = (warning or 20) critical = (critical or 40) try: data = get_server_status(con) writes = data['dur']['writeToDataFilesMB'] message = ('Write to data files : %.2f MB' % writes) message += performance_data(perf_data, [(('%.2f' % writes), 'write_to_data_files', warning, critical)]) return check_levels(writes, warning, critical, message) except Exception as e: return exit_with_general_critical(e)
[ "def", "check_write_to_datafiles", "(", "con", ",", "warning", ",", "critical", ",", "perf_data", ")", ":", "warning", "=", "(", "warning", "or", "20", ")", "critical", "=", "(", "critical", "or", "40", ")", "try", ":", "data", "=", "get_server_status", "(", "con", ")", "writes", "=", "data", "[", "'dur'", "]", "[", "'writeToDataFilesMB'", "]", "message", "=", "(", "'Write to data files : %.2f MB'", "%", "writes", ")", "message", "+=", "performance_data", "(", "perf_data", ",", "[", "(", "(", "'%.2f'", "%", "writes", ")", ",", "'write_to_data_files'", ",", "warning", ",", "critical", ")", "]", ")", "return", "check_levels", "(", "writes", ",", "warning", ",", "critical", ",", "message", ")", "except", "Exception", "as", "e", ":", "return", "exit_with_general_critical", "(", "e", ")" ]
checking the average amount of data in megabytes written to the databases datafiles in the last four seconds .
train
false
46,750
def _assert_matching_drivers(): if (CONF.database.slave_connection == ''): return normal = sqlalchemy.engine.url.make_url(CONF.database.connection) slave = sqlalchemy.engine.url.make_url(CONF.database.slave_connection) assert (normal.drivername == slave.drivername)
[ "def", "_assert_matching_drivers", "(", ")", ":", "if", "(", "CONF", ".", "database", ".", "slave_connection", "==", "''", ")", ":", "return", "normal", "=", "sqlalchemy", ".", "engine", ".", "url", ".", "make_url", "(", "CONF", ".", "database", ".", "connection", ")", "slave", "=", "sqlalchemy", ".", "engine", ".", "url", ".", "make_url", "(", "CONF", ".", "database", ".", "slave_connection", ")", "assert", "(", "normal", ".", "drivername", "==", "slave", ".", "drivername", ")" ]
make sure slave handle and normal handle have the same driver .
train
false
46,751
def _trunc(f, minpoly, p): ring = f.ring minpoly = minpoly.set_ring(ring) p_ = ring.ground_new(p) return f.trunc_ground(p).rem([minpoly, p_]).trunc_ground(p)
[ "def", "_trunc", "(", "f", ",", "minpoly", ",", "p", ")", ":", "ring", "=", "f", ".", "ring", "minpoly", "=", "minpoly", ".", "set_ring", "(", "ring", ")", "p_", "=", "ring", ".", "ground_new", "(", "p", ")", "return", "f", ".", "trunc_ground", "(", "p", ")", ".", "rem", "(", "[", "minpoly", ",", "p_", "]", ")", ".", "trunc_ground", "(", "p", ")" ]
compute the reduced representation of a polynomial f in mathbb z_p[z] / (check m_{alpha}(z))[x] parameters f : polyelement polynomial in mathbb z[x .
train
false
46,752
def _s3_cleanup(glob_path, time_old, dry_run=False, **runner_kwargs): runner = EMRJobRunner(**runner_kwargs) log.info(('Deleting all files in %s that are older than %s' % (glob_path, time_old))) for path in runner.fs.ls(glob_path): (bucket_name, key_name) = parse_s3_uri(path) bucket = runner.fs.get_bucket(bucket_name) for key in bucket.list(key_name): last_modified = iso8601_to_datetime(key.last_modified) age = (datetime.utcnow() - last_modified) if (age > time_old): log.info(('Deleting %s; is %s old' % (key.name, age))) if (not dry_run): key.delete()
[ "def", "_s3_cleanup", "(", "glob_path", ",", "time_old", ",", "dry_run", "=", "False", ",", "**", "runner_kwargs", ")", ":", "runner", "=", "EMRJobRunner", "(", "**", "runner_kwargs", ")", "log", ".", "info", "(", "(", "'Deleting all files in %s that are older than %s'", "%", "(", "glob_path", ",", "time_old", ")", ")", ")", "for", "path", "in", "runner", ".", "fs", ".", "ls", "(", "glob_path", ")", ":", "(", "bucket_name", ",", "key_name", ")", "=", "parse_s3_uri", "(", "path", ")", "bucket", "=", "runner", ".", "fs", ".", "get_bucket", "(", "bucket_name", ")", "for", "key", "in", "bucket", ".", "list", "(", "key_name", ")", ":", "last_modified", "=", "iso8601_to_datetime", "(", "key", ".", "last_modified", ")", "age", "=", "(", "datetime", ".", "utcnow", "(", ")", "-", "last_modified", ")", "if", "(", "age", ">", "time_old", ")", ":", "log", ".", "info", "(", "(", "'Deleting %s; is %s old'", "%", "(", "key", ".", "name", ",", "age", ")", ")", ")", "if", "(", "not", "dry_run", ")", ":", "key", ".", "delete", "(", ")" ]
delete all files older than *time_old* in *path* .
train
false
46,754
def _decode_pa_dict(d): retval = cdict() for (k, v) in d.items(): if isinstance(k, tuple): for subk in k: retval[subk] = v for (k, v) in d.items(): if (not isinstance(k, tuple)): retval[k] = v return retval
[ "def", "_decode_pa_dict", "(", "d", ")", ":", "retval", "=", "cdict", "(", ")", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "if", "isinstance", "(", "k", ",", "tuple", ")", ":", "for", "subk", "in", "k", ":", "retval", "[", "subk", "]", "=", "v", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "if", "(", "not", "isinstance", "(", "k", ",", "tuple", ")", ")", ":", "retval", "[", "k", "]", "=", "v", "return", "retval" ]
decodes dict passed to prot_attrs .
train
false
46,755
def sendBundle(bundle, ipAddr='127.0.0.1', port=9000): with oscLock: outSocket.sendto(bundle.message, (ipAddr, port))
[ "def", "sendBundle", "(", "bundle", ",", "ipAddr", "=", "'127.0.0.1'", ",", "port", "=", "9000", ")", ":", "with", "oscLock", ":", "outSocket", ".", "sendto", "(", "bundle", ".", "message", ",", "(", "ipAddr", ",", "port", ")", ")" ]
convert bundle to a binary and send it .
train
false
46,756
def police_rheader(r, tabs=[]): rheader = None if (r.representation == 'html'): if (r.name == 'station'): station = r.record if station: tabs = [(T('Station Details'), None), (T('Staff'), 'human_resource'), (T('Beats'), 'location')] rheader_tabs = s3_rheader_tabs(r, tabs) rheader = DIV(rheader_tabs) return rheader
[ "def", "police_rheader", "(", "r", ",", "tabs", "=", "[", "]", ")", ":", "rheader", "=", "None", "if", "(", "r", ".", "representation", "==", "'html'", ")", ":", "if", "(", "r", ".", "name", "==", "'station'", ")", ":", "station", "=", "r", ".", "record", "if", "station", ":", "tabs", "=", "[", "(", "T", "(", "'Station Details'", ")", ",", "None", ")", ",", "(", "T", "(", "'Staff'", ")", ",", "'human_resource'", ")", ",", "(", "T", "(", "'Beats'", ")", ",", "'location'", ")", "]", "rheader_tabs", "=", "s3_rheader_tabs", "(", "r", ",", "tabs", ")", "rheader", "=", "DIV", "(", "rheader_tabs", ")", "return", "rheader" ]
resource headers for component views .
train
false
46,757
def group_by_fields(table, names): col_indices = map(table[0].index, names) result = defaultdict(list) for row in table[1:]: header = row[0] states = tuple([row[i] for i in col_indices]) result[states].append(header) return result
[ "def", "group_by_fields", "(", "table", ",", "names", ")", ":", "col_indices", "=", "map", "(", "table", "[", "0", "]", ".", "index", ",", "names", ")", "result", "=", "defaultdict", "(", "list", ")", "for", "row", "in", "table", "[", "1", ":", "]", ":", "header", "=", "row", "[", "0", "]", "states", "=", "tuple", "(", "[", "row", "[", "i", "]", "for", "i", "in", "col_indices", "]", ")", "result", "[", "states", "]", ".", "append", "(", "header", ")", "return", "result" ]
returns dict of :[row_headers] from table .
train
false
46,760
def toggle_actions(actions, enable): if (actions is not None): for action in actions: if (action is not None): action.setEnabled(enable)
[ "def", "toggle_actions", "(", "actions", ",", "enable", ")", ":", "if", "(", "actions", "is", "not", "None", ")", ":", "for", "action", "in", "actions", ":", "if", "(", "action", "is", "not", "None", ")", ":", "action", ".", "setEnabled", "(", "enable", ")" ]
enable/disable actions .
train
true
46,761
def _fprop_slice_np(h, stride, H, roi_offset): hstart = int(np.floor((float(h) * stride))) hend = int(np.ceil((float((h + 1)) * stride))) hstart = min(max((hstart + roi_offset), 0), H) hend = min(max((hend + roi_offset), 0), H) return (slice(hstart, hend), (hend - hstart))
[ "def", "_fprop_slice_np", "(", "h", ",", "stride", ",", "H", ",", "roi_offset", ")", ":", "hstart", "=", "int", "(", "np", ".", "floor", "(", "(", "float", "(", "h", ")", "*", "stride", ")", ")", ")", "hend", "=", "int", "(", "np", ".", "ceil", "(", "(", "float", "(", "(", "h", "+", "1", ")", ")", "*", "stride", ")", ")", ")", "hstart", "=", "min", "(", "max", "(", "(", "hstart", "+", "roi_offset", ")", ",", "0", ")", ",", "H", ")", "hend", "=", "min", "(", "max", "(", "(", "hend", "+", "roi_offset", ")", ",", "0", ")", ",", "H", ")", "return", "(", "slice", "(", "hstart", ",", "hend", ")", ",", "(", "hend", "-", "hstart", ")", ")" ]
slicing in this 1 dimension h: is the index on the pooled map stride: h: the max of the input map roi_offset: how far hstart is from 0 .
train
false
46,762
@task @use_master def update_supported_locales_single(id, latest=False, **kw): from mkt.webapps.models import Webapp try: app = Webapp.objects.get(pk=id) except Webapp.DoesNotExist: log.info((u'[Webapp:%s] Did not find webapp to update supported locales.' % id)) return try: if app.update_supported_locales(latest=latest): log.info((u'[Webapp:%s] Updated supported locales.' % app.id)) except Exception: log.info((u'[Webapp%s] Updating supported locales failed.' % app.id), exc_info=True)
[ "@", "task", "@", "use_master", "def", "update_supported_locales_single", "(", "id", ",", "latest", "=", "False", ",", "**", "kw", ")", ":", "from", "mkt", ".", "webapps", ".", "models", "import", "Webapp", "try", ":", "app", "=", "Webapp", ".", "objects", ".", "get", "(", "pk", "=", "id", ")", "except", "Webapp", ".", "DoesNotExist", ":", "log", ".", "info", "(", "(", "u'[Webapp:%s] Did not find webapp to update supported locales.'", "%", "id", ")", ")", "return", "try", ":", "if", "app", ".", "update_supported_locales", "(", "latest", "=", "latest", ")", ":", "log", ".", "info", "(", "(", "u'[Webapp:%s] Updated supported locales.'", "%", "app", ".", "id", ")", ")", "except", "Exception", ":", "log", ".", "info", "(", "(", "u'[Webapp%s] Updating supported locales failed.'", "%", "app", ".", "id", ")", ",", "exc_info", "=", "True", ")" ]
update supported_locales for an individual app .
train
false
46,764
@contextmanager def temp_style(style_name, settings=None): if (not settings): settings = DUMMY_SETTINGS temp_file = (u'%s.%s' % (style_name, STYLE_EXTENSION)) tempdir = tempfile.mkdtemp() with open(os.path.join(tempdir, temp_file), u'w') as f: for (k, v) in six.iteritems(settings): f.write((u'%s: %s' % (k, v))) USER_LIBRARY_PATHS.append(tempdir) style.reload_library() try: (yield) finally: shutil.rmtree(tempdir) style.reload_library()
[ "@", "contextmanager", "def", "temp_style", "(", "style_name", ",", "settings", "=", "None", ")", ":", "if", "(", "not", "settings", ")", ":", "settings", "=", "DUMMY_SETTINGS", "temp_file", "=", "(", "u'%s.%s'", "%", "(", "style_name", ",", "STYLE_EXTENSION", ")", ")", "tempdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "tempdir", ",", "temp_file", ")", ",", "u'w'", ")", "as", "f", ":", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "settings", ")", ":", "f", ".", "write", "(", "(", "u'%s: %s'", "%", "(", "k", ",", "v", ")", ")", ")", "USER_LIBRARY_PATHS", ".", "append", "(", "tempdir", ")", "style", ".", "reload_library", "(", ")", "try", ":", "(", "yield", ")", "finally", ":", "shutil", ".", "rmtree", "(", "tempdir", ")", "style", ".", "reload_library", "(", ")" ]
context manager to create a style sheet in a temporary directory .
train
false
46,766
@csrf_exempt def ssl_login(request): if (not settings.FEATURES['AUTH_USE_CERTIFICATES']): return HttpResponseForbidden() cert = ssl_get_cert_from_request(request) if (not cert): return student.views.index(request) (_user, email, fullname) = _ssl_dn_extract_info(cert) redirect_to = get_next_url_for_login_page(request) retfun = functools.partial(redirect, redirect_to) return _external_login_or_signup(request, external_id=email, external_domain='ssl:MIT', credentials=cert, email=email, fullname=fullname, retfun=retfun)
[ "@", "csrf_exempt", "def", "ssl_login", "(", "request", ")", ":", "if", "(", "not", "settings", ".", "FEATURES", "[", "'AUTH_USE_CERTIFICATES'", "]", ")", ":", "return", "HttpResponseForbidden", "(", ")", "cert", "=", "ssl_get_cert_from_request", "(", "request", ")", "if", "(", "not", "cert", ")", ":", "return", "student", ".", "views", ".", "index", "(", "request", ")", "(", "_user", ",", "email", ",", "fullname", ")", "=", "_ssl_dn_extract_info", "(", "cert", ")", "redirect_to", "=", "get_next_url_for_login_page", "(", "request", ")", "retfun", "=", "functools", ".", "partial", "(", "redirect", ",", "redirect_to", ")", "return", "_external_login_or_signup", "(", "request", ",", "external_id", "=", "email", ",", "external_domain", "=", "'ssl:MIT'", ",", "credentials", "=", "cert", ",", "email", "=", "email", ",", "fullname", "=", "fullname", ",", "retfun", "=", "retfun", ")" ]
this is called by branding .
train
false
46,767
def notdefault(item): return (bool(item) and (str(item).lower() not in ('default', '*', '', str(DEFAULT_PRIORITY))))
[ "def", "notdefault", "(", "item", ")", ":", "return", "(", "bool", "(", "item", ")", "and", "(", "str", "(", "item", ")", ".", "lower", "(", ")", "not", "in", "(", "'default'", ",", "'*'", ",", "''", ",", "str", "(", "DEFAULT_PRIORITY", ")", ")", ")", ")" ]
return true if not default||* .
train
false
46,768
def _to_serializables(obj): if isinstance(obj, PRecord): result = dict(obj) result[_CLASS_MARKER] = obj.__class__.__name__ return result elif isinstance(obj, PClass): result = obj._to_dict() result[_CLASS_MARKER] = obj.__class__.__name__ return result elif isinstance(obj, PMap): return {_CLASS_MARKER: u'PMap', u'values': dict(obj).items()} elif isinstance(obj, (PSet, PVector, set)): return list(obj) elif isinstance(obj, FilePath): return {_CLASS_MARKER: u'FilePath', u'path': obj.path.decode('utf-8')} elif isinstance(obj, UUID): return {_CLASS_MARKER: u'UUID', 'hex': unicode(obj)} elif isinstance(obj, datetime): if (obj.tzinfo is None): raise ValueError('Datetime without a timezone: {}'.format(obj)) return {_CLASS_MARKER: u'datetime', 'seconds': timegm(obj.utctimetuple())} return obj
[ "def", "_to_serializables", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "PRecord", ")", ":", "result", "=", "dict", "(", "obj", ")", "result", "[", "_CLASS_MARKER", "]", "=", "obj", ".", "__class__", ".", "__name__", "return", "result", "elif", "isinstance", "(", "obj", ",", "PClass", ")", ":", "result", "=", "obj", ".", "_to_dict", "(", ")", "result", "[", "_CLASS_MARKER", "]", "=", "obj", ".", "__class__", ".", "__name__", "return", "result", "elif", "isinstance", "(", "obj", ",", "PMap", ")", ":", "return", "{", "_CLASS_MARKER", ":", "u'PMap'", ",", "u'values'", ":", "dict", "(", "obj", ")", ".", "items", "(", ")", "}", "elif", "isinstance", "(", "obj", ",", "(", "PSet", ",", "PVector", ",", "set", ")", ")", ":", "return", "list", "(", "obj", ")", "elif", "isinstance", "(", "obj", ",", "FilePath", ")", ":", "return", "{", "_CLASS_MARKER", ":", "u'FilePath'", ",", "u'path'", ":", "obj", ".", "path", ".", "decode", "(", "'utf-8'", ")", "}", "elif", "isinstance", "(", "obj", ",", "UUID", ")", ":", "return", "{", "_CLASS_MARKER", ":", "u'UUID'", ",", "'hex'", ":", "unicode", "(", "obj", ")", "}", "elif", "isinstance", "(", "obj", ",", "datetime", ")", ":", "if", "(", "obj", ".", "tzinfo", "is", "None", ")", ":", "raise", "ValueError", "(", "'Datetime without a timezone: {}'", ".", "format", "(", "obj", ")", ")", "return", "{", "_CLASS_MARKER", ":", "u'datetime'", ",", "'seconds'", ":", "timegm", "(", "obj", ".", "utctimetuple", "(", ")", ")", "}", "return", "obj" ]
this function turns assorted types into serializable objects .
train
false
46,769
def _ScrubShareNew(op_args): _ScrubForClass(Viewpoint, op_args['viewpoint'])
[ "def", "_ScrubShareNew", "(", "op_args", ")", ":", "_ScrubForClass", "(", "Viewpoint", ",", "op_args", "[", "'viewpoint'", "]", ")" ]
scrub the viewpoint title from the logs .
train
false
46,770
def write_biom_table(biom_table, biom_table_fp, compress=True, write_hdf5=HAVE_H5PY, table_type='OTU table'): if biom_table.is_empty(): raise EmptyBIOMTableError("Attempting to write an empty BIOM table to disk. QIIME doesn't support writing empty BIOM output files.") generated_by = get_generated_by_for_biom_tables() biom_table.type = table_type if write_hdf5: with biom_open(biom_table_fp, 'w') as biom_file: biom_table.to_hdf5(biom_file, generated_by, compress) else: with open(biom_table_fp, 'w') as biom_file: biom_table.to_json(generated_by, biom_file)
[ "def", "write_biom_table", "(", "biom_table", ",", "biom_table_fp", ",", "compress", "=", "True", ",", "write_hdf5", "=", "HAVE_H5PY", ",", "table_type", "=", "'OTU table'", ")", ":", "if", "biom_table", ".", "is_empty", "(", ")", ":", "raise", "EmptyBIOMTableError", "(", "\"Attempting to write an empty BIOM table to disk. QIIME doesn't support writing empty BIOM output files.\"", ")", "generated_by", "=", "get_generated_by_for_biom_tables", "(", ")", "biom_table", ".", "type", "=", "table_type", "if", "write_hdf5", ":", "with", "biom_open", "(", "biom_table_fp", ",", "'w'", ")", "as", "biom_file", ":", "biom_table", ".", "to_hdf5", "(", "biom_file", ",", "generated_by", ",", "compress", ")", "else", ":", "with", "open", "(", "biom_table_fp", ",", "'w'", ")", "as", "biom_file", ":", "biom_table", ".", "to_json", "(", "generated_by", ",", "biom_file", ")" ]
writes a biom table to the specified filepath parameters biom_table : biom .
train
false
46,771
def is_current_user_admin(): return (os.environ.get('USER_IS_ADMIN', '0') == '1')
[ "def", "is_current_user_admin", "(", ")", ":", "return", "(", "os", ".", "environ", ".", "get", "(", "'USER_IS_ADMIN'", ",", "'0'", ")", "==", "'1'", ")" ]
return true if the user making this request is an admin for this application .
train
false
46,772
def list_attached_user_policies(user_name, path_prefix=None, entity_filter=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) params = {'UserName': user_name} if (path_prefix is not None): params['PathPrefix'] = path_prefix policies = [] try: for ret in __utils__['boto.paged_call'](conn.get_response, 'ListAttachedUserPolicies', params, list_marker='AttachedPolicies'): policies.extend(ret.get('list_attached_user_policies_response', {}).get('list_attached_user_policies_result', {}).get('attached_policies', [])) return policies except boto.exception.BotoServerError as e: log.debug(e) msg = 'Failed to list user {0} attached policies.' log.error(msg.format(user_name)) return []
[ "def", "list_attached_user_policies", "(", "user_name", ",", "path_prefix", "=", "None", ",", "entity_filter", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "params", "=", "{", "'UserName'", ":", "user_name", "}", "if", "(", "path_prefix", "is", "not", "None", ")", ":", "params", "[", "'PathPrefix'", "]", "=", "path_prefix", "policies", "=", "[", "]", "try", ":", "for", "ret", "in", "__utils__", "[", "'boto.paged_call'", "]", "(", "conn", ".", "get_response", ",", "'ListAttachedUserPolicies'", ",", "params", ",", "list_marker", "=", "'AttachedPolicies'", ")", ":", "policies", ".", "extend", "(", "ret", ".", "get", "(", "'list_attached_user_policies_response'", ",", "{", "}", ")", ".", "get", "(", "'list_attached_user_policies_result'", ",", "{", "}", ")", ".", "get", "(", "'attached_policies'", ",", "[", "]", ")", ")", "return", "policies", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "msg", "=", "'Failed to list user {0} attached policies.'", "log", ".", "error", "(", "msg", ".", "format", "(", "user_name", ")", ")", "return", "[", "]" ]
list entities attached to the given user .
train
true
46,773
def file_dict(*packages): errors = [] ret = {} pkgs = {} cmd = 'dpkg -l {0}'.format(' '.join(packages)) out = __salt__['cmd.run_all'](cmd, python_shell=False) if (out['retcode'] != 0): msg = ('Error: ' + out['stderr']) log.error(msg) return msg out = out['stdout'] for line in out.splitlines(): if line.startswith('ii '): comps = line.split() pkgs[comps[1]] = {'version': comps[2], 'description': ' '.join(comps[3:])} if ('No packages found' in line): errors.append(line) for pkg in pkgs: files = [] cmd = 'dpkg -L {0}'.format(pkg) for line in __salt__['cmd.run'](cmd, python_shell=False).splitlines(): files.append(line) ret[pkg] = files return {'errors': errors, 'packages': ret}
[ "def", "file_dict", "(", "*", "packages", ")", ":", "errors", "=", "[", "]", "ret", "=", "{", "}", "pkgs", "=", "{", "}", "cmd", "=", "'dpkg -l {0}'", ".", "format", "(", "' '", ".", "join", "(", "packages", ")", ")", "out", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "(", "out", "[", "'retcode'", "]", "!=", "0", ")", ":", "msg", "=", "(", "'Error: '", "+", "out", "[", "'stderr'", "]", ")", "log", ".", "error", "(", "msg", ")", "return", "msg", "out", "=", "out", "[", "'stdout'", "]", "for", "line", "in", "out", ".", "splitlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "'ii '", ")", ":", "comps", "=", "line", ".", "split", "(", ")", "pkgs", "[", "comps", "[", "1", "]", "]", "=", "{", "'version'", ":", "comps", "[", "2", "]", ",", "'description'", ":", "' '", ".", "join", "(", "comps", "[", "3", ":", "]", ")", "}", "if", "(", "'No packages found'", "in", "line", ")", ":", "errors", ".", "append", "(", "line", ")", "for", "pkg", "in", "pkgs", ":", "files", "=", "[", "]", "cmd", "=", "'dpkg -L {0}'", ".", "format", "(", "pkg", ")", "for", "line", "in", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", ":", "files", ".", "append", "(", "line", ")", "ret", "[", "pkg", "]", "=", "files", "return", "{", "'errors'", ":", "errors", ",", "'packages'", ":", "ret", "}" ]
list the files that belong to a package .
train
true
46,775
def parseXRDS(text): try: element = ElementTree.XML(text) except XMLError as why: exc = XRDSError('Error parsing document as XML') exc.reason = why raise exc else: tree = ElementTree.ElementTree(element) if (not isXRDS(tree)): raise XRDSError('Not an XRDS document') return tree
[ "def", "parseXRDS", "(", "text", ")", ":", "try", ":", "element", "=", "ElementTree", ".", "XML", "(", "text", ")", "except", "XMLError", "as", "why", ":", "exc", "=", "XRDSError", "(", "'Error parsing document as XML'", ")", "exc", ".", "reason", "=", "why", "raise", "exc", "else", ":", "tree", "=", "ElementTree", ".", "ElementTree", "(", "element", ")", "if", "(", "not", "isXRDS", "(", "tree", ")", ")", ":", "raise", "XRDSError", "(", "'Not an XRDS document'", ")", "return", "tree" ]
parse the given text as an xrds document .
train
true
46,777
def findCertainShow(showList, indexerid): if ((indexerid is None) or (showList is None) or (len(showList) == 0)): return None indexer_ids = ([indexerid] if (not isinstance(indexerid, list)) else indexerid) results = [show for show in showList if (show.indexerid in indexer_ids)] if (not results): return None if (len(results) == 1): return results[0] raise MultipleShowObjectsException()
[ "def", "findCertainShow", "(", "showList", ",", "indexerid", ")", ":", "if", "(", "(", "indexerid", "is", "None", ")", "or", "(", "showList", "is", "None", ")", "or", "(", "len", "(", "showList", ")", "==", "0", ")", ")", ":", "return", "None", "indexer_ids", "=", "(", "[", "indexerid", "]", "if", "(", "not", "isinstance", "(", "indexerid", ",", "list", ")", ")", "else", "indexerid", ")", "results", "=", "[", "show", "for", "show", "in", "showList", "if", "(", "show", ".", "indexerid", "in", "indexer_ids", ")", "]", "if", "(", "not", "results", ")", ":", "return", "None", "if", "(", "len", "(", "results", ")", "==", "1", ")", ":", "return", "results", "[", "0", "]", "raise", "MultipleShowObjectsException", "(", ")" ]
find a show by indexer id in the show list .
train
false
46,779
def validate_permissions_for_doctype(doctype, for_remove=False): doctype = frappe.get_doc(u'DocType', doctype) if (frappe.conf.developer_mode and (not frappe.flags.in_test)): doctype.save() else: validate_permissions(doctype, for_remove) for perm in doctype.get(u'permissions'): perm.db_update()
[ "def", "validate_permissions_for_doctype", "(", "doctype", ",", "for_remove", "=", "False", ")", ":", "doctype", "=", "frappe", ".", "get_doc", "(", "u'DocType'", ",", "doctype", ")", "if", "(", "frappe", ".", "conf", ".", "developer_mode", "and", "(", "not", "frappe", ".", "flags", ".", "in_test", ")", ")", ":", "doctype", ".", "save", "(", ")", "else", ":", "validate_permissions", "(", "doctype", ",", "for_remove", ")", "for", "perm", "in", "doctype", ".", "get", "(", "u'permissions'", ")", ":", "perm", ".", "db_update", "(", ")" ]
validates if permissions are set correctly .
train
false
46,781
def greedyWrap(inString, width=80): outLines = [] if (inString.find('\n\n') >= 0): paragraphs = string.split(inString, '\n\n') for para in paragraphs: outLines.extend((greedyWrap(para) + [''])) return outLines inWords = string.split(inString) column = 0 ptr_line = 0 while inWords: column = (column + len(inWords[ptr_line])) ptr_line = (ptr_line + 1) if (column > width): if (ptr_line == 1): pass else: ptr_line = (ptr_line - 1) (l, inWords) = (inWords[0:ptr_line], inWords[ptr_line:]) outLines.append(string.join(l, ' ')) ptr_line = 0 column = 0 elif (not (len(inWords) > ptr_line)): outLines.append(string.join(inWords, ' ')) del inWords[:] else: column = (column + 1) return outLines
[ "def", "greedyWrap", "(", "inString", ",", "width", "=", "80", ")", ":", "outLines", "=", "[", "]", "if", "(", "inString", ".", "find", "(", "'\\n\\n'", ")", ">=", "0", ")", ":", "paragraphs", "=", "string", ".", "split", "(", "inString", ",", "'\\n\\n'", ")", "for", "para", "in", "paragraphs", ":", "outLines", ".", "extend", "(", "(", "greedyWrap", "(", "para", ")", "+", "[", "''", "]", ")", ")", "return", "outLines", "inWords", "=", "string", ".", "split", "(", "inString", ")", "column", "=", "0", "ptr_line", "=", "0", "while", "inWords", ":", "column", "=", "(", "column", "+", "len", "(", "inWords", "[", "ptr_line", "]", ")", ")", "ptr_line", "=", "(", "ptr_line", "+", "1", ")", "if", "(", "column", ">", "width", ")", ":", "if", "(", "ptr_line", "==", "1", ")", ":", "pass", "else", ":", "ptr_line", "=", "(", "ptr_line", "-", "1", ")", "(", "l", ",", "inWords", ")", "=", "(", "inWords", "[", "0", ":", "ptr_line", "]", ",", "inWords", "[", "ptr_line", ":", "]", ")", "outLines", ".", "append", "(", "string", ".", "join", "(", "l", ",", "' '", ")", ")", "ptr_line", "=", "0", "column", "=", "0", "elif", "(", "not", "(", "len", "(", "inWords", ")", ">", "ptr_line", ")", ")", ":", "outLines", ".", "append", "(", "string", ".", "join", "(", "inWords", ",", "' '", ")", ")", "del", "inWords", "[", ":", "]", "else", ":", "column", "=", "(", "column", "+", "1", ")", "return", "outLines" ]
given a string and a column width .
train
false
46,782
def new_state(trans, tool, invalid=False): state = galaxy.tools.DefaultToolState() state.inputs = {} if invalid: return state try: return tool.new_state(trans) except Exception as e: log.debug('Failed to build tool state for tool "%s" using standard method, will try to fall back on custom method: %s', tool.id, e) inputs = tool.inputs_by_page[0] context = ExpressionContext(state.inputs, parent=None) for input in inputs.values(): try: state.inputs[input.name] = input.get_initial_value(trans, context) except: state.inputs[input.name] = [] return state
[ "def", "new_state", "(", "trans", ",", "tool", ",", "invalid", "=", "False", ")", ":", "state", "=", "galaxy", ".", "tools", ".", "DefaultToolState", "(", ")", "state", ".", "inputs", "=", "{", "}", "if", "invalid", ":", "return", "state", "try", ":", "return", "tool", ".", "new_state", "(", "trans", ")", "except", "Exception", "as", "e", ":", "log", ".", "debug", "(", "'Failed to build tool state for tool \"%s\" using standard method, will try to fall back on custom method: %s'", ",", "tool", ".", "id", ",", "e", ")", "inputs", "=", "tool", ".", "inputs_by_page", "[", "0", "]", "context", "=", "ExpressionContext", "(", "state", ".", "inputs", ",", "parent", "=", "None", ")", "for", "input", "in", "inputs", ".", "values", "(", ")", ":", "try", ":", "state", ".", "inputs", "[", "input", ".", "name", "]", "=", "input", ".", "get_initial_value", "(", "trans", ",", "context", ")", "except", ":", "state", ".", "inputs", "[", "input", ".", "name", "]", "=", "[", "]", "return", "state" ]
create a new defaulttoolstate for the received tool .
train
false
46,783
def generateNegotiateMessage(): s = struct.pack('<8sII8s8s8s', 'NTLMSSP\x00', 1, NTLM_FLAGS, ('\x00' * 8), ('\x00' * 8), '\x06\x00r\x17\x00\x00\x00\x0f') return s
[ "def", "generateNegotiateMessage", "(", ")", ":", "s", "=", "struct", ".", "pack", "(", "'<8sII8s8s8s'", ",", "'NTLMSSP\\x00'", ",", "1", ",", "NTLM_FLAGS", ",", "(", "'\\x00'", "*", "8", ")", ",", "(", "'\\x00'", "*", "8", ")", ",", "'\\x06\\x00r\\x17\\x00\\x00\\x00\\x0f'", ")", "return", "s" ]
references: - [ms-nlmp]: 2 .
train
false
46,784
def typed(ruletypes): return switch(type, ruletypes)
[ "def", "typed", "(", "ruletypes", ")", ":", "return", "switch", "(", "type", ",", "ruletypes", ")" ]
apply rules based on the expression type inputs: ruletypes -- a dict mapping {type: rule} .
train
false
46,785
def fake_get_vim_object(arg): return fake.FakeVim()
[ "def", "fake_get_vim_object", "(", "arg", ")", ":", "return", "fake", ".", "FakeVim", "(", ")" ]
stubs out the vmwareapisessions get_vim_object method .
train
false
46,786
def send(text, connections, **kwargs): if (not isinstance(connections, collections.Iterable)): connections = [connections] router = get_router() message = router.new_outgoing_message(text=text, connections=connections, **kwargs) router.send_outgoing(message) return message
[ "def", "send", "(", "text", ",", "connections", ",", "**", "kwargs", ")", ":", "if", "(", "not", "isinstance", "(", "connections", ",", "collections", ".", "Iterable", ")", ")", ":", "connections", "=", "[", "connections", "]", "router", "=", "get_router", "(", ")", "message", "=", "router", ".", "new_outgoing_message", "(", "text", "=", "text", ",", "connections", "=", "connections", ",", "**", "kwargs", ")", "router", ".", "send_outgoing", "(", "message", ")", "return", "message" ]
send a message to the socket for the given session id .
train
false
46,787
def get_pkg_data_fileobj(data_name, package=None, encoding=None, cache=True): datafn = _find_pkg_data_path(data_name, package=package) if os.path.isdir(datafn): raise IOError(u"Tried to access a data file that's actually a package data directory") elif os.path.isfile(datafn): return get_readable_fileobj(datafn, encoding=encoding) else: return get_readable_fileobj((conf.dataurl + datafn), encoding=encoding, cache=cache)
[ "def", "get_pkg_data_fileobj", "(", "data_name", ",", "package", "=", "None", ",", "encoding", "=", "None", ",", "cache", "=", "True", ")", ":", "datafn", "=", "_find_pkg_data_path", "(", "data_name", ",", "package", "=", "package", ")", "if", "os", ".", "path", ".", "isdir", "(", "datafn", ")", ":", "raise", "IOError", "(", "u\"Tried to access a data file that's actually a package data directory\"", ")", "elif", "os", ".", "path", ".", "isfile", "(", "datafn", ")", ":", "return", "get_readable_fileobj", "(", "datafn", ",", "encoding", "=", "encoding", ")", "else", ":", "return", "get_readable_fileobj", "(", "(", "conf", ".", "dataurl", "+", "datafn", ")", ",", "encoding", "=", "encoding", ",", "cache", "=", "cache", ")" ]
retrieves a data file from the standard locations for the package and provides the file as a file-like object that reads bytes .
train
false
46,788
def lead_text(top_elem, num_words=10): pat = re.compile(u'\\s+', flags=re.UNICODE) words = [] def get_text(x, attr=u'text'): ans = getattr(x, attr) if ans: words.extend(filter(None, pat.split(ans))) stack = [(top_elem, u'text')] while (stack and (len(words) < num_words)): (elem, attr) = stack.pop() get_text(elem, attr) if (attr == u'text'): if (elem is not top_elem): stack.append((elem, u'tail')) stack.extend(reversed(list(((c, u'text') for c in elem.iterchildren(u'*'))))) return u' '.join(words[:num_words])
[ "def", "lead_text", "(", "top_elem", ",", "num_words", "=", "10", ")", ":", "pat", "=", "re", ".", "compile", "(", "u'\\\\s+'", ",", "flags", "=", "re", ".", "UNICODE", ")", "words", "=", "[", "]", "def", "get_text", "(", "x", ",", "attr", "=", "u'text'", ")", ":", "ans", "=", "getattr", "(", "x", ",", "attr", ")", "if", "ans", ":", "words", ".", "extend", "(", "filter", "(", "None", ",", "pat", ".", "split", "(", "ans", ")", ")", ")", "stack", "=", "[", "(", "top_elem", ",", "u'text'", ")", "]", "while", "(", "stack", "and", "(", "len", "(", "words", ")", "<", "num_words", ")", ")", ":", "(", "elem", ",", "attr", ")", "=", "stack", ".", "pop", "(", ")", "get_text", "(", "elem", ",", "attr", ")", "if", "(", "attr", "==", "u'text'", ")", ":", "if", "(", "elem", "is", "not", "top_elem", ")", ":", "stack", ".", "append", "(", "(", "elem", ",", "u'tail'", ")", ")", "stack", ".", "extend", "(", "reversed", "(", "list", "(", "(", "(", "c", ",", "u'text'", ")", "for", "c", "in", "elem", ".", "iterchildren", "(", "u'*'", ")", ")", ")", ")", ")", "return", "u' '", ".", "join", "(", "words", "[", ":", "num_words", "]", ")" ]
return the leading text contained in top_elem up to a maximum of num_words words .
train
false
46,790
def pooling_shape(pool_shape, image_shape, stride): (n_images, n_channels, height, width) = image_shape height = (((height - pool_shape[0]) / float(stride[0])) + 1) width = (((width - pool_shape[1]) / float(stride[1])) + 1) assert ((height % 1) == 0) assert ((width % 1) == 0) return (int(height), int(width))
[ "def", "pooling_shape", "(", "pool_shape", ",", "image_shape", ",", "stride", ")", ":", "(", "n_images", ",", "n_channels", ",", "height", ",", "width", ")", "=", "image_shape", "height", "=", "(", "(", "(", "height", "-", "pool_shape", "[", "0", "]", ")", "/", "float", "(", "stride", "[", "0", "]", ")", ")", "+", "1", ")", "width", "=", "(", "(", "(", "width", "-", "pool_shape", "[", "1", "]", ")", "/", "float", "(", "stride", "[", "1", "]", ")", ")", "+", "1", ")", "assert", "(", "(", "height", "%", "1", ")", "==", "0", ")", "assert", "(", "(", "width", "%", "1", ")", "==", "0", ")", "return", "(", "int", "(", "height", ")", ",", "int", "(", "width", ")", ")" ]
calculate output shape for pooling layer .
train
false
46,791
def EnqueueBackgroundThread(request_id, target, args, kwargs): return _pending_background_threads.EnqueueBackgroundThread(request_id, target, args, kwargs)
[ "def", "EnqueueBackgroundThread", "(", "request_id", ",", "target", ",", "args", ",", "kwargs", ")", ":", "return", "_pending_background_threads", ".", "EnqueueBackgroundThread", "(", "request_id", ",", "target", ",", "args", ",", "kwargs", ")" ]
enqueues a new background thread request for a certain request id .
train
false
46,793
def get_quotas_tenant(profile=None): conn = _auth(profile) return conn.get_quotas_tenant()
[ "def", "get_quotas_tenant", "(", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "get_quotas_tenant", "(", ")" ]
fetches tenant info in servers context for following quota operation cli example: .
train
false
46,794
def set_shutdown_hook(hook): if ((hook is not None) and (not callable(hook))): raise TypeError(('hook must be callable, got %s' % hook.__class__)) global __shutdown_hook with __shutdown_mutex: old_hook = __shutdown_hook __shutdown_hook = hook return old_hook
[ "def", "set_shutdown_hook", "(", "hook", ")", ":", "if", "(", "(", "hook", "is", "not", "None", ")", "and", "(", "not", "callable", "(", "hook", ")", ")", ")", ":", "raise", "TypeError", "(", "(", "'hook must be callable, got %s'", "%", "hook", ".", "__class__", ")", ")", "global", "__shutdown_hook", "with", "__shutdown_mutex", ":", "old_hook", "=", "__shutdown_hook", "__shutdown_hook", "=", "hook", "return", "old_hook" ]
registers a function to be called when the server is shutting down .
train
false
46,796
def determine_version(api): discovery = make_service('discovery', 'v1') response = discovery.apis().list(name=api, preferred=True).execute() if (not response.get('items')): raise ValueError('Unknown API "{0}".'.format(api)) return response['items'][0]['version']
[ "def", "determine_version", "(", "api", ")", ":", "discovery", "=", "make_service", "(", "'discovery'", ",", "'v1'", ")", "response", "=", "discovery", ".", "apis", "(", ")", ".", "list", "(", "name", "=", "api", ",", "preferred", "=", "True", ")", ".", "execute", "(", ")", "if", "(", "not", "response", ".", "get", "(", "'items'", ")", ")", ":", "raise", "ValueError", "(", "'Unknown API \"{0}\".'", ".", "format", "(", "api", ")", ")", "return", "response", "[", "'items'", "]", "[", "0", "]", "[", "'version'", "]" ]
determine current version of google api args: api: [string] google api name .
train
false
46,798
def zeta(x, q=None, out=None): if (q is None): q = 1 return _zeta(x, q, out)
[ "def", "zeta", "(", "x", ",", "q", "=", "None", ",", "out", "=", "None", ")", ":", "if", "(", "q", "is", "None", ")", ":", "q", "=", "1", "return", "_zeta", "(", "x", ",", "q", ",", "out", ")" ]
riemann zeta function .
train
false
46,800
def p_seen_union(p): val = _make_empty_struct(p[2]) setattr(thrift_stack[(-1)], p[2], val) p[0] = val
[ "def", "p_seen_union", "(", "p", ")", ":", "val", "=", "_make_empty_struct", "(", "p", "[", "2", "]", ")", "setattr", "(", "thrift_stack", "[", "(", "-", "1", ")", "]", ",", "p", "[", "2", "]", ",", "val", ")", "p", "[", "0", "]", "=", "val" ]
seen_union : union identifier .
train
false
46,801
def send_email_for_event_role_invite(email, role, event, link): message_settings = MessageSettings.query.filter_by(action=EVENT_ROLE).first() if ((not message_settings) or (message_settings.mail_status == 1)): subject = MAILS[EVENT_ROLE]['subject'].format(role=role, event=event) message = MAILS[EVENT_ROLE]['message'].format(email=email, role=role, event=event, link=link) send_email(to=email, action=EVENT_ROLE, subject=subject, html=message)
[ "def", "send_email_for_event_role_invite", "(", "email", ",", "role", ",", "event", ",", "link", ")", ":", "message_settings", "=", "MessageSettings", ".", "query", ".", "filter_by", "(", "action", "=", "EVENT_ROLE", ")", ".", "first", "(", ")", "if", "(", "(", "not", "message_settings", ")", "or", "(", "message_settings", ".", "mail_status", "==", "1", ")", ")", ":", "subject", "=", "MAILS", "[", "EVENT_ROLE", "]", "[", "'subject'", "]", ".", "format", "(", "role", "=", "role", ",", "event", "=", "event", ")", "message", "=", "MAILS", "[", "EVENT_ROLE", "]", "[", "'message'", "]", ".", "format", "(", "email", "=", "email", ",", "role", "=", "role", ",", "event", "=", "event", ",", "link", "=", "link", ")", "send_email", "(", "to", "=", "email", ",", "action", "=", "EVENT_ROLE", ",", "subject", "=", "subject", ",", "html", "=", "message", ")" ]
send email to users for event role invites .
train
false
46,802
def cache_nodes_ip(opts, base=None): if (base is None): base = opts['cachedir'] minions = list_cache_nodes_full(opts, base=base)
[ "def", "cache_nodes_ip", "(", "opts", ",", "base", "=", "None", ")", ":", "if", "(", "base", "is", "None", ")", ":", "base", "=", "opts", "[", "'cachedir'", "]", "minions", "=", "list_cache_nodes_full", "(", "opts", ",", "base", "=", "base", ")" ]
retrieve a list of all nodes from salt cloud cache .
train
false
46,803
def parse_if_range_header(value): if (not value): return IfRange() date = parse_date(value) if (date is not None): return IfRange(date=date) return IfRange(unquote_etag(value)[0])
[ "def", "parse_if_range_header", "(", "value", ")", ":", "if", "(", "not", "value", ")", ":", "return", "IfRange", "(", ")", "date", "=", "parse_date", "(", "value", ")", "if", "(", "date", "is", "not", "None", ")", ":", "return", "IfRange", "(", "date", "=", "date", ")", "return", "IfRange", "(", "unquote_etag", "(", "value", ")", "[", "0", "]", ")" ]
parses an if-range header which can be an etag or a date .
train
true
46,804
@pytest.fixture def dict_loader(): return loaders.DictLoader({'justdict.html': 'FOO'})
[ "@", "pytest", ".", "fixture", "def", "dict_loader", "(", ")", ":", "return", "loaders", ".", "DictLoader", "(", "{", "'justdict.html'", ":", "'FOO'", "}", ")" ]
returns dictloader .
train
false
46,805
def remove_embargo_countries(apps, schema_editor): country_model = apps.get_model(u'embargo', u'Country') country_model.objects.all().delete()
[ "def", "remove_embargo_countries", "(", "apps", ",", "schema_editor", ")", ":", "country_model", "=", "apps", ".", "get_model", "(", "u'embargo'", ",", "u'Country'", ")", "country_model", ".", "objects", ".", "all", "(", ")", ".", "delete", "(", ")" ]
clear all available countries .
train
false
46,806
def test_latex_no_trailing_backslash(): lines = '\n\\begin{table}\n\\begin{tabular}{ccc}\na & b & c \\\\\n1 & 1.0 & c \\\\ % comment\n3\\% & 3.0 & e % comment\n\\end{tabular}\n\\end{table}\n' dat = ascii.read(lines, format='latex') assert (dat.colnames == ['a', 'b', 'c']) assert np.all((dat['a'] == ['1', '3\\%'])) assert np.all((dat['c'] == ['c', 'e']))
[ "def", "test_latex_no_trailing_backslash", "(", ")", ":", "lines", "=", "'\\n\\\\begin{table}\\n\\\\begin{tabular}{ccc}\\na & b & c \\\\\\\\\\n1 & 1.0 & c \\\\\\\\ % comment\\n3\\\\% & 3.0 & e % comment\\n\\\\end{tabular}\\n\\\\end{table}\\n'", "dat", "=", "ascii", ".", "read", "(", "lines", ",", "format", "=", "'latex'", ")", "assert", "(", "dat", ".", "colnames", "==", "[", "'a'", ",", "'b'", ",", "'c'", "]", ")", "assert", "np", ".", "all", "(", "(", "dat", "[", "'a'", "]", "==", "[", "'1'", ",", "'3\\\\%'", "]", ")", ")", "assert", "np", ".", "all", "(", "(", "dat", "[", "'c'", "]", "==", "[", "'c'", ",", "'e'", "]", ")", ")" ]
test that latex/aastex file with no trailing backslash can be read .
train
false