id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
9,604
def is_localized(node): if isinstance(node.parent, compiler.ast.CallFunc): if isinstance(node.parent.node, compiler.ast.Name): if (node.parent.node.name == '_'): return True return False
[ "def", "is_localized", "(", "node", ")", ":", "if", "isinstance", "(", "node", ".", "parent", ",", "compiler", ".", "ast", ".", "CallFunc", ")", ":", "if", "isinstance", "(", "node", ".", "parent", ".", "node", ",", "compiler", ".", "ast", ".", "Name", ")", ":", "if", "(", "node", ".", "parent", ".", "node", ".", "name", "==", "'_'", ")", ":", "return", "True", "return", "False" ]
check message wrapped by _() .
train
false
9,605
def metadef_property_get(context, namespace_name, property_name, session=None): session = (session or get_session()) return metadef_property_api.get(context, namespace_name, property_name, session)
[ "def", "metadef_property_get", "(", "context", ",", "namespace_name", ",", "property_name", ",", "session", "=", "None", ")", ":", "session", "=", "(", "session", "or", "get_session", "(", ")", ")", "return", "metadef_property_api", ".", "get", "(", "context", ",", "namespace_name", ",", "property_name", ",", "session", ")" ]
get a metadef property or raise if it does not exist .
train
false
9,606
def _passphrase_callback(passphrase): def f(*args): return passphrase return f
[ "def", "_passphrase_callback", "(", "passphrase", ")", ":", "def", "f", "(", "*", "args", ")", ":", "return", "passphrase", "return", "f" ]
returns a callback function used to supply a passphrase for private keys .
train
false
9,607
def test_replace_update_column_via_setitem_warnings_always(): t = table.Table([[1, 2, 3], [4, 5, 6]], names=['a', 'b']) with catch_warnings() as w: with table.conf.set_temp('replace_warnings', ['always']): t['a'] = 0 assert (len(w) == 0) from inspect import currentframe, getframeinfo frameinfo = getframeinfo(currentframe()) t['a'] = [10, 20, 30] assert (len(w) == 1) assert ("replaced column 'a'" == str(w[0].message)) assert (w[0].lineno == (frameinfo.lineno + 1)) assert (w[0].category is table.TableReplaceWarning) assert ('test_table' in w[0].filename)
[ "def", "test_replace_update_column_via_setitem_warnings_always", "(", ")", ":", "t", "=", "table", ".", "Table", "(", "[", "[", "1", ",", "2", ",", "3", "]", ",", "[", "4", ",", "5", ",", "6", "]", "]", ",", "names", "=", "[", "'a'", ",", "'b'", "]", ")", "with", "catch_warnings", "(", ")", "as", "w", ":", "with", "table", ".", "conf", ".", "set_temp", "(", "'replace_warnings'", ",", "[", "'always'", "]", ")", ":", "t", "[", "'a'", "]", "=", "0", "assert", "(", "len", "(", "w", ")", "==", "0", ")", "from", "inspect", "import", "currentframe", ",", "getframeinfo", "frameinfo", "=", "getframeinfo", "(", "currentframe", "(", ")", ")", "t", "[", "'a'", "]", "=", "[", "10", ",", "20", ",", "30", "]", "assert", "(", "len", "(", "w", ")", "==", "1", ")", "assert", "(", "\"replaced column 'a'\"", "==", "str", "(", "w", "[", "0", "]", ".", "message", ")", ")", "assert", "(", "w", "[", "0", "]", ".", "lineno", "==", "(", "frameinfo", ".", "lineno", "+", "1", ")", ")", "assert", "(", "w", "[", "0", "]", ".", "category", "is", "table", ".", "TableReplaceWarning", ")", "assert", "(", "'test_table'", "in", "w", "[", "0", "]", ".", "filename", ")" ]
test warnings related to table replace change in #5556: test always setting that raises warning for any replace .
train
false
9,608
def _document_lock_clear(document_id, user_name): try: redis = redis_client(name='default') key = _document_lock_key.format(id=document_id) locked_by = redis.get(key) if (locked_by == user_name): return redis.delete(key) else: return False except RedisError as e: statsd.incr('redis.errror') log.error(('Redis error: %s' % e)) return False
[ "def", "_document_lock_clear", "(", "document_id", ",", "user_name", ")", ":", "try", ":", "redis", "=", "redis_client", "(", "name", "=", "'default'", ")", "key", "=", "_document_lock_key", ".", "format", "(", "id", "=", "document_id", ")", "locked_by", "=", "redis", ".", "get", "(", "key", ")", "if", "(", "locked_by", "==", "user_name", ")", ":", "return", "redis", ".", "delete", "(", "key", ")", "else", ":", "return", "False", "except", "RedisError", "as", "e", ":", "statsd", ".", "incr", "(", "'redis.errror'", ")", "log", ".", "error", "(", "(", "'Redis error: %s'", "%", "e", ")", ")", "return", "False" ]
remove a lock from a document .
train
false
9,609
def disable_beacon(name, **kwargs): ret = {'comment': [], 'result': True} if (not name): ret['comment'] = 'Beacon name is required.' ret['result'] = False return ret if (('test' in kwargs) and kwargs['test']): ret['comment'] = 'Beacons would be enabled.' else: _beacons = list_(return_yaml=False) if (name not in _beacons): ret['comment'] = 'Beacon {0} is not currently configured.'.format(name) ret['result'] = False return ret try: eventer = salt.utils.event.get_event('minion', opts=__opts__) res = __salt__['event.fire']({'func': 'disable_beacon', 'name': name}, 'manage_beacons') if res: event_ret = eventer.get_event(tag='/salt/minion/minion_beacon_disabled_complete', wait=30) if (event_ret and event_ret['complete']): beacons = event_ret['beacons'] beacon_config_dict = _get_beacon_config_dict(beacons[name]) if (('enabled' in beacon_config_dict) and (not beacon_config_dict['enabled'])): ret['result'] = True ret['comment'] = 'Disabled beacon {0} on minion.'.format(name) else: ret['result'] = False ret['comment'] = 'Failed to disable beacon on minion.' return ret except KeyError: ret['comment'] = 'Event module not available. Beacon disable job failed.' return ret
[ "def", "disable_beacon", "(", "name", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "'comment'", ":", "[", "]", ",", "'result'", ":", "True", "}", "if", "(", "not", "name", ")", ":", "ret", "[", "'comment'", "]", "=", "'Beacon name is required.'", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "if", "(", "(", "'test'", "in", "kwargs", ")", "and", "kwargs", "[", "'test'", "]", ")", ":", "ret", "[", "'comment'", "]", "=", "'Beacons would be enabled.'", "else", ":", "_beacons", "=", "list_", "(", "return_yaml", "=", "False", ")", "if", "(", "name", "not", "in", "_beacons", ")", ":", "ret", "[", "'comment'", "]", "=", "'Beacon {0} is not currently configured.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "try", ":", "eventer", "=", "salt", ".", "utils", ".", "event", ".", "get_event", "(", "'minion'", ",", "opts", "=", "__opts__", ")", "res", "=", "__salt__", "[", "'event.fire'", "]", "(", "{", "'func'", ":", "'disable_beacon'", ",", "'name'", ":", "name", "}", ",", "'manage_beacons'", ")", "if", "res", ":", "event_ret", "=", "eventer", ".", "get_event", "(", "tag", "=", "'/salt/minion/minion_beacon_disabled_complete'", ",", "wait", "=", "30", ")", "if", "(", "event_ret", "and", "event_ret", "[", "'complete'", "]", ")", ":", "beacons", "=", "event_ret", "[", "'beacons'", "]", "beacon_config_dict", "=", "_get_beacon_config_dict", "(", "beacons", "[", "name", "]", ")", "if", "(", "(", "'enabled'", "in", "beacon_config_dict", ")", "and", "(", "not", "beacon_config_dict", "[", "'enabled'", "]", ")", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Disabled beacon {0} on minion.'", ".", "format", "(", "name", ")", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to disable beacon on minion.'", "return", "ret", "except", "KeyError", ":", "ret", "[", "'comment'", "]", "=", "'Event module not available. Beacon disable job failed.'", "return", "ret" ]
disable beacon on the minion :name: name of the beacon to disable .
train
true
9,610
def make_secgroup_list(security_groups): secgroups = objects.SecurityGroupList() secgroups.objects = [] for sg in security_groups: secgroup = objects.SecurityGroup() if uuidutils.is_uuid_like(sg): secgroup.uuid = sg else: secgroup.name = sg secgroups.objects.append(secgroup) return secgroups
[ "def", "make_secgroup_list", "(", "security_groups", ")", ":", "secgroups", "=", "objects", ".", "SecurityGroupList", "(", ")", "secgroups", ".", "objects", "=", "[", "]", "for", "sg", "in", "security_groups", ":", "secgroup", "=", "objects", ".", "SecurityGroup", "(", ")", "if", "uuidutils", ".", "is_uuid_like", "(", "sg", ")", ":", "secgroup", ".", "uuid", "=", "sg", "else", ":", "secgroup", ".", "name", "=", "sg", "secgroups", ".", "objects", ".", "append", "(", "secgroup", ")", "return", "secgroups" ]
a helper to make security group objects from a list of names or uuids .
train
false
9,612
@register.tag def get_unread_message_count_between(parser, token): try: (tag_name, arg) = token.contents.split(None, 1) except ValueError: raise template.TemplateSyntaxError(('%s tag requires arguments' % token.contents.split()[0])) m = re.search('(.*?) and (.*?) as (\\w+)', arg) if (not m): raise template.TemplateSyntaxError(('%s tag had invalid arguments' % tag_name)) (um_from_user, um_to_user, var_name) = m.groups() return MessageCount(um_from_user, var_name, um_to_user)
[ "@", "register", ".", "tag", "def", "get_unread_message_count_between", "(", "parser", ",", "token", ")", ":", "try", ":", "(", "tag_name", ",", "arg", ")", "=", "token", ".", "contents", ".", "split", "(", "None", ",", "1", ")", "except", "ValueError", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "'%s tag requires arguments'", "%", "token", ".", "contents", ".", "split", "(", ")", "[", "0", "]", ")", ")", "m", "=", "re", ".", "search", "(", "'(.*?) and (.*?) as (\\\\w+)'", ",", "arg", ")", "if", "(", "not", "m", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "'%s tag had invalid arguments'", "%", "tag_name", ")", ")", "(", "um_from_user", ",", "um_to_user", ",", "var_name", ")", "=", "m", ".", "groups", "(", ")", "return", "MessageCount", "(", "um_from_user", ",", "var_name", ",", "um_to_user", ")" ]
returns the unread message count between two users .
train
true
9,613
def random_integers(random_state, size=None, low=0, high=1, ndim=None, dtype='int64'): low = tensor.as_tensor_variable(low) high = tensor.as_tensor_variable(high) (ndim, size, bcast) = _infer_ndim_bcast(ndim, size, low, high) op = RandomFunction(random_integers_helper, tensor.TensorType(dtype=dtype, broadcastable=bcast)) return op(random_state, size, low, high)
[ "def", "random_integers", "(", "random_state", ",", "size", "=", "None", ",", "low", "=", "0", ",", "high", "=", "1", ",", "ndim", "=", "None", ",", "dtype", "=", "'int64'", ")", ":", "low", "=", "tensor", ".", "as_tensor_variable", "(", "low", ")", "high", "=", "tensor", ".", "as_tensor_variable", "(", "high", ")", "(", "ndim", ",", "size", ",", "bcast", ")", "=", "_infer_ndim_bcast", "(", "ndim", ",", "size", ",", "low", ",", "high", ")", "op", "=", "RandomFunction", "(", "random_integers_helper", ",", "tensor", ".", "TensorType", "(", "dtype", "=", "dtype", ",", "broadcastable", "=", "bcast", ")", ")", "return", "op", "(", "random_state", ",", "size", ",", "low", ",", "high", ")" ]
return a scalar or an array of integer values over [low .
train
false
9,614
def cr_uid_id(method): method._api = 'cr_uid_id' return method
[ "def", "cr_uid_id", "(", "method", ")", ":", "method", ".", "_api", "=", "'cr_uid_id'", "return", "method" ]
decorate a traditional-style method that takes cr .
train
false
9,615
def get_file_dict(filename_list, log_printer): file_dict = {} for filename in filename_list: try: with open(filename, 'r', encoding='utf-8') as _file: file_dict[filename] = tuple(_file.readlines()) except UnicodeDecodeError: log_printer.warn("Failed to read file '{}'. It seems to contain non-unicode characters. Leaving it out.".format(filename)) except OSError as exception: log_printer.log_exception("Failed to read file '{}' because of an unknown error. Leaving it out.".format(filename), exception, log_level=LOG_LEVEL.WARNING) log_printer.debug(('Files that will be checked:\n' + '\n'.join(file_dict.keys()))) return file_dict
[ "def", "get_file_dict", "(", "filename_list", ",", "log_printer", ")", ":", "file_dict", "=", "{", "}", "for", "filename", "in", "filename_list", ":", "try", ":", "with", "open", "(", "filename", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "_file", ":", "file_dict", "[", "filename", "]", "=", "tuple", "(", "_file", ".", "readlines", "(", ")", ")", "except", "UnicodeDecodeError", ":", "log_printer", ".", "warn", "(", "\"Failed to read file '{}'. It seems to contain non-unicode characters. Leaving it out.\"", ".", "format", "(", "filename", ")", ")", "except", "OSError", "as", "exception", ":", "log_printer", ".", "log_exception", "(", "\"Failed to read file '{}' because of an unknown error. Leaving it out.\"", ".", "format", "(", "filename", ")", ",", "exception", ",", "log_level", "=", "LOG_LEVEL", ".", "WARNING", ")", "log_printer", ".", "debug", "(", "(", "'Files that will be checked:\\n'", "+", "'\\n'", ".", "join", "(", "file_dict", ".", "keys", "(", ")", ")", ")", ")", "return", "file_dict" ]
reads all files into a dictionary .
train
false
9,617
def action_description(text): def _decorator(func): func.description = text return func return _decorator
[ "def", "action_description", "(", "text", ")", ":", "def", "_decorator", "(", "func", ")", ":", "func", ".", "description", "=", "text", "return", "func", "return", "_decorator" ]
decorator for adding a description to command action .
train
false
9,620
def enable_availability_zones(name, availability_zones, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if isinstance(availability_zones, string_types): availability_zones = json.loads(availability_zones) try: conn.enable_availability_zones(name, availability_zones) msg = 'Enabled availability_zones on ELB {0}'.format(name) log.info(msg) return True except boto.exception.BotoServerError as error: log.debug(error) msg = 'Failed to enable availability_zones on ELB {0}: {1}'.format(name, error) log.error(msg) return False
[ "def", "enable_availability_zones", "(", "name", ",", "availability_zones", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "isinstance", "(", "availability_zones", ",", "string_types", ")", ":", "availability_zones", "=", "json", ".", "loads", "(", "availability_zones", ")", "try", ":", "conn", ".", "enable_availability_zones", "(", "name", ",", "availability_zones", ")", "msg", "=", "'Enabled availability_zones on ELB {0}'", ".", "format", "(", "name", ")", "log", ".", "info", "(", "msg", ")", "return", "True", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "error", ":", "log", ".", "debug", "(", "error", ")", "msg", "=", "'Failed to enable availability_zones on ELB {0}: {1}'", ".", "format", "(", "name", ",", "error", ")", "log", ".", "error", "(", "msg", ")", "return", "False" ]
enable availability zones for elb .
train
false
9,622
def _gpa10iterator(handle): for inline in handle: if (inline[0] == '!'): continue inrec = inline.rstrip('\n').split(' DCTB ') if (len(inrec) == 1): continue inrec[2] = inrec[2].split('|') inrec[4] = inrec[4].split('|') inrec[6] = inrec[6].split('|') inrec[10] = inrec[10].split('|') (yield dict(zip(GPA10FIELDS, inrec)))
[ "def", "_gpa10iterator", "(", "handle", ")", ":", "for", "inline", "in", "handle", ":", "if", "(", "inline", "[", "0", "]", "==", "'!'", ")", ":", "continue", "inrec", "=", "inline", ".", "rstrip", "(", "'\\n'", ")", ".", "split", "(", "' DCTB '", ")", "if", "(", "len", "(", "inrec", ")", "==", "1", ")", ":", "continue", "inrec", "[", "2", "]", "=", "inrec", "[", "2", "]", ".", "split", "(", "'|'", ")", "inrec", "[", "4", "]", "=", "inrec", "[", "4", "]", ".", "split", "(", "'|'", ")", "inrec", "[", "6", "]", "=", "inrec", "[", "6", "]", ".", "split", "(", "'|'", ")", "inrec", "[", "10", "]", "=", "inrec", "[", "10", "]", ".", "split", "(", "'|'", ")", "(", "yield", "dict", "(", "zip", "(", "GPA10FIELDS", ",", "inrec", ")", ")", ")" ]
read gpa 1 .
train
false
9,623
def he_normal(shape, name=None, dim_ordering='th'): (fan_in, fan_out) = get_fans(shape, dim_ordering=dim_ordering) s = np.sqrt((2.0 / fan_in)) return normal(shape, s, name=name)
[ "def", "he_normal", "(", "shape", ",", "name", "=", "None", ",", "dim_ordering", "=", "'th'", ")", ":", "(", "fan_in", ",", "fan_out", ")", "=", "get_fans", "(", "shape", ",", "dim_ordering", "=", "dim_ordering", ")", "s", "=", "np", ".", "sqrt", "(", "(", "2.0", "/", "fan_in", ")", ")", "return", "normal", "(", "shape", ",", "s", ",", "name", "=", "name", ")" ]
he normal variance scaling initializer .
train
false
9,624
def replace_slices(index): if isinstance(index, hashable_list): return list(index) elif isinstance(index, _slice): return index.as_slice() elif isinstance(index, tuple): return tuple(map(replace_slices, index)) return index
[ "def", "replace_slices", "(", "index", ")", ":", "if", "isinstance", "(", "index", ",", "hashable_list", ")", ":", "return", "list", "(", "index", ")", "elif", "isinstance", "(", "index", ",", "_slice", ")", ":", "return", "index", ".", "as_slice", "(", ")", "elif", "isinstance", "(", "index", ",", "tuple", ")", ":", "return", "tuple", "(", "map", "(", "replace_slices", ",", "index", ")", ")", "return", "index" ]
takes input from slice expression and returns either a list .
train
false
9,625
def get_osc_stats(directory, fs, ost): out = [] for fspath in os.listdir(directory): if ((fs in fspath) and (ost in fspath)): logging.debug(((((' opening file ' + str(directory)) + '/') + str(fspath)) + '/stats')) try: osc_statsfile = open(('%s/%s/stats' % (directory, fspath))) except IOError: osc_statsfile = [] for line in osc_statsfile: item = re.split('\\s+', line.rstrip()) out.append(item) return out
[ "def", "get_osc_stats", "(", "directory", ",", "fs", ",", "ost", ")", ":", "out", "=", "[", "]", "for", "fspath", "in", "os", ".", "listdir", "(", "directory", ")", ":", "if", "(", "(", "fs", "in", "fspath", ")", "and", "(", "ost", "in", "fspath", ")", ")", ":", "logging", ".", "debug", "(", "(", "(", "(", "(", "' opening file '", "+", "str", "(", "directory", ")", ")", "+", "'/'", ")", "+", "str", "(", "fspath", ")", ")", "+", "'/stats'", ")", ")", "try", ":", "osc_statsfile", "=", "open", "(", "(", "'%s/%s/stats'", "%", "(", "directory", ",", "fspath", ")", ")", ")", "except", "IOError", ":", "osc_statsfile", "=", "[", "]", "for", "line", "in", "osc_statsfile", ":", "item", "=", "re", ".", "split", "(", "'\\\\s+'", ",", "line", ".", "rstrip", "(", ")", ")", "out", ".", "append", "(", "item", ")", "return", "out" ]
read osc stats file .
train
false
9,626
def _get_patterns(installed_only=None): patterns = {} for element in __zypper__.nolock.xml.call('se', '-t', 'pattern').getElementsByTagName('solvable'): installed = (element.getAttribute('status') == 'installed') if ((installed_only and installed) or (not installed_only)): patterns[element.getAttribute('name')] = {'installed': installed, 'summary': element.getAttribute('summary')} return patterns
[ "def", "_get_patterns", "(", "installed_only", "=", "None", ")", ":", "patterns", "=", "{", "}", "for", "element", "in", "__zypper__", ".", "nolock", ".", "xml", ".", "call", "(", "'se'", ",", "'-t'", ",", "'pattern'", ")", ".", "getElementsByTagName", "(", "'solvable'", ")", ":", "installed", "=", "(", "element", ".", "getAttribute", "(", "'status'", ")", "==", "'installed'", ")", "if", "(", "(", "installed_only", "and", "installed", ")", "or", "(", "not", "installed_only", ")", ")", ":", "patterns", "[", "element", ".", "getAttribute", "(", "'name'", ")", "]", "=", "{", "'installed'", ":", "installed", ",", "'summary'", ":", "element", ".", "getAttribute", "(", "'summary'", ")", "}", "return", "patterns" ]
list all known patterns in repos .
train
true
9,628
@db.api_context_manager.writer def _delete_request_spec(context, instance_uuid): context.session.query(api_models.RequestSpec).filter_by(instance_uuid=instance_uuid).delete()
[ "@", "db", ".", "api_context_manager", ".", "writer", "def", "_delete_request_spec", "(", "context", ",", "instance_uuid", ")", ":", "context", ".", "session", ".", "query", "(", "api_models", ".", "RequestSpec", ")", ".", "filter_by", "(", "instance_uuid", "=", "instance_uuid", ")", ".", "delete", "(", ")" ]
deletes a requestspec by the instance_uuid .
train
false
9,629
@pytest.mark.parametrize('text, deleted, rest', [('test delete|foobar', 'delete', 'test |foobar'), ('test delete |foobar', 'delete ', 'test |foobar'), ('open -t github.com/foo/bar |', 'bar ', 'open -t github.com/foo/|'), ('open -t |github.com/foo/bar', 't ', 'open -|github.com/foo/bar'), fixme(('test del<ete>foobar', 'delete', 'test |foobar')), ('test del<ete >foobar', 'del', 'test |ete foobar'), ('open foo/bar.baz|', 'baz', 'open foo/bar.|')]) def test_rl_backward_kill_word(lineedit, bridge, text, deleted, rest): _validate_deletion(lineedit, bridge, bridge.rl_backward_kill_word, text, deleted, rest)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'text, deleted, rest'", ",", "[", "(", "'test delete|foobar'", ",", "'delete'", ",", "'test |foobar'", ")", ",", "(", "'test delete |foobar'", ",", "'delete '", ",", "'test |foobar'", ")", ",", "(", "'open -t github.com/foo/bar |'", ",", "'bar '", ",", "'open -t github.com/foo/|'", ")", ",", "(", "'open -t |github.com/foo/bar'", ",", "'t '", ",", "'open -|github.com/foo/bar'", ")", ",", "fixme", "(", "(", "'test del<ete>foobar'", ",", "'delete'", ",", "'test |foobar'", ")", ")", ",", "(", "'test del<ete >foobar'", ",", "'del'", ",", "'test |ete foobar'", ")", ",", "(", "'open foo/bar.baz|'", ",", "'baz'", ",", "'open foo/bar.|'", ")", "]", ")", "def", "test_rl_backward_kill_word", "(", "lineedit", ",", "bridge", ",", "text", ",", "deleted", ",", "rest", ")", ":", "_validate_deletion", "(", "lineedit", ",", "bridge", ",", "bridge", ".", "rl_backward_kill_word", ",", "text", ",", "deleted", ",", "rest", ")" ]
delete to word beginning and see if it comes back with yank .
train
false
9,631
def randprime(a, b): if (a >= b): return (a, b) = map(int, (a, b)) n = random.randint((a - 1), b) p = nextprime(n) if (p >= b): p = prevprime(b) if (p < a): raise ValueError('no primes exist in the specified range') return p
[ "def", "randprime", "(", "a", ",", "b", ")", ":", "if", "(", "a", ">=", "b", ")", ":", "return", "(", "a", ",", "b", ")", "=", "map", "(", "int", ",", "(", "a", ",", "b", ")", ")", "n", "=", "random", ".", "randint", "(", "(", "a", "-", "1", ")", ",", "b", ")", "p", "=", "nextprime", "(", "n", ")", "if", "(", "p", ">=", "b", ")", ":", "p", "=", "prevprime", "(", "b", ")", "if", "(", "p", "<", "a", ")", ":", "raise", "ValueError", "(", "'no primes exist in the specified range'", ")", "return", "p" ]
return a random prime number in the range [a .
train
false
9,632
def _dtype_to_stata_type(dtype, column): if (dtype.type == np.string_): return chr(dtype.itemsize) elif (dtype.type == np.object_): itemsize = max_len_string_array(_ensure_object(column.values)) return chr(max(itemsize, 1)) elif (dtype == np.float64): return chr(255) elif (dtype == np.float32): return chr(254) elif (dtype == np.int32): return chr(253) elif (dtype == np.int16): return chr(252) elif (dtype == np.int8): return chr(251) else: raise NotImplementedError(('Data type %s not supported.' % dtype))
[ "def", "_dtype_to_stata_type", "(", "dtype", ",", "column", ")", ":", "if", "(", "dtype", ".", "type", "==", "np", ".", "string_", ")", ":", "return", "chr", "(", "dtype", ".", "itemsize", ")", "elif", "(", "dtype", ".", "type", "==", "np", ".", "object_", ")", ":", "itemsize", "=", "max_len_string_array", "(", "_ensure_object", "(", "column", ".", "values", ")", ")", "return", "chr", "(", "max", "(", "itemsize", ",", "1", ")", ")", "elif", "(", "dtype", "==", "np", ".", "float64", ")", ":", "return", "chr", "(", "255", ")", "elif", "(", "dtype", "==", "np", ".", "float32", ")", ":", "return", "chr", "(", "254", ")", "elif", "(", "dtype", "==", "np", ".", "int32", ")", ":", "return", "chr", "(", "253", ")", "elif", "(", "dtype", "==", "np", ".", "int16", ")", ":", "return", "chr", "(", "252", ")", "elif", "(", "dtype", "==", "np", ".", "int8", ")", ":", "return", "chr", "(", "251", ")", "else", ":", "raise", "NotImplementedError", "(", "(", "'Data type %s not supported.'", "%", "dtype", ")", ")" ]
converts dtype types to stata types .
train
false
9,633
def intFromBytes(data, byteorder, signed=False): assert (byteorder == 'big') assert (not signed) if ((len(data) % 4) != 0): data = (('\x00' * (4 - (len(data) % 4))) + data) result = 0 while (len(data) > 0): (digit,) = struct.unpack('>I', data[:4]) result = ((result << 32) + digit) data = data[4:] return result
[ "def", "intFromBytes", "(", "data", ",", "byteorder", ",", "signed", "=", "False", ")", ":", "assert", "(", "byteorder", "==", "'big'", ")", "assert", "(", "not", "signed", ")", "if", "(", "(", "len", "(", "data", ")", "%", "4", ")", "!=", "0", ")", ":", "data", "=", "(", "(", "'\\x00'", "*", "(", "4", "-", "(", "len", "(", "data", ")", "%", "4", ")", ")", ")", "+", "data", ")", "result", "=", "0", "while", "(", "len", "(", "data", ")", ">", "0", ")", ":", "(", "digit", ",", ")", "=", "struct", ".", "unpack", "(", "'>I'", ",", "data", "[", ":", "4", "]", ")", "result", "=", "(", "(", "result", "<<", "32", ")", "+", "digit", ")", "data", "=", "data", "[", "4", ":", "]", "return", "result" ]
convert an integer in packed form to a python l{int} .
train
false
9,635
def track_forum_event(request, event_name, course, obj, data, id_map=None): user = request.user data['id'] = obj.id commentable_id = data['commentable_id'] team = get_team(commentable_id) if (team is not None): data.update(team_id=team.team_id) if (id_map is None): id_map = get_cached_discussion_id_map(course, [commentable_id], user) if (commentable_id in id_map): data['category_name'] = id_map[commentable_id]['title'] data['category_id'] = commentable_id data['url'] = request.META.get('HTTP_REFERER', '') data['user_forums_roles'] = [role.name for role in user.roles.filter(course_id=course.id)] data['user_course_roles'] = [role.role for role in user.courseaccessrole_set.filter(course_id=course.id)] tracker.emit(event_name, data)
[ "def", "track_forum_event", "(", "request", ",", "event_name", ",", "course", ",", "obj", ",", "data", ",", "id_map", "=", "None", ")", ":", "user", "=", "request", ".", "user", "data", "[", "'id'", "]", "=", "obj", ".", "id", "commentable_id", "=", "data", "[", "'commentable_id'", "]", "team", "=", "get_team", "(", "commentable_id", ")", "if", "(", "team", "is", "not", "None", ")", ":", "data", ".", "update", "(", "team_id", "=", "team", ".", "team_id", ")", "if", "(", "id_map", "is", "None", ")", ":", "id_map", "=", "get_cached_discussion_id_map", "(", "course", ",", "[", "commentable_id", "]", ",", "user", ")", "if", "(", "commentable_id", "in", "id_map", ")", ":", "data", "[", "'category_name'", "]", "=", "id_map", "[", "commentable_id", "]", "[", "'title'", "]", "data", "[", "'category_id'", "]", "=", "commentable_id", "data", "[", "'url'", "]", "=", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ",", "''", ")", "data", "[", "'user_forums_roles'", "]", "=", "[", "role", ".", "name", "for", "role", "in", "user", ".", "roles", ".", "filter", "(", "course_id", "=", "course", ".", "id", ")", "]", "data", "[", "'user_course_roles'", "]", "=", "[", "role", ".", "role", "for", "role", "in", "user", ".", "courseaccessrole_set", ".", "filter", "(", "course_id", "=", "course", ".", "id", ")", "]", "tracker", ".", "emit", "(", "event_name", ",", "data", ")" ]
send out an analytics event when a forum event happens .
train
false
9,636
def _flatten_suite(test): if isinstance(test, (unittest.TestSuite, list, tuple)): tests = [] for x in test: tests.extend(_flatten_suite(x)) return tests else: return [test]
[ "def", "_flatten_suite", "(", "test", ")", ":", "if", "isinstance", "(", "test", ",", "(", "unittest", ".", "TestSuite", ",", "list", ",", "tuple", ")", ")", ":", "tests", "=", "[", "]", "for", "x", "in", "test", ":", "tests", ".", "extend", "(", "_flatten_suite", "(", "x", ")", ")", "return", "tests", "else", ":", "return", "[", "test", "]" ]
expand nested suite into list of test cases .
train
false
9,637
def swappable_dependency(value): return SwappableTuple((value.split('.', 1)[0], '__first__'), value)
[ "def", "swappable_dependency", "(", "value", ")", ":", "return", "SwappableTuple", "(", "(", "value", ".", "split", "(", "'.'", ",", "1", ")", "[", "0", "]", ",", "'__first__'", ")", ",", "value", ")" ]
turns a setting value into a dependency .
train
false
9,638
def is_valid_id(id): parts = id.split(':') if (len(parts) == 3): (group, artifact, version) = parts if (group and artifact and version): return True return False
[ "def", "is_valid_id", "(", "id", ")", ":", "parts", "=", "id", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "parts", ")", "==", "3", ")", ":", "(", "group", ",", "artifact", ",", "version", ")", "=", "parts", "if", "(", "group", "and", "artifact", "and", "version", ")", ":", "return", "True", "return", "False" ]
check if id is valid .
train
false
9,639
def template_render(template, context=None, request=None): if isinstance(template, Template): if request: context = RequestContext(request, context) else: context = Context(context) return template.render(context) else: return template.render(context, request=request)
[ "def", "template_render", "(", "template", ",", "context", "=", "None", ",", "request", "=", "None", ")", ":", "if", "isinstance", "(", "template", ",", "Template", ")", ":", "if", "request", ":", "context", "=", "RequestContext", "(", "request", ",", "context", ")", "else", ":", "context", "=", "Context", "(", "context", ")", "return", "template", ".", "render", "(", "context", ")", "else", ":", "return", "template", ".", "render", "(", "context", ",", "request", "=", "request", ")" ]
passing context or requestcontext to template .
train
true
9,640
def create_cloudformation_template_main(argv, basepath, toplevel): parser = create_cloudformation_template_options() options = parser.parse_args(argv) print flocker_docker_template(cluster_size=options.cluster_size, client_ami_map=options.client_ami_map, node_ami_map=options.node_ami_map)
[ "def", "create_cloudformation_template_main", "(", "argv", ",", "basepath", ",", "toplevel", ")", ":", "parser", "=", "create_cloudformation_template_options", "(", ")", "options", "=", "parser", ".", "parse_args", "(", "argv", ")", "print", "flocker_docker_template", "(", "cluster_size", "=", "options", ".", "cluster_size", ",", "client_ami_map", "=", "options", ".", "client_ami_map", ",", "node_ami_map", "=", "options", ".", "node_ami_map", ")" ]
the entry point for admin/create-cloudformation-template .
train
false
9,641
def SimpleFastaParser(handle): while True: line = handle.readline() if (line == ''): return if (line[0] == '>'): break while True: if (line[0] != '>'): raise ValueError("Records in Fasta files should start with '>' character") title = line[1:].rstrip() lines = [] line = handle.readline() while True: if (not line): break if (line[0] == '>'): break lines.append(line.rstrip()) line = handle.readline() (yield (title, ''.join(lines).replace(' ', '').replace('\r', ''))) if (not line): return assert False, 'Should not reach this line'
[ "def", "SimpleFastaParser", "(", "handle", ")", ":", "while", "True", ":", "line", "=", "handle", ".", "readline", "(", ")", "if", "(", "line", "==", "''", ")", ":", "return", "if", "(", "line", "[", "0", "]", "==", "'>'", ")", ":", "break", "while", "True", ":", "if", "(", "line", "[", "0", "]", "!=", "'>'", ")", ":", "raise", "ValueError", "(", "\"Records in Fasta files should start with '>' character\"", ")", "title", "=", "line", "[", "1", ":", "]", ".", "rstrip", "(", ")", "lines", "=", "[", "]", "line", "=", "handle", ".", "readline", "(", ")", "while", "True", ":", "if", "(", "not", "line", ")", ":", "break", "if", "(", "line", "[", "0", "]", "==", "'>'", ")", ":", "break", "lines", ".", "append", "(", "line", ".", "rstrip", "(", ")", ")", "line", "=", "handle", ".", "readline", "(", ")", "(", "yield", "(", "title", ",", "''", ".", "join", "(", "lines", ")", ".", "replace", "(", "' '", ",", "''", ")", ".", "replace", "(", "'\\r'", ",", "''", ")", ")", ")", "if", "(", "not", "line", ")", ":", "return", "assert", "False", ",", "'Should not reach this line'" ]
generator function to iterate over fasta records .
train
false
9,644
def get_session_credentials(): return copy.deepcopy(_session['credentials'])
[ "def", "get_session_credentials", "(", ")", ":", "return", "copy", ".", "deepcopy", "(", "_session", "[", "'credentials'", "]", ")" ]
returns the credentials that will be sent to plotly .
train
false
9,645
def ElementWithReading(tag, text, reading=False): if (text is None): readingText = '' elif isinstance(text, basestring): readingText = text else: readingText = text[1] text = text[0] if (not reading): readingText = '' return ElementWithText(tag, text, reading=readingText)
[ "def", "ElementWithReading", "(", "tag", ",", "text", ",", "reading", "=", "False", ")", ":", "if", "(", "text", "is", "None", ")", ":", "readingText", "=", "''", "elif", "isinstance", "(", "text", ",", "basestring", ")", ":", "readingText", "=", "text", "else", ":", "readingText", "=", "text", "[", "1", "]", "text", "=", "text", "[", "0", "]", "if", "(", "not", "reading", ")", ":", "readingText", "=", "''", "return", "ElementWithText", "(", "tag", ",", "text", ",", "reading", "=", "readingText", ")" ]
a helper function that creates reading attributes .
train
false
9,646
def test_hermite(Chart, datas): chart = Chart(interpolate='hermite') chart = make_data(chart, datas) assert chart.render()
[ "def", "test_hermite", "(", "Chart", ",", "datas", ")", ":", "chart", "=", "Chart", "(", "interpolate", "=", "'hermite'", ")", "chart", "=", "make_data", "(", "chart", ",", "datas", ")", "assert", "chart", ".", "render", "(", ")" ]
test hermite interpolation .
train
false
9,648
def covent(px, py, pxpy, logbase=2): if ((not _isproperdist(px)) or (not _isproperdist(py))): raise ValueError('px or py is not a proper probability distribution') if ((pxpy != None) and (not _isproperdist(pxpy))): raise ValueError('pxpy is not a proper joint distribtion') if (pxpy == None): pxpy = np.outer(py, px) return (condent(px, py, pxpy, logbase=logbase) + condent(py, px, pxpy, logbase=logbase))
[ "def", "covent", "(", "px", ",", "py", ",", "pxpy", ",", "logbase", "=", "2", ")", ":", "if", "(", "(", "not", "_isproperdist", "(", "px", ")", ")", "or", "(", "not", "_isproperdist", "(", "py", ")", ")", ")", ":", "raise", "ValueError", "(", "'px or py is not a proper probability distribution'", ")", "if", "(", "(", "pxpy", "!=", "None", ")", "and", "(", "not", "_isproperdist", "(", "pxpy", ")", ")", ")", ":", "raise", "ValueError", "(", "'pxpy is not a proper joint distribtion'", ")", "if", "(", "pxpy", "==", "None", ")", ":", "pxpy", "=", "np", ".", "outer", "(", "py", ",", "px", ")", "return", "(", "condent", "(", "px", ",", "py", ",", "pxpy", ",", "logbase", "=", "logbase", ")", "+", "condent", "(", "py", ",", "px", ",", "pxpy", ",", "logbase", "=", "logbase", ")", ")" ]
an information theoretic covariance measure .
train
false
9,649
def _expected_device(requested_device): prefix = '/dev/sd' if requested_device.startswith(prefix): return FilePath('/dev').child(('xvd' + requested_device[len(prefix):])) raise ValueError('Unsupported requested device {!r}'.format(requested_device))
[ "def", "_expected_device", "(", "requested_device", ")", ":", "prefix", "=", "'/dev/sd'", "if", "requested_device", ".", "startswith", "(", "prefix", ")", ":", "return", "FilePath", "(", "'/dev'", ")", ".", "child", "(", "(", "'xvd'", "+", "requested_device", "[", "len", "(", "prefix", ")", ":", "]", ")", ")", "raise", "ValueError", "(", "'Unsupported requested device {!r}'", ".", "format", "(", "requested_device", ")", ")" ]
given a device we requested from aws ebs .
train
false
9,650
def atomic_rename(oldpath, newpath): if iswindows: import win32file for i in xrange(10): try: win32file.MoveFileEx(oldpath, newpath, (win32file.MOVEFILE_REPLACE_EXISTING | win32file.MOVEFILE_WRITE_THROUGH)) break except Exception: if (i > 8): raise time.sleep(1) else: os.rename(oldpath, newpath)
[ "def", "atomic_rename", "(", "oldpath", ",", "newpath", ")", ":", "if", "iswindows", ":", "import", "win32file", "for", "i", "in", "xrange", "(", "10", ")", ":", "try", ":", "win32file", ".", "MoveFileEx", "(", "oldpath", ",", "newpath", ",", "(", "win32file", ".", "MOVEFILE_REPLACE_EXISTING", "|", "win32file", ".", "MOVEFILE_WRITE_THROUGH", ")", ")", "break", "except", "Exception", ":", "if", "(", "i", ">", "8", ")", ":", "raise", "time", ".", "sleep", "(", "1", ")", "else", ":", "os", ".", "rename", "(", "oldpath", ",", "newpath", ")" ]
replace the file newpath with the file oldpath .
train
false
9,651
def test_hash_vs_eq(): a = (1 + S.Pi) ha = hash(a) a.is_positive assert (a.is_positive is True) assert (ha == hash(a)) b = a.expand(trig=True) hb = hash(b) assert (a == b) assert (ha == hb)
[ "def", "test_hash_vs_eq", "(", ")", ":", "a", "=", "(", "1", "+", "S", ".", "Pi", ")", "ha", "=", "hash", "(", "a", ")", "a", ".", "is_positive", "assert", "(", "a", ".", "is_positive", "is", "True", ")", "assert", "(", "ha", "==", "hash", "(", "a", ")", ")", "b", "=", "a", ".", "expand", "(", "trig", "=", "True", ")", "hb", "=", "hash", "(", "b", ")", "assert", "(", "a", "==", "b", ")", "assert", "(", "ha", "==", "hb", ")" ]
catch: different hash for equal objects .
train
false
9,652
def fetch(resources=None, languages=None): locale_dirs = _get_locale_dirs(resources) errors = [] for (name, dir_) in locale_dirs: if (languages is None): call('tx pull -r {res} -f --minimum-perc=5'.format(res=_tx_resource_for_name(name)), shell=True) languages = sorted([d for d in os.listdir(dir_) if ((not d.startswith('_')) and os.path.isdir(os.path.join(dir_, d)) and (d != 'en'))]) else: for lang in languages: call('tx pull -r {res} -f -l {lang}'.format(res=_tx_resource_for_name(name), lang=lang), shell=True) for lang in languages: po_path = '{path}/{lang}/LC_MESSAGES/django.po'.format(path=dir_, lang=lang) if (not os.path.exists(po_path)): print 'No {lang} translation for resource {res}'.format(lang=lang, res=name) continue call('msgcat -o {0} {0}'.format(po_path), shell=True) res = call('msgfmt -c -o {0}.mo {1}'.format(po_path[:(-3)], po_path), shell=True) if (res != 0): errors.append((name, lang)) if errors: print '\nWARNING: Errors have occurred in following cases:' for (resource, lang) in errors: print ' DCTB Resource {res} for language {lang}'.format(res=resource, lang=lang) exit(1)
[ "def", "fetch", "(", "resources", "=", "None", ",", "languages", "=", "None", ")", ":", "locale_dirs", "=", "_get_locale_dirs", "(", "resources", ")", "errors", "=", "[", "]", "for", "(", "name", ",", "dir_", ")", "in", "locale_dirs", ":", "if", "(", "languages", "is", "None", ")", ":", "call", "(", "'tx pull -r {res} -f --minimum-perc=5'", ".", "format", "(", "res", "=", "_tx_resource_for_name", "(", "name", ")", ")", ",", "shell", "=", "True", ")", "languages", "=", "sorted", "(", "[", "d", "for", "d", "in", "os", ".", "listdir", "(", "dir_", ")", "if", "(", "(", "not", "d", ".", "startswith", "(", "'_'", ")", ")", "and", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "dir_", ",", "d", ")", ")", "and", "(", "d", "!=", "'en'", ")", ")", "]", ")", "else", ":", "for", "lang", "in", "languages", ":", "call", "(", "'tx pull -r {res} -f -l {lang}'", ".", "format", "(", "res", "=", "_tx_resource_for_name", "(", "name", ")", ",", "lang", "=", "lang", ")", ",", "shell", "=", "True", ")", "for", "lang", "in", "languages", ":", "po_path", "=", "'{path}/{lang}/LC_MESSAGES/django.po'", ".", "format", "(", "path", "=", "dir_", ",", "lang", "=", "lang", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "po_path", ")", ")", ":", "print", "'No {lang} translation for resource {res}'", ".", "format", "(", "lang", "=", "lang", ",", "res", "=", "name", ")", "continue", "call", "(", "'msgcat -o {0} {0}'", ".", "format", "(", "po_path", ")", ",", "shell", "=", "True", ")", "res", "=", "call", "(", "'msgfmt -c -o {0}.mo {1}'", ".", "format", "(", "po_path", "[", ":", "(", "-", "3", ")", "]", ",", "po_path", ")", ",", "shell", "=", "True", ")", "if", "(", "res", "!=", "0", ")", ":", "errors", ".", "append", "(", "(", "name", ",", "lang", ")", ")", "if", "errors", ":", "print", "'\\nWARNING: Errors have occurred in following cases:'", "for", "(", "resource", ",", "lang", ")", "in", "errors", ":", "print", "' DCTB Resource {res} for language {lang}'", ".", "format", "(", "res", "=", "resource", ",", "lang", "=", "lang", ")", "exit", "(", "1", ")" ]
fetches remote packages cli example: .
train
false
9,653
def set_default_koji_tag(tag): global DEFAULT_KOJI_TAG DEFAULT_KOJI_TAG = tag
[ "def", "set_default_koji_tag", "(", "tag", ")", ":", "global", "DEFAULT_KOJI_TAG", "DEFAULT_KOJI_TAG", "=", "tag" ]
sets the default tag that will be used .
train
false
9,654
def get_master_api_key(): for key in ['GALAXY_CONFIG_MASTER_API_KEY', 'GALAXY_CONFIG_OVERRIDE_MASTER_API_KEY']: value = os.environ.get(key, None) if value: return value return DEFAULT_GALAXY_MASTER_API_KEY
[ "def", "get_master_api_key", "(", ")", ":", "for", "key", "in", "[", "'GALAXY_CONFIG_MASTER_API_KEY'", ",", "'GALAXY_CONFIG_OVERRIDE_MASTER_API_KEY'", "]", ":", "value", "=", "os", ".", "environ", ".", "get", "(", "key", ",", "None", ")", "if", "value", ":", "return", "value", "return", "DEFAULT_GALAXY_MASTER_API_KEY" ]
test master api key to use for functional test .
train
false
9,655
def seq_concat_item(seq, item): return ((seq + (item,)) if isinstance(seq, tuple) else (seq + [item]))
[ "def", "seq_concat_item", "(", "seq", ",", "item", ")", ":", "return", "(", "(", "seq", "+", "(", "item", ",", ")", ")", "if", "isinstance", "(", "seq", ",", "tuple", ")", "else", "(", "seq", "+", "[", "item", "]", ")", ")" ]
return copy of sequence seq with item added .
train
false
9,656
def unescape(text): rv = '' i = 0 while (i < len(text)): if (((i + 1) < len(text)) and (text[i] == '\\')): rv += text[(i + 1)] i += 1 else: rv += text[i] i += 1 return rv
[ "def", "unescape", "(", "text", ")", ":", "rv", "=", "''", "i", "=", "0", "while", "(", "i", "<", "len", "(", "text", ")", ")", ":", "if", "(", "(", "(", "i", "+", "1", ")", "<", "len", "(", "text", ")", ")", "and", "(", "text", "[", "i", "]", "==", "'\\\\'", ")", ")", ":", "rv", "+=", "text", "[", "(", "i", "+", "1", ")", "]", "i", "+=", "1", "else", ":", "rv", "+=", "text", "[", "i", "]", "i", "+=", "1", "return", "rv" ]
removes html or xml character references and entities from a text string .
train
false
9,658
def queue_dispatcher(force=False): __lock_.acquire() try: QUEUE.clear() finally: __lock_.release()
[ "def", "queue_dispatcher", "(", "force", "=", "False", ")", ":", "__lock_", ".", "acquire", "(", ")", "try", ":", "QUEUE", ".", "clear", "(", ")", "finally", ":", "__lock_", ".", "release", "(", ")" ]
default implementation of queue dispatcher .
train
false
9,659
def getLogger(name): return logging.getLogger(name)
[ "def", "getLogger", "(", "name", ")", ":", "return", "logging", ".", "getLogger", "(", "name", ")" ]
return a logger with the specified name .
train
false
9,662
def readlines(filename, encoding='utf-8'): (text, encoding) = read(filename, encoding) return (text.split(os.linesep), encoding)
[ "def", "readlines", "(", "filename", ",", "encoding", "=", "'utf-8'", ")", ":", "(", "text", ",", "encoding", ")", "=", "read", "(", "filename", ",", "encoding", ")", "return", "(", "text", ".", "split", "(", "os", ".", "linesep", ")", ",", "encoding", ")" ]
record the current file being tested .
train
true
9,663
@api_wrapper def update_export(module, export, filesystem, system): changed = False name = module.params['name'] client_list = module.params['client_list'] if (export is None): if (not module.check_mode): export = system.exports.create(export_path=name, filesystem=filesystem) if client_list: export.update_permissions(client_list) changed = True elif client_list: if (set(map(transform, unmunchify(export.get_permissions()))) != set(map(transform, client_list))): if (not module.check_mode): export.update_permissions(client_list) changed = True module.exit_json(changed=changed)
[ "@", "api_wrapper", "def", "update_export", "(", "module", ",", "export", ",", "filesystem", ",", "system", ")", ":", "changed", "=", "False", "name", "=", "module", ".", "params", "[", "'name'", "]", "client_list", "=", "module", ".", "params", "[", "'client_list'", "]", "if", "(", "export", "is", "None", ")", ":", "if", "(", "not", "module", ".", "check_mode", ")", ":", "export", "=", "system", ".", "exports", ".", "create", "(", "export_path", "=", "name", ",", "filesystem", "=", "filesystem", ")", "if", "client_list", ":", "export", ".", "update_permissions", "(", "client_list", ")", "changed", "=", "True", "elif", "client_list", ":", "if", "(", "set", "(", "map", "(", "transform", ",", "unmunchify", "(", "export", ".", "get_permissions", "(", ")", ")", ")", ")", "!=", "set", "(", "map", "(", "transform", ",", "client_list", ")", ")", ")", ":", "if", "(", "not", "module", ".", "check_mode", ")", ":", "export", ".", "update_permissions", "(", "client_list", ")", "changed", "=", "True", "module", ".", "exit_json", "(", "changed", "=", "changed", ")" ]
create new filesystem or update existing one .
train
false
9,664
def inplace_swap_row(X, m, n): if isinstance(X, sp.csc_matrix): return inplace_swap_row_csc(X, m, n) elif isinstance(X, sp.csr_matrix): return inplace_swap_row_csr(X, m, n) else: _raise_typeerror(X)
[ "def", "inplace_swap_row", "(", "X", ",", "m", ",", "n", ")", ":", "if", "isinstance", "(", "X", ",", "sp", ".", "csc_matrix", ")", ":", "return", "inplace_swap_row_csc", "(", "X", ",", "m", ",", "n", ")", "elif", "isinstance", "(", "X", ",", "sp", ".", "csr_matrix", ")", ":", "return", "inplace_swap_row_csr", "(", "X", ",", "m", ",", "n", ")", "else", ":", "_raise_typeerror", "(", "X", ")" ]
swaps two rows of a csc/csr matrix in-place .
train
false
9,666
def destroy_sink(sink): global sinks del sinks[sink.sink_id] del sink
[ "def", "destroy_sink", "(", "sink", ")", ":", "global", "sinks", "del", "sinks", "[", "sink", ".", "sink_id", "]", "del", "sink" ]
free a sink object by eliminating the reference from the global map .
train
false
9,668
def populate_filter_properties(filter_properties, host_state): if isinstance(host_state, dict): host = host_state['host'] nodename = host_state['nodename'] limits = host_state['limits'] else: host = host_state.host nodename = host_state.nodename limits = host_state.limits _add_retry_host(filter_properties, host, nodename) if (not filter_properties.get('force_hosts')): filter_properties['limits'] = limits
[ "def", "populate_filter_properties", "(", "filter_properties", ",", "host_state", ")", ":", "if", "isinstance", "(", "host_state", ",", "dict", ")", ":", "host", "=", "host_state", "[", "'host'", "]", "nodename", "=", "host_state", "[", "'nodename'", "]", "limits", "=", "host_state", "[", "'limits'", "]", "else", ":", "host", "=", "host_state", ".", "host", "nodename", "=", "host_state", ".", "nodename", "limits", "=", "host_state", ".", "limits", "_add_retry_host", "(", "filter_properties", ",", "host", ",", "nodename", ")", "if", "(", "not", "filter_properties", ".", "get", "(", "'force_hosts'", ")", ")", ":", "filter_properties", "[", "'limits'", "]", "=", "limits" ]
add additional information to the filter properties after a node has been selected by the scheduling process .
train
false
9,671
@task def update_mdn(ctx, tag): pre_update(tag) update()
[ "@", "task", "def", "update_mdn", "(", "ctx", ",", "tag", ")", ":", "pre_update", "(", "tag", ")", "update", "(", ")" ]
do typical mdn update .
train
false
9,672
def group_versions(versions): return list((list(releases) for (_, releases) in itertools.groupby(versions, operator.attrgetter(u'major_minor'))))
[ "def", "group_versions", "(", "versions", ")", ":", "return", "list", "(", "(", "list", "(", "releases", ")", "for", "(", "_", ",", "releases", ")", "in", "itertools", ".", "groupby", "(", "versions", ",", "operator", ".", "attrgetter", "(", "u'major_minor'", ")", ")", ")", ")" ]
group versions by major .
train
false
9,673
def save_workflow(trans, workflow, workflow_dict=None): stored = trans.model.StoredWorkflow() stored.name = workflow.name workflow.stored_workflow = stored stored.latest_workflow = workflow stored.user = trans.user if (workflow_dict and workflow_dict.get('annotation', '')): annotation = sanitize_html(workflow_dict['annotation'], 'utf-8', 'text/html') new_annotation = trans.model.StoredWorkflowAnnotationAssociation() new_annotation.annotation = annotation new_annotation.user = trans.user stored.annotations.append(new_annotation) trans.sa_session.add(stored) trans.sa_session.flush() if (trans.user.stored_workflow_menu_entries is None): trans.user.stored_workflow_menu_entries = [] menuEntry = trans.model.StoredWorkflowMenuEntry() menuEntry.stored_workflow = stored trans.user.stored_workflow_menu_entries.append(menuEntry) trans.sa_session.flush() return stored
[ "def", "save_workflow", "(", "trans", ",", "workflow", ",", "workflow_dict", "=", "None", ")", ":", "stored", "=", "trans", ".", "model", ".", "StoredWorkflow", "(", ")", "stored", ".", "name", "=", "workflow", ".", "name", "workflow", ".", "stored_workflow", "=", "stored", "stored", ".", "latest_workflow", "=", "workflow", "stored", ".", "user", "=", "trans", ".", "user", "if", "(", "workflow_dict", "and", "workflow_dict", ".", "get", "(", "'annotation'", ",", "''", ")", ")", ":", "annotation", "=", "sanitize_html", "(", "workflow_dict", "[", "'annotation'", "]", ",", "'utf-8'", ",", "'text/html'", ")", "new_annotation", "=", "trans", ".", "model", ".", "StoredWorkflowAnnotationAssociation", "(", ")", "new_annotation", ".", "annotation", "=", "annotation", "new_annotation", ".", "user", "=", "trans", ".", "user", "stored", ".", "annotations", ".", "append", "(", "new_annotation", ")", "trans", ".", "sa_session", ".", "add", "(", "stored", ")", "trans", ".", "sa_session", ".", "flush", "(", ")", "if", "(", "trans", ".", "user", ".", "stored_workflow_menu_entries", "is", "None", ")", ":", "trans", ".", "user", ".", "stored_workflow_menu_entries", "=", "[", "]", "menuEntry", "=", "trans", ".", "model", ".", "StoredWorkflowMenuEntry", "(", ")", "menuEntry", ".", "stored_workflow", "=", "stored", "trans", ".", "user", ".", "stored_workflow_menu_entries", ".", "append", "(", "menuEntry", ")", "trans", ".", "sa_session", ".", "flush", "(", ")", "return", "stored" ]
use the received in-memory workflow object for saving to the galaxy database .
train
false
9,674
def convert_from_bcd(bcd): (place, decimal) = (1, 0) while (bcd > 0): nibble = (bcd & 15) decimal += (nibble * place) bcd >>= 4 place *= 10 return decimal
[ "def", "convert_from_bcd", "(", "bcd", ")", ":", "(", "place", ",", "decimal", ")", "=", "(", "1", ",", "0", ")", "while", "(", "bcd", ">", "0", ")", ":", "nibble", "=", "(", "bcd", "&", "15", ")", "decimal", "+=", "(", "nibble", "*", "place", ")", "bcd", ">>=", "4", "place", "*=", "10", "return", "decimal" ]
converts a bcd value to a decimal value .
train
false
9,675
def get_credential_sets(): global _credential_set2 global _credential_set1 if (_credential_set1 is None): _credential_set1 = CredentialSet.create() _credential_set2 = CredentialSet.create() return (_credential_set1, _credential_set2)
[ "def", "get_credential_sets", "(", ")", ":", "global", "_credential_set2", "global", "_credential_set1", "if", "(", "_credential_set1", "is", "None", ")", ":", "_credential_set1", "=", "CredentialSet", ".", "create", "(", ")", "_credential_set2", "=", "CredentialSet", ".", "create", "(", ")", "return", "(", "_credential_set1", ",", "_credential_set2", ")" ]
get a pair of cached sets of ca-specific credentials .
train
false
9,676
def is_metatype(rdtype): if (((rdtype >= TKEY) and (rdtype <= ANY)) or _metatypes.has_key(rdtype)): return True return False
[ "def", "is_metatype", "(", "rdtype", ")", ":", "if", "(", "(", "(", "rdtype", ">=", "TKEY", ")", "and", "(", "rdtype", "<=", "ANY", ")", ")", "or", "_metatypes", ".", "has_key", "(", "rdtype", ")", ")", ":", "return", "True", "return", "False" ]
true if the type is a metatype .
train
true
9,677
def hdepth(tag): if (not _heading_re.search(tag.name)): raise TaskError(u"Can't compute heading depth of non-heading {0}".format(tag)) depth = int(tag.name[1], 10) cursor = tag while cursor: if (cursor.name == u'section'): depth += 1 cursor = cursor.parent return depth
[ "def", "hdepth", "(", "tag", ")", ":", "if", "(", "not", "_heading_re", ".", "search", "(", "tag", ".", "name", ")", ")", ":", "raise", "TaskError", "(", "u\"Can't compute heading depth of non-heading {0}\"", ".", "format", "(", "tag", ")", ")", "depth", "=", "int", "(", "tag", ".", "name", "[", "1", "]", ",", "10", ")", "cursor", "=", "tag", "while", "cursor", ":", "if", "(", "cursor", ".", "name", "==", "u'section'", ")", ":", "depth", "+=", "1", "cursor", "=", "cursor", ".", "parent", "return", "depth" ]
compute an h tags "outline depth" .
train
true
9,678
def make_auth_cookie(app, global_conf, cookie_name='PASTE_AUTH_COOKIE', scanlist=('REMOTE_USER', 'REMOTE_SESSION'), secret=None, timeout=30, maxlen=4096): if isinstance(scanlist, six.string_types): scanlist = scanlist.split() if ((secret is None) and global_conf.get('secret')): secret = global_conf['secret'] try: timeout = int(timeout) except ValueError: raise ValueError(('Bad value for timeout (must be int): %r' % timeout)) try: maxlen = int(maxlen) except ValueError: raise ValueError(('Bad value for maxlen (must be int): %r' % maxlen)) return AuthCookieHandler(app, cookie_name=cookie_name, scanlist=scanlist, secret=secret, timeout=timeout, maxlen=maxlen)
[ "def", "make_auth_cookie", "(", "app", ",", "global_conf", ",", "cookie_name", "=", "'PASTE_AUTH_COOKIE'", ",", "scanlist", "=", "(", "'REMOTE_USER'", ",", "'REMOTE_SESSION'", ")", ",", "secret", "=", "None", ",", "timeout", "=", "30", ",", "maxlen", "=", "4096", ")", ":", "if", "isinstance", "(", "scanlist", ",", "six", ".", "string_types", ")", ":", "scanlist", "=", "scanlist", ".", "split", "(", ")", "if", "(", "(", "secret", "is", "None", ")", "and", "global_conf", ".", "get", "(", "'secret'", ")", ")", ":", "secret", "=", "global_conf", "[", "'secret'", "]", "try", ":", "timeout", "=", "int", "(", "timeout", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "(", "'Bad value for timeout (must be int): %r'", "%", "timeout", ")", ")", "try", ":", "maxlen", "=", "int", "(", "maxlen", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "(", "'Bad value for maxlen (must be int): %r'", "%", "maxlen", ")", ")", "return", "AuthCookieHandler", "(", "app", ",", "cookie_name", "=", "cookie_name", ",", "scanlist", "=", "scanlist", ",", "secret", "=", "secret", ",", "timeout", "=", "timeout", ",", "maxlen", "=", "maxlen", ")" ]
this middleware uses cookies to stash-away a previously authenticated user so that re-authentication is not needed .
train
false
9,679
@snippet def dataset_exists(client, to_delete): DATASET_NAME = ('dataset_exists_%d' % (_millis(),)) dataset = client.dataset(DATASET_NAME) to_delete.append(dataset) assert (not dataset.exists()) dataset.create() assert dataset.exists()
[ "@", "snippet", "def", "dataset_exists", "(", "client", ",", "to_delete", ")", ":", "DATASET_NAME", "=", "(", "'dataset_exists_%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "dataset", "=", "client", ".", "dataset", "(", "DATASET_NAME", ")", "to_delete", ".", "append", "(", "dataset", ")", "assert", "(", "not", "dataset", ".", "exists", "(", ")", ")", "dataset", ".", "create", "(", ")", "assert", "dataset", ".", "exists", "(", ")" ]
test existence of a dataset .
train
false
9,681
def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False, exclude_atomic_group_hosts=False, valid_only=True, **filter_data): hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, exclude_atomic_group_hosts, valid_only, filter_data) return hosts.count()
[ "def", "get_num_hosts", "(", "multiple_labels", "=", "(", ")", ",", "exclude_only_if_needed_labels", "=", "False", ",", "exclude_atomic_group_hosts", "=", "False", ",", "valid_only", "=", "True", ",", "**", "filter_data", ")", ":", "hosts", "=", "rpc_utils", ".", "get_host_query", "(", "multiple_labels", ",", "exclude_only_if_needed_labels", ",", "exclude_atomic_group_hosts", ",", "valid_only", ",", "filter_data", ")", "return", "hosts", ".", "count", "(", ")" ]
get the number of hosts .
train
false
9,682
def check_conversion(condition, message): if (not condition): raise InvalidConversionError(message)
[ "def", "check_conversion", "(", "condition", ",", "message", ")", ":", "if", "(", "not", "condition", ")", ":", "raise", "InvalidConversionError", "(", "message", ")" ]
asserts a conversion condition and raises an error if its not met .
train
false
9,684
def dumpNodeConnections(nodes): def dumpConnections(node): 'Helper function: dump connections to node' for intf in node.intfList(): output((' %s:' % intf)) if intf.link: intfs = [intf.link.intf1, intf.link.intf2] intfs.remove(intf) output(intfs[0]) else: output(' ') for node in nodes: output(node.name) dumpConnections(node) output('\n')
[ "def", "dumpNodeConnections", "(", "nodes", ")", ":", "def", "dumpConnections", "(", "node", ")", ":", "for", "intf", "in", "node", ".", "intfList", "(", ")", ":", "output", "(", "(", "' %s:'", "%", "intf", ")", ")", "if", "intf", ".", "link", ":", "intfs", "=", "[", "intf", ".", "link", ".", "intf1", ",", "intf", ".", "link", ".", "intf2", "]", "intfs", ".", "remove", "(", "intf", ")", "output", "(", "intfs", "[", "0", "]", ")", "else", ":", "output", "(", "' '", ")", "for", "node", "in", "nodes", ":", "output", "(", "node", ".", "name", ")", "dumpConnections", "(", "node", ")", "output", "(", "'\\n'", ")" ]
dump connections to/from nodes .
train
false
9,685
@Profiler.profile def test_bulk_save_return_pks(n): session = Session(bind=engine) session.bulk_save_objects([Customer(name=('customer name %d' % i), description=('customer description %d' % i)) for i in range(n)], return_defaults=True) session.commit()
[ "@", "Profiler", ".", "profile", "def", "test_bulk_save_return_pks", "(", "n", ")", ":", "session", "=", "Session", "(", "bind", "=", "engine", ")", "session", ".", "bulk_save_objects", "(", "[", "Customer", "(", "name", "=", "(", "'customer name %d'", "%", "i", ")", ",", "description", "=", "(", "'customer description %d'", "%", "i", ")", ")", "for", "i", "in", "range", "(", "n", ")", "]", ",", "return_defaults", "=", "True", ")", "session", ".", "commit", "(", ")" ]
individual insert statements in "bulk" .
train
false
9,686
def _ConstructFullUrlBase(uri, host=None, ssl=False): if uri.startswith('http'): return uri if ssl: return ('https://%s%s' % (host, uri)) else: return ('http://%s%s' % (host, uri))
[ "def", "_ConstructFullUrlBase", "(", "uri", ",", "host", "=", "None", ",", "ssl", "=", "False", ")", ":", "if", "uri", ".", "startswith", "(", "'http'", ")", ":", "return", "uri", "if", "ssl", ":", "return", "(", "'https://%s%s'", "%", "(", "host", ",", "uri", ")", ")", "else", ":", "return", "(", "'http://%s%s'", "%", "(", "host", ",", "uri", ")", ")" ]
puts url components into the form http(s)://full .
train
false
9,687
def test_lex_mangling_hyphen(): entry = tokenize('foo-bar') assert (entry == [HySymbol('foo_bar')]) entry = tokenize('-') assert (entry == [HySymbol('-')])
[ "def", "test_lex_mangling_hyphen", "(", ")", ":", "entry", "=", "tokenize", "(", "'foo-bar'", ")", "assert", "(", "entry", "==", "[", "HySymbol", "(", "'foo_bar'", ")", "]", ")", "entry", "=", "tokenize", "(", "'-'", ")", "assert", "(", "entry", "==", "[", "HySymbol", "(", "'-'", ")", "]", ")" ]
ensure that hyphens get translated to underscores during mangling .
train
false
9,688
@supports_proxies('esxi') def get_service_instance_via_proxy(service_instance=None): connection_details = _get_proxy_connection_details() return salt.utils.vmware.get_service_instance(*connection_details)
[ "@", "supports_proxies", "(", "'esxi'", ")", "def", "get_service_instance_via_proxy", "(", "service_instance", "=", "None", ")", ":", "connection_details", "=", "_get_proxy_connection_details", "(", ")", "return", "salt", ".", "utils", ".", "vmware", ".", "get_service_instance", "(", "*", "connection_details", ")" ]
returns a service instance to the proxied endpoint .
train
false
9,693
def console_pool_get_by_host_type(context, compute_host, proxy_host, console_type): return IMPL.console_pool_get_by_host_type(context, compute_host, proxy_host, console_type)
[ "def", "console_pool_get_by_host_type", "(", "context", ",", "compute_host", ",", "proxy_host", ",", "console_type", ")", ":", "return", "IMPL", ".", "console_pool_get_by_host_type", "(", "context", ",", "compute_host", ",", "proxy_host", ",", "console_type", ")" ]
fetch a console pool for a given proxy host .
train
false
9,694
@handle_response_format @treeio_login_required def status_delete(request, status_id, response_format='html'): status = get_object_or_404(TicketStatus, pk=status_id) if (not request.user.profile.has_permission(status, mode='w')): return user_denied(request, "You don't have access to this Ticket Status", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): status.trash = True status.save() else: status.delete() return HttpResponseRedirect(reverse('services_settings_view')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('services_status_view', args=[status.id])) context = _get_default_context(request) context.update({'status': status}) return render_to_response('services/status_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "status_delete", "(", "request", ",", "status_id", ",", "response_format", "=", "'html'", ")", ":", "status", "=", "get_object_or_404", "(", "TicketStatus", ",", "pk", "=", "status_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "status", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Ticket Status\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "status", ".", "trash", "=", "True", "status", ".", "save", "(", ")", "else", ":", "status", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_settings_view'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_status_view'", ",", "args", "=", "[", "status", ".", "id", "]", ")", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'status'", ":", "status", "}", ")", "return", "render_to_response", "(", "'services/status_delete'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
status delete .
train
false
9,695
def p_statement_newline(p): p[0] = None
[ "def", "p_statement_newline", "(", "p", ")", ":", "p", "[", "0", "]", "=", "None" ]
statement : newline .
train
false
9,696
def FilterFixture(fixture=None, regex='.'): result = [] regex = re.compile(regex) if (fixture is None): fixture = client_fixture.VFS for (path, attributes) in fixture: if regex.match(path): result.append((path, attributes)) return result
[ "def", "FilterFixture", "(", "fixture", "=", "None", ",", "regex", "=", "'.'", ")", ":", "result", "=", "[", "]", "regex", "=", "re", ".", "compile", "(", "regex", ")", "if", "(", "fixture", "is", "None", ")", ":", "fixture", "=", "client_fixture", ".", "VFS", "for", "(", "path", ",", "attributes", ")", "in", "fixture", ":", "if", "regex", ".", "match", "(", "path", ")", ":", "result", ".", "append", "(", "(", "path", ",", "attributes", ")", ")", "return", "result" ]
returns a sub fixture by only returning objects which match the regex .
train
false
9,697
def _generate_noise(info, cov, iir_filter, random_state, n_samples, zi=None): from scipy.signal import lfilter noise_cov = pick_channels_cov(cov, include=info['ch_names'], exclude=[]) if (set(info['ch_names']) != set(noise_cov.ch_names)): raise ValueError(('Evoked and covariance channel names are not identical. Cannot generate the noise matrix. Channels missing in covariance %s.' % np.setdiff1d(info['ch_names'], noise_cov.ch_names))) rng = check_random_state(random_state) c = (np.diag(noise_cov.data) if noise_cov['diag'] else noise_cov.data) mu_channels = np.zeros(len(c)) with warnings.catch_warnings(record=True): noise = rng.multivariate_normal(mu_channels, c, n_samples).T if (iir_filter is not None): if (zi is None): zi = np.zeros((len(c), (len(iir_filter) - 1))) (noise, zf) = lfilter([1], iir_filter, noise, axis=(-1), zi=zi) else: zf = None return (noise, zf)
[ "def", "_generate_noise", "(", "info", ",", "cov", ",", "iir_filter", ",", "random_state", ",", "n_samples", ",", "zi", "=", "None", ")", ":", "from", "scipy", ".", "signal", "import", "lfilter", "noise_cov", "=", "pick_channels_cov", "(", "cov", ",", "include", "=", "info", "[", "'ch_names'", "]", ",", "exclude", "=", "[", "]", ")", "if", "(", "set", "(", "info", "[", "'ch_names'", "]", ")", "!=", "set", "(", "noise_cov", ".", "ch_names", ")", ")", ":", "raise", "ValueError", "(", "(", "'Evoked and covariance channel names are not identical. Cannot generate the noise matrix. Channels missing in covariance %s.'", "%", "np", ".", "setdiff1d", "(", "info", "[", "'ch_names'", "]", ",", "noise_cov", ".", "ch_names", ")", ")", ")", "rng", "=", "check_random_state", "(", "random_state", ")", "c", "=", "(", "np", ".", "diag", "(", "noise_cov", ".", "data", ")", "if", "noise_cov", "[", "'diag'", "]", "else", "noise_cov", ".", "data", ")", "mu_channels", "=", "np", ".", "zeros", "(", "len", "(", "c", ")", ")", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", ":", "noise", "=", "rng", ".", "multivariate_normal", "(", "mu_channels", ",", "c", ",", "n_samples", ")", ".", "T", "if", "(", "iir_filter", "is", "not", "None", ")", ":", "if", "(", "zi", "is", "None", ")", ":", "zi", "=", "np", ".", "zeros", "(", "(", "len", "(", "c", ")", ",", "(", "len", "(", "iir_filter", ")", "-", "1", ")", ")", ")", "(", "noise", ",", "zf", ")", "=", "lfilter", "(", "[", "1", "]", ",", "iir_filter", ",", "noise", ",", "axis", "=", "(", "-", "1", ")", ",", "zi", "=", "zi", ")", "else", ":", "zf", "=", "None", "return", "(", "noise", ",", "zf", ")" ]
create spatially colored and temporally iir-filtered noise .
train
false
9,698
def get_auth_provider(credentials, pre_auth=False, scope='project'): msg = 'tempest.manager.get_auth_provider is not a stable interface and as such it should not imported directly. It will be removed as the client manager becomes available in tempest.lib.' LOG.warning(msg) return tempest_clients.get_auth_provider(credentials=credentials, pre_auth=pre_auth, scope=scope)
[ "def", "get_auth_provider", "(", "credentials", ",", "pre_auth", "=", "False", ",", "scope", "=", "'project'", ")", ":", "msg", "=", "'tempest.manager.get_auth_provider is not a stable interface and as such it should not imported directly. It will be removed as the client manager becomes available in tempest.lib.'", "LOG", ".", "warning", "(", "msg", ")", "return", "tempest_clients", ".", "get_auth_provider", "(", "credentials", "=", "credentials", ",", "pre_auth", "=", "pre_auth", ",", "scope", "=", "scope", ")" ]
shim to get_auth_provider in clients .
train
false
9,700
def _has_option(option, family='ipv4'): cmd = '{0} --help'.format(_iptables_cmd(family)) if (option in __salt__['cmd.run'](cmd, output_loglevel='quiet')): return True return False
[ "def", "_has_option", "(", "option", ",", "family", "=", "'ipv4'", ")", ":", "cmd", "=", "'{0} --help'", ".", "format", "(", "_iptables_cmd", "(", "family", ")", ")", "if", "(", "option", "in", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "output_loglevel", "=", "'quiet'", ")", ")", ":", "return", "True", "return", "False" ]
return truth of whether iptables has option .
train
true
9,701
def mb_parser_null(resp): return resp
[ "def", "mb_parser_null", "(", "resp", ")", ":", "return", "resp" ]
return the raw response .
train
false
9,702
def test_member_completion(): superConsole.SendKeys('outputRedirectStart{(}True{)}{ENTER}') testRegex = '' superConsole.SendKeys('print c.{TAB}{ENTER}') testRegex += '(Cdoc|pretest)' superConsole.SendKeys('try:{ENTER}') superConsole.SendKeys('print f.{TAB}x{ENTER}') superConsole.SendKeys('{BACKSPACE}except:{ENTER}') superConsole.SendKeys('print "EXC"{ENTER}{ENTER}{ENTER}') testRegex += 'EXC' superConsole.SendKeys('outputRedirectStop{(}{)}{ENTER}') verifyResults(getTestOutput()[0], testRegex)
[ "def", "test_member_completion", "(", ")", ":", "superConsole", ".", "SendKeys", "(", "'outputRedirectStart{(}True{)}{ENTER}'", ")", "testRegex", "=", "''", "superConsole", ".", "SendKeys", "(", "'print c.{TAB}{ENTER}'", ")", "testRegex", "+=", "'(Cdoc|pretest)'", "superConsole", ".", "SendKeys", "(", "'try:{ENTER}'", ")", "superConsole", ".", "SendKeys", "(", "'print f.{TAB}x{ENTER}'", ")", "superConsole", ".", "SendKeys", "(", "'{BACKSPACE}except:{ENTER}'", ")", "superConsole", ".", "SendKeys", "(", "'print \"EXC\"{ENTER}{ENTER}{ENTER}'", ")", "testRegex", "+=", "'EXC'", "superConsole", ".", "SendKeys", "(", "'outputRedirectStop{(}{)}{ENTER}'", ")", "verifyResults", "(", "getTestOutput", "(", ")", "[", "0", "]", ",", "testRegex", ")" ]
ensure that tabbing after ident .
train
false
9,703
def p_command_return(p): p[0] = ('RETURN',)
[ "def", "p_command_return", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "'RETURN'", ",", ")" ]
command : return .
train
false
9,705
def encode_root(v): return v
[ "def", "encode_root", "(", "v", ")", ":", "return", "v" ]
encodes a trie root into serialization .
train
false
9,707
def p_expression_group(t): t[0] = t[2]
[ "def", "p_expression_group", "(", "t", ")", ":", "t", "[", "0", "]", "=", "t", "[", "2", "]" ]
expression : lparen expression rparen .
train
false
9,708
def _did_you_mean(name, choices): if (not choices): return None name = name.lower() candidates = [(_levenshtein(name, c), c) for c in choices] candidates.sort() if (candidates[0][0] <= 3): return candidates[0][1] return None
[ "def", "_did_you_mean", "(", "name", ",", "choices", ")", ":", "if", "(", "not", "choices", ")", ":", "return", "None", "name", "=", "name", ".", "lower", "(", ")", "candidates", "=", "[", "(", "_levenshtein", "(", "name", ",", "c", ")", ",", "c", ")", "for", "c", "in", "choices", "]", "candidates", ".", "sort", "(", ")", "if", "(", "candidates", "[", "0", "]", "[", "0", "]", "<=", "3", ")", ":", "return", "candidates", "[", "0", "]", "[", "1", "]", "return", "None" ]
suggest most likely setting based on levenshtein .
train
false
9,709
def rgb2yuv(rgb): return _convert(yuv_from_rgb, rgb)
[ "def", "rgb2yuv", "(", "rgb", ")", ":", "return", "_convert", "(", "yuv_from_rgb", ",", "rgb", ")" ]
rgb to yuv color space conversion .
train
false
9,711
def base64_decodestring(instr): if six.PY3: b = salt.utils.to_bytes(instr) data = base64.decodebytes(b) try: return salt.utils.to_str(data) except UnicodeDecodeError: return data return base64.decodestring(instr)
[ "def", "base64_decodestring", "(", "instr", ")", ":", "if", "six", ".", "PY3", ":", "b", "=", "salt", ".", "utils", ".", "to_bytes", "(", "instr", ")", "data", "=", "base64", ".", "decodebytes", "(", "b", ")", "try", ":", "return", "salt", ".", "utils", ".", "to_str", "(", "data", ")", "except", "UnicodeDecodeError", ":", "return", "data", "return", "base64", ".", "decodestring", "(", "instr", ")" ]
decode a base64-encoded string using the "legacy" python interface .
train
false
9,712
def trailing_slash(missing=True, extra=False, status=None, debug=False): request = cherrypy.serving.request pi = request.path_info if debug: cherrypy.log(('is_index: %r, missing: %r, extra: %r, path_info: %r' % (request.is_index, missing, extra, pi)), 'TOOLS.TRAILING_SLASH') if (request.is_index is True): if missing: if (not pi.endswith('/')): new_url = cherrypy.url((pi + '/'), request.query_string) raise cherrypy.HTTPRedirect(new_url, status=(status or 301)) elif (request.is_index is False): if extra: if (pi.endswith('/') and (pi != '/')): new_url = cherrypy.url(pi[:(-1)], request.query_string) raise cherrypy.HTTPRedirect(new_url, status=(status or 301))
[ "def", "trailing_slash", "(", "missing", "=", "True", ",", "extra", "=", "False", ",", "status", "=", "None", ",", "debug", "=", "False", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "pi", "=", "request", ".", "path_info", "if", "debug", ":", "cherrypy", ".", "log", "(", "(", "'is_index: %r, missing: %r, extra: %r, path_info: %r'", "%", "(", "request", ".", "is_index", ",", "missing", ",", "extra", ",", "pi", ")", ")", ",", "'TOOLS.TRAILING_SLASH'", ")", "if", "(", "request", ".", "is_index", "is", "True", ")", ":", "if", "missing", ":", "if", "(", "not", "pi", ".", "endswith", "(", "'/'", ")", ")", ":", "new_url", "=", "cherrypy", ".", "url", "(", "(", "pi", "+", "'/'", ")", ",", "request", ".", "query_string", ")", "raise", "cherrypy", ".", "HTTPRedirect", "(", "new_url", ",", "status", "=", "(", "status", "or", "301", ")", ")", "elif", "(", "request", ".", "is_index", "is", "False", ")", ":", "if", "extra", ":", "if", "(", "pi", ".", "endswith", "(", "'/'", ")", "and", "(", "pi", "!=", "'/'", ")", ")", ":", "new_url", "=", "cherrypy", ".", "url", "(", "pi", "[", ":", "(", "-", "1", ")", "]", ",", "request", ".", "query_string", ")", "raise", "cherrypy", ".", "HTTPRedirect", "(", "new_url", ",", "status", "=", "(", "status", "or", "301", ")", ")" ]
redirect if path_info has trailing slash .
train
false
9,714
def test_delimited_list(): assert (hug.types.delimited_list(',')('value1,value2') == ['value1', 'value2']) assert (hug.types.delimited_list(',')(['value1', 'value2']) == ['value1', 'value2']) assert (hug.types.delimited_list('|-|')('value1|-|value2|-|value3,value4') == ['value1', 'value2', 'value3,value4']) assert (',' in hug.types.delimited_list(',').__doc__)
[ "def", "test_delimited_list", "(", ")", ":", "assert", "(", "hug", ".", "types", ".", "delimited_list", "(", "','", ")", "(", "'value1,value2'", ")", "==", "[", "'value1'", ",", "'value2'", "]", ")", "assert", "(", "hug", ".", "types", ".", "delimited_list", "(", "','", ")", "(", "[", "'value1'", ",", "'value2'", "]", ")", "==", "[", "'value1'", ",", "'value2'", "]", ")", "assert", "(", "hug", ".", "types", ".", "delimited_list", "(", "'|-|'", ")", "(", "'value1|-|value2|-|value3,value4'", ")", "==", "[", "'value1'", ",", "'value2'", ",", "'value3,value4'", "]", ")", "assert", "(", "','", "in", "hug", ".", "types", ".", "delimited_list", "(", "','", ")", ".", "__doc__", ")" ]
test to ensure hugs custom delimited list type function works as expected .
train
false
9,716
def time_since(d, now=None, count=2, accuracy=6, simple=False): if (isinstance(d, int) or isinstance(d, float)): d = datetime.datetime.fromtimestamp(d) if (isinstance(now, int) or isinstance(now, float)): now = datetime.datetime.fromtimestamp(now) if (not isinstance(d, datetime.datetime)): d = datetime.datetime(d.year, d.month, d.day) if (now and (not isinstance(now, datetime.datetime))): now = datetime.datetime(now.year, now.month, now.day) if (not now): now = datetime.datetime.now() delta = (now - (d - datetime.timedelta(0, 0, d.microsecond))) since = ((((delta.days * 24) * 60) * 60) + delta.seconds) if (since <= 0): return ('0 ' + 'minutes') return format_time(since, count, accuracy, simple)
[ "def", "time_since", "(", "d", ",", "now", "=", "None", ",", "count", "=", "2", ",", "accuracy", "=", "6", ",", "simple", "=", "False", ")", ":", "if", "(", "isinstance", "(", "d", ",", "int", ")", "or", "isinstance", "(", "d", ",", "float", ")", ")", ":", "d", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "d", ")", "if", "(", "isinstance", "(", "now", ",", "int", ")", "or", "isinstance", "(", "now", ",", "float", ")", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "now", ")", "if", "(", "not", "isinstance", "(", "d", ",", "datetime", ".", "datetime", ")", ")", ":", "d", "=", "datetime", ".", "datetime", "(", "d", ".", "year", ",", "d", ".", "month", ",", "d", ".", "day", ")", "if", "(", "now", "and", "(", "not", "isinstance", "(", "now", ",", "datetime", ".", "datetime", ")", ")", ")", ":", "now", "=", "datetime", ".", "datetime", "(", "now", ".", "year", ",", "now", ".", "month", ",", "now", ".", "day", ")", "if", "(", "not", "now", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "delta", "=", "(", "now", "-", "(", "d", "-", "datetime", ".", "timedelta", "(", "0", ",", "0", ",", "d", ".", "microsecond", ")", ")", ")", "since", "=", "(", "(", "(", "(", "delta", ".", "days", "*", "24", ")", "*", "60", ")", "*", "60", ")", "+", "delta", ".", "seconds", ")", "if", "(", "since", "<=", "0", ")", ":", "return", "(", "'0 '", "+", "'minutes'", ")", "return", "format_time", "(", "since", ",", "count", ",", "accuracy", ",", "simple", ")" ]
returns a string representing time since e .
train
false
9,717
def next_monday_or_tuesday(dt): dow = dt.weekday() if ((dow == 5) or (dow == 6)): return (dt + timedelta(2)) elif (dow == 0): return (dt + timedelta(1)) return dt
[ "def", "next_monday_or_tuesday", "(", "dt", ")", ":", "dow", "=", "dt", ".", "weekday", "(", ")", "if", "(", "(", "dow", "==", "5", ")", "or", "(", "dow", "==", "6", ")", ")", ":", "return", "(", "dt", "+", "timedelta", "(", "2", ")", ")", "elif", "(", "dow", "==", "0", ")", ":", "return", "(", "dt", "+", "timedelta", "(", "1", ")", ")", "return", "dt" ]
for second holiday of two adjacent ones! if holiday falls on saturday .
train
true
9,719
def libvlc_event_detach(p_event_manager, i_event_type, f_callback, p_user_data): f = (_Cfunctions.get('libvlc_event_detach', None) or _Cfunction('libvlc_event_detach', ((1,), (1,), (1,), (1,)), None, None, EventManager, ctypes.c_uint, Callback, ctypes.c_void_p)) return f(p_event_manager, i_event_type, f_callback, p_user_data)
[ "def", "libvlc_event_detach", "(", "p_event_manager", ",", "i_event_type", ",", "f_callback", ",", "p_user_data", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_event_detach'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_event_detach'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "EventManager", ",", "ctypes", ".", "c_uint", ",", "Callback", ",", "ctypes", ".", "c_void_p", ")", ")", "return", "f", "(", "p_event_manager", ",", "i_event_type", ",", "f_callback", ",", "p_user_data", ")" ]
unregister an event notification .
train
true
9,721
def assert_match(pattern, string, msg=None): assert_not_none(re.search(pattern, string), msg)
[ "def", "assert_match", "(", "pattern", ",", "string", ",", "msg", "=", "None", ")", ":", "assert_not_none", "(", "re", ".", "search", "(", "pattern", ",", "string", ")", ",", "msg", ")" ]
verify that the pattern matches the string .
train
false
9,722
def default_select(identifier, all_entry_points): if (len(all_entry_points) == 0): raise PluginMissingError(identifier) elif (len(all_entry_points) == 1): return all_entry_points[0] elif (len(all_entry_points) > 1): raise AmbiguousPluginError(all_entry_points)
[ "def", "default_select", "(", "identifier", ",", "all_entry_points", ")", ":", "if", "(", "len", "(", "all_entry_points", ")", "==", "0", ")", ":", "raise", "PluginMissingError", "(", "identifier", ")", "elif", "(", "len", "(", "all_entry_points", ")", "==", "1", ")", ":", "return", "all_entry_points", "[", "0", "]", "elif", "(", "len", "(", "all_entry_points", ")", ">", "1", ")", ":", "raise", "AmbiguousPluginError", "(", "all_entry_points", ")" ]
raise an exception when we have ambiguous entry points .
train
true
9,723
def _mk_fileclient(): if ('cp.fileclient' not in __context__): __context__['cp.fileclient'] = salt.fileclient.get_file_client(__opts__)
[ "def", "_mk_fileclient", "(", ")", ":", "if", "(", "'cp.fileclient'", "not", "in", "__context__", ")", ":", "__context__", "[", "'cp.fileclient'", "]", "=", "salt", ".", "fileclient", ".", "get_file_client", "(", "__opts__", ")" ]
create a file client and add it to the context .
train
false
9,724
def libvlc_media_list_count(p_ml): f = (_Cfunctions.get('libvlc_media_list_count', None) or _Cfunction('libvlc_media_list_count', ((1,),), None, ctypes.c_int, MediaList)) return f(p_ml)
[ "def", "libvlc_media_list_count", "(", "p_ml", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_count'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_count'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaList", ")", ")", "return", "f", "(", "p_ml", ")" ]
get count on media list items the l{libvlc_media_list_lock} should be held upon entering this function .
train
true
9,725
def CanMergeMultipleLines(lines, last_was_merged=False): indent_amt = (lines[0].depth * style.Get('INDENT_WIDTH')) if ((len(lines) == 1) or (indent_amt > style.Get('COLUMN_LIMIT'))): return False if ((len(lines) >= 3) and (lines[2].depth >= lines[1].depth) and (lines[0].depth != lines[2].depth)): return False if (lines[0].first.value in _CLASS_OR_FUNC): return False limit = (style.Get('COLUMN_LIMIT') - indent_amt) if (lines[0].last.total_length < limit): limit -= lines[0].last.total_length if (lines[0].first.value == 'if'): return _CanMergeLineIntoIfStatement(lines, limit) if (last_was_merged and (lines[0].first.value in {'elif', 'else'})): return _CanMergeLineIntoIfStatement(lines, limit) return False
[ "def", "CanMergeMultipleLines", "(", "lines", ",", "last_was_merged", "=", "False", ")", ":", "indent_amt", "=", "(", "lines", "[", "0", "]", ".", "depth", "*", "style", ".", "Get", "(", "'INDENT_WIDTH'", ")", ")", "if", "(", "(", "len", "(", "lines", ")", "==", "1", ")", "or", "(", "indent_amt", ">", "style", ".", "Get", "(", "'COLUMN_LIMIT'", ")", ")", ")", ":", "return", "False", "if", "(", "(", "len", "(", "lines", ")", ">=", "3", ")", "and", "(", "lines", "[", "2", "]", ".", "depth", ">=", "lines", "[", "1", "]", ".", "depth", ")", "and", "(", "lines", "[", "0", "]", ".", "depth", "!=", "lines", "[", "2", "]", ".", "depth", ")", ")", ":", "return", "False", "if", "(", "lines", "[", "0", "]", ".", "first", ".", "value", "in", "_CLASS_OR_FUNC", ")", ":", "return", "False", "limit", "=", "(", "style", ".", "Get", "(", "'COLUMN_LIMIT'", ")", "-", "indent_amt", ")", "if", "(", "lines", "[", "0", "]", ".", "last", ".", "total_length", "<", "limit", ")", ":", "limit", "-=", "lines", "[", "0", "]", ".", "last", ".", "total_length", "if", "(", "lines", "[", "0", "]", ".", "first", ".", "value", "==", "'if'", ")", ":", "return", "_CanMergeLineIntoIfStatement", "(", "lines", ",", "limit", ")", "if", "(", "last_was_merged", "and", "(", "lines", "[", "0", "]", ".", "first", ".", "value", "in", "{", "'elif'", ",", "'else'", "}", ")", ")", ":", "return", "_CanMergeLineIntoIfStatement", "(", "lines", ",", "limit", ")", "return", "False" ]
determine if multiple lines can be joined into one .
train
false
9,726
def pack_context(msg, context): context_d = dict([(('_context_%s' % key), value) for (key, value) in context.to_dict().iteritems()]) msg.update(context_d)
[ "def", "pack_context", "(", "msg", ",", "context", ")", ":", "context_d", "=", "dict", "(", "[", "(", "(", "'_context_%s'", "%", "key", ")", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "context", ".", "to_dict", "(", ")", ".", "iteritems", "(", ")", "]", ")", "msg", ".", "update", "(", "context_d", ")" ]
pack context into msg .
train
false
9,728
@lru_cache() def timedelta_to_days(value): if (not hasattr(value, 'total_seconds')): secs = ((value.microseconds + ((value.seconds + (value.days * SECS_PER_DAY)) * (10 ** 6))) / (10 ** 6)) else: secs = value.total_seconds() return (secs / SECS_PER_DAY)
[ "@", "lru_cache", "(", ")", "def", "timedelta_to_days", "(", "value", ")", ":", "if", "(", "not", "hasattr", "(", "value", ",", "'total_seconds'", ")", ")", ":", "secs", "=", "(", "(", "value", ".", "microseconds", "+", "(", "(", "value", ".", "seconds", "+", "(", "value", ".", "days", "*", "SECS_PER_DAY", ")", ")", "*", "(", "10", "**", "6", ")", ")", ")", "/", "(", "10", "**", "6", ")", ")", "else", ":", "secs", "=", "value", ".", "total_seconds", "(", ")", "return", "(", "secs", "/", "SECS_PER_DAY", ")" ]
return the time delta as a number of seconds .
train
false
9,729
def _find_exe_version(cmd): executable = cmd.split()[0] if (find_executable(executable) is None): return None out = Popen(cmd, shell=True, stdout=PIPE).stdout try: out_string = out.read() finally: out.close() result = RE_VERSION.search(out_string) if (result is None): return None return LooseVersion(result.group(1).decode())
[ "def", "_find_exe_version", "(", "cmd", ")", ":", "executable", "=", "cmd", ".", "split", "(", ")", "[", "0", "]", "if", "(", "find_executable", "(", "executable", ")", "is", "None", ")", ":", "return", "None", "out", "=", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "PIPE", ")", ".", "stdout", "try", ":", "out_string", "=", "out", ".", "read", "(", ")", "finally", ":", "out", ".", "close", "(", ")", "result", "=", "RE_VERSION", ".", "search", "(", "out_string", ")", "if", "(", "result", "is", "None", ")", ":", "return", "None", "return", "LooseVersion", "(", "result", ".", "group", "(", "1", ")", ".", "decode", "(", ")", ")" ]
find the version of an executable by running cmd in the shell .
train
false
9,730
def _parse_pre_yarn_history_log(lines): result = {} task_to_counters = {} for record in _parse_pre_yarn_history_records(lines): fields = record['fields'] if ((record['type'] == 'Job') and ('COUNTERS' in fields)): result['counters'] = _parse_pre_yarn_counters(fields['COUNTERS']) elif ((record['type'] == 'Task') and ('COUNTERS' in fields) and ('TASKID' in fields)): task_id = fields['TASKID'] counters = _parse_pre_yarn_counters(fields['COUNTERS']) task_to_counters[task_id] = counters elif ((record['type'] in ('MapAttempt', 'ReduceAttempt')) and ('TASK_ATTEMPT_ID' in fields) and (fields.get('TASK_STATUS') == 'FAILED') and fields.get('ERROR')): result.setdefault('errors', []) result['errors'].append(dict(hadoop_error=dict(message=fields['ERROR'], start_line=record['start_line'], num_lines=record['num_lines']), attempt_id=fields['TASK_ATTEMPT_ID'])) if (('counters' not in result) and task_to_counters): result['counters'] = _sum_counters(*task_to_counters.values()) return result
[ "def", "_parse_pre_yarn_history_log", "(", "lines", ")", ":", "result", "=", "{", "}", "task_to_counters", "=", "{", "}", "for", "record", "in", "_parse_pre_yarn_history_records", "(", "lines", ")", ":", "fields", "=", "record", "[", "'fields'", "]", "if", "(", "(", "record", "[", "'type'", "]", "==", "'Job'", ")", "and", "(", "'COUNTERS'", "in", "fields", ")", ")", ":", "result", "[", "'counters'", "]", "=", "_parse_pre_yarn_counters", "(", "fields", "[", "'COUNTERS'", "]", ")", "elif", "(", "(", "record", "[", "'type'", "]", "==", "'Task'", ")", "and", "(", "'COUNTERS'", "in", "fields", ")", "and", "(", "'TASKID'", "in", "fields", ")", ")", ":", "task_id", "=", "fields", "[", "'TASKID'", "]", "counters", "=", "_parse_pre_yarn_counters", "(", "fields", "[", "'COUNTERS'", "]", ")", "task_to_counters", "[", "task_id", "]", "=", "counters", "elif", "(", "(", "record", "[", "'type'", "]", "in", "(", "'MapAttempt'", ",", "'ReduceAttempt'", ")", ")", "and", "(", "'TASK_ATTEMPT_ID'", "in", "fields", ")", "and", "(", "fields", ".", "get", "(", "'TASK_STATUS'", ")", "==", "'FAILED'", ")", "and", "fields", ".", "get", "(", "'ERROR'", ")", ")", ":", "result", ".", "setdefault", "(", "'errors'", ",", "[", "]", ")", "result", "[", "'errors'", "]", ".", "append", "(", "dict", "(", "hadoop_error", "=", "dict", "(", "message", "=", "fields", "[", "'ERROR'", "]", ",", "start_line", "=", "record", "[", "'start_line'", "]", ",", "num_lines", "=", "record", "[", "'num_lines'", "]", ")", ",", "attempt_id", "=", "fields", "[", "'TASK_ATTEMPT_ID'", "]", ")", ")", "if", "(", "(", "'counters'", "not", "in", "result", ")", "and", "task_to_counters", ")", ":", "result", "[", "'counters'", "]", "=", "_sum_counters", "(", "*", "task_to_counters", ".", "values", "(", ")", ")", "return", "result" ]
collect useful info from a pre-yarn history file .
train
false
9,731
def send_alerts(): products = Product.objects.filter(productalert__status=ProductAlert.ACTIVE).distinct() logger.info('Found %d products with active alerts', products.count()) for product in products: send_product_alerts(product)
[ "def", "send_alerts", "(", ")", ":", "products", "=", "Product", ".", "objects", ".", "filter", "(", "productalert__status", "=", "ProductAlert", ".", "ACTIVE", ")", ".", "distinct", "(", ")", "logger", ".", "info", "(", "'Found %d products with active alerts'", ",", "products", ".", "count", "(", ")", ")", "for", "product", "in", "products", ":", "send_product_alerts", "(", "product", ")" ]
send out product alerts .
train
false
9,732
@np.deprecate(message='scipy.stats.signaltonoise is deprecated in scipy 0.16.0') def signaltonoise(a, axis=0, ddof=0): a = np.asanyarray(a) m = a.mean(axis) sd = a.std(axis=axis, ddof=ddof) return np.where((sd == 0), 0, (m / sd))
[ "@", "np", ".", "deprecate", "(", "message", "=", "'scipy.stats.signaltonoise is deprecated in scipy 0.16.0'", ")", "def", "signaltonoise", "(", "a", ",", "axis", "=", "0", ",", "ddof", "=", "0", ")", ":", "a", "=", "np", ".", "asanyarray", "(", "a", ")", "m", "=", "a", ".", "mean", "(", "axis", ")", "sd", "=", "a", ".", "std", "(", "axis", "=", "axis", ",", "ddof", "=", "ddof", ")", "return", "np", ".", "where", "(", "(", "sd", "==", "0", ")", ",", "0", ",", "(", "m", "/", "sd", ")", ")" ]
the signal-to-noise ratio of the input data .
train
false
9,734
@block_user_agents @require_POST @login_required @process_document_path def subscribe_to_tree(request, document_slug, document_locale): document = get_object_or_404(Document, locale=document_locale, slug=document_slug) status = 0 if EditDocumentInTreeEvent.is_notifying(request.user, document): EditDocumentInTreeEvent.stop_notifying(request.user, document) else: EditDocumentInTreeEvent.notify(request.user, document) status = 1 if request.is_ajax(): return JsonResponse({'status': status}) else: return redirect(document)
[ "@", "block_user_agents", "@", "require_POST", "@", "login_required", "@", "process_document_path", "def", "subscribe_to_tree", "(", "request", ",", "document_slug", ",", "document_locale", ")", ":", "document", "=", "get_object_or_404", "(", "Document", ",", "locale", "=", "document_locale", ",", "slug", "=", "document_slug", ")", "status", "=", "0", "if", "EditDocumentInTreeEvent", ".", "is_notifying", "(", "request", ".", "user", ",", "document", ")", ":", "EditDocumentInTreeEvent", ".", "stop_notifying", "(", "request", ".", "user", ",", "document", ")", "else", ":", "EditDocumentInTreeEvent", ".", "notify", "(", "request", ".", "user", ",", "document", ")", "status", "=", "1", "if", "request", ".", "is_ajax", "(", ")", ":", "return", "JsonResponse", "(", "{", "'status'", ":", "status", "}", ")", "else", ":", "return", "redirect", "(", "document", ")" ]
toggle watching a tree of documents for edits .
train
false
9,736
def Skewness(xs): return StandardizedMoment(xs, 3)
[ "def", "Skewness", "(", "xs", ")", ":", "return", "StandardizedMoment", "(", "xs", ",", "3", ")" ]
computes skewness .
train
false
9,737
def _parse_selections(dpkgselection): ret = {} if isinstance(dpkgselection, six.string_types): dpkgselection = dpkgselection.split('\n') for line in dpkgselection: if line: (_pkg, _state) = line.split() if (_state in ret): ret[_state].append(_pkg) else: ret[_state] = [_pkg] return ret
[ "def", "_parse_selections", "(", "dpkgselection", ")", ":", "ret", "=", "{", "}", "if", "isinstance", "(", "dpkgselection", ",", "six", ".", "string_types", ")", ":", "dpkgselection", "=", "dpkgselection", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "dpkgselection", ":", "if", "line", ":", "(", "_pkg", ",", "_state", ")", "=", "line", ".", "split", "(", ")", "if", "(", "_state", "in", "ret", ")", ":", "ret", "[", "_state", "]", ".", "append", "(", "_pkg", ")", "else", ":", "ret", "[", "_state", "]", "=", "[", "_pkg", "]", "return", "ret" ]
parses the format from dpkg --get-selections and return a format that pkg .
train
true
9,738
def set_input_value(page, css, value): input_element = page.q(css=css).results[0] input_element.click() input_element.send_keys((Keys.CONTROL + 'a')) input_element.send_keys(value) return input_element
[ "def", "set_input_value", "(", "page", ",", "css", ",", "value", ")", ":", "input_element", "=", "page", ".", "q", "(", "css", "=", "css", ")", ".", "results", "[", "0", "]", "input_element", ".", "click", "(", ")", "input_element", ".", "send_keys", "(", "(", "Keys", ".", "CONTROL", "+", "'a'", ")", ")", "input_element", ".", "send_keys", "(", "value", ")", "return", "input_element" ]
sets the text field with the given label to the specified value .
train
false