id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
50,055
def rolling_count(arg, window, **kwargs): return ensure_compat('rolling', 'count', arg, window=window, **kwargs)
[ "def", "rolling_count", "(", "arg", ",", "window", ",", "**", "kwargs", ")", ":", "return", "ensure_compat", "(", "'rolling'", ",", "'count'", ",", "arg", ",", "window", "=", "window", ",", "**", "kwargs", ")" ]
rolling count of number of non-nan observations inside provided window .
train
false
50,056
def buildSimpleBorderSwipingNet(size=3, dim=3, hsize=1, predefined={}): dims = tuple(([size] * dim)) hdims = tuple((list(dims) + [(2 ** dim)])) inmod = LinearLayer((size ** dim), name='input') inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh') outmod = LinearLayer((size ** dim), name='output') outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh') hiddenmesh = ModuleMesh.constructWithLayers(TanhLayer, hsize, hdims, 'hidden') return BorderSwipingNetwork(inmesh, hiddenmesh, outmesh, predefined=predefined)
[ "def", "buildSimpleBorderSwipingNet", "(", "size", "=", "3", ",", "dim", "=", "3", ",", "hsize", "=", "1", ",", "predefined", "=", "{", "}", ")", ":", "dims", "=", "tuple", "(", "(", "[", "size", "]", "*", "dim", ")", ")", "hdims", "=", "tuple", "(", "(", "list", "(", "dims", ")", "+", "[", "(", "2", "**", "dim", ")", "]", ")", ")", "inmod", "=", "LinearLayer", "(", "(", "size", "**", "dim", ")", ",", "name", "=", "'input'", ")", "inmesh", "=", "ModuleMesh", ".", "viewOnFlatLayer", "(", "inmod", ",", "dims", ",", "'inmesh'", ")", "outmod", "=", "LinearLayer", "(", "(", "size", "**", "dim", ")", ",", "name", "=", "'output'", ")", "outmesh", "=", "ModuleMesh", ".", "viewOnFlatLayer", "(", "outmod", ",", "dims", ",", "'outmesh'", ")", "hiddenmesh", "=", "ModuleMesh", ".", "constructWithLayers", "(", "TanhLayer", ",", "hsize", ",", "hdims", ",", "'hidden'", ")", "return", "BorderSwipingNetwork", "(", "inmesh", ",", "hiddenmesh", ",", "outmesh", ",", "predefined", "=", "predefined", ")" ]
build a simple swiping network .
train
false
50,057
def tag_source(source_name, tag_names=None): if ((not tag_names) or (tag_names is None)): return session = Session() try: source = session.query(ArchiveSource).filter((ArchiveSource.name == source_name)).first() if (not source): log.critical((u'Source `%s` does not exists' % source_name)) srcs = u', '.join([s.name for s in session.query(ArchiveSource).order_by(ArchiveSource.name)]) if srcs: log.info((u'Known sources: %s' % srcs)) return tags = [] for tag_name in tag_names: tags.append(get_tag(tag_name, session)) log.verbose((u'Please wait while adding tags %s ...' % u', '.join(tag_names))) for a in session.query(ArchiveEntry).filter(ArchiveEntry.sources.any(name=source_name)).yield_per(5): a.tags.extend(tags) finally: session.commit() session.close()
[ "def", "tag_source", "(", "source_name", ",", "tag_names", "=", "None", ")", ":", "if", "(", "(", "not", "tag_names", ")", "or", "(", "tag_names", "is", "None", ")", ")", ":", "return", "session", "=", "Session", "(", ")", "try", ":", "source", "=", "session", ".", "query", "(", "ArchiveSource", ")", ".", "filter", "(", "(", "ArchiveSource", ".", "name", "==", "source_name", ")", ")", ".", "first", "(", ")", "if", "(", "not", "source", ")", ":", "log", ".", "critical", "(", "(", "u'Source `%s` does not exists'", "%", "source_name", ")", ")", "srcs", "=", "u', '", ".", "join", "(", "[", "s", ".", "name", "for", "s", "in", "session", ".", "query", "(", "ArchiveSource", ")", ".", "order_by", "(", "ArchiveSource", ".", "name", ")", "]", ")", "if", "srcs", ":", "log", ".", "info", "(", "(", "u'Known sources: %s'", "%", "srcs", ")", ")", "return", "tags", "=", "[", "]", "for", "tag_name", "in", "tag_names", ":", "tags", ".", "append", "(", "get_tag", "(", "tag_name", ",", "session", ")", ")", "log", ".", "verbose", "(", "(", "u'Please wait while adding tags %s ...'", "%", "u', '", ".", "join", "(", "tag_names", ")", ")", ")", "for", "a", "in", "session", ".", "query", "(", "ArchiveEntry", ")", ".", "filter", "(", "ArchiveEntry", ".", "sources", ".", "any", "(", "name", "=", "source_name", ")", ")", ".", "yield_per", "(", "5", ")", ":", "a", ".", "tags", ".", "extend", "(", "tags", ")", "finally", ":", "session", ".", "commit", "(", ")", "session", ".", "close", "(", ")" ]
tags all archived entries within a source with supplied tags .
train
false
50,058
def _make_faces_from_pb(faces): return [Face.from_pb(face) for face in faces]
[ "def", "_make_faces_from_pb", "(", "faces", ")", ":", "return", "[", "Face", ".", "from_pb", "(", "face", ")", "for", "face", "in", "faces", "]" ]
create face objects from a protobuf response .
train
false
50,062
def _rcm_estimate(G, nodelist): G = G.subgraph(nodelist) order = reverse_cuthill_mckee_ordering(G) n = len(nodelist) index = dict(zip(nodelist, range(n))) x = ndarray(n, dtype=float) for (i, u) in enumerate(order): x[index[u]] = i x -= ((n - 1) / 2.0) return x
[ "def", "_rcm_estimate", "(", "G", ",", "nodelist", ")", ":", "G", "=", "G", ".", "subgraph", "(", "nodelist", ")", "order", "=", "reverse_cuthill_mckee_ordering", "(", "G", ")", "n", "=", "len", "(", "nodelist", ")", "index", "=", "dict", "(", "zip", "(", "nodelist", ",", "range", "(", "n", ")", ")", ")", "x", "=", "ndarray", "(", "n", ",", "dtype", "=", "float", ")", "for", "(", "i", ",", "u", ")", "in", "enumerate", "(", "order", ")", ":", "x", "[", "index", "[", "u", "]", "]", "=", "i", "x", "-=", "(", "(", "n", "-", "1", ")", "/", "2.0", ")", "return", "x" ]
estimate the fiedler vector using the reverse cuthill-mckee ordering .
train
false
50,063
def _get_configdir(): return _get_config_or_cache_dir(_get_xdg_config_dir())
[ "def", "_get_configdir", "(", ")", ":", "return", "_get_config_or_cache_dir", "(", "_get_xdg_config_dir", "(", ")", ")" ]
return the string representing the configuration directory .
train
false
50,064
def draw_circular(G, **kwargs): draw(G, circular_layout(G), **kwargs)
[ "def", "draw_circular", "(", "G", ",", "**", "kwargs", ")", ":", "draw", "(", "G", ",", "circular_layout", "(", "G", ")", ",", "**", "kwargs", ")" ]
draw the graph g with a circular layout .
train
false
50,065
def merge_delete(del_chunks, doc): doc.append(DEL_START) doc.extend(del_chunks) doc.append(DEL_END)
[ "def", "merge_delete", "(", "del_chunks", ",", "doc", ")", ":", "doc", ".", "append", "(", "DEL_START", ")", "doc", ".", "extend", "(", "del_chunks", ")", "doc", ".", "append", "(", "DEL_END", ")" ]
adds the text chunks in del_chunks to the document doc with marker to show it is a delete .
train
true
50,068
@register.filter(is_safe=True) @stringfilter def truncatechars_html(value, arg): try: length = int(arg) except ValueError: return value return Truncator(value).chars(length, html=True)
[ "@", "register", ".", "filter", "(", "is_safe", "=", "True", ")", "@", "stringfilter", "def", "truncatechars_html", "(", "value", ",", "arg", ")", ":", "try", ":", "length", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "return", "value", "return", "Truncator", "(", "value", ")", ".", "chars", "(", "length", ",", "html", "=", "True", ")" ]
truncates html after a certain number of chars .
train
false
50,069
def warn_default(version): if (config.warn.ignore_bug_before == 'None'): return True if (config.warn.ignore_bug_before == 'all'): return False if (config.warn.ignore_bug_before >= version): return False return True
[ "def", "warn_default", "(", "version", ")", ":", "if", "(", "config", ".", "warn", ".", "ignore_bug_before", "==", "'None'", ")", ":", "return", "True", "if", "(", "config", ".", "warn", ".", "ignore_bug_before", "==", "'all'", ")", ":", "return", "False", "if", "(", "config", ".", "warn", ".", "ignore_bug_before", ">=", "version", ")", ":", "return", "False", "return", "True" ]
return true iff we should warn about bugs fixed after a given version .
train
false
50,070
def utc_millesecs_from_epoch(for_datetime=None): if (not for_datetime): for_datetime = datetime.datetime.now() return (calendar.timegm(for_datetime.utctimetuple()) * 1000)
[ "def", "utc_millesecs_from_epoch", "(", "for_datetime", "=", "None", ")", ":", "if", "(", "not", "for_datetime", ")", ":", "for_datetime", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "return", "(", "calendar", ".", "timegm", "(", "for_datetime", ".", "utctimetuple", "(", ")", ")", "*", "1000", ")" ]
returns millesconds from the unix epoch in utc .
train
false
50,072
def clone_request(request, method): ret = Request(request=request._request, parsers=request.parsers, authenticators=request.authenticators, negotiator=request.negotiator, parser_context=request.parser_context) ret._data = request._data ret._files = request._files ret._content_type = request._content_type ret._stream = request._stream ret._method = method if hasattr(request, '_user'): ret._user = request._user if hasattr(request, '_auth'): ret._auth = request._auth if hasattr(request, '_authenticator'): ret._authenticator = request._authenticator return ret
[ "def", "clone_request", "(", "request", ",", "method", ")", ":", "ret", "=", "Request", "(", "request", "=", "request", ".", "_request", ",", "parsers", "=", "request", ".", "parsers", ",", "authenticators", "=", "request", ".", "authenticators", ",", "negotiator", "=", "request", ".", "negotiator", ",", "parser_context", "=", "request", ".", "parser_context", ")", "ret", ".", "_data", "=", "request", ".", "_data", "ret", ".", "_files", "=", "request", ".", "_files", "ret", ".", "_content_type", "=", "request", ".", "_content_type", "ret", ".", "_stream", "=", "request", ".", "_stream", "ret", ".", "_method", "=", "method", "if", "hasattr", "(", "request", ",", "'_user'", ")", ":", "ret", ".", "_user", "=", "request", ".", "_user", "if", "hasattr", "(", "request", ",", "'_auth'", ")", ":", "ret", ".", "_auth", "=", "request", ".", "_auth", "if", "hasattr", "(", "request", ",", "'_authenticator'", ")", ":", "ret", ".", "_authenticator", "=", "request", ".", "_authenticator", "return", "ret" ]
internal helper method to clone a request .
train
false
50,073
def _set_tcp_keepalive(zmq_socket, opts): if (hasattr(zmq, 'TCP_KEEPALIVE') and opts): if ('tcp_keepalive' in opts): zmq_socket.setsockopt(zmq.TCP_KEEPALIVE, opts['tcp_keepalive']) if ('tcp_keepalive_idle' in opts): zmq_socket.setsockopt(zmq.TCP_KEEPALIVE_IDLE, opts['tcp_keepalive_idle']) if ('tcp_keepalive_cnt' in opts): zmq_socket.setsockopt(zmq.TCP_KEEPALIVE_CNT, opts['tcp_keepalive_cnt']) if ('tcp_keepalive_intvl' in opts): zmq_socket.setsockopt(zmq.TCP_KEEPALIVE_INTVL, opts['tcp_keepalive_intvl'])
[ "def", "_set_tcp_keepalive", "(", "zmq_socket", ",", "opts", ")", ":", "if", "(", "hasattr", "(", "zmq", ",", "'TCP_KEEPALIVE'", ")", "and", "opts", ")", ":", "if", "(", "'tcp_keepalive'", "in", "opts", ")", ":", "zmq_socket", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE", ",", "opts", "[", "'tcp_keepalive'", "]", ")", "if", "(", "'tcp_keepalive_idle'", "in", "opts", ")", ":", "zmq_socket", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE_IDLE", ",", "opts", "[", "'tcp_keepalive_idle'", "]", ")", "if", "(", "'tcp_keepalive_cnt'", "in", "opts", ")", ":", "zmq_socket", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE_CNT", ",", "opts", "[", "'tcp_keepalive_cnt'", "]", ")", "if", "(", "'tcp_keepalive_intvl'", "in", "opts", ")", ":", "zmq_socket", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE_INTVL", ",", "opts", "[", "'tcp_keepalive_intvl'", "]", ")" ]
ensure that tcp keepalives are set as specified in "opts" .
train
true
50,074
def DistEntry(): flags.StartMain(main)
[ "def", "DistEntry", "(", ")", ":", "flags", ".", "StartMain", "(", "main", ")" ]
the main entry point for packages .
train
false
50,075
def get_site_user(context, data_dict): _check_access('get_site_user', context, data_dict) model = context['model'] site_id = config.get('ckan.site_id', 'ckan_site_user') user = model.User.get(site_id) if (not user): apikey = str(uuid.uuid4()) user = model.User(name=site_id, password=apikey, apikey=apikey) user.sysadmin = True model.Session.add(user) model.Session.flush() if (not context.get('defer_commit')): model.repo.commit() return {'name': user.name, 'apikey': user.apikey}
[ "def", "get_site_user", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'get_site_user'", ",", "context", ",", "data_dict", ")", "model", "=", "context", "[", "'model'", "]", "site_id", "=", "config", ".", "get", "(", "'ckan.site_id'", ",", "'ckan_site_user'", ")", "user", "=", "model", ".", "User", ".", "get", "(", "site_id", ")", "if", "(", "not", "user", ")", ":", "apikey", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "user", "=", "model", ".", "User", "(", "name", "=", "site_id", ",", "password", "=", "apikey", ",", "apikey", "=", "apikey", ")", "user", ".", "sysadmin", "=", "True", "model", ".", "Session", ".", "add", "(", "user", ")", "model", ".", "Session", ".", "flush", "(", ")", "if", "(", "not", "context", ".", "get", "(", "'defer_commit'", ")", ")", ":", "model", ".", "repo", ".", "commit", "(", ")", "return", "{", "'name'", ":", "user", ".", "name", ",", "'apikey'", ":", "user", ".", "apikey", "}" ]
return the ckan site user .
train
false
50,076
def enforce_limit(limit): if (limit is None): limit = pecan.request.cfg.api.default_api_return_limit LOG.info(_LI('No limit value provided, result set will be limited to %(limit)d.'), {'limit': limit}) if ((not limit) or (limit <= 0)): raise base.ClientSideError(_('Limit must be positive')) return limit
[ "def", "enforce_limit", "(", "limit", ")", ":", "if", "(", "limit", "is", "None", ")", ":", "limit", "=", "pecan", ".", "request", ".", "cfg", ".", "api", ".", "default_api_return_limit", "LOG", ".", "info", "(", "_LI", "(", "'No limit value provided, result set will be limited to %(limit)d.'", ")", ",", "{", "'limit'", ":", "limit", "}", ")", "if", "(", "(", "not", "limit", ")", "or", "(", "limit", "<=", "0", ")", ")", ":", "raise", "base", ".", "ClientSideError", "(", "_", "(", "'Limit must be positive'", ")", ")", "return", "limit" ]
ensure limit is defined and is valid .
train
false
50,077
def typecheck(expressions, signature=None): for expression in expressions: signature = expression.typecheck(signature) for expression in expressions[:(-1)]: expression.typecheck(signature) return signature
[ "def", "typecheck", "(", "expressions", ",", "signature", "=", "None", ")", ":", "for", "expression", "in", "expressions", ":", "signature", "=", "expression", ".", "typecheck", "(", "signature", ")", "for", "expression", "in", "expressions", "[", ":", "(", "-", "1", ")", "]", ":", "expression", ".", "typecheck", "(", "signature", ")", "return", "signature" ]
ensure correct typing across a collection of expression objects .
train
false
50,078
def _iterate_axial_slices(array, limits=None): shape = array.shape[1] for ind in range(shape): if (limits and (ind not in limits)): continue (yield (ind, array[:, ind, :]))
[ "def", "_iterate_axial_slices", "(", "array", ",", "limits", "=", "None", ")", ":", "shape", "=", "array", ".", "shape", "[", "1", "]", "for", "ind", "in", "range", "(", "shape", ")", ":", "if", "(", "limits", "and", "(", "ind", "not", "in", "limits", ")", ")", ":", "continue", "(", "yield", "(", "ind", ",", "array", "[", ":", ",", "ind", ",", ":", "]", ")", ")" ]
iterate axial slices .
train
false
50,079
def house_graph(create_using=None): description = ['adjacencylist', 'House Graph', 5, [[2, 3], [1, 4], [1, 4, 5], [2, 3, 5], [3, 4]]] G = make_small_undirected_graph(description, create_using) return G
[ "def", "house_graph", "(", "create_using", "=", "None", ")", ":", "description", "=", "[", "'adjacencylist'", ",", "'House Graph'", ",", "5", ",", "[", "[", "2", ",", "3", "]", ",", "[", "1", ",", "4", "]", ",", "[", "1", ",", "4", ",", "5", "]", ",", "[", "2", ",", "3", ",", "5", "]", ",", "[", "3", ",", "4", "]", "]", "]", "G", "=", "make_small_undirected_graph", "(", "description", ",", "create_using", ")", "return", "G" ]
return the house graph .
train
false
50,080
def create_dir(path): try: os.makedirs(path) except OSError as exception: if (exception.errno != errno.EEXIST): raise
[ "def", "create_dir", "(", "path", ")", ":", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", "as", "exception", ":", "if", "(", "exception", ".", "errno", "!=", "errno", ".", "EEXIST", ")", ":", "raise" ]
creates a directory atomically .
train
false
50,081
def view_as_blocks(arr_in, block_shape): if (not isinstance(block_shape, tuple)): raise TypeError('block needs to be a tuple') block_shape = np.array(block_shape) if (block_shape <= 0).any(): raise ValueError("'block_shape' elements must be strictly positive") if (block_shape.size != arr_in.ndim): raise ValueError("'block_shape' must have the same length as 'arr_in.shape'") arr_shape = np.array(arr_in.shape) if ((arr_shape % block_shape).sum() != 0): raise ValueError("'block_shape' is not compatible with 'arr_in'") if (not arr_in.flags.contiguous): warn(RuntimeWarning('Cannot provide views on a non-contiguous input array without copying.')) arr_in = np.ascontiguousarray(arr_in) new_shape = (tuple((arr_shape // block_shape)) + tuple(block_shape)) new_strides = (tuple((arr_in.strides * block_shape)) + arr_in.strides) arr_out = as_strided(arr_in, shape=new_shape, strides=new_strides) return arr_out
[ "def", "view_as_blocks", "(", "arr_in", ",", "block_shape", ")", ":", "if", "(", "not", "isinstance", "(", "block_shape", ",", "tuple", ")", ")", ":", "raise", "TypeError", "(", "'block needs to be a tuple'", ")", "block_shape", "=", "np", ".", "array", "(", "block_shape", ")", "if", "(", "block_shape", "<=", "0", ")", ".", "any", "(", ")", ":", "raise", "ValueError", "(", "\"'block_shape' elements must be strictly positive\"", ")", "if", "(", "block_shape", ".", "size", "!=", "arr_in", ".", "ndim", ")", ":", "raise", "ValueError", "(", "\"'block_shape' must have the same length as 'arr_in.shape'\"", ")", "arr_shape", "=", "np", ".", "array", "(", "arr_in", ".", "shape", ")", "if", "(", "(", "arr_shape", "%", "block_shape", ")", ".", "sum", "(", ")", "!=", "0", ")", ":", "raise", "ValueError", "(", "\"'block_shape' is not compatible with 'arr_in'\"", ")", "if", "(", "not", "arr_in", ".", "flags", ".", "contiguous", ")", ":", "warn", "(", "RuntimeWarning", "(", "'Cannot provide views on a non-contiguous input array without copying.'", ")", ")", "arr_in", "=", "np", ".", "ascontiguousarray", "(", "arr_in", ")", "new_shape", "=", "(", "tuple", "(", "(", "arr_shape", "//", "block_shape", ")", ")", "+", "tuple", "(", "block_shape", ")", ")", "new_strides", "=", "(", "tuple", "(", "(", "arr_in", ".", "strides", "*", "block_shape", ")", ")", "+", "arr_in", ".", "strides", ")", "arr_out", "=", "as_strided", "(", "arr_in", ",", "shape", "=", "new_shape", ",", "strides", "=", "new_strides", ")", "return", "arr_out" ]
block view of the input n-dimensional array .
train
false
50,083
def process_movie_reviews(service, reader, sentiment_writer, entity_writer): collected_entities = {} for document in reader: try: (sentiment_total, entities) = get_sentiment_entities(service, document) except HttpError as e: logging.error('Error process_movie_reviews {}'.format(e.content)) continue document.label = get_sentiment_label(sentiment_total) sentiment_writer.write(to_sentiment_json(document.doc_id, sentiment_total, document.label)) sentiment_writer.write('\n') for ent in entities: (ent_sent, frequency) = collected_entities.get(ent, (0, 0)) ent_sent += sentiment_total frequency += 1 collected_entities[ent] = (ent_sent, frequency) for (entity, sentiment_frequency) in collected_entities.items(): entity_writer.write(to_entity_json(entity, sentiment_frequency[0], sentiment_frequency[1])) entity_writer.write('\n') sentiment_writer.flush() entity_writer.flush()
[ "def", "process_movie_reviews", "(", "service", ",", "reader", ",", "sentiment_writer", ",", "entity_writer", ")", ":", "collected_entities", "=", "{", "}", "for", "document", "in", "reader", ":", "try", ":", "(", "sentiment_total", ",", "entities", ")", "=", "get_sentiment_entities", "(", "service", ",", "document", ")", "except", "HttpError", "as", "e", ":", "logging", ".", "error", "(", "'Error process_movie_reviews {}'", ".", "format", "(", "e", ".", "content", ")", ")", "continue", "document", ".", "label", "=", "get_sentiment_label", "(", "sentiment_total", ")", "sentiment_writer", ".", "write", "(", "to_sentiment_json", "(", "document", ".", "doc_id", ",", "sentiment_total", ",", "document", ".", "label", ")", ")", "sentiment_writer", ".", "write", "(", "'\\n'", ")", "for", "ent", "in", "entities", ":", "(", "ent_sent", ",", "frequency", ")", "=", "collected_entities", ".", "get", "(", "ent", ",", "(", "0", ",", "0", ")", ")", "ent_sent", "+=", "sentiment_total", "frequency", "+=", "1", "collected_entities", "[", "ent", "]", "=", "(", "ent_sent", ",", "frequency", ")", "for", "(", "entity", ",", "sentiment_frequency", ")", "in", "collected_entities", ".", "items", "(", ")", ":", "entity_writer", ".", "write", "(", "to_entity_json", "(", "entity", ",", "sentiment_frequency", "[", "0", "]", ",", "sentiment_frequency", "[", "1", "]", ")", ")", "entity_writer", ".", "write", "(", "'\\n'", ")", "sentiment_writer", ".", "flush", "(", ")", "entity_writer", ".", "flush", "(", ")" ]
perform some sentiment math and come up with movie review .
train
false
50,084
def smoment(X, n, condition=None, **kwargs): sigma = std(X, condition, **kwargs) return (((1 / sigma) ** n) * cmoment(X, n, condition, **kwargs))
[ "def", "smoment", "(", "X", ",", "n", ",", "condition", "=", "None", ",", "**", "kwargs", ")", ":", "sigma", "=", "std", "(", "X", ",", "condition", ",", "**", "kwargs", ")", "return", "(", "(", "(", "1", "/", "sigma", ")", "**", "n", ")", "*", "cmoment", "(", "X", ",", "n", ",", "condition", ",", "**", "kwargs", ")", ")" ]
return the nth standardized moment of a random expression i .
train
false
50,085
def hash_shard(word): return ('server%d' % (hash(word) % 4))
[ "def", "hash_shard", "(", "word", ")", ":", "return", "(", "'server%d'", "%", "(", "hash", "(", "word", ")", "%", "4", ")", ")" ]
assign data to servers using pythons built-in hash() function .
train
false
50,089
def PackBool(name, value, pbvalue): pbvalue.set_booleanvalue(value)
[ "def", "PackBool", "(", "name", ",", "value", ",", "pbvalue", ")", ":", "pbvalue", ".", "set_booleanvalue", "(", "value", ")" ]
packs a boolean property into a entity_pb .
train
false
50,091
def check_missing_generic(role, folder_base, generic_role_names, provider, constants, monkeypatch): assert (folder_base is not None) role_map = (constants['gmail_role_map'] if (provider == 'gmail') else constants['imap_role_map']) keys_to_remove = [] for folder_name in role_map: if (role_map[folder_name] == role): keys_to_remove.append(folder_name) for key in keys_to_remove: del role_map[key] for role_alias in generic_role_names: folders = (folder_base + [('\\HasNoChildren', '/', role_alias)]) client = (patch_gmail_client(monkeypatch, folders) if (provider == 'gmail') else patch_generic_client(monkeypatch, folders)) raw_folders = client.folders() role_map[role_alias] = role generic_folder_checks(raw_folders, role_map, client, provider) del role_map[role_alias]
[ "def", "check_missing_generic", "(", "role", ",", "folder_base", ",", "generic_role_names", ",", "provider", ",", "constants", ",", "monkeypatch", ")", ":", "assert", "(", "folder_base", "is", "not", "None", ")", "role_map", "=", "(", "constants", "[", "'gmail_role_map'", "]", "if", "(", "provider", "==", "'gmail'", ")", "else", "constants", "[", "'imap_role_map'", "]", ")", "keys_to_remove", "=", "[", "]", "for", "folder_name", "in", "role_map", ":", "if", "(", "role_map", "[", "folder_name", "]", "==", "role", ")", ":", "keys_to_remove", ".", "append", "(", "folder_name", ")", "for", "key", "in", "keys_to_remove", ":", "del", "role_map", "[", "key", "]", "for", "role_alias", "in", "generic_role_names", ":", "folders", "=", "(", "folder_base", "+", "[", "(", "'\\\\HasNoChildren'", ",", "'/'", ",", "role_alias", ")", "]", ")", "client", "=", "(", "patch_gmail_client", "(", "monkeypatch", ",", "folders", ")", "if", "(", "provider", "==", "'gmail'", ")", "else", "patch_generic_client", "(", "monkeypatch", ",", "folders", ")", ")", "raw_folders", "=", "client", ".", "folders", "(", ")", "role_map", "[", "role_alias", "]", "=", "role", "generic_folder_checks", "(", "raw_folders", ",", "role_map", ",", "client", ",", "provider", ")", "del", "role_map", "[", "role_alias", "]" ]
check clients label every folder in generic_role_names as input role role: the role that the generic_role_names should be assigned folder_base: generic list of folders .
train
false
50,093
def Handle(environ): error = logservice.LogsBuffer() request_environment.current_request.Init(error, environ) response = {'error': 0, 'response_code': 200} try: request_id = environ[BACKGROUND_REQUEST_ID] _pending_background_threads.RunBackgroundThread(request_id) return response except: exception = sys.exc_info() tb = exception[2].tb_next if tb: tb = tb.tb_next message = ''.join(traceback.format_exception(exception[0], exception[1], tb)) logging.error(message) response['response_code'] = 500 response['error'] = 1 return response finally: request_environment.current_request.Clear() response['logs'] = error.parse_logs()
[ "def", "Handle", "(", "environ", ")", ":", "error", "=", "logservice", ".", "LogsBuffer", "(", ")", "request_environment", ".", "current_request", ".", "Init", "(", "error", ",", "environ", ")", "response", "=", "{", "'error'", ":", "0", ",", "'response_code'", ":", "200", "}", "try", ":", "request_id", "=", "environ", "[", "BACKGROUND_REQUEST_ID", "]", "_pending_background_threads", ".", "RunBackgroundThread", "(", "request_id", ")", "return", "response", "except", ":", "exception", "=", "sys", ".", "exc_info", "(", ")", "tb", "=", "exception", "[", "2", "]", ".", "tb_next", "if", "tb", ":", "tb", "=", "tb", ".", "tb_next", "message", "=", "''", ".", "join", "(", "traceback", ".", "format_exception", "(", "exception", "[", "0", "]", ",", "exception", "[", "1", "]", ",", "tb", ")", ")", "logging", ".", "error", "(", "message", ")", "response", "[", "'response_code'", "]", "=", "500", "response", "[", "'error'", "]", "=", "1", "return", "response", "finally", ":", "request_environment", ".", "current_request", ".", "Clear", "(", ")", "response", "[", "'logs'", "]", "=", "error", ".", "parse_logs", "(", ")" ]
handles a background request .
train
false
50,095
def getTextComplexLoops(fontFamily, fontSize, text, yAxisPointingUpward=True): textComplexLoops = [] fontReader = getFontReader(fontFamily) horizontalAdvanceX = 0.0 for character in text: glyph = fontReader.getGlyph(character, yAxisPointingUpward) textComplexLoops += glyph.getSizedAdvancedLoops(fontSize, horizontalAdvanceX, yAxisPointingUpward) horizontalAdvanceX += glyph.horizontalAdvanceX return textComplexLoops
[ "def", "getTextComplexLoops", "(", "fontFamily", ",", "fontSize", ",", "text", ",", "yAxisPointingUpward", "=", "True", ")", ":", "textComplexLoops", "=", "[", "]", "fontReader", "=", "getFontReader", "(", "fontFamily", ")", "horizontalAdvanceX", "=", "0.0", "for", "character", "in", "text", ":", "glyph", "=", "fontReader", ".", "getGlyph", "(", "character", ",", "yAxisPointingUpward", ")", "textComplexLoops", "+=", "glyph", ".", "getSizedAdvancedLoops", "(", "fontSize", ",", "horizontalAdvanceX", ",", "yAxisPointingUpward", ")", "horizontalAdvanceX", "+=", "glyph", ".", "horizontalAdvanceX", "return", "textComplexLoops" ]
get text as complex loops .
train
false
50,098
def _gather_update_categories(updateCollection): categories = [] for i in range(updateCollection.Count): update = updateCollection.Item(i) for j in range(update.Categories.Count): name = update.Categories.Item(j).Name if (name not in categories): log.debug('found category: {0}'.format(name)) categories.append(name) return categories
[ "def", "_gather_update_categories", "(", "updateCollection", ")", ":", "categories", "=", "[", "]", "for", "i", "in", "range", "(", "updateCollection", ".", "Count", ")", ":", "update", "=", "updateCollection", ".", "Item", "(", "i", ")", "for", "j", "in", "range", "(", "update", ".", "Categories", ".", "Count", ")", ":", "name", "=", "update", ".", "Categories", ".", "Item", "(", "j", ")", ".", "Name", "if", "(", "name", "not", "in", "categories", ")", ":", "log", ".", "debug", "(", "'found category: {0}'", ".", "format", "(", "name", ")", ")", "categories", ".", "append", "(", "name", ")", "return", "categories" ]
this is a convenience method to gather what categories of updates are available in any update collection it is passed .
train
false
50,099
def check_versions(service, versions, required, recommended=None): must_upgrade = dict(filter((lambda x: (required > x[1])), versions.items())) if must_upgrade: raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade)) if recommended: should_upgrade = dict(filter((lambda x: (recommended > x[1])), versions.items())) if should_upgrade: warnings.warn(make_upgrade_message(service, 'should', recommended, should_upgrade), PendingDeprecationWarning)
[ "def", "check_versions", "(", "service", ",", "versions", ",", "required", ",", "recommended", "=", "None", ")", ":", "must_upgrade", "=", "dict", "(", "filter", "(", "(", "lambda", "x", ":", "(", "required", ">", "x", "[", "1", "]", ")", ")", ",", "versions", ".", "items", "(", ")", ")", ")", "if", "must_upgrade", ":", "raise", "InvalidConfiguration", "(", "make_upgrade_message", "(", "service", ",", "'must'", ",", "required", ",", "must_upgrade", ")", ")", "if", "recommended", ":", "should_upgrade", "=", "dict", "(", "filter", "(", "(", "lambda", "x", ":", "(", "recommended", ">", "x", "[", "1", "]", ")", ")", ",", "versions", ".", "items", "(", ")", ")", ")", "if", "should_upgrade", ":", "warnings", ".", "warn", "(", "make_upgrade_message", "(", "service", ",", "'should'", ",", "recommended", ",", "should_upgrade", ")", ",", "PendingDeprecationWarning", ")" ]
check that hosts fulfill version requirements .
train
false
50,100
def test_fnpickling_simple(tmpdir): fn = str(tmpdir.join('test1.pickle')) obj1 = 'astring' fnpickle(obj1, fn) res = fnunpickle(fn) assert (obj1 == res) fnpickle(obj1, fn, usecPickle=False) res = fnunpickle(fn, usecPickle=False) assert (obj1 == res) with open(fn, 'wb') as f: fnpickle(obj1, f) with open(fn, 'rb') as f: res = fnunpickle(f) assert (obj1 == res) with open(fn, 'wb') as f: fnpickle(obj1, f, usecPickle=False) with open(fn, 'rb') as f: res = fnunpickle(f, usecPickle=False) assert (obj1 == res)
[ "def", "test_fnpickling_simple", "(", "tmpdir", ")", ":", "fn", "=", "str", "(", "tmpdir", ".", "join", "(", "'test1.pickle'", ")", ")", "obj1", "=", "'astring'", "fnpickle", "(", "obj1", ",", "fn", ")", "res", "=", "fnunpickle", "(", "fn", ")", "assert", "(", "obj1", "==", "res", ")", "fnpickle", "(", "obj1", ",", "fn", ",", "usecPickle", "=", "False", ")", "res", "=", "fnunpickle", "(", "fn", ",", "usecPickle", "=", "False", ")", "assert", "(", "obj1", "==", "res", ")", "with", "open", "(", "fn", ",", "'wb'", ")", "as", "f", ":", "fnpickle", "(", "obj1", ",", "f", ")", "with", "open", "(", "fn", ",", "'rb'", ")", "as", "f", ":", "res", "=", "fnunpickle", "(", "f", ")", "assert", "(", "obj1", "==", "res", ")", "with", "open", "(", "fn", ",", "'wb'", ")", "as", "f", ":", "fnpickle", "(", "obj1", ",", "f", ",", "usecPickle", "=", "False", ")", "with", "open", "(", "fn", ",", "'rb'", ")", "as", "f", ":", "res", "=", "fnunpickle", "(", "f", ",", "usecPickle", "=", "False", ")", "assert", "(", "obj1", "==", "res", ")" ]
tests the fnpickle and fnupickle functions basic operation by pickling and unpickling a string .
train
false
50,102
def cleanUp(*arg, **kw): package = kw.get('package', None) if (package is None): package = caller_package() kw['package'] = package return setUp(*arg, **kw)
[ "def", "cleanUp", "(", "*", "arg", ",", "**", "kw", ")", ":", "package", "=", "kw", ".", "get", "(", "'package'", ",", "None", ")", "if", "(", "package", "is", "None", ")", ":", "package", "=", "caller_package", "(", ")", "kw", "[", "'package'", "]", "=", "package", "return", "setUp", "(", "*", "arg", ",", "**", "kw", ")" ]
just removes the dlls directory we created .
train
false
50,103
def _prep_tuple(v): from sympy import unpolarify return TupleArg(*[unpolarify(x) for x in v])
[ "def", "_prep_tuple", "(", "v", ")", ":", "from", "sympy", "import", "unpolarify", "return", "TupleArg", "(", "*", "[", "unpolarify", "(", "x", ")", "for", "x", "in", "v", "]", ")" ]
turn an iterable argument v into a tuple and unpolarify .
train
false
50,104
def _make_lock_uri(cloud_tmp_dir, cluster_id, step_num): return ((((cloud_tmp_dir + 'locks/') + cluster_id) + '/') + str(step_num))
[ "def", "_make_lock_uri", "(", "cloud_tmp_dir", ",", "cluster_id", ",", "step_num", ")", ":", "return", "(", "(", "(", "(", "cloud_tmp_dir", "+", "'locks/'", ")", "+", "cluster_id", ")", "+", "'/'", ")", "+", "str", "(", "step_num", ")", ")" ]
generate the uri to lock the cluster cluster_id .
train
false
50,105
def connectionCreatorFromEndpoint(memoryReactor, tlsEndpoint): return tlsEndpoint._wrapperFactory(None)._connectionCreator
[ "def", "connectionCreatorFromEndpoint", "(", "memoryReactor", ",", "tlsEndpoint", ")", ":", "return", "tlsEndpoint", ".", "_wrapperFactory", "(", "None", ")", ".", "_connectionCreator" ]
given a l{memoryreactor} and the result of calling l{wrapclienttls} .
train
false
50,106
def env(*args, **kwargs): for arg in args: value = os.environ.get(arg, None) if value: return value return kwargs.get('default', '')
[ "def", "env", "(", "*", "args", ",", "**", "kwargs", ")", ":", "for", "arg", "in", "args", ":", "value", "=", "os", ".", "environ", ".", "get", "(", "arg", ",", "None", ")", "if", "value", ":", "return", "value", "return", "kwargs", ".", "get", "(", "'default'", ",", "''", ")" ]
returns the first environment variable set .
train
true
50,107
def _argument_adapter(callback): def wrapper(*args, **kwargs): if (kwargs or (len(args) > 1)): callback(Arguments(args, kwargs)) elif args: callback(args[0]) else: callback(None) return wrapper
[ "def", "_argument_adapter", "(", "callback", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "kwargs", "or", "(", "len", "(", "args", ")", ">", "1", ")", ")", ":", "callback", "(", "Arguments", "(", "args", ",", "kwargs", ")", ")", "elif", "args", ":", "callback", "(", "args", "[", "0", "]", ")", "else", ":", "callback", "(", "None", ")", "return", "wrapper" ]
returns a function that when invoked runs callback with one arg .
train
true
50,108
def should_vendor_libs(): vendor_libs = (not os.getenv(u'GIT_COLA_NO_VENDOR_LIBS', u'')) if (u'--no-vendor-libs' in sys.argv): sys.argv.remove(u'--no-vendor-libs') vendor_libs = False return vendor_libs
[ "def", "should_vendor_libs", "(", ")", ":", "vendor_libs", "=", "(", "not", "os", ".", "getenv", "(", "u'GIT_COLA_NO_VENDOR_LIBS'", ",", "u''", ")", ")", "if", "(", "u'--no-vendor-libs'", "in", "sys", ".", "argv", ")", ":", "sys", ".", "argv", ".", "remove", "(", "u'--no-vendor-libs'", ")", "vendor_libs", "=", "False", "return", "vendor_libs" ]
return true if we should include vendored libraries .
train
false
50,109
def visit_snippet_latex(self, node): self.verbatim = ''
[ "def", "visit_snippet_latex", "(", "self", ",", "node", ")", ":", "self", ".", "verbatim", "=", "''" ]
latex document generator visit handler .
train
false
50,110
def load_marked_modules(cr, graph, states, force, progressdict, report, loaded_modules, perform_checks): processed_modules = [] while True: cr.execute('SELECT name from ir_module_module WHERE state IN %s', (tuple(states),)) module_list = [name for (name,) in cr.fetchall() if (name not in graph)] if (not module_list): break graph.add_modules(cr, module_list, force) _logger.debug('Updating graph with %d more modules', len(module_list)) (loaded, processed) = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks) processed_modules.extend(processed) loaded_modules.extend(loaded) if (not processed): break return processed_modules
[ "def", "load_marked_modules", "(", "cr", ",", "graph", ",", "states", ",", "force", ",", "progressdict", ",", "report", ",", "loaded_modules", ",", "perform_checks", ")", ":", "processed_modules", "=", "[", "]", "while", "True", ":", "cr", ".", "execute", "(", "'SELECT name from ir_module_module WHERE state IN %s'", ",", "(", "tuple", "(", "states", ")", ",", ")", ")", "module_list", "=", "[", "name", "for", "(", "name", ",", ")", "in", "cr", ".", "fetchall", "(", ")", "if", "(", "name", "not", "in", "graph", ")", "]", "if", "(", "not", "module_list", ")", ":", "break", "graph", ".", "add_modules", "(", "cr", ",", "module_list", ",", "force", ")", "_logger", ".", "debug", "(", "'Updating graph with %d more modules'", ",", "len", "(", "module_list", ")", ")", "(", "loaded", ",", "processed", ")", "=", "load_module_graph", "(", "cr", ",", "graph", ",", "progressdict", ",", "report", "=", "report", ",", "skip_modules", "=", "loaded_modules", ",", "perform_checks", "=", "perform_checks", ")", "processed_modules", ".", "extend", "(", "processed", ")", "loaded_modules", ".", "extend", "(", "loaded", ")", "if", "(", "not", "processed", ")", ":", "break", "return", "processed_modules" ]
loads modules marked with states .
train
false
50,111
def slow(slowness_reason): def decorator(f): f.slowness_reason = slowness_reason return f return decorator
[ "def", "slow", "(", "slowness_reason", ")", ":", "def", "decorator", "(", "f", ")", ":", "f", ".", "slowness_reason", "=", "slowness_reason", "return", "f", "return", "decorator" ]
this is a decorate that annotates a test as being "known to be slow .
train
false
50,112
def _xfs_info_get_kv(serialized): if serialized.startswith('='): serialized = serialized[1:].strip() serialized = serialized.replace(' = ', '=*** ').replace(' =', '=') opt = [] for tkn in serialized.split(' '): if ((not opt) or ('=' in tkn)): opt.append(tkn) else: opt[(len(opt) - 1)] = ((opt[(len(opt) - 1)] + ' ') + tkn) return [tuple(items.split('=')) for items in opt]
[ "def", "_xfs_info_get_kv", "(", "serialized", ")", ":", "if", "serialized", ".", "startswith", "(", "'='", ")", ":", "serialized", "=", "serialized", "[", "1", ":", "]", ".", "strip", "(", ")", "serialized", "=", "serialized", ".", "replace", "(", "' = '", ",", "'=*** '", ")", ".", "replace", "(", "' ='", ",", "'='", ")", "opt", "=", "[", "]", "for", "tkn", "in", "serialized", ".", "split", "(", "' '", ")", ":", "if", "(", "(", "not", "opt", ")", "or", "(", "'='", "in", "tkn", ")", ")", ":", "opt", ".", "append", "(", "tkn", ")", "else", ":", "opt", "[", "(", "len", "(", "opt", ")", "-", "1", ")", "]", "=", "(", "(", "opt", "[", "(", "len", "(", "opt", ")", "-", "1", ")", "]", "+", "' '", ")", "+", "tkn", ")", "return", "[", "tuple", "(", "items", ".", "split", "(", "'='", ")", ")", "for", "items", "in", "opt", "]" ]
parse one line of the xfs info output .
train
true
50,113
def get_is_file_present(name): global NAME_PREFIX name = name.replace(NAME_PREFIX, '') filename = ('/' + name.replace('_present', '').replace('_', '/')) if os.path.isfile(filename): return 1 else: return 0
[ "def", "get_is_file_present", "(", "name", ")", ":", "global", "NAME_PREFIX", "name", "=", "name", ".", "replace", "(", "NAME_PREFIX", ",", "''", ")", "filename", "=", "(", "'/'", "+", "name", ".", "replace", "(", "'_present'", ",", "''", ")", ".", "replace", "(", "'_'", ",", "'/'", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "return", "1", "else", ":", "return", "0" ]
find whether file exists .
train
false
50,114
def bridge_create(br, may_exist=True): param_may_exist = _param_may_exist(may_exist) cmd = 'ovs-vsctl {1}add-br {0}'.format(br, param_may_exist) result = __salt__['cmd.run_all'](cmd) return _retcode_to_bool(result['retcode'])
[ "def", "bridge_create", "(", "br", ",", "may_exist", "=", "True", ")", ":", "param_may_exist", "=", "_param_may_exist", "(", "may_exist", ")", "cmd", "=", "'ovs-vsctl {1}add-br {0}'", ".", "format", "(", "br", ",", "param_may_exist", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "return", "_retcode_to_bool", "(", "result", "[", "'retcode'", "]", ")" ]
creates a new bridge .
train
false
50,116
def stripEscapes(s): result = '' show = 1 i = 0 L = len(s) while (i < L): if ((show == 0) and (s[i] in ANSI_TERMINATORS)): show = 1 elif show: n = s.find(ANSI_ESCAPE_BEGIN, i) if (n == (-1)): return (result + s[i:]) else: result = (result + s[i:n]) i = n show = 0 i += 1 return result
[ "def", "stripEscapes", "(", "s", ")", ":", "result", "=", "''", "show", "=", "1", "i", "=", "0", "L", "=", "len", "(", "s", ")", "while", "(", "i", "<", "L", ")", ":", "if", "(", "(", "show", "==", "0", ")", "and", "(", "s", "[", "i", "]", "in", "ANSI_TERMINATORS", ")", ")", ":", "show", "=", "1", "elif", "show", ":", "n", "=", "s", ".", "find", "(", "ANSI_ESCAPE_BEGIN", ",", "i", ")", "if", "(", "n", "==", "(", "-", "1", ")", ")", ":", "return", "(", "result", "+", "s", "[", "i", ":", "]", ")", "else", ":", "result", "=", "(", "result", "+", "s", "[", "i", ":", "n", "]", ")", "i", "=", "n", "show", "=", "0", "i", "+=", "1", "return", "result" ]
remove all ansi color escapes from the given string .
train
false
50,117
def beneficiary_type(): return s3_rest_controller()
[ "def", "beneficiary_type", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
50,118
def should_skip_file(name): if name.startswith('.'): return 'Skipping hidden file %(filename)s' if name.endswith(('~', '.bak')): return 'Skipping backup file %(filename)s' if name.endswith(('.pyc', '.pyo')): return (('Skipping %s file ' % os.path.splitext(name)[1]) + '%(filename)s') if name.endswith('$py.class'): return 'Skipping $py.class file %(filename)s' if (name in ('CVS', '_darcs')): return 'Skipping version control directory %(filename)s' return None
[ "def", "should_skip_file", "(", "name", ")", ":", "if", "name", ".", "startswith", "(", "'.'", ")", ":", "return", "'Skipping hidden file %(filename)s'", "if", "name", ".", "endswith", "(", "(", "'~'", ",", "'.bak'", ")", ")", ":", "return", "'Skipping backup file %(filename)s'", "if", "name", ".", "endswith", "(", "(", "'.pyc'", ",", "'.pyo'", ")", ")", ":", "return", "(", "(", "'Skipping %s file '", "%", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "1", "]", ")", "+", "'%(filename)s'", ")", "if", "name", ".", "endswith", "(", "'$py.class'", ")", ":", "return", "'Skipping $py.class file %(filename)s'", "if", "(", "name", "in", "(", "'CVS'", ",", "'_darcs'", ")", ")", ":", "return", "'Skipping version control directory %(filename)s'", "return", "None" ]
checks if a file should be skipped based on its name .
train
true
50,123
@np.deprecate(message='spleval is deprecated in scipy 0.19.0, use BSpline instead.') def spleval(xck, xnew, deriv=0): (xj, cvals, k) = xck oldshape = np.shape(xnew) xx = np.ravel(xnew) sh = cvals.shape[1:] res = np.empty((xx.shape + sh), dtype=cvals.dtype) for index in np.ndindex(*sh): sl = ((slice(None),) + index) if issubclass(cvals.dtype.type, np.complexfloating): res[sl].real = _fitpack._bspleval(xx, xj, cvals.real[sl], k, deriv) res[sl].imag = _fitpack._bspleval(xx, xj, cvals.imag[sl], k, deriv) else: res[sl] = _fitpack._bspleval(xx, xj, cvals[sl], k, deriv) res.shape = (oldshape + sh) return res
[ "@", "np", ".", "deprecate", "(", "message", "=", "'spleval is deprecated in scipy 0.19.0, use BSpline instead.'", ")", "def", "spleval", "(", "xck", ",", "xnew", ",", "deriv", "=", "0", ")", ":", "(", "xj", ",", "cvals", ",", "k", ")", "=", "xck", "oldshape", "=", "np", ".", "shape", "(", "xnew", ")", "xx", "=", "np", ".", "ravel", "(", "xnew", ")", "sh", "=", "cvals", ".", "shape", "[", "1", ":", "]", "res", "=", "np", ".", "empty", "(", "(", "xx", ".", "shape", "+", "sh", ")", ",", "dtype", "=", "cvals", ".", "dtype", ")", "for", "index", "in", "np", ".", "ndindex", "(", "*", "sh", ")", ":", "sl", "=", "(", "(", "slice", "(", "None", ")", ",", ")", "+", "index", ")", "if", "issubclass", "(", "cvals", ".", "dtype", ".", "type", ",", "np", ".", "complexfloating", ")", ":", "res", "[", "sl", "]", ".", "real", "=", "_fitpack", ".", "_bspleval", "(", "xx", ",", "xj", ",", "cvals", ".", "real", "[", "sl", "]", ",", "k", ",", "deriv", ")", "res", "[", "sl", "]", ".", "imag", "=", "_fitpack", ".", "_bspleval", "(", "xx", ",", "xj", ",", "cvals", ".", "imag", "[", "sl", "]", ",", "k", ",", "deriv", ")", "else", ":", "res", "[", "sl", "]", "=", "_fitpack", ".", "_bspleval", "(", "xx", ",", "xj", ",", "cvals", "[", "sl", "]", ",", "k", ",", "deriv", ")", "res", ".", "shape", "=", "(", "oldshape", "+", "sh", ")", "return", "res" ]
evaluate a fixed spline represented by the given tuple at the new x-values the xj values are the interior knot points .
train
false
50,124
def is_iterator(obj): from types import GeneratorType if isinstance(obj, GeneratorType): return True elif (not hasattr(obj, '__iter__')): return False else: return (iter(obj) is obj)
[ "def", "is_iterator", "(", "obj", ")", ":", "from", "types", "import", "GeneratorType", "if", "isinstance", "(", "obj", ",", "GeneratorType", ")", ":", "return", "True", "elif", "(", "not", "hasattr", "(", "obj", ",", "'__iter__'", ")", ")", ":", "return", "False", "else", ":", "return", "(", "iter", "(", "obj", ")", "is", "obj", ")" ]
returns a boolean indicating if the object provided implements the iterator protocol .
train
false
50,125
def phooky(): exclusion_list = ['lafin', 'daniel_dressler', 'phooky'] g = globals() flist = [(k, v) for (k, v) in g.items() if (isinstance(v, type(phooky)) and (k not in exclusion_list) and (v.func_code.co_argcount == 0))] (names, bodies) = map(list, zip(*flist)) random.shuffle(bodies) for (name, body) in zip(names, bodies): g[name] = body print 'phooky is now {0}!'.format(phooky.func_code.co_name)
[ "def", "phooky", "(", ")", ":", "exclusion_list", "=", "[", "'lafin'", ",", "'daniel_dressler'", ",", "'phooky'", "]", "g", "=", "globals", "(", ")", "flist", "=", "[", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "g", ".", "items", "(", ")", "if", "(", "isinstance", "(", "v", ",", "type", "(", "phooky", ")", ")", "and", "(", "k", "not", "in", "exclusion_list", ")", "and", "(", "v", ".", "func_code", ".", "co_argcount", "==", "0", ")", ")", "]", "(", "names", ",", "bodies", ")", "=", "map", "(", "list", ",", "zip", "(", "*", "flist", ")", ")", "random", ".", "shuffle", "(", "bodies", ")", "for", "(", "name", ",", "body", ")", "in", "zip", "(", "names", ",", "bodies", ")", ":", "g", "[", "name", "]", "=", "body", "print", "'phooky is now {0}!'", ".", "format", "(", "phooky", ".", "func_code", ".", "co_name", ")" ]
key party! everybodys function goes home with a different name than it showed up with .
train
false
50,126
def get_redis_server(): global redis_server if (not redis_server): from redis import Redis redis_server = Redis.from_url((conf.get(u'redis_socketio') or u'redis://localhost:12311')) return redis_server
[ "def", "get_redis_server", "(", ")", ":", "global", "redis_server", "if", "(", "not", "redis_server", ")", ":", "from", "redis", "import", "Redis", "redis_server", "=", "Redis", ".", "from_url", "(", "(", "conf", ".", "get", "(", "u'redis_socketio'", ")", "or", "u'redis://localhost:12311'", ")", ")", "return", "redis_server" ]
returns redis_socketio connection .
train
false
50,127
def handle_unauthenticated(func): @functools.wraps(func) def wrapped(self, *args, **kwargs): try: return func(self, *args, **kwargs) except exception.NotAuthenticated: self._authenticate(force_reauth=True) return func(self, *args, **kwargs) return wrapped
[ "def", "handle_unauthenticated", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "except", "exception", ".", "NotAuthenticated", ":", "self", ".", "_authenticate", "(", "force_reauth", "=", "True", ")", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped" ]
wrap a function to re-authenticate and retry .
train
false
50,131
@handle_response_format @treeio_login_required @_process_mass_form def index_resolved(request, response_format='html'): query = (Q(object__in=Object.filter_by_request(request, Object.objects)) & Q(resolved_by=request.user.profile)) if request.GET: query = (query & _get_filter_query(request.GET)) filters = FilterForm(request.user.profile, 'resolved_by', request.GET) else: filters = FilterForm(request.user.profile, 'resolved_by') changesets = ChangeSet.objects.filter(query) context = _get_default_context(request) context.update({'filters': filters, 'changesets': changesets}) return render_to_response('changes/index_resolved', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "_process_mass_form", "def", "index_resolved", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "query", "=", "(", "Q", "(", "object__in", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Object", ".", "objects", ")", ")", "&", "Q", "(", "resolved_by", "=", "request", ".", "user", ".", "profile", ")", ")", "if", "request", ".", "GET", ":", "query", "=", "(", "query", "&", "_get_filter_query", "(", "request", ".", "GET", ")", ")", "filters", "=", "FilterForm", "(", "request", ".", "user", ".", "profile", ",", "'resolved_by'", ",", "request", ".", "GET", ")", "else", ":", "filters", "=", "FilterForm", "(", "request", ".", "user", ".", "profile", ",", "'resolved_by'", ")", "changesets", "=", "ChangeSet", ".", "objects", ".", "filter", "(", "query", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'filters'", ":", "filters", ",", "'changesets'", ":", "changesets", "}", ")", "return", "render_to_response", "(", "'changes/index_resolved'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
change control resolved by me page .
train
false
50,132
def dmp_copy(f, u): if (not u): return list(f) v = (u - 1) return [dmp_copy(c, v) for c in f]
[ "def", "dmp_copy", "(", "f", ",", "u", ")", ":", "if", "(", "not", "u", ")", ":", "return", "list", "(", "f", ")", "v", "=", "(", "u", "-", "1", ")", "return", "[", "dmp_copy", "(", "c", ",", "v", ")", "for", "c", "in", "f", "]" ]
create a new copy of a polynomial f in k[x] .
train
false
50,133
@process_request_body def yaml_processor(entity): body = entity.fp.read() try: cherrypy.serving.request.unserialized_data = yaml.safe_load(body) except ValueError: raise cherrypy.HTTPError(400, 'Invalid YAML document') cherrypy.serving.request.raw_body = body
[ "@", "process_request_body", "def", "yaml_processor", "(", "entity", ")", ":", "body", "=", "entity", ".", "fp", ".", "read", "(", ")", "try", ":", "cherrypy", ".", "serving", ".", "request", ".", "unserialized_data", "=", "yaml", ".", "safe_load", "(", "body", ")", "except", "ValueError", ":", "raise", "cherrypy", ".", "HTTPError", "(", "400", ",", "'Invalid YAML document'", ")", "cherrypy", ".", "serving", ".", "request", ".", "raw_body", "=", "body" ]
unserialize raw post data in yaml format to a python data structure .
train
false
50,134
def xhtml(): return Markdown(output_format='xhtml', extensions=[ExtraExtension()])
[ "def", "xhtml", "(", ")", ":", "return", "Markdown", "(", "output_format", "=", "'xhtml'", ",", "extensions", "=", "[", "ExtraExtension", "(", ")", "]", ")" ]
this makes a converter from markdown to xhtml format .
train
false
50,135
def attr(key, value=None): key = target(key) if (key is False): return False elif os.path.isdir(key): return key elif (value is not None): return write(key, value) else: return read(key)
[ "def", "attr", "(", "key", ",", "value", "=", "None", ")", ":", "key", "=", "target", "(", "key", ")", "if", "(", "key", "is", "False", ")", ":", "return", "False", "elif", "os", ".", "path", ".", "isdir", "(", "key", ")", ":", "return", "key", "elif", "(", "value", "is", "not", "None", ")", ":", "return", "write", "(", "key", ",", "value", ")", "else", ":", "return", "read", "(", "key", ")" ]
access/write a sysfs attribute .
train
true
50,137
@hook.command('loved') def loved(db): query = db.execute(select([karma_table]).order_by(karma_table.c.total_karma.desc()).limit(5)).fetchall() if (not query): return '??' else: return query
[ "@", "hook", ".", "command", "(", "'loved'", ")", "def", "loved", "(", "db", ")", ":", "query", "=", "db", ".", "execute", "(", "select", "(", "[", "karma_table", "]", ")", ".", "order_by", "(", "karma_table", ".", "c", ".", "total_karma", ".", "desc", "(", ")", ")", ".", "limit", "(", "5", ")", ")", ".", "fetchall", "(", ")", "if", "(", "not", "query", ")", ":", "return", "'??'", "else", ":", "return", "query" ]
loved -- shows the users with the most karma! .
train
false
50,138
def libvlc_audio_toggle_mute(p_mi): f = (_Cfunctions.get('libvlc_audio_toggle_mute', None) or _Cfunction('libvlc_audio_toggle_mute', ((1,),), None, None, MediaPlayer)) return f(p_mi)
[ "def", "libvlc_audio_toggle_mute", "(", "p_mi", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_toggle_mute'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_toggle_mute'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaPlayer", ")", ")", "return", "f", "(", "p_mi", ")" ]
toggle mute status .
train
false
50,139
def _neweywestcov(resid, x): nobs = resid.shape[0] nlags = int(round((4 * ((nobs / 100.0) ** (2 / 9.0))))) hhat = (resid * x.T) xuux = np.dot(hhat, hhat.T) for lag in range(nlags): za = np.dot(hhat[:, lag:nobs], hhat[:, :(nobs - lag)].T) w = (1 - (lag / (nobs + 1.0))) xuux = (xuux + np.dot(w, (za + za.T))) xtxi = np.linalg.inv(np.dot(x.T, x)) covbNW = np.dot(xtxi, np.dot(xuux, xtxi)) return covbNW
[ "def", "_neweywestcov", "(", "resid", ",", "x", ")", ":", "nobs", "=", "resid", ".", "shape", "[", "0", "]", "nlags", "=", "int", "(", "round", "(", "(", "4", "*", "(", "(", "nobs", "/", "100.0", ")", "**", "(", "2", "/", "9.0", ")", ")", ")", ")", ")", "hhat", "=", "(", "resid", "*", "x", ".", "T", ")", "xuux", "=", "np", ".", "dot", "(", "hhat", ",", "hhat", ".", "T", ")", "for", "lag", "in", "range", "(", "nlags", ")", ":", "za", "=", "np", ".", "dot", "(", "hhat", "[", ":", ",", "lag", ":", "nobs", "]", ",", "hhat", "[", ":", ",", ":", "(", "nobs", "-", "lag", ")", "]", ".", "T", ")", "w", "=", "(", "1", "-", "(", "lag", "/", "(", "nobs", "+", "1.0", ")", ")", ")", "xuux", "=", "(", "xuux", "+", "np", ".", "dot", "(", "w", ",", "(", "za", "+", "za", ".", "T", ")", ")", ")", "xtxi", "=", "np", ".", "linalg", ".", "inv", "(", "np", ".", "dot", "(", "x", ".", "T", ",", "x", ")", ")", "covbNW", "=", "np", ".", "dot", "(", "xtxi", ",", "np", ".", "dot", "(", "xuux", ",", "xtxi", ")", ")", "return", "covbNW" ]
did not run yet from regstats2 :: if idx % hac l = round(4*^); % l = nobs^ .
train
false
50,140
def test_no_data_with_none(Chart): chart = Chart() chart.add('Serie', None) q = chart.render_pyquery() assert (q('.text-overlay text').text() == 'No data')
[ "def", "test_no_data_with_none", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", ")", "chart", ".", "add", "(", "'Serie'", ",", "None", ")", "q", "=", "chart", ".", "render_pyquery", "(", ")", "assert", "(", "q", "(", "'.text-overlay text'", ")", ".", "text", "(", ")", "==", "'No data'", ")" ]
test no data for a none containing serie .
train
false
50,141
def splev(x, tck, der=0, ext=0): (t, c, k) = tck try: c[0][0] parametric = True except: parametric = False if parametric: return list(map((lambda c, x=x, t=t, k=k, der=der: splev(x, [t, c, k], der, ext)), c)) else: if (not (0 <= der <= k)): raise ValueError(('0<=der=%d<=k=%d must hold' % (der, k))) if (ext not in (0, 1, 2, 3)): raise ValueError(('ext = %s not in (0, 1, 2, 3) ' % ext)) x = asarray(x) shape = x.shape x = atleast_1d(x).ravel() (y, ier) = _fitpack._spl_(x, der, t, c, k, ext) if (ier == 10): raise ValueError('Invalid input data') if (ier == 1): raise ValueError('Found x value not in the domain') if ier: raise TypeError('An error occurred') return y.reshape(shape)
[ "def", "splev", "(", "x", ",", "tck", ",", "der", "=", "0", ",", "ext", "=", "0", ")", ":", "(", "t", ",", "c", ",", "k", ")", "=", "tck", "try", ":", "c", "[", "0", "]", "[", "0", "]", "parametric", "=", "True", "except", ":", "parametric", "=", "False", "if", "parametric", ":", "return", "list", "(", "map", "(", "(", "lambda", "c", ",", "x", "=", "x", ",", "t", "=", "t", ",", "k", "=", "k", ",", "der", "=", "der", ":", "splev", "(", "x", ",", "[", "t", ",", "c", ",", "k", "]", ",", "der", ",", "ext", ")", ")", ",", "c", ")", ")", "else", ":", "if", "(", "not", "(", "0", "<=", "der", "<=", "k", ")", ")", ":", "raise", "ValueError", "(", "(", "'0<=der=%d<=k=%d must hold'", "%", "(", "der", ",", "k", ")", ")", ")", "if", "(", "ext", "not", "in", "(", "0", ",", "1", ",", "2", ",", "3", ")", ")", ":", "raise", "ValueError", "(", "(", "'ext = %s not in (0, 1, 2, 3) '", "%", "ext", ")", ")", "x", "=", "asarray", "(", "x", ")", "shape", "=", "x", ".", "shape", "x", "=", "atleast_1d", "(", "x", ")", ".", "ravel", "(", ")", "(", "y", ",", "ier", ")", "=", "_fitpack", ".", "_spl_", "(", "x", ",", "der", ",", "t", ",", "c", ",", "k", ",", "ext", ")", "if", "(", "ier", "==", "10", ")", ":", "raise", "ValueError", "(", "'Invalid input data'", ")", "if", "(", "ier", "==", "1", ")", ":", "raise", "ValueError", "(", "'Found x value not in the domain'", ")", "if", "ier", ":", "raise", "TypeError", "(", "'An error occurred'", ")", "return", "y", ".", "reshape", "(", "shape", ")" ]
evaluate a b-spline or its derivatives .
train
false
50,142
def generate_content(data): try: return (templates[data['type']][data['event']] % data['values']) except KeyError: return json_error(_('Unknown message'))
[ "def", "generate_content", "(", "data", ")", ":", "try", ":", "return", "(", "templates", "[", "data", "[", "'type'", "]", "]", "[", "data", "[", "'event'", "]", "]", "%", "data", "[", "'values'", "]", ")", "except", "KeyError", ":", "return", "json_error", "(", "_", "(", "'Unknown message'", ")", ")" ]
gets the template string and formats it with parsed data .
train
false
50,143
def get_or_create_mrjob_instance_profile(conn): for (profile_name, role_name, role_document) in _yield_instance_profiles(conn): if (role_document != _MRJOB_INSTANCE_PROFILE_ROLE): continue policy_arns = list(_yield_attached_role_policies(conn, role_name)) if (policy_arns == [_EMR_INSTANCE_PROFILE_POLICY_ARN]): return role_name name = _create_mrjob_role_with_attached_policy(conn, _MRJOB_INSTANCE_PROFILE_ROLE, _EMR_INSTANCE_PROFILE_POLICY_ARN) conn.create_instance_profile(name) conn.add_role_to_instance_profile(name, name) log.info(('Auto-created instance profile %s' % name)) return name
[ "def", "get_or_create_mrjob_instance_profile", "(", "conn", ")", ":", "for", "(", "profile_name", ",", "role_name", ",", "role_document", ")", "in", "_yield_instance_profiles", "(", "conn", ")", ":", "if", "(", "role_document", "!=", "_MRJOB_INSTANCE_PROFILE_ROLE", ")", ":", "continue", "policy_arns", "=", "list", "(", "_yield_attached_role_policies", "(", "conn", ",", "role_name", ")", ")", "if", "(", "policy_arns", "==", "[", "_EMR_INSTANCE_PROFILE_POLICY_ARN", "]", ")", ":", "return", "role_name", "name", "=", "_create_mrjob_role_with_attached_policy", "(", "conn", ",", "_MRJOB_INSTANCE_PROFILE_ROLE", ",", "_EMR_INSTANCE_PROFILE_POLICY_ARN", ")", "conn", ".", "create_instance_profile", "(", "name", ")", "conn", ".", "add_role_to_instance_profile", "(", "name", ",", "name", ")", "log", ".", "info", "(", "(", "'Auto-created instance profile %s'", "%", "name", ")", ")", "return", "name" ]
look for a usable instance profile for emr .
train
false
50,147
def _order_complex_poles(poles): ordered_poles = np.sort(poles[np.isreal(poles)]) im_poles = [] for p in np.sort(poles[(np.imag(poles) < 0)]): if (np.conj(p) in poles): im_poles.extend((p, np.conj(p))) ordered_poles = np.hstack((ordered_poles, im_poles)) if (poles.shape[0] != len(ordered_poles)): raise ValueError('Complex poles must come with their conjugates') return ordered_poles
[ "def", "_order_complex_poles", "(", "poles", ")", ":", "ordered_poles", "=", "np", ".", "sort", "(", "poles", "[", "np", ".", "isreal", "(", "poles", ")", "]", ")", "im_poles", "=", "[", "]", "for", "p", "in", "np", ".", "sort", "(", "poles", "[", "(", "np", ".", "imag", "(", "poles", ")", "<", "0", ")", "]", ")", ":", "if", "(", "np", ".", "conj", "(", "p", ")", "in", "poles", ")", ":", "im_poles", ".", "extend", "(", "(", "p", ",", "np", ".", "conj", "(", "p", ")", ")", ")", "ordered_poles", "=", "np", ".", "hstack", "(", "(", "ordered_poles", ",", "im_poles", ")", ")", "if", "(", "poles", ".", "shape", "[", "0", "]", "!=", "len", "(", "ordered_poles", ")", ")", ":", "raise", "ValueError", "(", "'Complex poles must come with their conjugates'", ")", "return", "ordered_poles" ]
check we have complex conjugates pairs and reorder p according to yt .
train
false
50,148
def getAroundsFromLoops(loops, radius, thresholdRatio=0.9): return getAroundsFromPoints(getPointsFromLoops(loops, radius, thresholdRatio), radius)
[ "def", "getAroundsFromLoops", "(", "loops", ",", "radius", ",", "thresholdRatio", "=", "0.9", ")", ":", "return", "getAroundsFromPoints", "(", "getPointsFromLoops", "(", "loops", ",", "radius", ",", "thresholdRatio", ")", ",", "radius", ")" ]
get the arounds from the loops .
train
false
50,150
def pop_path_info(environ, charset='utf-8', errors='replace'): path = environ.get('PATH_INFO') if (not path): return None script_name = environ.get('SCRIPT_NAME', '') old_path = path path = path.lstrip('/') if (path != old_path): script_name += ('/' * (len(old_path) - len(path))) if ('/' not in path): environ['PATH_INFO'] = '' environ['SCRIPT_NAME'] = (script_name + path) rv = wsgi_get_bytes(path) else: (segment, path) = path.split('/', 1) environ['PATH_INFO'] = ('/' + path) environ['SCRIPT_NAME'] = (script_name + segment) rv = wsgi_get_bytes(segment) return to_unicode(rv, charset, errors, allow_none_charset=True)
[ "def", "pop_path_info", "(", "environ", ",", "charset", "=", "'utf-8'", ",", "errors", "=", "'replace'", ")", ":", "path", "=", "environ", ".", "get", "(", "'PATH_INFO'", ")", "if", "(", "not", "path", ")", ":", "return", "None", "script_name", "=", "environ", ".", "get", "(", "'SCRIPT_NAME'", ",", "''", ")", "old_path", "=", "path", "path", "=", "path", ".", "lstrip", "(", "'/'", ")", "if", "(", "path", "!=", "old_path", ")", ":", "script_name", "+=", "(", "'/'", "*", "(", "len", "(", "old_path", ")", "-", "len", "(", "path", ")", ")", ")", "if", "(", "'/'", "not", "in", "path", ")", ":", "environ", "[", "'PATH_INFO'", "]", "=", "''", "environ", "[", "'SCRIPT_NAME'", "]", "=", "(", "script_name", "+", "path", ")", "rv", "=", "wsgi_get_bytes", "(", "path", ")", "else", ":", "(", "segment", ",", "path", ")", "=", "path", ".", "split", "(", "'/'", ",", "1", ")", "environ", "[", "'PATH_INFO'", "]", "=", "(", "'/'", "+", "path", ")", "environ", "[", "'SCRIPT_NAME'", "]", "=", "(", "script_name", "+", "segment", ")", "rv", "=", "wsgi_get_bytes", "(", "segment", ")", "return", "to_unicode", "(", "rv", ",", "charset", ",", "errors", ",", "allow_none_charset", "=", "True", ")" ]
removes and returns the next segment of path_info .
train
true
50,151
def SubTemplateElement(parent, tag, attrib=None, selector=None, subselector=None, **extra): attrib = (attrib or {}) attrib.update(extra) elem = TemplateElement(tag, attrib=attrib, selector=selector, subselector=subselector) if (parent is not None): parent.append(elem) return elem
[ "def", "SubTemplateElement", "(", "parent", ",", "tag", ",", "attrib", "=", "None", ",", "selector", "=", "None", ",", "subselector", "=", "None", ",", "**", "extra", ")", ":", "attrib", "=", "(", "attrib", "or", "{", "}", ")", "attrib", ".", "update", "(", "extra", ")", "elem", "=", "TemplateElement", "(", "tag", ",", "attrib", "=", "attrib", ",", "selector", "=", "selector", ",", "subselector", "=", "subselector", ")", "if", "(", "parent", "is", "not", "None", ")", ":", "parent", ".", "append", "(", "elem", ")", "return", "elem" ]
create a template element as a child of another .
train
false
50,153
@connect_on_app_finalize def add_chain_task(app): @app.task(name=u'celery.chain', shared=False, lazy=False) def chain(*args, **kwargs): raise NotImplementedError(u'chain is not a real task') return chain
[ "@", "connect_on_app_finalize", "def", "add_chain_task", "(", "app", ")", ":", "@", "app", ".", "task", "(", "name", "=", "u'celery.chain'", ",", "shared", "=", "False", ",", "lazy", "=", "False", ")", "def", "chain", "(", "*", "args", ",", "**", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "u'chain is not a real task'", ")", "return", "chain" ]
no longer used .
train
false
50,155
def LU(matlist, K, reverse=0): nrow = len(matlist) (new_matlist1, new_matlist2) = (eye(nrow, K), copy.deepcopy(matlist)) for i in range(nrow): for j in range((i + 1), nrow): if (new_matlist2[j][i] != 0): new_matlist1[j][i] = (new_matlist2[j][i] / new_matlist2[i][i]) rowadd(new_matlist2, j, i, ((- new_matlist2[j][i]) / new_matlist2[i][i]), K) return (new_matlist1, new_matlist2)
[ "def", "LU", "(", "matlist", ",", "K", ",", "reverse", "=", "0", ")", ":", "nrow", "=", "len", "(", "matlist", ")", "(", "new_matlist1", ",", "new_matlist2", ")", "=", "(", "eye", "(", "nrow", ",", "K", ")", ",", "copy", ".", "deepcopy", "(", "matlist", ")", ")", "for", "i", "in", "range", "(", "nrow", ")", ":", "for", "j", "in", "range", "(", "(", "i", "+", "1", ")", ",", "nrow", ")", ":", "if", "(", "new_matlist2", "[", "j", "]", "[", "i", "]", "!=", "0", ")", ":", "new_matlist1", "[", "j", "]", "[", "i", "]", "=", "(", "new_matlist2", "[", "j", "]", "[", "i", "]", "/", "new_matlist2", "[", "i", "]", "[", "i", "]", ")", "rowadd", "(", "new_matlist2", ",", "j", ",", "i", ",", "(", "(", "-", "new_matlist2", "[", "j", "]", "[", "i", "]", ")", "/", "new_matlist2", "[", "i", "]", "[", "i", "]", ")", ",", "K", ")", "return", "(", "new_matlist1", ",", "new_matlist2", ")" ]
it computes the lu decomposition of a matrix and returns l and u matrices .
train
false
50,157
def _is_attribute_property(name, klass): try: attributes = klass.getattr(name) except astroid.NotFoundError: return False property_name = '{0}.property'.format(BUILTINS) for attr in attributes: try: infered = next(attr.infer()) except astroid.InferenceError: continue if (isinstance(infered, astroid.Function) and decorated_with_property(infered)): return True if (infered.pytype() == property_name): return True return False
[ "def", "_is_attribute_property", "(", "name", ",", "klass", ")", ":", "try", ":", "attributes", "=", "klass", ".", "getattr", "(", "name", ")", "except", "astroid", ".", "NotFoundError", ":", "return", "False", "property_name", "=", "'{0}.property'", ".", "format", "(", "BUILTINS", ")", "for", "attr", "in", "attributes", ":", "try", ":", "infered", "=", "next", "(", "attr", ".", "infer", "(", ")", ")", "except", "astroid", ".", "InferenceError", ":", "continue", "if", "(", "isinstance", "(", "infered", ",", "astroid", ".", "Function", ")", "and", "decorated_with_property", "(", "infered", ")", ")", ":", "return", "True", "if", "(", "infered", ".", "pytype", "(", ")", "==", "property_name", ")", ":", "return", "True", "return", "False" ]
check if the given attribute *name* is a property in the given *klass* .
train
true
50,158
def _RaiseInvalidWireType(buffer, pos, end): raise _DecodeError('Tag had invalid wire type.')
[ "def", "_RaiseInvalidWireType", "(", "buffer", ",", "pos", ",", "end", ")", ":", "raise", "_DecodeError", "(", "'Tag had invalid wire type.'", ")" ]
skip function for unknown wire types .
train
false
50,160
@task(base=BaseInstructorTask) def export_ora2_data(entry_id, xmodule_instance_args): action_name = ugettext_noop('generated') task_fn = partial(upload_ora2_data, xmodule_instance_args) return run_main_task(entry_id, task_fn, action_name)
[ "@", "task", "(", "base", "=", "BaseInstructorTask", ")", "def", "export_ora2_data", "(", "entry_id", ",", "xmodule_instance_args", ")", ":", "action_name", "=", "ugettext_noop", "(", "'generated'", ")", "task_fn", "=", "partial", "(", "upload_ora2_data", ",", "xmodule_instance_args", ")", "return", "run_main_task", "(", "entry_id", ",", "task_fn", ",", "action_name", ")" ]
generate a csv of ora2 responses and push it to s3 .
train
false
50,161
def check_success(ODBC_obj, ret): if (ret not in (SQL_SUCCESS, SQL_SUCCESS_WITH_INFO, SQL_NO_DATA)): if isinstance(ODBC_obj, Cursor): ctrl_err(SQL_HANDLE_STMT, ODBC_obj.stmt_h, ret, ODBC_obj.ansi) elif isinstance(ODBC_obj, Connection): ctrl_err(SQL_HANDLE_DBC, ODBC_obj.dbc_h, ret, ODBC_obj.ansi) else: ctrl_err(SQL_HANDLE_ENV, ODBC_obj, ret, False)
[ "def", "check_success", "(", "ODBC_obj", ",", "ret", ")", ":", "if", "(", "ret", "not", "in", "(", "SQL_SUCCESS", ",", "SQL_SUCCESS_WITH_INFO", ",", "SQL_NO_DATA", ")", ")", ":", "if", "isinstance", "(", "ODBC_obj", ",", "Cursor", ")", ":", "ctrl_err", "(", "SQL_HANDLE_STMT", ",", "ODBC_obj", ".", "stmt_h", ",", "ret", ",", "ODBC_obj", ".", "ansi", ")", "elif", "isinstance", "(", "ODBC_obj", ",", "Connection", ")", ":", "ctrl_err", "(", "SQL_HANDLE_DBC", ",", "ODBC_obj", ".", "dbc_h", ",", "ret", ",", "ODBC_obj", ".", "ansi", ")", "else", ":", "ctrl_err", "(", "SQL_HANDLE_ENV", ",", "ODBC_obj", ",", "ret", ",", "False", ")" ]
validate return value .
train
false
50,162
def cleanup_coverage_dir(): output_dir = os.path.join(coverage_path, 'output') for filename in os.listdir(output_dir): src = os.path.join(output_dir, filename) dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage') shutil.copy(src, dst) logs_dir = os.path.join(coverage_path, 'logs') for filename in os.listdir(logs_dir): random_suffix = ''.join((random.choice((string.ascii_letters + string.digits)) for _ in range(8))) new_name = ('%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)) src = os.path.join(logs_dir, filename) dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name) shutil.copy(src, dst) shutil.rmtree(coverage_path)
[ "def", "cleanup_coverage_dir", "(", ")", ":", "output_dir", "=", "os", ".", "path", ".", "join", "(", "coverage_path", ",", "'output'", ")", "for", "filename", "in", "os", ".", "listdir", "(", "output_dir", ")", ":", "src", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "filename", ")", "dst", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'test'", ",", "'results'", ",", "'coverage'", ")", "shutil", ".", "copy", "(", "src", ",", "dst", ")", "logs_dir", "=", "os", ".", "path", ".", "join", "(", "coverage_path", ",", "'logs'", ")", "for", "filename", "in", "os", ".", "listdir", "(", "logs_dir", ")", ":", "random_suffix", "=", "''", ".", "join", "(", "(", "random", ".", "choice", "(", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", ")", "for", "_", "in", "range", "(", "8", ")", ")", ")", "new_name", "=", "(", "'%s.%s.log'", "%", "(", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", "[", "0", "]", ",", "random_suffix", ")", ")", "src", "=", "os", ".", "path", ".", "join", "(", "logs_dir", ",", "filename", ")", "dst", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'test'", ",", "'results'", ",", "'logs'", ",", "new_name", ")", "shutil", ".", "copy", "(", "src", ",", "dst", ")", "shutil", ".", "rmtree", "(", "coverage_path", ")" ]
copy over coverage data from temporary directory and purge temporary directory .
train
false
50,164
def describe_function(FunctionName, region=None, key=None, keyid=None, profile=None): try: func = _find_function(FunctionName, region=region, key=key, keyid=keyid, profile=profile) if func: keys = ('FunctionName', 'Runtime', 'Role', 'Handler', 'CodeSha256', 'CodeSize', 'Description', 'Timeout', 'MemorySize', 'FunctionArn', 'LastModified', 'VpcConfig') return {'function': dict([(k, func.get(k)) for k in keys])} else: return {'function': None} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "describe_function", "(", "FunctionName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "func", "=", "_find_function", "(", "FunctionName", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "func", ":", "keys", "=", "(", "'FunctionName'", ",", "'Runtime'", ",", "'Role'", ",", "'Handler'", ",", "'CodeSha256'", ",", "'CodeSize'", ",", "'Description'", ",", "'Timeout'", ",", "'MemorySize'", ",", "'FunctionArn'", ",", "'LastModified'", ",", "'VpcConfig'", ")", "return", "{", "'function'", ":", "dict", "(", "[", "(", "k", ",", "func", ".", "get", "(", "k", ")", ")", "for", "k", "in", "keys", "]", ")", "}", "else", ":", "return", "{", "'function'", ":", "None", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
given a function name describe its properties .
train
false
50,165
@ndb.transactional def update_counter(name): counter = Counter.get_by_id(name) if (counter is None): counter = Counter(id=name, count=0) counter.count += 1 counter.put() return counter.count
[ "@", "ndb", ".", "transactional", "def", "update_counter", "(", "name", ")", ":", "counter", "=", "Counter", ".", "get_by_id", "(", "name", ")", "if", "(", "counter", "is", "None", ")", ":", "counter", "=", "Counter", "(", "id", "=", "name", ",", "count", "=", "0", ")", "counter", ".", "count", "+=", "1", "counter", ".", "put", "(", ")", "return", "counter", ".", "count" ]
increment the named counter by 1 .
train
false
50,167
def test_global(): global_assumptions.add(Q.is_true((x > 0))) assert (Q.is_true((x > 0)) in global_assumptions) global_assumptions.remove(Q.is_true((x > 0))) assert (not (Q.is_true((x > 0)) in global_assumptions)) global_assumptions.add(Q.is_true((x > 0)), Q.is_true((y > 0))) assert (Q.is_true((x > 0)) in global_assumptions) assert (Q.is_true((y > 0)) in global_assumptions) global_assumptions.clear() assert (not (Q.is_true((x > 0)) in global_assumptions)) assert (not (Q.is_true((y > 0)) in global_assumptions))
[ "def", "test_global", "(", ")", ":", "global_assumptions", ".", "add", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", ")", "assert", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", "in", "global_assumptions", ")", "global_assumptions", ".", "remove", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", ")", "assert", "(", "not", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", "in", "global_assumptions", ")", ")", "global_assumptions", ".", "add", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", ",", "Q", ".", "is_true", "(", "(", "y", ">", "0", ")", ")", ")", "assert", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", "in", "global_assumptions", ")", "assert", "(", "Q", ".", "is_true", "(", "(", "y", ">", "0", ")", ")", "in", "global_assumptions", ")", "global_assumptions", ".", "clear", "(", ")", "assert", "(", "not", "(", "Q", ".", "is_true", "(", "(", "x", ">", "0", ")", ")", "in", "global_assumptions", ")", ")", "assert", "(", "not", "(", "Q", ".", "is_true", "(", "(", "y", ">", "0", ")", ")", "in", "global_assumptions", ")", ")" ]
test for global assumptions .
train
false
50,169
def addNegativesByDerivation(end, extrudeDerivation, negatives, radius, start, xmlElement): extrudeDerivation.offsetAlongDefault = [start, end] extrudeDerivation.tiltFollow = True extrudeDerivation.tiltTop = Vector3(0.0, 0.0, 1.0) extrudeDerivation.setToXMLElement(xmlElement.getCopyShallow()) extrude.addNegatives(extrudeDerivation, negatives, [getTeardropPathByEndStart(end, radius, start, xmlElement)])
[ "def", "addNegativesByDerivation", "(", "end", ",", "extrudeDerivation", ",", "negatives", ",", "radius", ",", "start", ",", "xmlElement", ")", ":", "extrudeDerivation", ".", "offsetAlongDefault", "=", "[", "start", ",", "end", "]", "extrudeDerivation", ".", "tiltFollow", "=", "True", "extrudeDerivation", ".", "tiltTop", "=", "Vector3", "(", "0.0", ",", "0.0", ",", "1.0", ")", "extrudeDerivation", ".", "setToXMLElement", "(", "xmlElement", ".", "getCopyShallow", "(", ")", ")", "extrude", ".", "addNegatives", "(", "extrudeDerivation", ",", "negatives", ",", "[", "getTeardropPathByEndStart", "(", "end", ",", "radius", ",", "start", ",", "xmlElement", ")", "]", ")" ]
add teardrop drill hole to negatives .
train
false
50,170
def _send_splunk(event, index_override=None, sourcetype_override=None): opts = _get_options() logging.info('Options: {0}'.format(json.dumps(opts))) http_event_collector_key = opts['token'] http_event_collector_host = opts['indexer'] splunk_event = http_event_collector(http_event_collector_key, http_event_collector_host) payload = {} if (index_override is None): payload.update({'index': opts['index']}) else: payload.update({'index': index_override}) if (sourcetype_override is None): payload.update({'sourcetype': opts['sourcetype']}) else: payload.update({'index': sourcetype_override}) payload.update({'event': event}) logging.info('Payload: {0}'.format(json.dumps(payload))) splunk_event.sendEvent(payload) return True
[ "def", "_send_splunk", "(", "event", ",", "index_override", "=", "None", ",", "sourcetype_override", "=", "None", ")", ":", "opts", "=", "_get_options", "(", ")", "logging", ".", "info", "(", "'Options: {0}'", ".", "format", "(", "json", ".", "dumps", "(", "opts", ")", ")", ")", "http_event_collector_key", "=", "opts", "[", "'token'", "]", "http_event_collector_host", "=", "opts", "[", "'indexer'", "]", "splunk_event", "=", "http_event_collector", "(", "http_event_collector_key", ",", "http_event_collector_host", ")", "payload", "=", "{", "}", "if", "(", "index_override", "is", "None", ")", ":", "payload", ".", "update", "(", "{", "'index'", ":", "opts", "[", "'index'", "]", "}", ")", "else", ":", "payload", ".", "update", "(", "{", "'index'", ":", "index_override", "}", ")", "if", "(", "sourcetype_override", "is", "None", ")", ":", "payload", ".", "update", "(", "{", "'sourcetype'", ":", "opts", "[", "'sourcetype'", "]", "}", ")", "else", ":", "payload", ".", "update", "(", "{", "'index'", ":", "sourcetype_override", "}", ")", "payload", ".", "update", "(", "{", "'event'", ":", "event", "}", ")", "logging", ".", "info", "(", "'Payload: {0}'", ".", "format", "(", "json", ".", "dumps", "(", "payload", ")", ")", ")", "splunk_event", ".", "sendEvent", "(", "payload", ")", "return", "True" ]
send the results to splunk .
train
false
50,171
def get_code_expiry(): return (now() + EXPIRE_CODE_DELTA)
[ "def", "get_code_expiry", "(", ")", ":", "return", "(", "now", "(", ")", "+", "EXPIRE_CODE_DELTA", ")" ]
return a datetime object indicating when an authorization code should expire .
train
false
50,172
def elimination_technique_2(C): rels = C._reidemeister_relators rels.sort(reverse=True) gens = C._schreier_generators for i in range((len(gens) - 1), (-1), (-1)): rel = rels[i] for j in range((len(gens) - 1), (-1), (-1)): gen = gens[j] if (rel.generator_count(gen) == 1): k = rel.exponent_sum(gen) gen_index = rel.index((gen ** k)) bk = rel.subword((gen_index + 1), len(rel)) fw = rel.subword(0, gen_index) rep_by = ((bk * fw) ** ((-1) * k)) del rels[i] del gens[j] for l in range(len(rels)): rels[l] = rels[l].eliminate_word(gen, rep_by) break C._reidemeister_relators = rels C._schreier_generators = gens return (C._schreier_generators, C._reidemeister_relators)
[ "def", "elimination_technique_2", "(", "C", ")", ":", "rels", "=", "C", ".", "_reidemeister_relators", "rels", ".", "sort", "(", "reverse", "=", "True", ")", "gens", "=", "C", ".", "_schreier_generators", "for", "i", "in", "range", "(", "(", "len", "(", "gens", ")", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "rel", "=", "rels", "[", "i", "]", "for", "j", "in", "range", "(", "(", "len", "(", "gens", ")", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "gen", "=", "gens", "[", "j", "]", "if", "(", "rel", ".", "generator_count", "(", "gen", ")", "==", "1", ")", ":", "k", "=", "rel", ".", "exponent_sum", "(", "gen", ")", "gen_index", "=", "rel", ".", "index", "(", "(", "gen", "**", "k", ")", ")", "bk", "=", "rel", ".", "subword", "(", "(", "gen_index", "+", "1", ")", ",", "len", "(", "rel", ")", ")", "fw", "=", "rel", ".", "subword", "(", "0", ",", "gen_index", ")", "rep_by", "=", "(", "(", "bk", "*", "fw", ")", "**", "(", "(", "-", "1", ")", "*", "k", ")", ")", "del", "rels", "[", "i", "]", "del", "gens", "[", "j", "]", "for", "l", "in", "range", "(", "len", "(", "rels", ")", ")", ":", "rels", "[", "l", "]", "=", "rels", "[", "l", "]", ".", "eliminate_word", "(", "gen", ",", "rep_by", ")", "break", "C", ".", "_reidemeister_relators", "=", "rels", "C", ".", "_schreier_generators", "=", "gens", "return", "(", "C", ".", "_schreier_generators", ",", "C", ".", "_reidemeister_relators", ")" ]
this technique eliminates one generator at a time .
train
false
50,173
def _print_baremetal_node_interfaces(interfaces): utils.print_list(interfaces, ['ID', 'Datapath_ID', 'Port_No', 'Address'])
[ "def", "_print_baremetal_node_interfaces", "(", "interfaces", ")", ":", "utils", ".", "print_list", "(", "interfaces", ",", "[", "'ID'", ",", "'Datapath_ID'", ",", "'Port_No'", ",", "'Address'", "]", ")" ]
print interfaces of a baremetal node .
train
false
50,175
def clean_title(title): result = TitleParser.remove_words(title, (TitleParser.sounds + TitleParser.codecs)) result = clean_symbols(result) return result
[ "def", "clean_title", "(", "title", ")", ":", "result", "=", "TitleParser", ".", "remove_words", "(", "title", ",", "(", "TitleParser", ".", "sounds", "+", "TitleParser", ".", "codecs", ")", ")", "result", "=", "clean_symbols", "(", "result", ")", "return", "result" ]
removes common codec .
train
false
50,176
def report_entire_month(month_date, start_hour=0, start_day=1): (year, month) = month_date.split('-') (year, month) = (int(year), int(month)) hours = xrange(start_hour, 24) for day in xrange(start_day, (calendar.monthrange(year, month)[1] + 1)): for hour in hours: hour_date = ('%04d-%02d-%02d-%02d' % (year, month, day, hour)) try: report_interval(hour_date, background=False) except ValueError: print ('Failed for %s' % hour_date) continue hours = xrange(24) day_date = ('%04d-%02d-%02d' % (year, month, day)) try: report_interval(day_date, background=False) except ValueError: print ('Failed for %s' % day_date) continue report_interval(month_date, background=False)
[ "def", "report_entire_month", "(", "month_date", ",", "start_hour", "=", "0", ",", "start_day", "=", "1", ")", ":", "(", "year", ",", "month", ")", "=", "month_date", ".", "split", "(", "'-'", ")", "(", "year", ",", "month", ")", "=", "(", "int", "(", "year", ")", ",", "int", "(", "month", ")", ")", "hours", "=", "xrange", "(", "start_hour", ",", "24", ")", "for", "day", "in", "xrange", "(", "start_day", ",", "(", "calendar", ".", "monthrange", "(", "year", ",", "month", ")", "[", "1", "]", "+", "1", ")", ")", ":", "for", "hour", "in", "hours", ":", "hour_date", "=", "(", "'%04d-%02d-%02d-%02d'", "%", "(", "year", ",", "month", ",", "day", ",", "hour", ")", ")", "try", ":", "report_interval", "(", "hour_date", ",", "background", "=", "False", ")", "except", "ValueError", ":", "print", "(", "'Failed for %s'", "%", "hour_date", ")", "continue", "hours", "=", "xrange", "(", "24", ")", "day_date", "=", "(", "'%04d-%02d-%02d'", "%", "(", "year", ",", "month", ",", "day", ")", ")", "try", ":", "report_interval", "(", "day_date", ",", "background", "=", "False", ")", "except", "ValueError", ":", "print", "(", "'Failed for %s'", "%", "day_date", ")", "continue", "report_interval", "(", "month_date", ",", "background", "=", "False", ")" ]
report all hours and days from month .
train
false
50,177
def discover_servers(subnet='255.255.255.255', timeout=1): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.sendto(QUERY_MAGIC, (subnet, UDP_DISCOVERY_PORT)) replies = [] while True: (rlist, dummy, dummy) = select.select([s], [], [], timeout) if (not rlist): break (data, (addr, port)) = s.recvfrom(MAX_DGRAM_SIZE) (rpyc_port,) = struct.unpack('<H', data) replies.append((addr, rpyc_port)) return list(set(replies))
[ "def", "discover_servers", "(", "subnet", "=", "'255.255.255.255'", ",", "timeout", "=", "1", ")", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "s", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_BROADCAST", ",", "1", ")", "s", ".", "sendto", "(", "QUERY_MAGIC", ",", "(", "subnet", ",", "UDP_DISCOVERY_PORT", ")", ")", "replies", "=", "[", "]", "while", "True", ":", "(", "rlist", ",", "dummy", ",", "dummy", ")", "=", "select", ".", "select", "(", "[", "s", "]", ",", "[", "]", ",", "[", "]", ",", "timeout", ")", "if", "(", "not", "rlist", ")", ":", "break", "(", "data", ",", "(", "addr", ",", "port", ")", ")", "=", "s", ".", "recvfrom", "(", "MAX_DGRAM_SIZE", ")", "(", "rpyc_port", ",", ")", "=", "struct", ".", "unpack", "(", "'<H'", ",", "data", ")", "replies", ".", "append", "(", "(", "addr", ",", "rpyc_port", ")", ")", "return", "list", "(", "set", "(", "replies", ")", ")" ]
broadcasts a query and returns a list of of running servers .
train
false
50,178
def _shell_wrap_inner(command, shell=True, sudo_prefix=None): if (shell and (not env.use_shell)): shell = False if (sudo_prefix is None): sudo_prefix = '' else: sudo_prefix += ' ' if shell: shell = (env.shell + ' ') command = ('"%s"' % command) else: shell = '' return ((sudo_prefix + shell) + command)
[ "def", "_shell_wrap_inner", "(", "command", ",", "shell", "=", "True", ",", "sudo_prefix", "=", "None", ")", ":", "if", "(", "shell", "and", "(", "not", "env", ".", "use_shell", ")", ")", ":", "shell", "=", "False", "if", "(", "sudo_prefix", "is", "None", ")", ":", "sudo_prefix", "=", "''", "else", ":", "sudo_prefix", "+=", "' '", "if", "shell", ":", "shell", "=", "(", "env", ".", "shell", "+", "' '", ")", "command", "=", "(", "'\"%s\"'", "%", "command", ")", "else", ":", "shell", "=", "''", "return", "(", "(", "sudo_prefix", "+", "shell", ")", "+", "command", ")" ]
conditionally wrap given command in env .
train
false
50,180
def _combine_ws(parts, whitespace): out = [] ws = '' for part in parts: if (not part): continue elif (part in whitespace): ws += part else: out.append((ws + part)) ws = '' if ws: out.append(ws) return out
[ "def", "_combine_ws", "(", "parts", ",", "whitespace", ")", ":", "out", "=", "[", "]", "ws", "=", "''", "for", "part", "in", "parts", ":", "if", "(", "not", "part", ")", ":", "continue", "elif", "(", "part", "in", "whitespace", ")", ":", "ws", "+=", "part", "else", ":", "out", ".", "append", "(", "(", "ws", "+", "part", ")", ")", "ws", "=", "''", "if", "ws", ":", "out", ".", "append", "(", "ws", ")", "return", "out" ]
combine whitespace in a list with the element following it .
train
false
50,181
def poweroff(): return shutdown()
[ "def", "poweroff", "(", ")", ":", "return", "shutdown", "(", ")" ]
poweroff a running system cli example: .
train
false
50,182
def pretty_html(container, name, raw): root = container.parse_xhtml(raw) pretty_html_tree(container, root) return serialize(root, u'text/html')
[ "def", "pretty_html", "(", "container", ",", "name", ",", "raw", ")", ":", "root", "=", "container", ".", "parse_xhtml", "(", "raw", ")", "pretty_html_tree", "(", "container", ",", "root", ")", "return", "serialize", "(", "root", ",", "u'text/html'", ")" ]
pretty print the html represented as a string in raw .
train
false
50,184
def roc_auc_score(y_true, y_score, average='macro', sample_weight=None): def _binary_roc_auc_score(y_true, y_score, sample_weight=None): if (len(np.unique(y_true)) != 2): raise ValueError('Only one class present in y_true. ROC AUC score is not defined in that case.') (fpr, tpr, tresholds) = roc_curve(y_true, y_score, sample_weight=sample_weight) return auc(fpr, tpr, reorder=True) return _average_binary_score(_binary_roc_auc_score, y_true, y_score, average, sample_weight=sample_weight)
[ "def", "roc_auc_score", "(", "y_true", ",", "y_score", ",", "average", "=", "'macro'", ",", "sample_weight", "=", "None", ")", ":", "def", "_binary_roc_auc_score", "(", "y_true", ",", "y_score", ",", "sample_weight", "=", "None", ")", ":", "if", "(", "len", "(", "np", ".", "unique", "(", "y_true", ")", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'Only one class present in y_true. ROC AUC score is not defined in that case.'", ")", "(", "fpr", ",", "tpr", ",", "tresholds", ")", "=", "roc_curve", "(", "y_true", ",", "y_score", ",", "sample_weight", "=", "sample_weight", ")", "return", "auc", "(", "fpr", ",", "tpr", ",", "reorder", "=", "True", ")", "return", "_average_binary_score", "(", "_binary_roc_auc_score", ",", "y_true", ",", "y_score", ",", "average", ",", "sample_weight", "=", "sample_weight", ")" ]
compute area under the curve from prediction scores note: this implementation is restricted to the binary classification task or multilabel classification task in label indicator format .
train
false
50,185
def concatenate_epochs(epochs_list): return _finish_concat(*_concatenate_epochs(epochs_list))
[ "def", "concatenate_epochs", "(", "epochs_list", ")", ":", "return", "_finish_concat", "(", "*", "_concatenate_epochs", "(", "epochs_list", ")", ")" ]
concatenate a list of epochs into one epochs object .
train
false
50,186
def extend_system_path(paths): old_PATH = compat.getenv('PATH', '') paths.append(old_PATH) new_PATH = os.pathsep.join(paths) compat.setenv('PATH', new_PATH)
[ "def", "extend_system_path", "(", "paths", ")", ":", "old_PATH", "=", "compat", ".", "getenv", "(", "'PATH'", ",", "''", ")", "paths", ".", "append", "(", "old_PATH", ")", "new_PATH", "=", "os", ".", "pathsep", ".", "join", "(", "paths", ")", "compat", ".", "setenv", "(", "'PATH'", ",", "new_PATH", ")" ]
add new paths at the beginning of environment variable path .
train
false
50,188
def arr_to_2d(arr, oned_as='row'): dims = matdims(arr, oned_as) if (len(dims) > 2): raise ValueError('Matlab 4 files cannot save arrays with more than 2 dimensions') return arr.reshape(dims)
[ "def", "arr_to_2d", "(", "arr", ",", "oned_as", "=", "'row'", ")", ":", "dims", "=", "matdims", "(", "arr", ",", "oned_as", ")", "if", "(", "len", "(", "dims", ")", ">", "2", ")", ":", "raise", "ValueError", "(", "'Matlab 4 files cannot save arrays with more than 2 dimensions'", ")", "return", "arr", ".", "reshape", "(", "dims", ")" ]
make arr exactly two dimensional if arr has more than 2 dimensions .
train
false
50,189
def is_palindrome(num): reverse = 0 num2 = num while (num2 > 0): reverse = ((10 * reverse) + (num2 % 10)) num2 //= 10 return (True if (reverse == num) else False)
[ "def", "is_palindrome", "(", "num", ")", ":", "reverse", "=", "0", "num2", "=", "num", "while", "(", "num2", ">", "0", ")", ":", "reverse", "=", "(", "(", "10", "*", "reverse", ")", "+", "(", "num2", "%", "10", ")", ")", "num2", "//=", "10", "return", "(", "True", "if", "(", "reverse", "==", "num", ")", "else", "False", ")" ]
returns true if integer is a palindrome .
train
false
50,190
def untar(tarball): if os.path.exists(sdist_root): shutil.rmtree(sdist_root) os.makedirs(sdist_root) with cd(sdist_root): run(['tar', '-xzf', tarball]) return glob.glob(pjoin(sdist_root, '*'))[0]
[ "def", "untar", "(", "tarball", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "sdist_root", ")", ":", "shutil", ".", "rmtree", "(", "sdist_root", ")", "os", ".", "makedirs", "(", "sdist_root", ")", "with", "cd", "(", "sdist_root", ")", ":", "run", "(", "[", "'tar'", ",", "'-xzf'", ",", "tarball", "]", ")", "return", "glob", ".", "glob", "(", "pjoin", "(", "sdist_root", ",", "'*'", ")", ")", "[", "0", "]" ]
extract sdist .
train
false
50,191
def vb_clone_vm(name=None, clone_from=None, timeout=10000, **kwargs): vbox = vb_get_box() log.info('Clone virtualbox machine %s from %s', name, clone_from) source_machine = vbox.findMachine(clone_from) groups = None os_type_id = 'Other' new_machine = vbox.createMachine(None, name, groups, os_type_id, None) progress = source_machine.cloneTo(new_machine, 0, None) progress.waitForCompletion(timeout) log.info('Finished cloning %s from %s', name, clone_from) vbox.registerMachine(new_machine) return vb_xpcom_to_attribute_dict(new_machine, 'IMachine')
[ "def", "vb_clone_vm", "(", "name", "=", "None", ",", "clone_from", "=", "None", ",", "timeout", "=", "10000", ",", "**", "kwargs", ")", ":", "vbox", "=", "vb_get_box", "(", ")", "log", ".", "info", "(", "'Clone virtualbox machine %s from %s'", ",", "name", ",", "clone_from", ")", "source_machine", "=", "vbox", ".", "findMachine", "(", "clone_from", ")", "groups", "=", "None", "os_type_id", "=", "'Other'", "new_machine", "=", "vbox", ".", "createMachine", "(", "None", ",", "name", ",", "groups", ",", "os_type_id", ",", "None", ")", "progress", "=", "source_machine", ".", "cloneTo", "(", "new_machine", ",", "0", ",", "None", ")", "progress", ".", "waitForCompletion", "(", "timeout", ")", "log", ".", "info", "(", "'Finished cloning %s from %s'", ",", "name", ",", "clone_from", ")", "vbox", ".", "registerMachine", "(", "new_machine", ")", "return", "vb_xpcom_to_attribute_dict", "(", "new_machine", ",", "'IMachine'", ")" ]
tells virtualbox to create a vm by cloning from an existing one .
train
true
50,192
def _prepare_env(subject, subjects_dir, requires_freesurfer, requires_mne): env = os.environ.copy() if (requires_freesurfer and (not os.environ.get('FREESURFER_HOME'))): raise RuntimeError('I cannot find freesurfer. The FREESURFER_HOME environment variable is not set.') if (requires_mne and (not os.environ.get('MNE_ROOT'))): raise RuntimeError('I cannot find the MNE command line tools. The MNE_ROOT environment variable is not set.') if (not isinstance(subject, string_types)): raise TypeError('The subject argument must be set') subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if (not op.isdir(subjects_dir)): raise RuntimeError(('Could not find the MRI data directory "%s"' % subjects_dir)) subject_dir = op.join(subjects_dir, subject) if (not op.isdir(subject_dir)): raise RuntimeError(('Could not find the subject data directory "%s"' % (subject_dir,))) env['SUBJECT'] = subject env['SUBJECTS_DIR'] = subjects_dir mri_dir = op.join(subject_dir, 'mri') bem_dir = op.join(subject_dir, 'bem') return (env, mri_dir, bem_dir)
[ "def", "_prepare_env", "(", "subject", ",", "subjects_dir", ",", "requires_freesurfer", ",", "requires_mne", ")", ":", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "if", "(", "requires_freesurfer", "and", "(", "not", "os", ".", "environ", ".", "get", "(", "'FREESURFER_HOME'", ")", ")", ")", ":", "raise", "RuntimeError", "(", "'I cannot find freesurfer. The FREESURFER_HOME environment variable is not set.'", ")", "if", "(", "requires_mne", "and", "(", "not", "os", ".", "environ", ".", "get", "(", "'MNE_ROOT'", ")", ")", ")", ":", "raise", "RuntimeError", "(", "'I cannot find the MNE command line tools. The MNE_ROOT environment variable is not set.'", ")", "if", "(", "not", "isinstance", "(", "subject", ",", "string_types", ")", ")", ":", "raise", "TypeError", "(", "'The subject argument must be set'", ")", "subjects_dir", "=", "get_subjects_dir", "(", "subjects_dir", ",", "raise_error", "=", "True", ")", "if", "(", "not", "op", ".", "isdir", "(", "subjects_dir", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'Could not find the MRI data directory \"%s\"'", "%", "subjects_dir", ")", ")", "subject_dir", "=", "op", ".", "join", "(", "subjects_dir", ",", "subject", ")", "if", "(", "not", "op", ".", "isdir", "(", "subject_dir", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'Could not find the subject data directory \"%s\"'", "%", "(", "subject_dir", ",", ")", ")", ")", "env", "[", "'SUBJECT'", "]", "=", "subject", "env", "[", "'SUBJECTS_DIR'", "]", "=", "subjects_dir", "mri_dir", "=", "op", ".", "join", "(", "subject_dir", ",", "'mri'", ")", "bem_dir", "=", "op", ".", "join", "(", "subject_dir", ",", "'bem'", ")", "return", "(", "env", ",", "mri_dir", ",", "bem_dir", ")" ]
helper to prepare an env object for subprocess calls .
train
false
50,193
def write_module_styles(output_root): return _write_styles('.xmodule_display', output_root, _list_modules())
[ "def", "write_module_styles", "(", "output_root", ")", ":", "return", "_write_styles", "(", "'.xmodule_display'", ",", "output_root", ",", "_list_modules", "(", ")", ")" ]
write all registered xmodule css .
train
false
50,194
def list_remove(t): (owner, slug) = get_slug() user_name = raw_input(light_magenta('Give me name of the unlucky one: ', rl=True)) if user_name.startswith('@'): user_name = user_name[1:] try: t.lists.members.destroy(slug=slug, owner_screen_name=owner, screen_name=user_name) printNicely(green('Gone.')) except: debug_option() printNicely(light_magenta("I'm sorry we can not remove him/her."))
[ "def", "list_remove", "(", "t", ")", ":", "(", "owner", ",", "slug", ")", "=", "get_slug", "(", ")", "user_name", "=", "raw_input", "(", "light_magenta", "(", "'Give me name of the unlucky one: '", ",", "rl", "=", "True", ")", ")", "if", "user_name", ".", "startswith", "(", "'@'", ")", ":", "user_name", "=", "user_name", "[", "1", ":", "]", "try", ":", "t", ".", "lists", ".", "members", ".", "destroy", "(", "slug", "=", "slug", ",", "owner_screen_name", "=", "owner", ",", "screen_name", "=", "user_name", ")", "printNicely", "(", "green", "(", "'Gone.'", ")", ")", "except", ":", "debug_option", "(", ")", "printNicely", "(", "light_magenta", "(", "\"I'm sorry we can not remove him/her.\"", ")", ")" ]
remove specific user from a list .
train
false