id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
50,195
def makePersonalServerFactory(site): return pb.PBServerFactory(distrib.ResourcePublisher(site))
[ "def", "makePersonalServerFactory", "(", "site", ")", ":", "return", "pb", ".", "PBServerFactory", "(", "distrib", ".", "ResourcePublisher", "(", "site", ")", ")" ]
create and return a factory which will respond to i{distrib} requests against the given site .
train
false
50,196
def a_to_s(A): strs = [] for row in A: x = ([None] * len(row)) for (i, num) in enumerate(row): if (num != 0): x[i] = '@' else: x[i] = ' ' strs.append(''.join(x)) return '\n'.join(strs)
[ "def", "a_to_s", "(", "A", ")", ":", "strs", "=", "[", "]", "for", "row", "in", "A", ":", "x", "=", "(", "[", "None", "]", "*", "len", "(", "row", ")", ")", "for", "(", "i", ",", "num", ")", "in", "enumerate", "(", "row", ")", ":", "if", "(", "num", "!=", "0", ")", ":", "x", "[", "i", "]", "=", "'@'", "else", ":", "x", "[", "i", "]", "=", "' '", "strs", ".", "append", "(", "''", ".", "join", "(", "x", ")", ")", "return", "'\\n'", ".", "join", "(", "strs", ")" ]
prints binary array .
train
false
50,197
def _get_blobs(im, rois): blobs = {'data': None, 'rois': None} (blobs['data'], im_scale_factors) = _get_image_blob(im) blobs['rois'] = _get_rois_blob(rois, im_scale_factors) return (blobs, im_scale_factors)
[ "def", "_get_blobs", "(", "im", ",", "rois", ")", ":", "blobs", "=", "{", "'data'", ":", "None", ",", "'rois'", ":", "None", "}", "(", "blobs", "[", "'data'", "]", ",", "im_scale_factors", ")", "=", "_get_image_blob", "(", "im", ")", "blobs", "[", "'rois'", "]", "=", "_get_rois_blob", "(", "rois", ",", "im_scale_factors", ")", "return", "(", "blobs", ",", "im_scale_factors", ")" ]
convert an image and rois within that image into network inputs .
train
false
50,199
def available_extensions(user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): exts = [] query = 'select * from pg_available_extensions();' ret = psql_query(query, user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas) exts = {} for row in ret: if (('default_version' in row) and ('name' in row)): exts[row['name']] = row return exts
[ "def", "available_extensions", "(", "user", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "maintenance_db", "=", "None", ",", "password", "=", "None", ",", "runas", "=", "None", ")", ":", "exts", "=", "[", "]", "query", "=", "'select * from pg_available_extensions();'", "ret", "=", "psql_query", "(", "query", ",", "user", "=", "user", ",", "host", "=", "host", ",", "port", "=", "port", ",", "maintenance_db", "=", "maintenance_db", ",", "password", "=", "password", ",", "runas", "=", "runas", ")", "exts", "=", "{", "}", "for", "row", "in", "ret", ":", "if", "(", "(", "'default_version'", "in", "row", ")", "and", "(", "'name'", "in", "row", ")", ")", ":", "exts", "[", "row", "[", "'name'", "]", "]", "=", "row", "return", "exts" ]
list available postgresql extensions cli example: .
train
true
50,200
def test_box(): (vertices, filled, outline) = create_box() assert_array_equal(np.arange(len(vertices)), np.unique(filled)) assert_array_equal(np.arange(len(vertices)), np.unique(outline))
[ "def", "test_box", "(", ")", ":", "(", "vertices", ",", "filled", ",", "outline", ")", "=", "create_box", "(", ")", "assert_array_equal", "(", "np", ".", "arange", "(", "len", "(", "vertices", ")", ")", ",", "np", ".", "unique", "(", "filled", ")", ")", "assert_array_equal", "(", "np", ".", "arange", "(", "len", "(", "vertices", ")", ")", ",", "np", ".", "unique", "(", "outline", ")", ")" ]
test box function .
train
false
50,201
def init_save_key(key_size, key_dir, keyname='key-certbot.pem'): try: key_pem = make_key(key_size) except ValueError as err: logger.exception(err) raise err config = zope.component.getUtility(interfaces.IConfig) util.make_or_verify_dir(key_dir, 448, os.geteuid(), config.strict_permissions) (key_f, key_path) = util.unique_file(os.path.join(key_dir, keyname), 384, 'wb') with key_f: key_f.write(key_pem) logger.info('Generating key (%d bits): %s', key_size, key_path) return util.Key(key_path, key_pem)
[ "def", "init_save_key", "(", "key_size", ",", "key_dir", ",", "keyname", "=", "'key-certbot.pem'", ")", ":", "try", ":", "key_pem", "=", "make_key", "(", "key_size", ")", "except", "ValueError", "as", "err", ":", "logger", ".", "exception", "(", "err", ")", "raise", "err", "config", "=", "zope", ".", "component", ".", "getUtility", "(", "interfaces", ".", "IConfig", ")", "util", ".", "make_or_verify_dir", "(", "key_dir", ",", "448", ",", "os", ".", "geteuid", "(", ")", ",", "config", ".", "strict_permissions", ")", "(", "key_f", ",", "key_path", ")", "=", "util", ".", "unique_file", "(", "os", ".", "path", ".", "join", "(", "key_dir", ",", "keyname", ")", ",", "384", ",", "'wb'", ")", "with", "key_f", ":", "key_f", ".", "write", "(", "key_pem", ")", "logger", ".", "info", "(", "'Generating key (%d bits): %s'", ",", "key_size", ",", "key_path", ")", "return", "util", ".", "Key", "(", "key_path", ",", "key_pem", ")" ]
initializes and saves a privkey .
train
false
50,202
def concat_all(*args): if (len(args) != 1): return concat_all(list(args)) if (not isinstance(args[0], (tuple, list))): return [args[0]] res = [] for k in args[0]: res.extend(concat_all(k)) return res
[ "def", "concat_all", "(", "*", "args", ")", ":", "if", "(", "len", "(", "args", ")", "!=", "1", ")", ":", "return", "concat_all", "(", "list", "(", "args", ")", ")", "if", "(", "not", "isinstance", "(", "args", "[", "0", "]", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "return", "[", "args", "[", "0", "]", "]", "res", "=", "[", "]", "for", "k", "in", "args", "[", "0", "]", ":", "res", ".", "extend", "(", "concat_all", "(", "k", ")", ")", "return", "res" ]
concat_all -> list concats all the arguments together .
train
false
50,203
@aborts @with_settings(foo=set()) def test_require_key_exists_empty_set(): require('foo')
[ "@", "aborts", "@", "with_settings", "(", "foo", "=", "set", "(", ")", ")", "def", "test_require_key_exists_empty_set", "(", ")", ":", "require", "(", "'foo'", ")" ]
when given a single existing key but the value is an empty set .
train
false
50,204
def _kwargs_to_nodes(kwargs): node = (kwargs.get('node') or kwargs.get('project')) parent = kwargs.get('parent') if node: return (parent, node) pid = kwargs.get('pid') nid = kwargs.get('nid') if (pid and nid): node = _load_node_or_fail(nid) parent = _load_node_or_fail(pid) elif (pid and (not nid)): node = _load_node_or_fail(pid) elif (nid and (not pid)): node = _load_node_or_fail(nid) elif ((not pid) and (not nid)): raise HTTPError(http.NOT_FOUND, data={'message_short': 'Node not found', 'message_long': 'No Node with that primary key could be found'}) return (parent, node)
[ "def", "_kwargs_to_nodes", "(", "kwargs", ")", ":", "node", "=", "(", "kwargs", ".", "get", "(", "'node'", ")", "or", "kwargs", ".", "get", "(", "'project'", ")", ")", "parent", "=", "kwargs", ".", "get", "(", "'parent'", ")", "if", "node", ":", "return", "(", "parent", ",", "node", ")", "pid", "=", "kwargs", ".", "get", "(", "'pid'", ")", "nid", "=", "kwargs", ".", "get", "(", "'nid'", ")", "if", "(", "pid", "and", "nid", ")", ":", "node", "=", "_load_node_or_fail", "(", "nid", ")", "parent", "=", "_load_node_or_fail", "(", "pid", ")", "elif", "(", "pid", "and", "(", "not", "nid", ")", ")", ":", "node", "=", "_load_node_or_fail", "(", "pid", ")", "elif", "(", "nid", "and", "(", "not", "pid", ")", ")", ":", "node", "=", "_load_node_or_fail", "(", "nid", ")", "elif", "(", "(", "not", "pid", ")", "and", "(", "not", "nid", ")", ")", ":", "raise", "HTTPError", "(", "http", ".", "NOT_FOUND", ",", "data", "=", "{", "'message_short'", ":", "'Node not found'", ",", "'message_long'", ":", "'No Node with that primary key could be found'", "}", ")", "return", "(", "parent", ",", "node", ")" ]
retrieve project and component objects from keyword arguments .
train
false
50,205
def _get_dev_url(backend, instance=None): return ('http://%s' % _get_dev_hostname(backend, instance))
[ "def", "_get_dev_url", "(", "backend", ",", "instance", "=", "None", ")", ":", "return", "(", "'http://%s'", "%", "_get_dev_hostname", "(", "backend", ",", "instance", ")", ")" ]
returns the url of a backend [instance] in the dev_appserver .
train
false
50,206
def parse_row(input_row, parsers): return [(parser(value) if (parser is not None) else value) for (value, parser) in zip(input_row, parsers)]
[ "def", "parse_row", "(", "input_row", ",", "parsers", ")", ":", "return", "[", "(", "parser", "(", "value", ")", "if", "(", "parser", "is", "not", "None", ")", "else", "value", ")", "for", "(", "value", ",", "parser", ")", "in", "zip", "(", "input_row", ",", "parsers", ")", "]" ]
given a list of parsers apply the appropriate one to each element of the input_row .
train
false
50,208
def extract_zip(source, remove=False, fatal=True): tempdir = tempfile.mkdtemp() zip_file = SafeUnzip(source) try: if zip_file.is_valid(fatal): zip_file.extract_to_dest(tempdir) except: rm_local_tmp_dir(tempdir) raise if remove: os.remove(source) return tempdir
[ "def", "extract_zip", "(", "source", ",", "remove", "=", "False", ",", "fatal", "=", "True", ")", ":", "tempdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "zip_file", "=", "SafeUnzip", "(", "source", ")", "try", ":", "if", "zip_file", ".", "is_valid", "(", "fatal", ")", ":", "zip_file", ".", "extract_to_dest", "(", "tempdir", ")", "except", ":", "rm_local_tmp_dir", "(", "tempdir", ")", "raise", "if", "remove", ":", "os", ".", "remove", "(", "source", ")", "return", "tempdir" ]
extracts the zip file .
train
false
50,209
def false_(): return False
[ "def", "false_", "(", ")", ":", "return", "False" ]
always return false cli example: .
train
false
50,211
def ode_nth_linear_constant_coeff_variation_of_parameters(eq, func, order, match): gensol = ode_nth_linear_constant_coeff_homogeneous(eq, func, order, match, returns='both') match.update(gensol) return _solve_variation_of_parameters(eq, func, order, match)
[ "def", "ode_nth_linear_constant_coeff_variation_of_parameters", "(", "eq", ",", "func", ",", "order", ",", "match", ")", ":", "gensol", "=", "ode_nth_linear_constant_coeff_homogeneous", "(", "eq", ",", "func", ",", "order", ",", "match", ",", "returns", "=", "'both'", ")", "match", ".", "update", "(", "gensol", ")", "return", "_solve_variation_of_parameters", "(", "eq", ",", "func", ",", "order", ",", "match", ")" ]
solves an nth order linear differential equation with constant coefficients using the method of variation of parameters .
train
false
50,212
def db_del_user(username): user = get_object(User, username=username) if user: user.delete()
[ "def", "db_del_user", "(", "username", ")", ":", "user", "=", "get_object", "(", "User", ",", "username", "=", "username", ")", "if", "user", ":", "user", ".", "delete", "(", ")" ]
delete a user from database .
train
false
50,213
def _removeBackrefs(senderkey): try: signals = connections[senderkey] except KeyError: signals = None else: items = signals.items() def allReceivers(): for (signal, set) in items: for item in set: (yield item) for receiver in allReceivers(): _killBackref(receiver, senderkey)
[ "def", "_removeBackrefs", "(", "senderkey", ")", ":", "try", ":", "signals", "=", "connections", "[", "senderkey", "]", "except", "KeyError", ":", "signals", "=", "None", "else", ":", "items", "=", "signals", ".", "items", "(", ")", "def", "allReceivers", "(", ")", ":", "for", "(", "signal", ",", "set", ")", "in", "items", ":", "for", "item", "in", "set", ":", "(", "yield", "item", ")", "for", "receiver", "in", "allReceivers", "(", ")", ":", "_killBackref", "(", "receiver", ",", "senderkey", ")" ]
remove all back-references to this senderkey .
train
true
50,214
def test_indentation(): count = 40 gotoutput = pretty.pretty(MyList(range(count))) expectedoutput = (('MyList(\n' + ',\n'.join(((' %d' % i) for i in range(count)))) + ')') nt.assert_equal(gotoutput, expectedoutput)
[ "def", "test_indentation", "(", ")", ":", "count", "=", "40", "gotoutput", "=", "pretty", ".", "pretty", "(", "MyList", "(", "range", "(", "count", ")", ")", ")", "expectedoutput", "=", "(", "(", "'MyList(\\n'", "+", "',\\n'", ".", "join", "(", "(", "(", "' %d'", "%", "i", ")", "for", "i", "in", "range", "(", "count", ")", ")", ")", ")", "+", "')'", ")", "nt", ".", "assert_equal", "(", "gotoutput", ",", "expectedoutput", ")" ]
test correct indentation in groups .
train
false
50,215
def runWithConfig(swarmConfig, options, outDir=None, outputLabel='default', permWorkDir=None, verbosity=1): global g_currentVerbosityLevel g_currentVerbosityLevel = verbosity if (outDir is None): outDir = os.getcwd() if (permWorkDir is None): permWorkDir = os.getcwd() _checkOverwrite(options, outDir) _generateExpFilesFromSwarmDescription(swarmConfig, outDir) options['expDescConfig'] = swarmConfig options['outputLabel'] = outputLabel options['outDir'] = outDir options['permWorkDir'] = permWorkDir runOptions = _injectDefaultOptions(options) _validateOptions(runOptions) return _runAction(runOptions)
[ "def", "runWithConfig", "(", "swarmConfig", ",", "options", ",", "outDir", "=", "None", ",", "outputLabel", "=", "'default'", ",", "permWorkDir", "=", "None", ",", "verbosity", "=", "1", ")", ":", "global", "g_currentVerbosityLevel", "g_currentVerbosityLevel", "=", "verbosity", "if", "(", "outDir", "is", "None", ")", ":", "outDir", "=", "os", ".", "getcwd", "(", ")", "if", "(", "permWorkDir", "is", "None", ")", ":", "permWorkDir", "=", "os", ".", "getcwd", "(", ")", "_checkOverwrite", "(", "options", ",", "outDir", ")", "_generateExpFilesFromSwarmDescription", "(", "swarmConfig", ",", "outDir", ")", "options", "[", "'expDescConfig'", "]", "=", "swarmConfig", "options", "[", "'outputLabel'", "]", "=", "outputLabel", "options", "[", "'outDir'", "]", "=", "outDir", "options", "[", "'permWorkDir'", "]", "=", "permWorkDir", "runOptions", "=", "_injectDefaultOptions", "(", "options", ")", "_validateOptions", "(", "runOptions", ")", "return", "_runAction", "(", "runOptions", ")" ]
starts a swarm .
train
true
50,217
def get_page_context(page, content, toc, meta, config): if config[u'site_url']: page.set_canonical_url(config[u'site_url']) if config[u'repo_url']: page.set_edit_url(config[u'repo_url'], config[u'edit_uri']) page.content = content page.toc = toc page.meta = meta if (page.is_homepage or (page.title is None)): page_title = None else: page_title = page.title if page.is_homepage: page_description = config[u'site_description'] else: page_description = None return {u'page': page, u'page_title': page_title, u'page_description': page_description, u'content': content, u'toc': toc, u'meta': meta, u'canonical_url': page.canonical_url, u'current_page': page, u'previous_page': page.previous_page, u'next_page': page.next_page}
[ "def", "get_page_context", "(", "page", ",", "content", ",", "toc", ",", "meta", ",", "config", ")", ":", "if", "config", "[", "u'site_url'", "]", ":", "page", ".", "set_canonical_url", "(", "config", "[", "u'site_url'", "]", ")", "if", "config", "[", "u'repo_url'", "]", ":", "page", ".", "set_edit_url", "(", "config", "[", "u'repo_url'", "]", ",", "config", "[", "u'edit_uri'", "]", ")", "page", ".", "content", "=", "content", "page", ".", "toc", "=", "toc", "page", ".", "meta", "=", "meta", "if", "(", "page", ".", "is_homepage", "or", "(", "page", ".", "title", "is", "None", ")", ")", ":", "page_title", "=", "None", "else", ":", "page_title", "=", "page", ".", "title", "if", "page", ".", "is_homepage", ":", "page_description", "=", "config", "[", "u'site_description'", "]", "else", ":", "page_description", "=", "None", "return", "{", "u'page'", ":", "page", ",", "u'page_title'", ":", "page_title", ",", "u'page_description'", ":", "page_description", ",", "u'content'", ":", "content", ",", "u'toc'", ":", "toc", ",", "u'meta'", ":", "meta", ",", "u'canonical_url'", ":", "page", ".", "canonical_url", ",", "u'current_page'", ":", "page", ",", "u'previous_page'", ":", "page", ".", "previous_page", ",", "u'next_page'", ":", "page", ".", "next_page", "}" ]
generate the page context by extending the global context and adding page specific variables .
train
false
50,218
@RegisterWithArgChecks(name='neighbor.in_filter.set', req_args=[neighbors.IP_ADDRESS, neighbors.IN_FILTER]) def set_neighbor_in_filter(neigh_ip_address, filters): core = CORE_MANAGER.get_core_service() peer = core.peer_manager.get_by_addr(neigh_ip_address) peer.in_filters = filters return True
[ "@", "RegisterWithArgChecks", "(", "name", "=", "'neighbor.in_filter.set'", ",", "req_args", "=", "[", "neighbors", ".", "IP_ADDRESS", ",", "neighbors", ".", "IN_FILTER", "]", ")", "def", "set_neighbor_in_filter", "(", "neigh_ip_address", ",", "filters", ")", ":", "core", "=", "CORE_MANAGER", ".", "get_core_service", "(", ")", "peer", "=", "core", ".", "peer_manager", ".", "get_by_addr", "(", "neigh_ip_address", ")", "peer", ".", "in_filters", "=", "filters", "return", "True" ]
returns a neighbor in_filter for given ip address if exists .
train
false
50,220
def create_cache(size): if (size == 0): return None if (size < 0): return {} return LRUCache(size)
[ "def", "create_cache", "(", "size", ")", ":", "if", "(", "size", "==", "0", ")", ":", "return", "None", "if", "(", "size", "<", "0", ")", ":", "return", "{", "}", "return", "LRUCache", "(", "size", ")" ]
return the cache class for the given size .
train
false
50,221
def isLoopListIntersecting(loops): for loopIndex in xrange((len(loops) - 1)): loop = loops[loopIndex] if isLoopIntersectingLoops(loop, loops[(loopIndex + 1):]): return True return False
[ "def", "isLoopListIntersecting", "(", "loops", ")", ":", "for", "loopIndex", "in", "xrange", "(", "(", "len", "(", "loops", ")", "-", "1", ")", ")", ":", "loop", "=", "loops", "[", "loopIndex", "]", "if", "isLoopIntersectingLoops", "(", "loop", ",", "loops", "[", "(", "loopIndex", "+", "1", ")", ":", "]", ")", ":", "return", "True", "return", "False" ]
determine if a loop in the list is intersecting the other loops .
train
false
50,222
def test_multiseries_donut(): chart = Pie(inner_radius=0.3, pretty_print=True) chart.title = 'Browser usage by version in February 2012 (in %)' chart.add('IE', [5.7, 10.2, 2.6, 1]) chart.add('Firefox', [0.6, 16.8, 7.4, 2.2, 1.2, 1, 1, 1.1, 4.3, 1]) chart.add('Chrome', [0.3, 0.9, 17.1, 15.3, 0.6, 0.5, 1.6]) chart.add('Safari', [4.4, 0.1]) chart.add('Opera', [0.1, 1.6, 0.1, 0.5]) assert chart.render()
[ "def", "test_multiseries_donut", "(", ")", ":", "chart", "=", "Pie", "(", "inner_radius", "=", "0.3", ",", "pretty_print", "=", "True", ")", "chart", ".", "title", "=", "'Browser usage by version in February 2012 (in %)'", "chart", ".", "add", "(", "'IE'", ",", "[", "5.7", ",", "10.2", ",", "2.6", ",", "1", "]", ")", "chart", ".", "add", "(", "'Firefox'", ",", "[", "0.6", ",", "16.8", ",", "7.4", ",", "2.2", ",", "1.2", ",", "1", ",", "1", ",", "1.1", ",", "4.3", ",", "1", "]", ")", "chart", ".", "add", "(", "'Chrome'", ",", "[", "0.3", ",", "0.9", ",", "17.1", ",", "15.3", ",", "0.6", ",", "0.5", ",", "1.6", "]", ")", "chart", ".", "add", "(", "'Safari'", ",", "[", "4.4", ",", "0.1", "]", ")", "chart", ".", "add", "(", "'Opera'", ",", "[", "0.1", ",", "1.6", ",", "0.1", ",", "0.5", "]", ")", "assert", "chart", ".", "render", "(", ")" ]
test a donut pie chart with multiserie .
train
false
50,223
def from_bytes(klass, data): obj = klass() b = TMemoryBuffer(data) p = TBinaryProtocol(b) obj.read(p) return obj
[ "def", "from_bytes", "(", "klass", ",", "data", ")", ":", "obj", "=", "klass", "(", ")", "b", "=", "TMemoryBuffer", "(", "data", ")", "p", "=", "TBinaryProtocol", "(", "b", ")", "obj", ".", "read", "(", "p", ")", "return", "obj" ]
returns thrift object from a string .
train
false
50,226
def render_template_multiple_times(request): return HttpResponse((render_to_string('base.html') + render_to_string('base.html')))
[ "def", "render_template_multiple_times", "(", "request", ")", ":", "return", "HttpResponse", "(", "(", "render_to_string", "(", "'base.html'", ")", "+", "render_to_string", "(", "'base.html'", ")", ")", ")" ]
a view that renders a template multiple times .
train
false
50,228
def remote(expr): return _annotate_columns(expression._clause_element_as_expr(expr), {'remote': True})
[ "def", "remote", "(", "expr", ")", ":", "return", "_annotate_columns", "(", "expression", ".", "_clause_element_as_expr", "(", "expr", ")", ",", "{", "'remote'", ":", "True", "}", ")" ]
annotate a portion of a primaryjoin expression with a remote annotation .
train
false
50,229
def get_models(app_labels): from django.db.models import get_app, get_apps, get_model from django.db.models import get_models as get_all_models from django.contrib.contenttypes.models import ContentType EXCLUDED_MODELS = (ContentType,) models = [] if (not app_labels): for app in get_apps(): models += [m for m in get_all_models(app) if (m not in EXCLUDED_MODELS)] for app_label in app_labels: if ('.' in app_label): (app_label, model_name) = app_label.split('.', 1) models.append(get_model(app_label, model_name)) else: models += [m for m in get_all_models(get_app(app_label)) if (m not in EXCLUDED_MODELS)] return models
[ "def", "get_models", "(", "app_labels", ")", ":", "from", "django", ".", "db", ".", "models", "import", "get_app", ",", "get_apps", ",", "get_model", "from", "django", ".", "db", ".", "models", "import", "get_models", "as", "get_all_models", "from", "django", ".", "contrib", ".", "contenttypes", ".", "models", "import", "ContentType", "EXCLUDED_MODELS", "=", "(", "ContentType", ",", ")", "models", "=", "[", "]", "if", "(", "not", "app_labels", ")", ":", "for", "app", "in", "get_apps", "(", ")", ":", "models", "+=", "[", "m", "for", "m", "in", "get_all_models", "(", "app", ")", "if", "(", "m", "not", "in", "EXCLUDED_MODELS", ")", "]", "for", "app_label", "in", "app_labels", ":", "if", "(", "'.'", "in", "app_label", ")", ":", "(", "app_label", ",", "model_name", ")", "=", "app_label", ".", "split", "(", "'.'", ",", "1", ")", "models", ".", "append", "(", "get_model", "(", "app_label", ",", "model_name", ")", ")", "else", ":", "models", "+=", "[", "m", "for", "m", "in", "get_all_models", "(", "get_app", "(", "app_label", ")", ")", "if", "(", "m", "not", "in", "EXCLUDED_MODELS", ")", "]", "return", "models" ]
gets a list of models for the given app labels .
train
false
50,230
def yaml_dquote(text): with io.StringIO() as ostream: yemitter = yaml.emitter.Emitter(ostream, width=six.MAXSIZE) yemitter.write_double_quoted(six.text_type(text)) return ostream.getvalue()
[ "def", "yaml_dquote", "(", "text", ")", ":", "with", "io", ".", "StringIO", "(", ")", "as", "ostream", ":", "yemitter", "=", "yaml", ".", "emitter", ".", "Emitter", "(", "ostream", ",", "width", "=", "six", ".", "MAXSIZE", ")", "yemitter", ".", "write_double_quoted", "(", "six", ".", "text_type", "(", "text", ")", ")", "return", "ostream", ".", "getvalue", "(", ")" ]
make text into a double-quoted yaml string with correct escaping for special characters .
train
true
50,231
def safe_quote(s): return (urllib.quote(s) if (s == urllib.unquote(s)) else s)
[ "def", "safe_quote", "(", "s", ")", ":", "return", "(", "urllib", ".", "quote", "(", "s", ")", "if", "(", "s", "==", "urllib", ".", "unquote", "(", "s", ")", ")", "else", "s", ")" ]
url-encode strings that are not already url-encoded .
train
false
50,232
def slugify_uniquely(value, model, slugfield='slug'): suffix = 0 potential = base = django_slugify(unidecode(value)) if (len(potential) == 0): potential = 'null' while True: if suffix: potential = '-'.join([base, str(suffix)]) if (not model.objects.filter(**{slugfield: potential}).exists()): return potential suffix += 1
[ "def", "slugify_uniquely", "(", "value", ",", "model", ",", "slugfield", "=", "'slug'", ")", ":", "suffix", "=", "0", "potential", "=", "base", "=", "django_slugify", "(", "unidecode", "(", "value", ")", ")", "if", "(", "len", "(", "potential", ")", "==", "0", ")", ":", "potential", "=", "'null'", "while", "True", ":", "if", "suffix", ":", "potential", "=", "'-'", ".", "join", "(", "[", "base", ",", "str", "(", "suffix", ")", "]", ")", "if", "(", "not", "model", ".", "objects", ".", "filter", "(", "**", "{", "slugfield", ":", "potential", "}", ")", ".", "exists", "(", ")", ")", ":", "return", "potential", "suffix", "+=", "1" ]
returns a slug on a name which is unique within a models table .
train
false
50,233
def no_auth_required(fn): fn._no_auth_required = True return fn
[ "def", "no_auth_required", "(", "fn", ")", ":", "fn", ".", "_no_auth_required", "=", "True", "return", "fn" ]
authentication is not required to use the given method .
train
false
50,236
def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=None): if (s > n): raise nx.NetworkXError('s must be <= n') assigned = 0 sizes = [] while True: size = int(random.normalvariate(s, ((float(s) / v) + 0.5))) if (size < 1): continue if ((assigned + size) >= n): sizes.append((n - assigned)) break assigned += size sizes.append(size) return random_partition_graph(sizes, p_in, p_out, directed, seed)
[ "def", "gaussian_random_partition_graph", "(", "n", ",", "s", ",", "v", ",", "p_in", ",", "p_out", ",", "directed", "=", "False", ",", "seed", "=", "None", ")", ":", "if", "(", "s", ">", "n", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'s must be <= n'", ")", "assigned", "=", "0", "sizes", "=", "[", "]", "while", "True", ":", "size", "=", "int", "(", "random", ".", "normalvariate", "(", "s", ",", "(", "(", "float", "(", "s", ")", "/", "v", ")", "+", "0.5", ")", ")", ")", "if", "(", "size", "<", "1", ")", ":", "continue", "if", "(", "(", "assigned", "+", "size", ")", ">=", "n", ")", ":", "sizes", ".", "append", "(", "(", "n", "-", "assigned", ")", ")", "break", "assigned", "+=", "size", "sizes", ".", "append", "(", "size", ")", "return", "random_partition_graph", "(", "sizes", ",", "p_in", ",", "p_out", ",", "directed", ",", "seed", ")" ]
generate a gaussian random partition graph .
train
false
50,239
def Newline(): return Leaf(token.NEWLINE, '\n')
[ "def", "Newline", "(", ")", ":", "return", "Leaf", "(", "token", ".", "NEWLINE", ",", "'\\n'", ")" ]
a newline literal .
train
false
50,240
@step((CHECK_PREFIX + 'I have sent an email with the following in the body:')) def mail_sent_content_multiline(step): return mail_sent_content(step, step.multiline, 'body')
[ "@", "step", "(", "(", "CHECK_PREFIX", "+", "'I have sent an email with the following in the body:'", ")", ")", "def", "mail_sent_content_multiline", "(", "step", ")", ":", "return", "mail_sent_content", "(", "step", ",", "step", ".", "multiline", ",", "'body'", ")" ]
i have sent an email with the following in the body: name: mr .
train
false
50,241
def award_program_certificate(client, username, program_uuid): client.credentials.post({'username': username, 'credential': {'program_uuid': program_uuid}, 'attributes': []})
[ "def", "award_program_certificate", "(", "client", ",", "username", ",", "program_uuid", ")", ":", "client", ".", "credentials", ".", "post", "(", "{", "'username'", ":", "username", ",", "'credential'", ":", "{", "'program_uuid'", ":", "program_uuid", "}", ",", "'attributes'", ":", "[", "]", "}", ")" ]
issue a new certificate of completion to the given student for the given program .
train
false
50,242
def available_content_databases(): pattern = re.compile('content_(?P<channel>[^_]+)_(?P<language>[^_]+).sqlite') for filename in glob.iglob(django_settings.DEFAULT_DATABASE_DIR): match = pattern.search(filename) if match: (yield match.group(1, 2))
[ "def", "available_content_databases", "(", ")", ":", "pattern", "=", "re", ".", "compile", "(", "'content_(?P<channel>[^_]+)_(?P<language>[^_]+).sqlite'", ")", "for", "filename", "in", "glob", ".", "iglob", "(", "django_settings", ".", "DEFAULT_DATABASE_DIR", ")", ":", "match", "=", "pattern", ".", "search", "(", "filename", ")", "if", "match", ":", "(", "yield", "match", ".", "group", "(", "1", ",", "2", ")", ")" ]
generator to return the channel and language for every content database that exists in the system .
train
false
50,244
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
50,245
def filter_roidb(roidb): def is_valid(entry): overlaps = entry['max_overlaps'] fg_inds = np.where((overlaps >= cfg.TRAIN.FG_THRESH))[0] bg_inds = np.where(((overlaps < cfg.TRAIN.BG_THRESH_HI) & (overlaps >= cfg.TRAIN.BG_THRESH_LO)))[0] valid = ((len(fg_inds) > 0) or (len(bg_inds) > 0)) return valid num = len(roidb) filtered_roidb = [entry for entry in roidb if is_valid(entry)] num_after = len(filtered_roidb) print 'Filtered {} roidb entries: {} -> {}'.format((num - num_after), num, num_after) return filtered_roidb
[ "def", "filter_roidb", "(", "roidb", ")", ":", "def", "is_valid", "(", "entry", ")", ":", "overlaps", "=", "entry", "[", "'max_overlaps'", "]", "fg_inds", "=", "np", ".", "where", "(", "(", "overlaps", ">=", "cfg", ".", "TRAIN", ".", "FG_THRESH", ")", ")", "[", "0", "]", "bg_inds", "=", "np", ".", "where", "(", "(", "(", "overlaps", "<", "cfg", ".", "TRAIN", ".", "BG_THRESH_HI", ")", "&", "(", "overlaps", ">=", "cfg", ".", "TRAIN", ".", "BG_THRESH_LO", ")", ")", ")", "[", "0", "]", "valid", "=", "(", "(", "len", "(", "fg_inds", ")", ">", "0", ")", "or", "(", "len", "(", "bg_inds", ")", ">", "0", ")", ")", "return", "valid", "num", "=", "len", "(", "roidb", ")", "filtered_roidb", "=", "[", "entry", "for", "entry", "in", "roidb", "if", "is_valid", "(", "entry", ")", "]", "num_after", "=", "len", "(", "filtered_roidb", ")", "print", "'Filtered {} roidb entries: {} -> {}'", ".", "format", "(", "(", "num", "-", "num_after", ")", ",", "num", ",", "num_after", ")", "return", "filtered_roidb" ]
remove roidb entries that have no usable rois .
train
false
50,246
def logical_xor(image1, image2): image1.load() image2.load() return image1._new(image1.im.chop_xor(image2.im))
[ "def", "logical_xor", "(", "image1", ",", "image2", ")", ":", "image1", ".", "load", "(", ")", "image2", ".", "load", "(", ")", "return", "image1", ".", "_new", "(", "image1", ".", "im", ".", "chop_xor", "(", "image2", ".", "im", ")", ")" ]
logical xor between two images .
train
false
50,247
def case_insensitive_update(dct1, dct2): lowkeys = dict([(key.lower(), key) for key in dct1]) for (key, val) in dct2.items(): d1_key = lowkeys.get(key.lower(), key) dct1[d1_key] = val
[ "def", "case_insensitive_update", "(", "dct1", ",", "dct2", ")", ":", "lowkeys", "=", "dict", "(", "[", "(", "key", ".", "lower", "(", ")", ",", "key", ")", "for", "key", "in", "dct1", "]", ")", "for", "(", "key", ",", "val", ")", "in", "dct2", ".", "items", "(", ")", ":", "d1_key", "=", "lowkeys", ".", "get", "(", "key", ".", "lower", "(", ")", ",", "key", ")", "dct1", "[", "d1_key", "]", "=", "val" ]
given two dicts .
train
true
50,248
def get_threads(exploration_id): thread_models = feedback_models.FeedbackThreadModel.get_threads(exploration_id) return [_get_thread_from_model(model) for model in thread_models]
[ "def", "get_threads", "(", "exploration_id", ")", ":", "thread_models", "=", "feedback_models", ".", "FeedbackThreadModel", ".", "get_threads", "(", "exploration_id", ")", "return", "[", "_get_thread_from_model", "(", "model", ")", "for", "model", "in", "thread_models", "]" ]
fetches all the threads for the given exploration id .
train
false
50,249
@dec.skip_without('sqlite3') def test_macro_run(): ip = get_ipython() ip.history_manager.reset() cmds = ['a=10', 'a+=1', py3compat.doctest_refactor_print('print a'), '%macro test 2-3'] for cmd in cmds: ip.run_cell(cmd, store_history=True) nt.assert_equal(ip.user_ns['test'].value, py3compat.doctest_refactor_print('a+=1\nprint a\n')) with tt.AssertPrints('12'): ip.run_cell('test') with tt.AssertPrints('13'): ip.run_cell('test')
[ "@", "dec", ".", "skip_without", "(", "'sqlite3'", ")", "def", "test_macro_run", "(", ")", ":", "ip", "=", "get_ipython", "(", ")", "ip", ".", "history_manager", ".", "reset", "(", ")", "cmds", "=", "[", "'a=10'", ",", "'a+=1'", ",", "py3compat", ".", "doctest_refactor_print", "(", "'print a'", ")", ",", "'%macro test 2-3'", "]", "for", "cmd", "in", "cmds", ":", "ip", ".", "run_cell", "(", "cmd", ",", "store_history", "=", "True", ")", "nt", ".", "assert_equal", "(", "ip", ".", "user_ns", "[", "'test'", "]", ".", "value", ",", "py3compat", ".", "doctest_refactor_print", "(", "'a+=1\\nprint a\\n'", ")", ")", "with", "tt", ".", "AssertPrints", "(", "'12'", ")", ":", "ip", ".", "run_cell", "(", "'test'", ")", "with", "tt", ".", "AssertPrints", "(", "'13'", ")", ":", "ip", ".", "run_cell", "(", "'test'", ")" ]
test that we can run a multi-line macro successfully .
train
false
50,250
def set_style(style=None, rc=None): style_object = axes_style(style, rc) mpl.rcParams.update(style_object)
[ "def", "set_style", "(", "style", "=", "None", ",", "rc", "=", "None", ")", ":", "style_object", "=", "axes_style", "(", "style", ",", "rc", ")", "mpl", ".", "rcParams", ".", "update", "(", "style_object", ")" ]
set the aesthetic style of the plots .
train
true
50,251
def subtractXIntersectionsTable(subtractFromTable, subtractTable): subtractFromTableKeys = subtractFromTable.keys() subtractFromTableKeys.sort() for subtractFromTableKey in subtractFromTableKeys: xIntersectionIndexList = [] addXIntersectionIndexesFromXIntersections((-1), xIntersectionIndexList, subtractFromTable[subtractFromTableKey]) if (subtractFromTableKey in subtractTable): addXIntersectionIndexesFromXIntersections(0, xIntersectionIndexList, subtractTable[subtractFromTableKey]) xIntersections = getXIntersectionsFromIntersections(xIntersectionIndexList) if (len(xIntersections) > 0): subtractFromTable[subtractFromTableKey] = xIntersections else: del subtractFromTable[subtractFromTableKey]
[ "def", "subtractXIntersectionsTable", "(", "subtractFromTable", ",", "subtractTable", ")", ":", "subtractFromTableKeys", "=", "subtractFromTable", ".", "keys", "(", ")", "subtractFromTableKeys", ".", "sort", "(", ")", "for", "subtractFromTableKey", "in", "subtractFromTableKeys", ":", "xIntersectionIndexList", "=", "[", "]", "addXIntersectionIndexesFromXIntersections", "(", "(", "-", "1", ")", ",", "xIntersectionIndexList", ",", "subtractFromTable", "[", "subtractFromTableKey", "]", ")", "if", "(", "subtractFromTableKey", "in", "subtractTable", ")", ":", "addXIntersectionIndexesFromXIntersections", "(", "0", ",", "xIntersectionIndexList", ",", "subtractTable", "[", "subtractFromTableKey", "]", ")", "xIntersections", "=", "getXIntersectionsFromIntersections", "(", "xIntersectionIndexList", ")", "if", "(", "len", "(", "xIntersections", ")", ">", "0", ")", ":", "subtractFromTable", "[", "subtractFromTableKey", "]", "=", "xIntersections", "else", ":", "del", "subtractFromTable", "[", "subtractFromTableKey", "]" ]
subtract the subtracttable from the subtractfromtable .
train
false
50,252
def ROCP(ds, count, timeperiod=(- (2 ** 31))): return call_talib_with_ds(ds, count, talib.ROCP, timeperiod)
[ "def", "ROCP", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "ROCP", ",", "timeperiod", ")" ]
rate of change percentage: /prevprice .
train
false
50,253
@register.filter def app_label(obj): try: return lower(obj._meta.object_name) except AttributeError: return ''
[ "@", "register", ".", "filter", "def", "app_label", "(", "obj", ")", ":", "try", ":", "return", "lower", "(", "obj", ".", "_meta", ".", "object_name", ")", "except", "AttributeError", ":", "return", "''" ]
returns an objects app label .
train
false
50,254
def PackLocation(location): packed = struct.pack('>ddd', *[float(x) for x in location]) return base64hex.B64HexEncode(packed)
[ "def", "PackLocation", "(", "location", ")", ":", "packed", "=", "struct", ".", "pack", "(", "'>ddd'", ",", "*", "[", "float", "(", "x", ")", "for", "x", "in", "location", "]", ")", "return", "base64hex", ".", "B64HexEncode", "(", "packed", ")" ]
converts location named tuple into a packed .
train
false
50,255
def _is_epsilon_nash(x, g, epsilon, indptr=None): if (indptr is None): indptr = np.empty((g.N + 1), dtype=int) indptr[0] = 0 indptr[1:] = np.cumsum(g.nums_actions) action_profile = _get_action_profile(x, indptr) return g.is_nash(action_profile, tol=epsilon)
[ "def", "_is_epsilon_nash", "(", "x", ",", "g", ",", "epsilon", ",", "indptr", "=", "None", ")", ":", "if", "(", "indptr", "is", "None", ")", ":", "indptr", "=", "np", ".", "empty", "(", "(", "g", ".", "N", "+", "1", ")", ",", "dtype", "=", "int", ")", "indptr", "[", "0", "]", "=", "0", "indptr", "[", "1", ":", "]", "=", "np", ".", "cumsum", "(", "g", ".", "nums_actions", ")", "action_profile", "=", "_get_action_profile", "(", "x", ",", "indptr", ")", "return", "g", ".", "is_nash", "(", "action_profile", ",", "tol", "=", "epsilon", ")" ]
determine whether x is an epsilon-nash equilibrium of g .
train
true
50,257
def _property_name_to_value(entities): return {key: values[0] for (key, values) in _property_name_to_values(entities).iteritems()}
[ "def", "_property_name_to_value", "(", "entities", ")", ":", "return", "{", "key", ":", "values", "[", "0", "]", "for", "(", "key", ",", "values", ")", "in", "_property_name_to_values", "(", "entities", ")", ".", "iteritems", "(", ")", "}" ]
returns a a mapping of entity property names to a sample value .
train
false
50,259
def event2json(event): return jdumps(dict(type=event.type, send_event=event.send_event, time=event.time, root=str(event.root), window=str(event.window), same_screen=event.same_screen, child=str(event.child), root_x=event.root_x, root_y=event.root_y, event_x=event.event_x, event_y=event.event_y, state=event.state, detail=event.detail))
[ "def", "event2json", "(", "event", ")", ":", "return", "jdumps", "(", "dict", "(", "type", "=", "event", ".", "type", ",", "send_event", "=", "event", ".", "send_event", ",", "time", "=", "event", ".", "time", ",", "root", "=", "str", "(", "event", ".", "root", ")", ",", "window", "=", "str", "(", "event", ".", "window", ")", ",", "same_screen", "=", "event", ".", "same_screen", ",", "child", "=", "str", "(", "event", ".", "child", ")", ",", "root_x", "=", "event", ".", "root_x", ",", "root_y", "=", "event", ".", "root_y", ",", "event_x", "=", "event", ".", "event_x", ",", "event_y", "=", "event", ".", "event_y", ",", "state", "=", "event", ".", "state", ",", "detail", "=", "event", ".", "detail", ")", ")" ]
instance variable: keybuttonpointerevent time the server x time when this event was generated .
train
false
50,260
def dummy_product(d1, d2, method='full'): if (method == 'full'): dd = (d1[:, :, None] * d2[:, None, :]).reshape(d1.shape[0], (-1)) elif (method == 'drop-last'): d12rl = dummy_product(d1[:, :(-1)], d2[:, :(-1)]) dd = np.column_stack((np.ones(d1.shape[0], int), d1[:, :(-1)], d2[:, :(-1)], d12rl)) elif (method == 'drop-first'): d12r = dummy_product(d1[:, 1:], d2[:, 1:]) dd = np.column_stack((np.ones(d1.shape[0], int), d1[:, 1:], d2[:, 1:], d12r)) else: raise ValueError('method not recognized') return dd
[ "def", "dummy_product", "(", "d1", ",", "d2", ",", "method", "=", "'full'", ")", ":", "if", "(", "method", "==", "'full'", ")", ":", "dd", "=", "(", "d1", "[", ":", ",", ":", ",", "None", "]", "*", "d2", "[", ":", ",", "None", ",", ":", "]", ")", ".", "reshape", "(", "d1", ".", "shape", "[", "0", "]", ",", "(", "-", "1", ")", ")", "elif", "(", "method", "==", "'drop-last'", ")", ":", "d12rl", "=", "dummy_product", "(", "d1", "[", ":", ",", ":", "(", "-", "1", ")", "]", ",", "d2", "[", ":", ",", ":", "(", "-", "1", ")", "]", ")", "dd", "=", "np", ".", "column_stack", "(", "(", "np", ".", "ones", "(", "d1", ".", "shape", "[", "0", "]", ",", "int", ")", ",", "d1", "[", ":", ",", ":", "(", "-", "1", ")", "]", ",", "d2", "[", ":", ",", ":", "(", "-", "1", ")", "]", ",", "d12rl", ")", ")", "elif", "(", "method", "==", "'drop-first'", ")", ":", "d12r", "=", "dummy_product", "(", "d1", "[", ":", ",", "1", ":", "]", ",", "d2", "[", ":", ",", "1", ":", "]", ")", "dd", "=", "np", ".", "column_stack", "(", "(", "np", ".", "ones", "(", "d1", ".", "shape", "[", "0", "]", ",", "int", ")", ",", "d1", "[", ":", ",", "1", ":", "]", ",", "d2", "[", ":", ",", "1", ":", "]", ",", "d12r", ")", ")", "else", ":", "raise", "ValueError", "(", "'method not recognized'", ")", "return", "dd" ]
dummy variable from product of two dummy variables parameters d1 .
train
false
50,262
def _iter_contrasts(n_subjects, factor_levels, effect_picks): from scipy.signal import detrend sc = [] n_factors = len(factor_levels) for n_levels in factor_levels: sc.append([np.ones([n_levels, 1]), detrend(np.eye(n_levels), type='constant')]) for this_effect in effect_picks: contrast_idx = _get_contrast_indices((this_effect + 1), n_factors) c_ = sc[0][contrast_idx[(n_factors - 1)]] for i_contrast in range(1, n_factors): this_contrast = contrast_idx[((n_factors - 1) - i_contrast)] c_ = np.kron(c_, sc[i_contrast][this_contrast]) df1 = np.linalg.matrix_rank(c_) df2 = (df1 * (n_subjects - 1)) (yield (c_, df1, df2))
[ "def", "_iter_contrasts", "(", "n_subjects", ",", "factor_levels", ",", "effect_picks", ")", ":", "from", "scipy", ".", "signal", "import", "detrend", "sc", "=", "[", "]", "n_factors", "=", "len", "(", "factor_levels", ")", "for", "n_levels", "in", "factor_levels", ":", "sc", ".", "append", "(", "[", "np", ".", "ones", "(", "[", "n_levels", ",", "1", "]", ")", ",", "detrend", "(", "np", ".", "eye", "(", "n_levels", ")", ",", "type", "=", "'constant'", ")", "]", ")", "for", "this_effect", "in", "effect_picks", ":", "contrast_idx", "=", "_get_contrast_indices", "(", "(", "this_effect", "+", "1", ")", ",", "n_factors", ")", "c_", "=", "sc", "[", "0", "]", "[", "contrast_idx", "[", "(", "n_factors", "-", "1", ")", "]", "]", "for", "i_contrast", "in", "range", "(", "1", ",", "n_factors", ")", ":", "this_contrast", "=", "contrast_idx", "[", "(", "(", "n_factors", "-", "1", ")", "-", "i_contrast", ")", "]", "c_", "=", "np", ".", "kron", "(", "c_", ",", "sc", "[", "i_contrast", "]", "[", "this_contrast", "]", ")", "df1", "=", "np", ".", "linalg", ".", "matrix_rank", "(", "c_", ")", "df2", "=", "(", "df1", "*", "(", "n_subjects", "-", "1", ")", ")", "(", "yield", "(", "c_", ",", "df1", ",", "df2", ")", ")" ]
setup contrasts .
train
false
50,264
def membership(): redirect(URL(c='admin', args=request.args, vars=request.vars))
[ "def", "membership", "(", ")", ":", "redirect", "(", "URL", "(", "c", "=", "'admin'", ",", "args", "=", "request", ".", "args", ",", "vars", "=", "request", ".", "vars", ")", ")" ]
defined in admin module .
train
false
50,265
def _handle_following_field(form_value, user, cc_content): if form_value: user.follow(cc_content) else: user.unfollow(cc_content)
[ "def", "_handle_following_field", "(", "form_value", ",", "user", ",", "cc_content", ")", ":", "if", "form_value", ":", "user", ".", "follow", "(", "cc_content", ")", "else", ":", "user", ".", "unfollow", "(", "cc_content", ")" ]
follow/unfollow thread for the user .
train
false
50,270
def _deprecateAttribute(proxy, name, version, message): _module = object.__getattribute__(proxy, '_module') attr = _DeprecatedAttribute(_module, name, version, message) _deprecatedAttributes = object.__getattribute__(proxy, '_deprecatedAttributes') _deprecatedAttributes[name] = attr
[ "def", "_deprecateAttribute", "(", "proxy", ",", "name", ",", "version", ",", "message", ")", ":", "_module", "=", "object", ".", "__getattribute__", "(", "proxy", ",", "'_module'", ")", "attr", "=", "_DeprecatedAttribute", "(", "_module", ",", "name", ",", "version", ",", "message", ")", "_deprecatedAttributes", "=", "object", ".", "__getattribute__", "(", "proxy", ",", "'_deprecatedAttributes'", ")", "_deprecatedAttributes", "[", "name", "]", "=", "attr" ]
mark a module-level attribute as being deprecated .
train
false
50,273
def generate_auth_sub_url(next, scopes, secure=False, session=True, request_url='https://www.google.com/accounts/AuthSubRequest', domain='default', scopes_param_prefix='auth_sub_scopes'): if isinstance(next, (str, unicode)): next = atom.url.parse_url(next) scopes_string = ' '.join([str(scope) for scope in scopes]) next.params[scopes_param_prefix] = scopes_string if isinstance(request_url, (str, unicode)): request_url = atom.url.parse_url(request_url) request_url.params['next'] = str(next) request_url.params['scope'] = scopes_string if session: request_url.params['session'] = 1 else: request_url.params['session'] = 0 if secure: request_url.params['secure'] = 1 else: request_url.params['secure'] = 0 request_url.params['hd'] = domain return request_url
[ "def", "generate_auth_sub_url", "(", "next", ",", "scopes", ",", "secure", "=", "False", ",", "session", "=", "True", ",", "request_url", "=", "'https://www.google.com/accounts/AuthSubRequest'", ",", "domain", "=", "'default'", ",", "scopes_param_prefix", "=", "'auth_sub_scopes'", ")", ":", "if", "isinstance", "(", "next", ",", "(", "str", ",", "unicode", ")", ")", ":", "next", "=", "atom", ".", "url", ".", "parse_url", "(", "next", ")", "scopes_string", "=", "' '", ".", "join", "(", "[", "str", "(", "scope", ")", "for", "scope", "in", "scopes", "]", ")", "next", ".", "params", "[", "scopes_param_prefix", "]", "=", "scopes_string", "if", "isinstance", "(", "request_url", ",", "(", "str", ",", "unicode", ")", ")", ":", "request_url", "=", "atom", ".", "url", ".", "parse_url", "(", "request_url", ")", "request_url", ".", "params", "[", "'next'", "]", "=", "str", "(", "next", ")", "request_url", ".", "params", "[", "'scope'", "]", "=", "scopes_string", "if", "session", ":", "request_url", ".", "params", "[", "'session'", "]", "=", "1", "else", ":", "request_url", ".", "params", "[", "'session'", "]", "=", "0", "if", "secure", ":", "request_url", ".", "params", "[", "'secure'", "]", "=", "1", "else", ":", "request_url", ".", "params", "[", "'secure'", "]", "=", "0", "request_url", ".", "params", "[", "'hd'", "]", "=", "domain", "return", "request_url" ]
constructs a url string for requesting a multiscope authsub token .
train
false
50,274
def version_setting(parser, token): try: (tag, version_suffix) = token.split_contents() except: raise TemplateSyntaxError(('%s tag requires 1 argument' % token.contents.split()[0])) if (((version_suffix[0] == version_suffix[(-1)]) and (version_suffix[0] in ('"', "'"))) and (version_suffix.lower()[1:(-1)] not in VERSIONS)): raise TemplateSyntaxError(('%s tag received bad version_suffix %s' % (tag, version_suffix))) return VersionSettingNode(version_suffix)
[ "def", "version_setting", "(", "parser", ",", "token", ")", ":", "try", ":", "(", "tag", ",", "version_suffix", ")", "=", "token", ".", "split_contents", "(", ")", "except", ":", "raise", "TemplateSyntaxError", "(", "(", "'%s tag requires 1 argument'", "%", "token", ".", "contents", ".", "split", "(", ")", "[", "0", "]", ")", ")", "if", "(", "(", "(", "version_suffix", "[", "0", "]", "==", "version_suffix", "[", "(", "-", "1", ")", "]", ")", "and", "(", "version_suffix", "[", "0", "]", "in", "(", "'\"'", ",", "\"'\"", ")", ")", ")", "and", "(", "version_suffix", ".", "lower", "(", ")", "[", "1", ":", "(", "-", "1", ")", "]", "not", "in", "VERSIONS", ")", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "'%s tag received bad version_suffix %s'", "%", "(", "tag", ",", "version_suffix", ")", ")", ")", "return", "VersionSettingNode", "(", "version_suffix", ")" ]
get information about a version setting .
train
false
50,277
@pytest.mark.network def test_install_global_option_using_editable(script, tmpdir): url = 'hg+http://bitbucket.org/runeh/anyjson' result = script.pip('install', '--global-option=--version', '-e', ('%s@0.2.5#egg=anyjson' % local_checkout(url, tmpdir.join('cache'))), expect_stderr=True) assert ('Successfully installed anyjson' in result.stdout)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_install_global_option_using_editable", "(", "script", ",", "tmpdir", ")", ":", "url", "=", "'hg+http://bitbucket.org/runeh/anyjson'", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--global-option=--version'", ",", "'-e'", ",", "(", "'%s@0.2.5#egg=anyjson'", "%", "local_checkout", "(", "url", ",", "tmpdir", ".", "join", "(", "'cache'", ")", ")", ")", ",", "expect_stderr", "=", "True", ")", "assert", "(", "'Successfully installed anyjson'", "in", "result", ".", "stdout", ")" ]
test using global distutils options .
train
false
50,280
def do_scrape(): sc = SlackClient(settings.SLACK_TOKEN) all_results = [] for area in settings.AREAS: all_results += scrape_area(area) print '{}: Got {} results'.format(time.ctime(), len(all_results)) for result in all_results: post_listing_to_slack(sc, result)
[ "def", "do_scrape", "(", ")", ":", "sc", "=", "SlackClient", "(", "settings", ".", "SLACK_TOKEN", ")", "all_results", "=", "[", "]", "for", "area", "in", "settings", ".", "AREAS", ":", "all_results", "+=", "scrape_area", "(", "area", ")", "print", "'{}: Got {} results'", ".", "format", "(", "time", ".", "ctime", "(", ")", ",", "len", "(", "all_results", ")", ")", "for", "result", "in", "all_results", ":", "post_listing_to_slack", "(", "sc", ",", "result", ")" ]
runs the craigslist scraper .
train
false
50,281
def is_s3_uri(uri): try: parse_s3_uri(uri) return True except ValueError: return False
[ "def", "is_s3_uri", "(", "uri", ")", ":", "try", ":", "parse_s3_uri", "(", "uri", ")", "return", "True", "except", "ValueError", ":", "return", "False" ]
return true if *uri* can be parsed into an s3 uri .
train
false
50,282
def debug_repr(obj): return DebugReprGenerator().repr(obj)
[ "def", "debug_repr", "(", "obj", ")", ":", "return", "DebugReprGenerator", "(", ")", ".", "repr", "(", "obj", ")" ]
creates a debug repr of an object as html unicode string .
train
false
50,284
def from_file(filepath, delimiter='', blanklines=False): data = [] try: with open(filepath, 'r') as f: for line in f: if (blanklines and (line.strip('\n\r ') == '')): continue data.append(line) except IOError: raise IOError('Error opening or reading file: {}'.format(filepath)) return Base64(Join(delimiter, data))
[ "def", "from_file", "(", "filepath", ",", "delimiter", "=", "''", ",", "blanklines", "=", "False", ")", ":", "data", "=", "[", "]", "try", ":", "with", "open", "(", "filepath", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "(", "blanklines", "and", "(", "line", ".", "strip", "(", "'\\n\\r '", ")", "==", "''", ")", ")", ":", "continue", "data", ".", "append", "(", "line", ")", "except", "IOError", ":", "raise", "IOError", "(", "'Error opening or reading file: {}'", ".", "format", "(", "filepath", ")", ")", "return", "Base64", "(", "Join", "(", "delimiter", ",", "data", ")", ")" ]
imports userdata from a file .
train
true
50,285
def _findNewest(): path = os.path.expanduser((('~/Library/Application Support/Skype/' + getUserName()) + '/main.db')) with contextlib.closing(sqlite3.connect(path).cursor()) as db: db.execute('SELECT skypename,fullname,displayname FROM Contacts WHERE skypename IN (SELECT identity FROM Conversations ORDER BY last_activity_timestamp DESC LIMIT 5)') return _listFriends(db.fetchall(), ': ', True, True)
[ "def", "_findNewest", "(", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "(", "(", "'~/Library/Application Support/Skype/'", "+", "getUserName", "(", ")", ")", "+", "'/main.db'", ")", ")", "with", "contextlib", ".", "closing", "(", "sqlite3", ".", "connect", "(", "path", ")", ".", "cursor", "(", ")", ")", "as", "db", ":", "db", ".", "execute", "(", "'SELECT skypename,fullname,displayname FROM Contacts WHERE skypename IN (SELECT identity FROM Conversations ORDER BY last_activity_timestamp DESC LIMIT 5)'", ")", "return", "_listFriends", "(", "db", ".", "fetchall", "(", ")", ",", "': '", ",", "True", ",", "True", ")" ]
get newest messages from skype database :return: list of tuples of friends .
train
false
50,286
def order_by_name(query, table, language=None, *extra_languages, **kwargs): name_attribute = kwargs.pop('name', 'name') if kwargs: raise ValueError(('Unexpected keyword arguments: %s' % kwargs.keys())) order_columns = [] if (language is None): query = query.outerjoin(table.names_local) order_columns.append(func.lower(getattr(table.names_table, name_attribute))) else: extra_languages = ((language,) + extra_languages) for language in extra_languages: names_table = aliased(table.names_table) query = query.outerjoin(names_table) query = query.filter((names_table.foreign_id == table.id)) query = query.filter((names_table.local_language_id == language.id)) order_columns.append(func.lower(getattr(names_table, name_attribute))) order_columns.append(table.identifier) query = query.order_by(coalesce(*order_columns)) return query
[ "def", "order_by_name", "(", "query", ",", "table", ",", "language", "=", "None", ",", "*", "extra_languages", ",", "**", "kwargs", ")", ":", "name_attribute", "=", "kwargs", ".", "pop", "(", "'name'", ",", "'name'", ")", "if", "kwargs", ":", "raise", "ValueError", "(", "(", "'Unexpected keyword arguments: %s'", "%", "kwargs", ".", "keys", "(", ")", ")", ")", "order_columns", "=", "[", "]", "if", "(", "language", "is", "None", ")", ":", "query", "=", "query", ".", "outerjoin", "(", "table", ".", "names_local", ")", "order_columns", ".", "append", "(", "func", ".", "lower", "(", "getattr", "(", "table", ".", "names_table", ",", "name_attribute", ")", ")", ")", "else", ":", "extra_languages", "=", "(", "(", "language", ",", ")", "+", "extra_languages", ")", "for", "language", "in", "extra_languages", ":", "names_table", "=", "aliased", "(", "table", ".", "names_table", ")", "query", "=", "query", ".", "outerjoin", "(", "names_table", ")", "query", "=", "query", ".", "filter", "(", "(", "names_table", ".", "foreign_id", "==", "table", ".", "id", ")", ")", "query", "=", "query", ".", "filter", "(", "(", "names_table", ".", "local_language_id", "==", "language", ".", "id", ")", ")", "order_columns", ".", "append", "(", "func", ".", "lower", "(", "getattr", "(", "names_table", ",", "name_attribute", ")", ")", ")", "order_columns", ".", "append", "(", "table", ".", "identifier", ")", "query", "=", "query", ".", "order_by", "(", "coalesce", "(", "*", "order_columns", ")", ")", "return", "query" ]
order a query by name .
train
false
50,288
def GetFileLength(fh): pos = fh.tell() fh.seek(0, 2) length = fh.tell() fh.seek(pos, 0) return length
[ "def", "GetFileLength", "(", "fh", ")", ":", "pos", "=", "fh", ".", "tell", "(", ")", "fh", ".", "seek", "(", "0", ",", "2", ")", "length", "=", "fh", ".", "tell", "(", ")", "fh", ".", "seek", "(", "pos", ",", "0", ")", "return", "length" ]
returns the length of the file represented by fh .
train
false
50,289
def host_to_device(dst, src, size, stream=0): varargs = [] if stream: assert isinstance(stream, Stream) fn = driver.cuMemcpyHtoDAsync varargs.append(stream.handle) else: fn = driver.cuMemcpyHtoD fn(device_pointer(dst), host_pointer(src), size, *varargs)
[ "def", "host_to_device", "(", "dst", ",", "src", ",", "size", ",", "stream", "=", "0", ")", ":", "varargs", "=", "[", "]", "if", "stream", ":", "assert", "isinstance", "(", "stream", ",", "Stream", ")", "fn", "=", "driver", ".", "cuMemcpyHtoDAsync", "varargs", ".", "append", "(", "stream", ".", "handle", ")", "else", ":", "fn", "=", "driver", ".", "cuMemcpyHtoD", "fn", "(", "device_pointer", "(", "dst", ")", ",", "host_pointer", "(", "src", ")", ",", "size", ",", "*", "varargs", ")" ]
note: the underlying data pointer from the host data buffer is used and it should not be changed until the operation which can be asynchronous completes .
train
false
50,290
def project_has_valid_admins(project, exclude_user=None): admin_memberships = project.memberships.filter(is_admin=True, user__is_active=True) if exclude_user: admin_memberships = admin_memberships.exclude(user=exclude_user) return (admin_memberships.count() > 0)
[ "def", "project_has_valid_admins", "(", "project", ",", "exclude_user", "=", "None", ")", ":", "admin_memberships", "=", "project", ".", "memberships", ".", "filter", "(", "is_admin", "=", "True", ",", "user__is_active", "=", "True", ")", "if", "exclude_user", ":", "admin_memberships", "=", "admin_memberships", ".", "exclude", "(", "user", "=", "exclude_user", ")", "return", "(", "admin_memberships", ".", "count", "(", ")", ">", "0", ")" ]
checks if the project has any owner membership with a user different than the specified .
train
false
50,291
def cycles(seq): n = len(seq) cycled_seq = cycle(seq) for x in seq: (yield tuple(islice(cycled_seq, n))) next(cycled_seq)
[ "def", "cycles", "(", "seq", ")", ":", "n", "=", "len", "(", "seq", ")", "cycled_seq", "=", "cycle", "(", "seq", ")", "for", "x", "in", "seq", ":", "(", "yield", "tuple", "(", "islice", "(", "cycled_seq", ",", "n", ")", ")", ")", "next", "(", "cycled_seq", ")" ]
yields cyclic permutations of the given sequence .
train
false
50,292
@verbose def _download_all_example_data(verbose=True): from . import sample, testing, misc, spm_face, somato, brainstorm, megsim, eegbci, multimodal sample.data_path() testing.data_path() misc.data_path() spm_face.data_path() somato.data_path() multimodal.data_path() sys.argv += ['--accept-brainstorm-license'] try: brainstorm.bst_raw.data_path() brainstorm.bst_auditory.data_path() brainstorm.bst_phantom_elekta.data_path() brainstorm.bst_phantom_ctf.data_path() finally: sys.argv.pop((-1)) megsim.load_data(condition='visual', data_format='single-trial', data_type='simulation', update_path=True) megsim.load_data(condition='visual', data_format='raw', data_type='experimental', update_path=True) megsim.load_data(condition='visual', data_format='evoked', data_type='simulation', update_path=True) eegbci.load_data(1, [6, 10, 14], update_path=True) sys.argv += ['--accept-hcpmmp-license'] try: fetch_hcp_mmp_parcellation() finally: sys.argv.pop((-1))
[ "@", "verbose", "def", "_download_all_example_data", "(", "verbose", "=", "True", ")", ":", "from", ".", "import", "sample", ",", "testing", ",", "misc", ",", "spm_face", ",", "somato", ",", "brainstorm", ",", "megsim", ",", "eegbci", ",", "multimodal", "sample", ".", "data_path", "(", ")", "testing", ".", "data_path", "(", ")", "misc", ".", "data_path", "(", ")", "spm_face", ".", "data_path", "(", ")", "somato", ".", "data_path", "(", ")", "multimodal", ".", "data_path", "(", ")", "sys", ".", "argv", "+=", "[", "'--accept-brainstorm-license'", "]", "try", ":", "brainstorm", ".", "bst_raw", ".", "data_path", "(", ")", "brainstorm", ".", "bst_auditory", ".", "data_path", "(", ")", "brainstorm", ".", "bst_phantom_elekta", ".", "data_path", "(", ")", "brainstorm", ".", "bst_phantom_ctf", ".", "data_path", "(", ")", "finally", ":", "sys", ".", "argv", ".", "pop", "(", "(", "-", "1", ")", ")", "megsim", ".", "load_data", "(", "condition", "=", "'visual'", ",", "data_format", "=", "'single-trial'", ",", "data_type", "=", "'simulation'", ",", "update_path", "=", "True", ")", "megsim", ".", "load_data", "(", "condition", "=", "'visual'", ",", "data_format", "=", "'raw'", ",", "data_type", "=", "'experimental'", ",", "update_path", "=", "True", ")", "megsim", ".", "load_data", "(", "condition", "=", "'visual'", ",", "data_format", "=", "'evoked'", ",", "data_type", "=", "'simulation'", ",", "update_path", "=", "True", ")", "eegbci", ".", "load_data", "(", "1", ",", "[", "6", ",", "10", ",", "14", "]", ",", "update_path", "=", "True", ")", "sys", ".", "argv", "+=", "[", "'--accept-hcpmmp-license'", "]", "try", ":", "fetch_hcp_mmp_parcellation", "(", ")", "finally", ":", "sys", ".", "argv", ".", "pop", "(", "(", "-", "1", ")", ")" ]
helper to download all datasets used in examples and tutorials .
train
false
50,294
def get_builtin_gnu_translations(languages=None): import gettext return gettext.translation('wtforms', messages_path(), languages)
[ "def", "get_builtin_gnu_translations", "(", "languages", "=", "None", ")", ":", "import", "gettext", "return", "gettext", ".", "translation", "(", "'wtforms'", ",", "messages_path", "(", ")", ",", "languages", ")" ]
get a gettext .
train
false
50,295
@step(((STEP_PREFIX + '([A-Z][a-z0-9_ ]*) with ([a-z]+) "([^"]*)"') + ' is linked to ([A-Z][a-z0-9_ ]*) in the database:')) def create_m2m_links(step, rel_model_name, rel_key, rel_value, relation_name): lookup = {rel_key: rel_value} rel_model = get_model(rel_model_name).objects.get(**lookup) relation = None for m2m in rel_model._meta.many_to_many: if (relation_name in (m2m.name, m2m.verbose_name)): relation = getattr(rel_model, m2m.name) break if (not relation): try: relation = getattr(rel_model, relation_name) except AttributeError: pass assert relation, ("%s does not have a many-to-many relation named '%s'" % (rel_model._meta.verbose_name.capitalize(), relation_name)) m2m_model = relation.model for hash_ in step.hashes: relation.add(m2m_model.objects.get(**hash_))
[ "@", "step", "(", "(", "(", "STEP_PREFIX", "+", "'([A-Z][a-z0-9_ ]*) with ([a-z]+) \"([^\"]*)\"'", ")", "+", "' is linked to ([A-Z][a-z0-9_ ]*) in the database:'", ")", ")", "def", "create_m2m_links", "(", "step", ",", "rel_model_name", ",", "rel_key", ",", "rel_value", ",", "relation_name", ")", ":", "lookup", "=", "{", "rel_key", ":", "rel_value", "}", "rel_model", "=", "get_model", "(", "rel_model_name", ")", ".", "objects", ".", "get", "(", "**", "lookup", ")", "relation", "=", "None", "for", "m2m", "in", "rel_model", ".", "_meta", ".", "many_to_many", ":", "if", "(", "relation_name", "in", "(", "m2m", ".", "name", ",", "m2m", ".", "verbose_name", ")", ")", ":", "relation", "=", "getattr", "(", "rel_model", ",", "m2m", ".", "name", ")", "break", "if", "(", "not", "relation", ")", ":", "try", ":", "relation", "=", "getattr", "(", "rel_model", ",", "relation_name", ")", "except", "AttributeError", ":", "pass", "assert", "relation", ",", "(", "\"%s does not have a many-to-many relation named '%s'\"", "%", "(", "rel_model", ".", "_meta", ".", "verbose_name", ".", "capitalize", "(", ")", ",", "relation_name", ")", ")", "m2m_model", "=", "relation", ".", "model", "for", "hash_", "in", "step", ".", "hashes", ":", "relation", ".", "add", "(", "m2m_model", ".", "objects", ".", "get", "(", "**", "hash_", ")", ")" ]
and article with name "guidelines" is linked to tags in the database: | name | | coding | | style | .
train
true
50,296
def create_dataset(dataset_name, project=None): bigquery_client = bigquery.Client(project=project) dataset = bigquery_client.dataset(dataset_name) dataset.create() print 'Created dataset {}.'.format(dataset_name)
[ "def", "create_dataset", "(", "dataset_name", ",", "project", "=", "None", ")", ":", "bigquery_client", "=", "bigquery", ".", "Client", "(", "project", "=", "project", ")", "dataset", "=", "bigquery_client", ".", "dataset", "(", "dataset_name", ")", "dataset", ".", "create", "(", ")", "print", "'Created dataset {}.'", ".", "format", "(", "dataset_name", ")" ]
create a dataset .
train
false
50,298
def SetNamespace(proto, namespace): if (not namespace): proto.clear_name_space() else: proto.set_name_space(namespace)
[ "def", "SetNamespace", "(", "proto", ",", "namespace", ")", ":", "if", "(", "not", "namespace", ")", ":", "proto", ".", "clear_name_space", "(", ")", "else", ":", "proto", ".", "set_name_space", "(", "namespace", ")" ]
sets the namespace for a protocol buffer or clears the field .
train
false
50,299
def ireplace(text, old, new, count=None): pattern = re.compile(re.escape(old), re.IGNORECASE) if count: return pattern.sub(new, text, count=count) else: return pattern.sub(new, text)
[ "def", "ireplace", "(", "text", ",", "old", ",", "new", ",", "count", "=", "None", ")", ":", "pattern", "=", "re", ".", "compile", "(", "re", ".", "escape", "(", "old", ")", ",", "re", ".", "IGNORECASE", ")", "if", "count", ":", "return", "pattern", ".", "sub", "(", "new", ",", "text", ",", "count", "=", "count", ")", "else", ":", "return", "pattern", ".", "sub", "(", "new", ",", "text", ")" ]
a case-insensitive replace() clone .
train
false
50,300
def _grid_from_X(X, percentiles=(0.05, 0.95), grid_resolution=100): if (len(percentiles) != 2): raise ValueError('percentile must be tuple of len 2') if (not all(((0.0 <= x <= 1.0) for x in percentiles))): raise ValueError('percentile values must be in [0, 1]') axes = [] for col in range(X.shape[1]): uniques = np.unique(X[:, col]) if (uniques.shape[0] < grid_resolution): axis = uniques else: emp_percentiles = mquantiles(X, prob=percentiles, axis=0) axis = np.linspace(emp_percentiles[(0, col)], emp_percentiles[(1, col)], num=grid_resolution, endpoint=True) axes.append(axis) return (cartesian(axes), axes)
[ "def", "_grid_from_X", "(", "X", ",", "percentiles", "=", "(", "0.05", ",", "0.95", ")", ",", "grid_resolution", "=", "100", ")", ":", "if", "(", "len", "(", "percentiles", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'percentile must be tuple of len 2'", ")", "if", "(", "not", "all", "(", "(", "(", "0.0", "<=", "x", "<=", "1.0", ")", "for", "x", "in", "percentiles", ")", ")", ")", ":", "raise", "ValueError", "(", "'percentile values must be in [0, 1]'", ")", "axes", "=", "[", "]", "for", "col", "in", "range", "(", "X", ".", "shape", "[", "1", "]", ")", ":", "uniques", "=", "np", ".", "unique", "(", "X", "[", ":", ",", "col", "]", ")", "if", "(", "uniques", ".", "shape", "[", "0", "]", "<", "grid_resolution", ")", ":", "axis", "=", "uniques", "else", ":", "emp_percentiles", "=", "mquantiles", "(", "X", ",", "prob", "=", "percentiles", ",", "axis", "=", "0", ")", "axis", "=", "np", ".", "linspace", "(", "emp_percentiles", "[", "(", "0", ",", "col", ")", "]", ",", "emp_percentiles", "[", "(", "1", ",", "col", ")", "]", ",", "num", "=", "grid_resolution", ",", "endpoint", "=", "True", ")", "axes", ".", "append", "(", "axis", ")", "return", "(", "cartesian", "(", "axes", ")", ",", "axes", ")" ]
generate a grid of points based on the percentiles of x .
train
false
50,301
def _bytes2int(LSB, MSB): return int(((256 * int(MSB)) | int(LSB)))
[ "def", "_bytes2int", "(", "LSB", ",", "MSB", ")", ":", "return", "int", "(", "(", "(", "256", "*", "int", "(", "MSB", ")", ")", "|", "int", "(", "LSB", ")", ")", ")" ]
convert two 8 bit bytes to one integer .
train
false
50,306
@pytest.mark.parametrize('value', [[u'foo\\bar'], [u' DCTB foo\\bar\n'], [u'fo\xf6\r\nb\xe4r'], [u'fo\xf6\\r\\nb\xe4r'], [u'fo\xf6\r\n\\r\\nb\xe4r', u'b\xe4r\r\n\\r\\nb\xe4z'], [u'nfo\xf6\nb\xe4r'], [u'nfo\xf6\\nb\xe4r'], [u'fo\xf6\n\\nb\xe4r', u'b\xe4r\n\\nb\xe4z']]) def test_form_multistringformfield(value): def test_form_factory(nplurals): class TestForm(Form, ): value = MultiStringFormField(nplurals=nplurals) return TestForm data = {('value_%d' % i): val for (i, val) in enumerate(value)} form_class = test_form_factory(nplurals=len(value)) form = form_class(data=data) assert form.is_valid() assert (form.cleaned_data == {'value': value})
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'value'", ",", "[", "[", "u'foo\\\\bar'", "]", ",", "[", "u' DCTB foo\\\\bar\\n'", "]", ",", "[", "u'fo\\xf6\\r\\nb\\xe4r'", "]", ",", "[", "u'fo\\xf6\\\\r\\\\nb\\xe4r'", "]", ",", "[", "u'fo\\xf6\\r\\n\\\\r\\\\nb\\xe4r'", ",", "u'b\\xe4r\\r\\n\\\\r\\\\nb\\xe4z'", "]", ",", "[", "u'nfo\\xf6\\nb\\xe4r'", "]", ",", "[", "u'nfo\\xf6\\\\nb\\xe4r'", "]", ",", "[", "u'fo\\xf6\\n\\\\nb\\xe4r'", ",", "u'b\\xe4r\\n\\\\nb\\xe4z'", "]", "]", ")", "def", "test_form_multistringformfield", "(", "value", ")", ":", "def", "test_form_factory", "(", "nplurals", ")", ":", "class", "TestForm", "(", "Form", ",", ")", ":", "value", "=", "MultiStringFormField", "(", "nplurals", "=", "nplurals", ")", "return", "TestForm", "data", "=", "{", "(", "'value_%d'", "%", "i", ")", ":", "val", "for", "(", "i", ",", "val", ")", "in", "enumerate", "(", "value", ")", "}", "form_class", "=", "test_form_factory", "(", "nplurals", "=", "len", "(", "value", ")", ")", "form", "=", "form_class", "(", "data", "=", "data", ")", "assert", "form", ".", "is_valid", "(", ")", "assert", "(", "form", ".", "cleaned_data", "==", "{", "'value'", ":", "value", "}", ")" ]
tests multistringformfields value compression in a form .
train
false
50,307
def is_class_instance(obj): try: cls = obj.__class__ except AttributeError: return False else: return ((cls != type) and (not issubclass(cls, NOT_CLASS_TYPES)))
[ "def", "is_class_instance", "(", "obj", ")", ":", "try", ":", "cls", "=", "obj", ".", "__class__", "except", "AttributeError", ":", "return", "False", "else", ":", "return", "(", "(", "cls", "!=", "type", ")", "and", "(", "not", "issubclass", "(", "cls", ",", "NOT_CLASS_TYPES", ")", ")", ")" ]
like inspect .
train
false
50,309
def insert_into_module(func, module_name, base_name, prefix): func.__module__ = module_name module = sys.modules[module_name] for i in range(1000): func.__name__ = str((u'%s_%s_%s' % (prefix, base_name, i))) if (not hasattr(module, func.__name__)): break setattr(module, func.__name__, func)
[ "def", "insert_into_module", "(", "func", ",", "module_name", ",", "base_name", ",", "prefix", ")", ":", "func", ".", "__module__", "=", "module_name", "module", "=", "sys", ".", "modules", "[", "module_name", "]", "for", "i", "in", "range", "(", "1000", ")", ":", "func", ".", "__name__", "=", "str", "(", "(", "u'%s_%s_%s'", "%", "(", "prefix", ",", "base_name", ",", "i", ")", ")", ")", "if", "(", "not", "hasattr", "(", "module", ",", "func", ".", "__name__", ")", ")", ":", "break", "setattr", "(", "module", ",", "func", ".", "__name__", ",", "func", ")" ]
add a function into a module .
train
false
50,310
def list_loadbalancers(call=None): if (call == 'action'): raise SaltCloudSystemExit('The avail_images function must be called with -f or --function, or with the --list-loadbalancers option') ret = {} conn = get_conn() datacenter = get_datacenter(conn) for item in conn.list_loadbalancers(datacenter['id'])['items']: lb = {'id': item['id']} lb.update(item['properties']) ret[lb['name']] = lb return ret
[ "def", "list_loadbalancers", "(", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The avail_images function must be called with -f or --function, or with the --list-loadbalancers option'", ")", "ret", "=", "{", "}", "conn", "=", "get_conn", "(", ")", "datacenter", "=", "get_datacenter", "(", "conn", ")", "for", "item", "in", "conn", ".", "list_loadbalancers", "(", "datacenter", "[", "'id'", "]", ")", "[", "'items'", "]", ":", "lb", "=", "{", "'id'", ":", "item", "[", "'id'", "]", "}", "lb", ".", "update", "(", "item", "[", "'properties'", "]", ")", "ret", "[", "lb", "[", "'name'", "]", "]", "=", "lb", "return", "ret" ]
return a list of the loadbalancers that are on the provider .
train
true
50,316
def test_cache_get_nonexistent_data(config_stub, tmpdir): config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': False}} disk_cache = cache.DiskCache(str(tmpdir)) preload_cache(disk_cache, 'https://qutebrowser.org') assert (disk_cache.data(QUrl('http://qutebrowser.org')) is None)
[ "def", "test_cache_get_nonexistent_data", "(", "config_stub", ",", "tmpdir", ")", ":", "config_stub", ".", "data", "=", "{", "'storage'", ":", "{", "'cache-size'", ":", "1024", "}", ",", "'general'", ":", "{", "'private-browsing'", ":", "False", "}", "}", "disk_cache", "=", "cache", ".", "DiskCache", "(", "str", "(", "tmpdir", ")", ")", "preload_cache", "(", "disk_cache", ",", "'https://qutebrowser.org'", ")", "assert", "(", "disk_cache", ".", "data", "(", "QUrl", "(", "'http://qutebrowser.org'", ")", ")", "is", "None", ")" ]
test querying some data that was never inserted .
train
false
50,319
def disaggregate(statistic, data): new_statistic = {} for (loc, details) in data.items(): new_statistic[loc] = {} prev_value = 0 for key in sorted(details.keys()): new_value = details[key][0] if (prev_value != new_value): if (new_value is None): if prev_value: rejected_data[loc] = [key, 'None value', statistic, new_value, prev_value] continue else: new_value = 0 if ((new_value < prev_value) and (statistic not in ('Suspected cases', 'Probable cases'))): suspect_data[loc] = [key, 'Value went down', statistic, new_value, prev_value] details[key][0] = (new_value - prev_value) prev_value = new_value new_statistic[loc][key] = details[key] return new_statistic
[ "def", "disaggregate", "(", "statistic", ",", "data", ")", ":", "new_statistic", "=", "{", "}", "for", "(", "loc", ",", "details", ")", "in", "data", ".", "items", "(", ")", ":", "new_statistic", "[", "loc", "]", "=", "{", "}", "prev_value", "=", "0", "for", "key", "in", "sorted", "(", "details", ".", "keys", "(", ")", ")", ":", "new_value", "=", "details", "[", "key", "]", "[", "0", "]", "if", "(", "prev_value", "!=", "new_value", ")", ":", "if", "(", "new_value", "is", "None", ")", ":", "if", "prev_value", ":", "rejected_data", "[", "loc", "]", "=", "[", "key", ",", "'None value'", ",", "statistic", ",", "new_value", ",", "prev_value", "]", "continue", "else", ":", "new_value", "=", "0", "if", "(", "(", "new_value", "<", "prev_value", ")", "and", "(", "statistic", "not", "in", "(", "'Suspected cases'", ",", "'Probable cases'", ")", ")", ")", ":", "suspect_data", "[", "loc", "]", "=", "[", "key", ",", "'Value went down'", ",", "statistic", ",", "new_value", ",", "prev_value", "]", "details", "[", "key", "]", "[", "0", "]", "=", "(", "new_value", "-", "prev_value", ")", "prev_value", "=", "new_value", "new_statistic", "[", "loc", "]", "[", "key", "]", "=", "details", "[", "key", "]", "return", "new_statistic" ]
dis-aggregate the running total for each statistic .
train
false
50,321
def capture_payment(is_sandbox=False, sanbox_response=None): controller = frappe.get_doc(u'Razorpay Settings') for doc in frappe.get_all(u'Integration Request', filters={u'status': u'Authorized', u'integration_request_service': u'Razorpay'}, fields=[u'name', u'data']): try: if is_sandbox: resp = sanbox_response else: data = json.loads(doc.data) settings = controller.get_settings(data) resp = controller.post_request(u'https://api.razorpay.com/v1/payments/{0}/capture'.format(data.get(u'razorpay_payment_id')), auth=(settings.api_key, settings.api_secret), data={u'amount': data.get(u'amount')}) if (resp.get(u'status') == u'captured'): frappe.db.set_value(u'Integration Request', doc.name, u'status', u'Completed') except Exception: doc = frappe.get_doc(u'Integration Request', doc.name) doc.status = u'Failed' doc.error = frappe.get_traceback() frappe.log_error(doc.error, u'{0} Failed'.format(doc.name))
[ "def", "capture_payment", "(", "is_sandbox", "=", "False", ",", "sanbox_response", "=", "None", ")", ":", "controller", "=", "frappe", ".", "get_doc", "(", "u'Razorpay Settings'", ")", "for", "doc", "in", "frappe", ".", "get_all", "(", "u'Integration Request'", ",", "filters", "=", "{", "u'status'", ":", "u'Authorized'", ",", "u'integration_request_service'", ":", "u'Razorpay'", "}", ",", "fields", "=", "[", "u'name'", ",", "u'data'", "]", ")", ":", "try", ":", "if", "is_sandbox", ":", "resp", "=", "sanbox_response", "else", ":", "data", "=", "json", ".", "loads", "(", "doc", ".", "data", ")", "settings", "=", "controller", ".", "get_settings", "(", "data", ")", "resp", "=", "controller", ".", "post_request", "(", "u'https://api.razorpay.com/v1/payments/{0}/capture'", ".", "format", "(", "data", ".", "get", "(", "u'razorpay_payment_id'", ")", ")", ",", "auth", "=", "(", "settings", ".", "api_key", ",", "settings", ".", "api_secret", ")", ",", "data", "=", "{", "u'amount'", ":", "data", ".", "get", "(", "u'amount'", ")", "}", ")", "if", "(", "resp", ".", "get", "(", "u'status'", ")", "==", "u'captured'", ")", ":", "frappe", ".", "db", ".", "set_value", "(", "u'Integration Request'", ",", "doc", ".", "name", ",", "u'status'", ",", "u'Completed'", ")", "except", "Exception", ":", "doc", "=", "frappe", ".", "get_doc", "(", "u'Integration Request'", ",", "doc", ".", "name", ")", "doc", ".", "status", "=", "u'Failed'", "doc", ".", "error", "=", "frappe", ".", "get_traceback", "(", ")", "frappe", ".", "log_error", "(", "doc", ".", "error", ",", "u'{0} Failed'", ".", "format", "(", "doc", ".", "name", ")", ")" ]
verifies the purchase as complete by the merchant .
train
false
50,322
def parse_hybi00_frames(buf): start = buf.find('\x00') tail = 0 frames = [] while (start != (-1)): end = buf.find('\xff', (start + 1)) if (end == (-1)): break else: frame = buf[(start + 1):end] frames.append((NORMAL, frame)) tail = (end + 1) start = buf.find('\x00', (end + 1)) buf = buf[tail:] return (frames, buf)
[ "def", "parse_hybi00_frames", "(", "buf", ")", ":", "start", "=", "buf", ".", "find", "(", "'\\x00'", ")", "tail", "=", "0", "frames", "=", "[", "]", "while", "(", "start", "!=", "(", "-", "1", ")", ")", ":", "end", "=", "buf", ".", "find", "(", "'\\xff'", ",", "(", "start", "+", "1", ")", ")", "if", "(", "end", "==", "(", "-", "1", ")", ")", ":", "break", "else", ":", "frame", "=", "buf", "[", "(", "start", "+", "1", ")", ":", "end", "]", "frames", ".", "append", "(", "(", "NORMAL", ",", "frame", ")", ")", "tail", "=", "(", "end", "+", "1", ")", "start", "=", "buf", ".", "find", "(", "'\\x00'", ",", "(", "end", "+", "1", ")", ")", "buf", "=", "buf", "[", "tail", ":", "]", "return", "(", "frames", ",", "buf", ")" ]
parse hybi-00 frames .
train
false
50,323
def generate_patch_base(x, y, base=0.0): x = x.values y = y.values y0 = np.insert(y, 0, base) y0 = np.append(y0, base) x0 = np.insert(x, 0, x[0]) x0 = np.append(x0, x0[(-1)]) return (x0, y0)
[ "def", "generate_patch_base", "(", "x", ",", "y", ",", "base", "=", "0.0", ")", ":", "x", "=", "x", ".", "values", "y", "=", "y", ".", "values", "y0", "=", "np", ".", "insert", "(", "y", ",", "0", ",", "base", ")", "y0", "=", "np", ".", "append", "(", "y0", ",", "base", ")", "x0", "=", "np", ".", "insert", "(", "x", ",", "0", ",", "x", "[", "0", "]", ")", "x0", "=", "np", ".", "append", "(", "x0", ",", "x0", "[", "(", "-", "1", ")", "]", ")", "return", "(", "x0", ",", "y0", ")" ]
adds base to the start and end of y .
train
false
50,324
def get_public_ip(): return file_io.read(constants.PUBLIC_IP_LOC).rstrip()
[ "def", "get_public_ip", "(", ")", ":", "return", "file_io", ".", "read", "(", "constants", ".", "PUBLIC_IP_LOC", ")", ".", "rstrip", "(", ")" ]
get the public ip of the current machine .
train
false
50,325
def usb_devices(attrs=None, where=None): return _osquery_cmd(table='usb_devices', attrs=attrs, where=where)
[ "def", "usb_devices", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'usb_devices'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return usb_devices information from osquery cli example: .
train
false
50,326
def _whatsnd(data): hdr = data[:512] fakefile = StringIO(hdr) for testfn in sndhdr.tests: res = testfn(hdr, fakefile) if (res is not None): return _sndhdr_MIMEmap.get(res[0]) return None
[ "def", "_whatsnd", "(", "data", ")", ":", "hdr", "=", "data", "[", ":", "512", "]", "fakefile", "=", "StringIO", "(", "hdr", ")", "for", "testfn", "in", "sndhdr", ".", "tests", ":", "res", "=", "testfn", "(", "hdr", ",", "fakefile", ")", "if", "(", "res", "is", "not", "None", ")", ":", "return", "_sndhdr_MIMEmap", ".", "get", "(", "res", "[", "0", "]", ")", "return", "None" ]
try to identify a sound file type .
train
true
50,327
def attr_call(): return __salt__.grains.items()
[ "def", "attr_call", "(", ")", ":", "return", "__salt__", ".", "grains", ".", "items", "(", ")" ]
call grains .
train
false
50,329
def survey_getAllQuestionsForTemplate(template_id): s3db = current.s3db sectable = s3db.survey_section q_ltable = s3db.survey_question_list qsntable = s3db.survey_question query = (((q_ltable.template_id == template_id) & (q_ltable.section_id == sectable.id)) & (q_ltable.question_id == qsntable.id)) rows = current.db(query).select(qsntable.id, qsntable.code, qsntable.name, qsntable.type, sectable.name, q_ltable.posn, orderby=q_ltable.posn) questions = [] for row in rows: question = {} question_row = row.survey_question question['qstn_id'] = question_row.id question['code'] = question_row.code question['name'] = s3db.survey_qstn_name_represent(question_row.name) question['type'] = question_row.type question['posn'] = row.survey_question_list.posn question['section'] = row.survey_section.name questions.append(question) return questions
[ "def", "survey_getAllQuestionsForTemplate", "(", "template_id", ")", ":", "s3db", "=", "current", ".", "s3db", "sectable", "=", "s3db", ".", "survey_section", "q_ltable", "=", "s3db", ".", "survey_question_list", "qsntable", "=", "s3db", ".", "survey_question", "query", "=", "(", "(", "(", "q_ltable", ".", "template_id", "==", "template_id", ")", "&", "(", "q_ltable", ".", "section_id", "==", "sectable", ".", "id", ")", ")", "&", "(", "q_ltable", ".", "question_id", "==", "qsntable", ".", "id", ")", ")", "rows", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "qsntable", ".", "id", ",", "qsntable", ".", "code", ",", "qsntable", ".", "name", ",", "qsntable", ".", "type", ",", "sectable", ".", "name", ",", "q_ltable", ".", "posn", ",", "orderby", "=", "q_ltable", ".", "posn", ")", "questions", "=", "[", "]", "for", "row", "in", "rows", ":", "question", "=", "{", "}", "question_row", "=", "row", ".", "survey_question", "question", "[", "'qstn_id'", "]", "=", "question_row", ".", "id", "question", "[", "'code'", "]", "=", "question_row", ".", "code", "question", "[", "'name'", "]", "=", "s3db", ".", "survey_qstn_name_represent", "(", "question_row", ".", "name", ")", "question", "[", "'type'", "]", "=", "question_row", ".", "type", "question", "[", "'posn'", "]", "=", "row", ".", "survey_question_list", ".", "posn", "question", "[", "'section'", "]", "=", "row", ".", "survey_section", ".", "name", "questions", ".", "append", "(", "question", ")", "return", "questions" ]
function to return the list of questions for the given template the questions are returned in the order of their position in the template .
train
false
50,331
def _get_time(time, tzinfo=None): if (time is None): time = datetime.utcnow() elif isinstance(time, number_types): time = datetime.utcfromtimestamp(time) if (time.tzinfo is None): time = time.replace(tzinfo=UTC) if isinstance(time, datetime): if (tzinfo is not None): time = time.astimezone(tzinfo) if hasattr(tzinfo, 'normalize'): time = tzinfo.normalize(time) time = time.timetz() elif (tzinfo is not None): time = time.replace(tzinfo=tzinfo) return time
[ "def", "_get_time", "(", "time", ",", "tzinfo", "=", "None", ")", ":", "if", "(", "time", "is", "None", ")", ":", "time", "=", "datetime", ".", "utcnow", "(", ")", "elif", "isinstance", "(", "time", ",", "number_types", ")", ":", "time", "=", "datetime", ".", "utcfromtimestamp", "(", "time", ")", "if", "(", "time", ".", "tzinfo", "is", "None", ")", ":", "time", "=", "time", ".", "replace", "(", "tzinfo", "=", "UTC", ")", "if", "isinstance", "(", "time", ",", "datetime", ")", ":", "if", "(", "tzinfo", "is", "not", "None", ")", ":", "time", "=", "time", ".", "astimezone", "(", "tzinfo", ")", "if", "hasattr", "(", "tzinfo", ",", "'normalize'", ")", ":", "time", "=", "tzinfo", ".", "normalize", "(", "time", ")", "time", "=", "time", ".", "timetz", "(", ")", "elif", "(", "tzinfo", "is", "not", "None", ")", ":", "time", "=", "time", ".", "replace", "(", "tzinfo", "=", "tzinfo", ")", "return", "time" ]
get a timezoned time from a given instant .
train
false
50,332
def rmsdiff_2011(im1, im2): diff = ImageChops.difference(im1, im2) h = diff.histogram() sq = ((value * (idx ** 2)) for (idx, value) in enumerate(h)) sum_of_squares = sum(sq) rms = math.sqrt((sum_of_squares / float((im1.size[0] * im1.size[1])))) return rms
[ "def", "rmsdiff_2011", "(", "im1", ",", "im2", ")", ":", "diff", "=", "ImageChops", ".", "difference", "(", "im1", ",", "im2", ")", "h", "=", "diff", ".", "histogram", "(", ")", "sq", "=", "(", "(", "value", "*", "(", "idx", "**", "2", ")", ")", "for", "(", "idx", ",", "value", ")", "in", "enumerate", "(", "h", ")", ")", "sum_of_squares", "=", "sum", "(", "sq", ")", "rms", "=", "math", ".", "sqrt", "(", "(", "sum_of_squares", "/", "float", "(", "(", "im1", ".", "size", "[", "0", "]", "*", "im1", ".", "size", "[", "1", "]", ")", ")", ")", ")", "return", "rms" ]
calculate the root-mean-square difference between two images .
train
false
50,333
def vereq(a, b): if (not (a == b)): raise TestFailed(('%r == %r' % (a, b)))
[ "def", "vereq", "(", "a", ",", "b", ")", ":", "if", "(", "not", "(", "a", "==", "b", ")", ")", ":", "raise", "TestFailed", "(", "(", "'%r == %r'", "%", "(", "a", ",", "b", ")", ")", ")" ]
raise testfailed if a == b is false .
train
false
50,335
def checkHandleAndSynchronize(default=None): def wrap(f): def invoke_func(*args, **kwargs): download = args[0] with download.dllock: if (download.handle and download.handle.is_valid()): return f(*args, **kwargs) return default return invoke_func return wrap
[ "def", "checkHandleAndSynchronize", "(", "default", "=", "None", ")", ":", "def", "wrap", "(", "f", ")", ":", "def", "invoke_func", "(", "*", "args", ",", "**", "kwargs", ")", ":", "download", "=", "args", "[", "0", "]", "with", "download", ".", "dllock", ":", "if", "(", "download", ".", "handle", "and", "download", ".", "handle", ".", "is_valid", "(", ")", ")", ":", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "default", "return", "invoke_func", "return", "wrap" ]
return the libtorrent handle if its available .
train
false
50,336
def get_auth_settings(): from mezzanine.conf import settings try: auth_settings = (settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, settings.TWITTER_ACCESS_TOKEN_KEY, settings.TWITTER_ACCESS_TOKEN_SECRET) except AttributeError: return None else: return (auth_settings if all(auth_settings) else None)
[ "def", "get_auth_settings", "(", ")", ":", "from", "mezzanine", ".", "conf", "import", "settings", "try", ":", "auth_settings", "=", "(", "settings", ".", "TWITTER_CONSUMER_KEY", ",", "settings", ".", "TWITTER_CONSUMER_SECRET", ",", "settings", ".", "TWITTER_ACCESS_TOKEN_KEY", ",", "settings", ".", "TWITTER_ACCESS_TOKEN_SECRET", ")", "except", "AttributeError", ":", "return", "None", "else", ":", "return", "(", "auth_settings", "if", "all", "(", "auth_settings", ")", "else", "None", ")" ]
returns all the key/secret settings for twitter access .
train
true
50,337
def AllTargets(target_list, target_dicts, build_file): bftargets = BuildFileTargets(target_list, build_file) deptargets = DeepDependencyTargets(target_dicts, bftargets) return (bftargets + deptargets)
[ "def", "AllTargets", "(", "target_list", ",", "target_dicts", ",", "build_file", ")", ":", "bftargets", "=", "BuildFileTargets", "(", "target_list", ",", "build_file", ")", "deptargets", "=", "DeepDependencyTargets", "(", "target_dicts", ",", "bftargets", ")", "return", "(", "bftargets", "+", "deptargets", ")" ]
returns all targets for the specified build_file .
train
false
50,339
def set_package(fxn): @functools.wraps(fxn) def set_package_wrapper(*args, **kwargs): warnings.warn('The import system now takes care of this automatically.', DeprecationWarning, stacklevel=2) module = fxn(*args, **kwargs) if (getattr(module, '__package__', None) is None): module.__package__ = module.__name__ if (not hasattr(module, '__path__')): module.__package__ = module.__package__.rpartition('.')[0] return module return set_package_wrapper
[ "def", "set_package", "(", "fxn", ")", ":", "@", "functools", ".", "wraps", "(", "fxn", ")", "def", "set_package_wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "warnings", ".", "warn", "(", "'The import system now takes care of this automatically.'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "module", "=", "fxn", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "getattr", "(", "module", ",", "'__package__'", ",", "None", ")", "is", "None", ")", ":", "module", ".", "__package__", "=", "module", ".", "__name__", "if", "(", "not", "hasattr", "(", "module", ",", "'__path__'", ")", ")", ":", "module", ".", "__package__", "=", "module", ".", "__package__", ".", "rpartition", "(", "'.'", ")", "[", "0", "]", "return", "module", "return", "set_package_wrapper" ]
set __package__ on the returned module .
train
true
50,340
def likelihood_ratio_statistic(x, y): x[(x == 0)] = 1e-05 y[(y == 0)] = 1e-05 y *= (x.sum() / y.sum()) lrs = (2 * (x * np.log((x / y))).sum()) return lrs
[ "def", "likelihood_ratio_statistic", "(", "x", ",", "y", ")", ":", "x", "[", "(", "x", "==", "0", ")", "]", "=", "1e-05", "y", "[", "(", "y", "==", "0", ")", "]", "=", "1e-05", "y", "*=", "(", "x", ".", "sum", "(", ")", "/", "y", ".", "sum", "(", ")", ")", "lrs", "=", "(", "2", "*", "(", "x", "*", "np", ".", "log", "(", "(", "x", "/", "y", ")", ")", ")", ".", "sum", "(", ")", ")", "return", "lrs" ]
calculate likelihood ratio statistic for given distributions .
train
false
50,341
def judge(zcontext, in_url, pythagoras_url, out_url): isock = zcontext.socket(zmq.SUB) isock.connect(in_url) for prefix in ('01', '10', '11'): isock.setsockopt(zmq.SUBSCRIBE, prefix) psock = zcontext.socket(zmq.REQ) psock.connect(pythagoras_url) osock = zcontext.socket(zmq.PUSH) osock.connect(out_url) unit = (2 ** (B * 2)) while True: bits = isock.recv_string() (n, m) = (int(bits[::2], 2), int(bits[1::2], 2)) psock.send_json((n, m)) sumsquares = psock.recv_json() osock.send_string(('Y' if (sumsquares < unit) else 'N'))
[ "def", "judge", "(", "zcontext", ",", "in_url", ",", "pythagoras_url", ",", "out_url", ")", ":", "isock", "=", "zcontext", ".", "socket", "(", "zmq", ".", "SUB", ")", "isock", ".", "connect", "(", "in_url", ")", "for", "prefix", "in", "(", "'01'", ",", "'10'", ",", "'11'", ")", ":", "isock", ".", "setsockopt", "(", "zmq", ".", "SUBSCRIBE", ",", "prefix", ")", "psock", "=", "zcontext", ".", "socket", "(", "zmq", ".", "REQ", ")", "psock", ".", "connect", "(", "pythagoras_url", ")", "osock", "=", "zcontext", ".", "socket", "(", "zmq", ".", "PUSH", ")", "osock", ".", "connect", "(", "out_url", ")", "unit", "=", "(", "2", "**", "(", "B", "*", "2", ")", ")", "while", "True", ":", "bits", "=", "isock", ".", "recv_string", "(", ")", "(", "n", ",", "m", ")", "=", "(", "int", "(", "bits", "[", ":", ":", "2", "]", ",", "2", ")", ",", "int", "(", "bits", "[", "1", ":", ":", "2", "]", ",", "2", ")", ")", "psock", ".", "send_json", "(", "(", "n", ",", "m", ")", ")", "sumsquares", "=", "psock", ".", "recv_json", "(", ")", "osock", ".", "send_string", "(", "(", "'Y'", "if", "(", "sumsquares", "<", "unit", ")", "else", "'N'", ")", ")" ]
determine whether each input coordinate is inside the unit circle .
train
false
50,342
def MakeComparison(firsts, others): first_hist = thinkstats2.Hist(firsts.prglngth, label='first') other_hist = thinkstats2.Hist(others.prglngth, label='other') width = 0.45 thinkplot.PrePlot(2) thinkplot.Hist(first_hist, align='right', width=width) thinkplot.Hist(other_hist, align='left', width=width) thinkplot.Save(root='first_nsfg_hist', title='Histogram', xlabel='weeks', ylabel='frequency', axis=[27, 46, 0, 2700])
[ "def", "MakeComparison", "(", "firsts", ",", "others", ")", ":", "first_hist", "=", "thinkstats2", ".", "Hist", "(", "firsts", ".", "prglngth", ",", "label", "=", "'first'", ")", "other_hist", "=", "thinkstats2", ".", "Hist", "(", "others", ".", "prglngth", ",", "label", "=", "'other'", ")", "width", "=", "0.45", "thinkplot", ".", "PrePlot", "(", "2", ")", "thinkplot", ".", "Hist", "(", "first_hist", ",", "align", "=", "'right'", ",", "width", "=", "width", ")", "thinkplot", ".", "Hist", "(", "other_hist", ",", "align", "=", "'left'", ",", "width", "=", "width", ")", "thinkplot", ".", "Save", "(", "root", "=", "'first_nsfg_hist'", ",", "title", "=", "'Histogram'", ",", "xlabel", "=", "'weeks'", ",", "ylabel", "=", "'frequency'", ",", "axis", "=", "[", "27", ",", "46", ",", "0", ",", "2700", "]", ")" ]
plots histograms of pregnancy length for first babies and others .
train
false