id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
54,419
def create_gzip_message(payloads, key=None, compresslevel=None): message_set = KafkaProtocol._encode_message_set([create_message(payload, pl_key) for (payload, pl_key) in payloads]) gzipped = gzip_encode(message_set, compresslevel=compresslevel) codec = (ATTRIBUTE_CODEC_MASK & CODEC_GZIP) return kafka.structs.Message(0, (0 | codec), key, gzipped)
[ "def", "create_gzip_message", "(", "payloads", ",", "key", "=", "None", ",", "compresslevel", "=", "None", ")", ":", "message_set", "=", "KafkaProtocol", ".", "_encode_message_set", "(", "[", "create_message", "(", "payload", ",", "pl_key", ")", "for", "(", "payload", ",", "pl_key", ")", "in", "payloads", "]", ")", "gzipped", "=", "gzip_encode", "(", "message_set", ",", "compresslevel", "=", "compresslevel", ")", "codec", "=", "(", "ATTRIBUTE_CODEC_MASK", "&", "CODEC_GZIP", ")", "return", "kafka", ".", "structs", ".", "Message", "(", "0", ",", "(", "0", "|", "codec", ")", ",", "key", ",", "gzipped", ")" ]
construct a gzipped message containing multiple messages the given payloads will be encoded .
train
true
54,420
def unpublish_exploration(committer_id, exploration_id): _unpublish_activity(committer_id, exploration_id, feconf.ACTIVITY_TYPE_EXPLORATION)
[ "def", "unpublish_exploration", "(", "committer_id", ",", "exploration_id", ")", ":", "_unpublish_activity", "(", "committer_id", ",", "exploration_id", ",", "feconf", ".", "ACTIVITY_TYPE_EXPLORATION", ")" ]
unpublishes an exploration .
train
false
54,421
def custom_create(request): class SlugChangingArticleForm(forms.ModelForm, ): 'Custom form class to overwrite the slug.' class Meta: model = Article def save(self, *args, **kwargs): self.instance.slug = 'some-other-slug' return super(SlugChangingArticleForm, self).save(*args, **kwargs) from django.views.generic.create_update import create_object return create_object(request, post_save_redirect='/create_update/view/article/%(slug)s/', form_class=SlugChangingArticleForm)
[ "def", "custom_create", "(", "request", ")", ":", "class", "SlugChangingArticleForm", "(", "forms", ".", "ModelForm", ",", ")", ":", "class", "Meta", ":", "model", "=", "Article", "def", "save", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "self", ".", "instance", ".", "slug", "=", "'some-other-slug'", "return", "super", "(", "SlugChangingArticleForm", ",", "self", ")", ".", "save", "(", "*", "args", ",", "**", "kwargs", ")", "from", "django", ".", "views", ".", "generic", ".", "create_update", "import", "create_object", "return", "create_object", "(", "request", ",", "post_save_redirect", "=", "'/create_update/view/article/%(slug)s/'", ",", "form_class", "=", "SlugChangingArticleForm", ")" ]
calls create_object generic view with a custom form class .
train
false
54,422
def iter_texts_from_json_bz2(loc): with bz2.BZ2File(loc) as file_: for (i, line) in enumerate(file_): (yield ujson.loads(line)[u'body'])
[ "def", "iter_texts_from_json_bz2", "(", "loc", ")", ":", "with", "bz2", ".", "BZ2File", "(", "loc", ")", "as", "file_", ":", "for", "(", "i", ",", "line", ")", "in", "enumerate", "(", "file_", ")", ":", "(", "yield", "ujson", ".", "loads", "(", "line", ")", "[", "u'body'", "]", ")" ]
iterator of unicode strings .
train
false
54,423
def find_indentation(node): while (node is not None): if ((node.type == syms.suite) and (len(node.children) > 2)): indent = node.children[1] if (indent.type == token.INDENT): return indent.value node = node.parent return u''
[ "def", "find_indentation", "(", "node", ")", ":", "while", "(", "node", "is", "not", "None", ")", ":", "if", "(", "(", "node", ".", "type", "==", "syms", ".", "suite", ")", "and", "(", "len", "(", "node", ".", "children", ")", ">", "2", ")", ")", ":", "indent", "=", "node", ".", "children", "[", "1", "]", "if", "(", "indent", ".", "type", "==", "token", ".", "INDENT", ")", ":", "return", "indent", ".", "value", "node", "=", "node", ".", "parent", "return", "u''" ]
find the indentation of *node* .
train
true
54,425
def disabled_xblocks(): return XBlockConfiguration.objects.current_set().filter(enabled=False)
[ "def", "disabled_xblocks", "(", ")", ":", "return", "XBlockConfiguration", ".", "objects", ".", "current_set", "(", ")", ".", "filter", "(", "enabled", "=", "False", ")" ]
return the queryset of disabled xblock types .
train
false
54,426
@register.tag(name='crispy') def do_uni_form(parser, token): token = token.split_contents() form = token.pop(1) helper = None template_pack = ("'%s'" % get_template_pack()) try: helper = token.pop(1) except IndexError: pass try: template_pack = token.pop(1) except IndexError: pass if ((helper is not None) and isinstance(helper, string_types) and (("'" in helper) or ('"' in helper))): template_pack = helper helper = None if (template_pack is not None): template_pack = template_pack[1:(-1)] ALLOWED_TEMPLATE_PACKS = getattr(settings, 'CRISPY_ALLOWED_TEMPLATE_PACKS', ('bootstrap', 'uni_form', 'bootstrap3', 'bootstrap4')) if (template_pack not in ALLOWED_TEMPLATE_PACKS): raise template.TemplateSyntaxError(("crispy tag's template_pack argument should be in %s" % str(ALLOWED_TEMPLATE_PACKS))) return CrispyFormNode(form, helper, template_pack=template_pack)
[ "@", "register", ".", "tag", "(", "name", "=", "'crispy'", ")", "def", "do_uni_form", "(", "parser", ",", "token", ")", ":", "token", "=", "token", ".", "split_contents", "(", ")", "form", "=", "token", ".", "pop", "(", "1", ")", "helper", "=", "None", "template_pack", "=", "(", "\"'%s'\"", "%", "get_template_pack", "(", ")", ")", "try", ":", "helper", "=", "token", ".", "pop", "(", "1", ")", "except", "IndexError", ":", "pass", "try", ":", "template_pack", "=", "token", ".", "pop", "(", "1", ")", "except", "IndexError", ":", "pass", "if", "(", "(", "helper", "is", "not", "None", ")", "and", "isinstance", "(", "helper", ",", "string_types", ")", "and", "(", "(", "\"'\"", "in", "helper", ")", "or", "(", "'\"'", "in", "helper", ")", ")", ")", ":", "template_pack", "=", "helper", "helper", "=", "None", "if", "(", "template_pack", "is", "not", "None", ")", ":", "template_pack", "=", "template_pack", "[", "1", ":", "(", "-", "1", ")", "]", "ALLOWED_TEMPLATE_PACKS", "=", "getattr", "(", "settings", ",", "'CRISPY_ALLOWED_TEMPLATE_PACKS'", ",", "(", "'bootstrap'", ",", "'uni_form'", ",", "'bootstrap3'", ",", "'bootstrap4'", ")", ")", "if", "(", "template_pack", "not", "in", "ALLOWED_TEMPLATE_PACKS", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "\"crispy tag's template_pack argument should be in %s\"", "%", "str", "(", "ALLOWED_TEMPLATE_PACKS", ")", ")", ")", "return", "CrispyFormNode", "(", "form", ",", "helper", ",", "template_pack", "=", "template_pack", ")" ]
you need to pass in at least the form/formset object .
train
true
54,427
def _CreateDbOperationMap(entry_list): map = dict() for entry in entry_list: handler = entry.handler method_str = ((handler.im_self.__name__ + '.') + handler.im_func.__name__) map[method_str] = entry return map
[ "def", "_CreateDbOperationMap", "(", "entry_list", ")", ":", "map", "=", "dict", "(", ")", "for", "entry", "in", "entry_list", ":", "handler", "=", "entry", ".", "handler", "method_str", "=", "(", "(", "handler", ".", "im_self", ".", "__name__", "+", "'.'", ")", "+", "handler", ".", "im_func", ".", "__name__", ")", "map", "[", "method_str", "]", "=", "entry", "return", "map" ]
create an operation map from a list of opmapentry objects .
train
false
54,428
def mkAssocResponse(*keys): args = dict([(key, association_response_values[key]) for key in keys]) return Message.fromOpenIDArgs(args)
[ "def", "mkAssocResponse", "(", "*", "keys", ")", ":", "args", "=", "dict", "(", "[", "(", "key", ",", "association_response_values", "[", "key", "]", ")", "for", "key", "in", "keys", "]", ")", "return", "Message", ".", "fromOpenIDArgs", "(", "args", ")" ]
build an association response message that contains the specified subset of keys .
train
false
54,429
def check_header_validity(header): (name, value) = header if isinstance(value, bytes): pat = _CLEAN_HEADER_REGEX_BYTE else: pat = _CLEAN_HEADER_REGEX_STR try: if (not pat.match(value)): raise InvalidHeader(('Invalid return character or leading space in header: %s' % name)) except TypeError: raise InvalidHeader(('Header value %s must be of type str or bytes, not %s' % (value, type(value))))
[ "def", "check_header_validity", "(", "header", ")", ":", "(", "name", ",", "value", ")", "=", "header", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "pat", "=", "_CLEAN_HEADER_REGEX_BYTE", "else", ":", "pat", "=", "_CLEAN_HEADER_REGEX_STR", "try", ":", "if", "(", "not", "pat", ".", "match", "(", "value", ")", ")", ":", "raise", "InvalidHeader", "(", "(", "'Invalid return character or leading space in header: %s'", "%", "name", ")", ")", "except", "TypeError", ":", "raise", "InvalidHeader", "(", "(", "'Header value %s must be of type str or bytes, not %s'", "%", "(", "value", ",", "type", "(", "value", ")", ")", ")", ")" ]
verifies that header value is a string which doesnt contain leading whitespace or return characters .
train
true
54,432
def assert_regex(result, expected, msg=''): assert re.search(expected, result), ('%s%r not found in %r' % (_fmt_msg(msg), expected, result))
[ "def", "assert_regex", "(", "result", ",", "expected", ",", "msg", "=", "''", ")", ":", "assert", "re", ".", "search", "(", "expected", ",", "result", ")", ",", "(", "'%s%r not found in %r'", "%", "(", "_fmt_msg", "(", "msg", ")", ",", "expected", ",", "result", ")", ")" ]
assert that expected matches the result .
train
false
54,433
def scatter_plot(): point_count = 1000 x_index = np.random.random(point_count) y_index = np.random.random(point_count) color_list = np.random.random(point_count) scale_list = (np.random.random(point_count) * 100) plt.scatter(x_index, y_index, s=scale_list, c=color_list, marker='o') plt.show() return
[ "def", "scatter_plot", "(", ")", ":", "point_count", "=", "1000", "x_index", "=", "np", ".", "random", ".", "random", "(", "point_count", ")", "y_index", "=", "np", ".", "random", ".", "random", "(", "point_count", ")", "color_list", "=", "np", ".", "random", ".", "random", "(", "point_count", ")", "scale_list", "=", "(", "np", ".", "random", ".", "random", "(", "point_count", ")", "*", "100", ")", "plt", ".", "scatter", "(", "x_index", ",", "y_index", ",", "s", "=", "scale_list", ",", "c", "=", "color_list", ",", "marker", "=", "'o'", ")", "plt", ".", "show", "(", ")", "return" ]
scatter plot .
train
false
54,435
def dmp_rem(f, g, u, K): return dmp_div(f, g, u, K)[1]
[ "def", "dmp_rem", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "return", "dmp_div", "(", "f", ",", "g", ",", "u", ",", "K", ")", "[", "1", "]" ]
returns polynomial remainder in k[x] .
train
false
54,436
def import_key(extern_key, passphrase=None): extern_key = tobytes(extern_key) if (passphrase is not None): passphrase = tobytes(passphrase) if extern_key.startswith(b('-----')): (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) if enc_flag: passphrase = None return _import_keyDER(der, passphrase) if extern_key.startswith(b('ssh-rsa ')): keystring = binascii.a2b_base64(extern_key.split(b(' '))[1]) keyparts = [] while (len(keystring) > 4): l = struct.unpack('>I', keystring[:4])[0] keyparts.append(keystring[4:(4 + l)]) keystring = keystring[(4 + l):] e = Integer.from_bytes(keyparts[1]) n = Integer.from_bytes(keyparts[2]) return construct([n, e]) if (bord(extern_key[0]) == 48): return _import_keyDER(extern_key, passphrase) raise ValueError('RSA key format is not supported')
[ "def", "import_key", "(", "extern_key", ",", "passphrase", "=", "None", ")", ":", "extern_key", "=", "tobytes", "(", "extern_key", ")", "if", "(", "passphrase", "is", "not", "None", ")", ":", "passphrase", "=", "tobytes", "(", "passphrase", ")", "if", "extern_key", ".", "startswith", "(", "b", "(", "'-----'", ")", ")", ":", "(", "der", ",", "marker", ",", "enc_flag", ")", "=", "PEM", ".", "decode", "(", "tostr", "(", "extern_key", ")", ",", "passphrase", ")", "if", "enc_flag", ":", "passphrase", "=", "None", "return", "_import_keyDER", "(", "der", ",", "passphrase", ")", "if", "extern_key", ".", "startswith", "(", "b", "(", "'ssh-rsa '", ")", ")", ":", "keystring", "=", "binascii", ".", "a2b_base64", "(", "extern_key", ".", "split", "(", "b", "(", "' '", ")", ")", "[", "1", "]", ")", "keyparts", "=", "[", "]", "while", "(", "len", "(", "keystring", ")", ">", "4", ")", ":", "l", "=", "struct", ".", "unpack", "(", "'>I'", ",", "keystring", "[", ":", "4", "]", ")", "[", "0", "]", "keyparts", ".", "append", "(", "keystring", "[", "4", ":", "(", "4", "+", "l", ")", "]", ")", "keystring", "=", "keystring", "[", "(", "4", "+", "l", ")", ":", "]", "e", "=", "Integer", ".", "from_bytes", "(", "keyparts", "[", "1", "]", ")", "n", "=", "Integer", ".", "from_bytes", "(", "keyparts", "[", "2", "]", ")", "return", "construct", "(", "[", "n", ",", "e", "]", ")", "if", "(", "bord", "(", "extern_key", "[", "0", "]", ")", "==", "48", ")", ":", "return", "_import_keyDER", "(", "extern_key", ",", "passphrase", ")", "raise", "ValueError", "(", "'RSA key format is not supported'", ")" ]
import an rsa key .
train
false
54,437
def buildAllTarballs(checkout, destination): if (not checkout.child('.svn').exists()): raise NotWorkingDirectory(('%s does not appear to be an SVN working directory.' % (checkout.path,))) if runCommand(['svn', 'st', checkout.path]).strip(): raise UncleanWorkingDirectory(('There are local modifications to the SVN checkout in %s.' % (checkout.path,))) workPath = FilePath(mkdtemp()) export = workPath.child('export') runCommand(['svn', 'export', checkout.path, export.path]) twistedPath = export.child('twisted') version = Project(twistedPath).getVersion() versionString = version.base() apiBaseURL = ('http://twistedmatrix.com/documents/%s/api/%%s.html' % versionString) if (not destination.exists()): destination.createDirectory() db = DistributionBuilder(export, destination, apiBaseURL=apiBaseURL) db.buildCore(versionString) for subproject in twisted_subprojects: if twistedPath.child(subproject).exists(): db.buildSubProject(subproject, versionString) db.buildTwisted(versionString) workPath.remove()
[ "def", "buildAllTarballs", "(", "checkout", ",", "destination", ")", ":", "if", "(", "not", "checkout", ".", "child", "(", "'.svn'", ")", ".", "exists", "(", ")", ")", ":", "raise", "NotWorkingDirectory", "(", "(", "'%s does not appear to be an SVN working directory.'", "%", "(", "checkout", ".", "path", ",", ")", ")", ")", "if", "runCommand", "(", "[", "'svn'", ",", "'st'", ",", "checkout", ".", "path", "]", ")", ".", "strip", "(", ")", ":", "raise", "UncleanWorkingDirectory", "(", "(", "'There are local modifications to the SVN checkout in %s.'", "%", "(", "checkout", ".", "path", ",", ")", ")", ")", "workPath", "=", "FilePath", "(", "mkdtemp", "(", ")", ")", "export", "=", "workPath", ".", "child", "(", "'export'", ")", "runCommand", "(", "[", "'svn'", ",", "'export'", ",", "checkout", ".", "path", ",", "export", ".", "path", "]", ")", "twistedPath", "=", "export", ".", "child", "(", "'twisted'", ")", "version", "=", "Project", "(", "twistedPath", ")", ".", "getVersion", "(", ")", "versionString", "=", "version", ".", "base", "(", ")", "apiBaseURL", "=", "(", "'http://twistedmatrix.com/documents/%s/api/%%s.html'", "%", "versionString", ")", "if", "(", "not", "destination", ".", "exists", "(", ")", ")", ":", "destination", ".", "createDirectory", "(", ")", "db", "=", "DistributionBuilder", "(", "export", ",", "destination", ",", "apiBaseURL", "=", "apiBaseURL", ")", "db", ".", "buildCore", "(", "versionString", ")", "for", "subproject", "in", "twisted_subprojects", ":", "if", "twistedPath", ".", "child", "(", "subproject", ")", ".", "exists", "(", ")", ":", "db", ".", "buildSubProject", "(", "subproject", ",", "versionString", ")", "db", ".", "buildTwisted", "(", "versionString", ")", "workPath", ".", "remove", "(", ")" ]
build complete tarballs for twisted and all subprojects .
train
false
54,438
def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None): if undirected: if (distance is not None): (sp, _) = nx.single_source_dijkstra(G.to_undirected(), n, cutoff=radius, weight=distance) else: sp = dict(nx.single_source_shortest_path_length(G.to_undirected(), n, cutoff=radius)) elif (distance is not None): (sp, _) = nx.single_source_dijkstra(G, n, cutoff=radius, weight=distance) else: sp = dict(nx.single_source_shortest_path_length(G, n, cutoff=radius)) H = G.subgraph(sp).copy() if (not center): H.remove_node(n) return H
[ "def", "ego_graph", "(", "G", ",", "n", ",", "radius", "=", "1", ",", "center", "=", "True", ",", "undirected", "=", "False", ",", "distance", "=", "None", ")", ":", "if", "undirected", ":", "if", "(", "distance", "is", "not", "None", ")", ":", "(", "sp", ",", "_", ")", "=", "nx", ".", "single_source_dijkstra", "(", "G", ".", "to_undirected", "(", ")", ",", "n", ",", "cutoff", "=", "radius", ",", "weight", "=", "distance", ")", "else", ":", "sp", "=", "dict", "(", "nx", ".", "single_source_shortest_path_length", "(", "G", ".", "to_undirected", "(", ")", ",", "n", ",", "cutoff", "=", "radius", ")", ")", "elif", "(", "distance", "is", "not", "None", ")", ":", "(", "sp", ",", "_", ")", "=", "nx", ".", "single_source_dijkstra", "(", "G", ",", "n", ",", "cutoff", "=", "radius", ",", "weight", "=", "distance", ")", "else", ":", "sp", "=", "dict", "(", "nx", ".", "single_source_shortest_path_length", "(", "G", ",", "n", ",", "cutoff", "=", "radius", ")", ")", "H", "=", "G", ".", "subgraph", "(", "sp", ")", ".", "copy", "(", ")", "if", "(", "not", "center", ")", ":", "H", ".", "remove_node", "(", "n", ")", "return", "H" ]
returns induced subgraph of neighbors centered at node n within a given radius .
train
false
54,442
def GetResources(filename, types=None, names=None, languages=None): hsrc = win32api.LoadLibraryEx(filename, 0, LOAD_LIBRARY_AS_DATAFILE) res = _GetResources(hsrc, types, names, languages) win32api.FreeLibrary(hsrc) return res
[ "def", "GetResources", "(", "filename", ",", "types", "=", "None", ",", "names", "=", "None", ",", "languages", "=", "None", ")", ":", "hsrc", "=", "win32api", ".", "LoadLibraryEx", "(", "filename", ",", "0", ",", "LOAD_LIBRARY_AS_DATAFILE", ")", "res", "=", "_GetResources", "(", "hsrc", ",", "types", ",", "names", ",", "languages", ")", "win32api", ".", "FreeLibrary", "(", "hsrc", ")", "return", "res" ]
get resources from dll/exe file .
train
true
54,443
def test_duplicate_output(): assert_raises(BundleError, bundle_to_joblist, Bundle(Bundle('s1', output='foo'), Bundle('s2', output='foo')))
[ "def", "test_duplicate_output", "(", ")", ":", "assert_raises", "(", "BundleError", ",", "bundle_to_joblist", ",", "Bundle", "(", "Bundle", "(", "'s1'", ",", "output", "=", "'foo'", ")", ",", "Bundle", "(", "'s2'", ",", "output", "=", "'foo'", ")", ")", ")" ]
an error is raised if within a single bundle .
train
false
54,444
def classof(A, B): try: if (A._class_priority > B._class_priority): return A.__class__ else: return B.__class__ except Exception: pass try: import numpy if isinstance(A, numpy.ndarray): return B.__class__ if isinstance(B, numpy.ndarray): return A.__class__ except Exception: pass raise TypeError(('Incompatible classes %s, %s' % (A.__class__, B.__class__)))
[ "def", "classof", "(", "A", ",", "B", ")", ":", "try", ":", "if", "(", "A", ".", "_class_priority", ">", "B", ".", "_class_priority", ")", ":", "return", "A", ".", "__class__", "else", ":", "return", "B", ".", "__class__", "except", "Exception", ":", "pass", "try", ":", "import", "numpy", "if", "isinstance", "(", "A", ",", "numpy", ".", "ndarray", ")", ":", "return", "B", ".", "__class__", "if", "isinstance", "(", "B", ",", "numpy", ".", "ndarray", ")", ":", "return", "A", ".", "__class__", "except", "Exception", ":", "pass", "raise", "TypeError", "(", "(", "'Incompatible classes %s, %s'", "%", "(", "A", ".", "__class__", ",", "B", ".", "__class__", ")", ")", ")" ]
get the type of the result when combining matrices of different types .
train
false
54,445
def _sparse_blockify(tuples, dtype=None): new_blocks = [] for (i, names, array) in tuples: array = _maybe_to_sparse(array) block = make_block(array, klass=SparseBlock, fastpath=True, placement=[i]) new_blocks.append(block) return new_blocks
[ "def", "_sparse_blockify", "(", "tuples", ",", "dtype", "=", "None", ")", ":", "new_blocks", "=", "[", "]", "for", "(", "i", ",", "names", ",", "array", ")", "in", "tuples", ":", "array", "=", "_maybe_to_sparse", "(", "array", ")", "block", "=", "make_block", "(", "array", ",", "klass", "=", "SparseBlock", ",", "fastpath", "=", "True", ",", "placement", "=", "[", "i", "]", ")", "new_blocks", ".", "append", "(", "block", ")", "return", "new_blocks" ]
return an array of blocks that potentially have different dtypes .
train
false
54,446
def compose_all(stream, Loader=Loader): loader = Loader(stream) while loader.check_node(): (yield loader.get_node())
[ "def", "compose_all", "(", "stream", ",", "Loader", "=", "Loader", ")", ":", "loader", "=", "Loader", "(", "stream", ")", "while", "loader", ".", "check_node", "(", ")", ":", "(", "yield", "loader", ".", "get_node", "(", ")", ")" ]
parse all yaml documents in a stream and produce corresponding representation trees .
train
true
54,447
def get_item_inventory_size(): _inventory.retrieve_inventories_size() return _inventory.item_inventory_size
[ "def", "get_item_inventory_size", "(", ")", ":", "_inventory", ".", "retrieve_inventories_size", "(", ")", "return", "_inventory", ".", "item_inventory_size" ]
access to the item inventory size .
train
false
54,448
def test_private_browsing(qtbot, tmpdir, fake_save_manager, config_stub): config_stub.data = {'general': {'private-browsing': True}} private_hist = history.WebHistory(hist_dir=str(tmpdir), hist_name='history') with qtbot.assertNotEmitted(private_hist.add_completion_item): with qtbot.assertNotEmitted(private_hist.item_added): private_hist.add_url(QUrl('http://www.example.com/')) assert (not private_hist._temp_history) with qtbot.assertNotEmitted(private_hist.add_completion_item): with qtbot.assertNotEmitted(private_hist.item_added): with qtbot.waitSignals([private_hist.async_read_done], order='strict'): list(private_hist.async_read()) with qtbot.assertNotEmitted(private_hist.add_completion_item): with qtbot.assertNotEmitted(private_hist.item_added): private_hist.add_url(QUrl('http://www.example.com/')) assert (not private_hist._temp_history) assert (not private_hist._new_history) assert (not private_hist.history_dict)
[ "def", "test_private_browsing", "(", "qtbot", ",", "tmpdir", ",", "fake_save_manager", ",", "config_stub", ")", ":", "config_stub", ".", "data", "=", "{", "'general'", ":", "{", "'private-browsing'", ":", "True", "}", "}", "private_hist", "=", "history", ".", "WebHistory", "(", "hist_dir", "=", "str", "(", "tmpdir", ")", ",", "hist_name", "=", "'history'", ")", "with", "qtbot", ".", "assertNotEmitted", "(", "private_hist", ".", "add_completion_item", ")", ":", "with", "qtbot", ".", "assertNotEmitted", "(", "private_hist", ".", "item_added", ")", ":", "private_hist", ".", "add_url", "(", "QUrl", "(", "'http://www.example.com/'", ")", ")", "assert", "(", "not", "private_hist", ".", "_temp_history", ")", "with", "qtbot", ".", "assertNotEmitted", "(", "private_hist", ".", "add_completion_item", ")", ":", "with", "qtbot", ".", "assertNotEmitted", "(", "private_hist", ".", "item_added", ")", ":", "with", "qtbot", ".", "waitSignals", "(", "[", "private_hist", ".", "async_read_done", "]", ",", "order", "=", "'strict'", ")", ":", "list", "(", "private_hist", ".", "async_read", "(", ")", ")", "with", "qtbot", ".", "assertNotEmitted", "(", "private_hist", ".", "add_completion_item", ")", ":", "with", "qtbot", ".", "assertNotEmitted", "(", "private_hist", ".", "item_added", ")", ":", "private_hist", ".", "add_url", "(", "QUrl", "(", "'http://www.example.com/'", ")", ")", "assert", "(", "not", "private_hist", ".", "_temp_history", ")", "assert", "(", "not", "private_hist", ".", "_new_history", ")", "assert", "(", "not", "private_hist", ".", "history_dict", ")" ]
make sure no data is saved at all with private browsing .
train
false
54,449
def metric_cleanup(): logging.debug('metric_cleanup') pass
[ "def", "metric_cleanup", "(", ")", ":", "logging", ".", "debug", "(", "'metric_cleanup'", ")", "pass" ]
clean up the metric module .
train
false
54,450
def _copy_py_state(r, ptr): mt = r.getstate()[1] (ints, index) = (mt[:(-1)], mt[(-1)]) _helperlib.rnd_set_state(ptr, (index, list(ints))) return (ints, index)
[ "def", "_copy_py_state", "(", "r", ",", "ptr", ")", ":", "mt", "=", "r", ".", "getstate", "(", ")", "[", "1", "]", "(", "ints", ",", "index", ")", "=", "(", "mt", "[", ":", "(", "-", "1", ")", "]", ",", "mt", "[", "(", "-", "1", ")", "]", ")", "_helperlib", ".", "rnd_set_state", "(", "ptr", ",", "(", "index", ",", "list", "(", "ints", ")", ")", ")", "return", "(", "ints", ",", "index", ")" ]
copy state of python random *r* to numba state *ptr* .
train
false
54,451
def parse_xreply(xreply): try: xreply = json.loads(xreply) except ValueError as err: log.error(err) return (1, 'unexpected reply from server') return_code = xreply['return_code'] content = xreply['content'] return (return_code, content)
[ "def", "parse_xreply", "(", "xreply", ")", ":", "try", ":", "xreply", "=", "json", ".", "loads", "(", "xreply", ")", "except", "ValueError", "as", "err", ":", "log", ".", "error", "(", "err", ")", "return", "(", "1", ",", "'unexpected reply from server'", ")", "return_code", "=", "xreply", "[", "'return_code'", "]", "content", "=", "xreply", "[", "'content'", "]", "return", "(", "return_code", ",", "content", ")" ]
parse the reply from xqueue .
train
false
54,452
def rand_text_alphanumeric(length, bad=''): chars = ((upperAlpha + lowerAlpha) + numerals) return rand_base(length, bad, set(chars))
[ "def", "rand_text_alphanumeric", "(", "length", ",", "bad", "=", "''", ")", ":", "chars", "=", "(", "(", "upperAlpha", "+", "lowerAlpha", ")", "+", "numerals", ")", "return", "rand_base", "(", "length", ",", "bad", ",", "set", "(", "chars", ")", ")" ]
generate a random string with alpha and numerals chars .
train
false
54,453
@app.route('/raise-500', methods=['GET']) @requires_auth def raise_500(): raise ValueError('Foo!')
[ "@", "app", ".", "route", "(", "'/raise-500'", ",", "methods", "=", "[", "'GET'", "]", ")", "@", "requires_auth", "def", "raise_500", "(", ")", ":", "raise", "ValueError", "(", "'Foo!'", ")" ]
this exists for testing error_500_handler .
train
false
54,454
def _unary_int_input_wrapper_impl(wrapped_impl): def implementer(context, builder, sig, args): (val,) = args input_type = sig.args[0] fpval = context.cast(builder, val, input_type, types.float64) inner_sig = signature(types.float64, types.float64) res = wrapped_impl(context, builder, inner_sig, (fpval,)) return context.cast(builder, res, types.float64, sig.return_type) return implementer
[ "def", "_unary_int_input_wrapper_impl", "(", "wrapped_impl", ")", ":", "def", "implementer", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "(", "val", ",", ")", "=", "args", "input_type", "=", "sig", ".", "args", "[", "0", "]", "fpval", "=", "context", ".", "cast", "(", "builder", ",", "val", ",", "input_type", ",", "types", ".", "float64", ")", "inner_sig", "=", "signature", "(", "types", ".", "float64", ",", "types", ".", "float64", ")", "res", "=", "wrapped_impl", "(", "context", ",", "builder", ",", "inner_sig", ",", "(", "fpval", ",", ")", ")", "return", "context", ".", "cast", "(", "builder", ",", "res", ",", "types", ".", "float64", ",", "sig", ".", "return_type", ")", "return", "implementer" ]
return an implementation factory to convert the single integral input argument to a float64 .
train
false
54,455
def unintegrate_levels(x, d): x = x[:d] return np.asarray([np.diff(x, (d - i))[0] for i in range(d, 0, (-1))])
[ "def", "unintegrate_levels", "(", "x", ",", "d", ")", ":", "x", "=", "x", "[", ":", "d", "]", "return", "np", ".", "asarray", "(", "[", "np", ".", "diff", "(", "x", ",", "(", "d", "-", "i", ")", ")", "[", "0", "]", "for", "i", "in", "range", "(", "d", ",", "0", ",", "(", "-", "1", ")", ")", "]", ")" ]
returns the successive differences needed to unintegrate the series .
train
false
54,456
def _fileobj_normalize_mode(f): mode = f.mode if isinstance(f, gzip.GzipFile): if (mode == gzip.READ): return 'rb' elif (mode == gzip.WRITE): return 'wb' else: return None if ('+' in mode): mode = mode.replace('+', '') mode += '+' return mode
[ "def", "_fileobj_normalize_mode", "(", "f", ")", ":", "mode", "=", "f", ".", "mode", "if", "isinstance", "(", "f", ",", "gzip", ".", "GzipFile", ")", ":", "if", "(", "mode", "==", "gzip", ".", "READ", ")", ":", "return", "'rb'", "elif", "(", "mode", "==", "gzip", ".", "WRITE", ")", ":", "return", "'wb'", "else", ":", "return", "None", "if", "(", "'+'", "in", "mode", ")", ":", "mode", "=", "mode", ".", "replace", "(", "'+'", ",", "''", ")", "mode", "+=", "'+'", "return", "mode" ]
takes care of some corner cases in python where the mode string is either oddly formatted or does not truly represent the file mode .
train
false
54,458
def _pop_header_name(row, index_col): none_fill = (lambda x: (None if (x == '') else x)) if (index_col is None): return (none_fill(row[0]), row[1:]) else: i = (index_col if (not is_list_like(index_col)) else max(index_col)) return (none_fill(row[i]), ((row[:i] + ['']) + row[(i + 1):]))
[ "def", "_pop_header_name", "(", "row", ",", "index_col", ")", ":", "none_fill", "=", "(", "lambda", "x", ":", "(", "None", "if", "(", "x", "==", "''", ")", "else", "x", ")", ")", "if", "(", "index_col", "is", "None", ")", ":", "return", "(", "none_fill", "(", "row", "[", "0", "]", ")", ",", "row", "[", "1", ":", "]", ")", "else", ":", "i", "=", "(", "index_col", "if", "(", "not", "is_list_like", "(", "index_col", ")", ")", "else", "max", "(", "index_col", ")", ")", "return", "(", "none_fill", "(", "row", "[", "i", "]", ")", ",", "(", "(", "row", "[", ":", "i", "]", "+", "[", "''", "]", ")", "+", "row", "[", "(", "i", "+", "1", ")", ":", "]", ")", ")" ]
for header rows in multiindex parsing .
train
false
54,459
def remux_audio(filename, title): util.dbg('starting remux') temp_file = ((filename + '.') + str(random.randint(10000, 99999))) os.rename(filename, temp_file) meta = extract_metadata(title) metadata = [('title=%s' % meta['title'])] if meta['artist']: metadata = [('title=%s' % meta['title']), '-metadata', ('artist=%s' % meta['artist'])] cmd = [g.muxapp, '-y', '-i', temp_file, '-acodec', 'copy', '-metadata'] cmd += (metadata + ['-vn', filename]) util.dbg(cmd) try: with open(os.devnull, 'w') as devnull: subprocess.call(cmd, stdout=devnull, stderr=subprocess.STDOUT) except OSError: util.dbg('Failed to remux audio using %s', g.muxapp) os.rename(temp_file, filename) else: os.unlink(temp_file) util.dbg(('remuxed audio file using %s' % g.muxapp))
[ "def", "remux_audio", "(", "filename", ",", "title", ")", ":", "util", ".", "dbg", "(", "'starting remux'", ")", "temp_file", "=", "(", "(", "filename", "+", "'.'", ")", "+", "str", "(", "random", ".", "randint", "(", "10000", ",", "99999", ")", ")", ")", "os", ".", "rename", "(", "filename", ",", "temp_file", ")", "meta", "=", "extract_metadata", "(", "title", ")", "metadata", "=", "[", "(", "'title=%s'", "%", "meta", "[", "'title'", "]", ")", "]", "if", "meta", "[", "'artist'", "]", ":", "metadata", "=", "[", "(", "'title=%s'", "%", "meta", "[", "'title'", "]", ")", ",", "'-metadata'", ",", "(", "'artist=%s'", "%", "meta", "[", "'artist'", "]", ")", "]", "cmd", "=", "[", "g", ".", "muxapp", ",", "'-y'", ",", "'-i'", ",", "temp_file", ",", "'-acodec'", ",", "'copy'", ",", "'-metadata'", "]", "cmd", "+=", "(", "metadata", "+", "[", "'-vn'", ",", "filename", "]", ")", "util", ".", "dbg", "(", "cmd", ")", "try", ":", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "devnull", ":", "subprocess", ".", "call", "(", "cmd", ",", "stdout", "=", "devnull", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "except", "OSError", ":", "util", ".", "dbg", "(", "'Failed to remux audio using %s'", ",", "g", ".", "muxapp", ")", "os", ".", "rename", "(", "temp_file", ",", "filename", ")", "else", ":", "os", ".", "unlink", "(", "temp_file", ")", "util", ".", "dbg", "(", "(", "'remuxed audio file using %s'", "%", "g", ".", "muxapp", ")", ")" ]
remux audio file .
train
false
54,460
def make_history_query(cls, interval): time_points = get_time_points(interval) q = Session.query(cls).filter(cls.date.in_(time_points)) if hasattr(cls, 'interval'): q = q.filter((cls.interval == interval)) q = q.order_by(desc(cls.date)) return (time_points, q)
[ "def", "make_history_query", "(", "cls", ",", "interval", ")", ":", "time_points", "=", "get_time_points", "(", "interval", ")", "q", "=", "Session", ".", "query", "(", "cls", ")", ".", "filter", "(", "cls", ".", "date", ".", "in_", "(", "time_points", ")", ")", "if", "hasattr", "(", "cls", ",", "'interval'", ")", ":", "q", "=", "q", ".", "filter", "(", "(", "cls", ".", "interval", "==", "interval", ")", ")", "q", "=", "q", ".", "order_by", "(", "desc", "(", "cls", ".", "date", ")", ")", "return", "(", "time_points", ",", "q", ")" ]
build a generic query showing the history of a given aggregate .
train
false
54,461
def is_cached(path, saltenv='base'): return _client().is_cached(path, saltenv)
[ "def", "is_cached", "(", "path", ",", "saltenv", "=", "'base'", ")", ":", "return", "_client", "(", ")", ".", "is_cached", "(", "path", ",", "saltenv", ")" ]
return a boolean if the given path on the master has been cached on the minion cli example: .
train
false
54,462
def rs_LambertW(p, x, prec): if rs_is_puiseux(p, x): return rs_puiseux(rs_LambertW, p, x, prec) R = p.ring p1 = R(0) if _has_constant_term(p, x): raise NotImplementedError('Polynomial must not have constant term in the series variables') if (x in R.gens): for precx in _giant_steps(prec): e = rs_exp(p1, x, precx) p2 = (rs_mul(e, p1, x, precx) - p) p3 = rs_mul(e, (p1 + 1), x, precx) p3 = rs_series_inversion(p3, x, precx) tmp = rs_mul(p2, p3, x, precx) p1 -= tmp return p1 else: raise NotImplementedError
[ "def", "rs_LambertW", "(", "p", ",", "x", ",", "prec", ")", ":", "if", "rs_is_puiseux", "(", "p", ",", "x", ")", ":", "return", "rs_puiseux", "(", "rs_LambertW", ",", "p", ",", "x", ",", "prec", ")", "R", "=", "p", ".", "ring", "p1", "=", "R", "(", "0", ")", "if", "_has_constant_term", "(", "p", ",", "x", ")", ":", "raise", "NotImplementedError", "(", "'Polynomial must not have constant term in the series variables'", ")", "if", "(", "x", "in", "R", ".", "gens", ")", ":", "for", "precx", "in", "_giant_steps", "(", "prec", ")", ":", "e", "=", "rs_exp", "(", "p1", ",", "x", ",", "precx", ")", "p2", "=", "(", "rs_mul", "(", "e", ",", "p1", ",", "x", ",", "precx", ")", "-", "p", ")", "p3", "=", "rs_mul", "(", "e", ",", "(", "p1", "+", "1", ")", ",", "x", ",", "precx", ")", "p3", "=", "rs_series_inversion", "(", "p3", ",", "x", ",", "precx", ")", "tmp", "=", "rs_mul", "(", "p2", ",", "p3", ",", "x", ",", "precx", ")", "p1", "-=", "tmp", "return", "p1", "else", ":", "raise", "NotImplementedError" ]
calculate the series expansion of the principal branch of the lambert w function .
train
false
54,464
def require_driver_initialized(driver): if (not driver.initialized): driver_name = driver.__class__.__name__ LOG.error(_LE('Volume driver %s not initialized'), driver_name) raise exception.DriverNotInitialized() else: log_unsupported_driver_warning(driver)
[ "def", "require_driver_initialized", "(", "driver", ")", ":", "if", "(", "not", "driver", ".", "initialized", ")", ":", "driver_name", "=", "driver", ".", "__class__", ".", "__name__", "LOG", ".", "error", "(", "_LE", "(", "'Volume driver %s not initialized'", ")", ",", "driver_name", ")", "raise", "exception", ".", "DriverNotInitialized", "(", ")", "else", ":", "log_unsupported_driver_warning", "(", "driver", ")" ]
verifies if driver is initialized if the driver is not initialized .
train
false
54,465
def copy_asset_file(source, destination, context=None, renderer=None): if (not os.path.exists(source)): return if (os.path.exists(destination) and os.path.isdir(destination)): destination = os.path.join(destination, os.path.basename(source)) if (source.lower().endswith('_t') and context): if (renderer is None): from sphinx.util.template import SphinxRenderer renderer = SphinxRenderer() with codecs.open(source, 'r', encoding='utf-8') as fsrc: if destination.lower().endswith('_t'): destination = destination[:(-2)] with codecs.open(destination, 'w', encoding='utf-8') as fdst: fdst.write(renderer.render_string(fsrc.read(), context)) else: copyfile(source, destination)
[ "def", "copy_asset_file", "(", "source", ",", "destination", ",", "context", "=", "None", ",", "renderer", "=", "None", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "source", ")", ")", ":", "return", "if", "(", "os", ".", "path", ".", "exists", "(", "destination", ")", "and", "os", ".", "path", ".", "isdir", "(", "destination", ")", ")", ":", "destination", "=", "os", ".", "path", ".", "join", "(", "destination", ",", "os", ".", "path", ".", "basename", "(", "source", ")", ")", "if", "(", "source", ".", "lower", "(", ")", ".", "endswith", "(", "'_t'", ")", "and", "context", ")", ":", "if", "(", "renderer", "is", "None", ")", ":", "from", "sphinx", ".", "util", ".", "template", "import", "SphinxRenderer", "renderer", "=", "SphinxRenderer", "(", ")", "with", "codecs", ".", "open", "(", "source", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "fsrc", ":", "if", "destination", ".", "lower", "(", ")", ".", "endswith", "(", "'_t'", ")", ":", "destination", "=", "destination", "[", ":", "(", "-", "2", ")", "]", "with", "codecs", ".", "open", "(", "destination", ",", "'w'", ",", "encoding", "=", "'utf-8'", ")", "as", "fdst", ":", "fdst", ".", "write", "(", "renderer", ".", "render_string", "(", "fsrc", ".", "read", "(", ")", ",", "context", ")", ")", "else", ":", "copyfile", "(", "source", ",", "destination", ")" ]
copy an asset file to destination .
train
false
54,466
def test_invalid_config(): with pytest.raises(InvalidConfiguration) as excinfo: config.get_config('tests/test-config/invalid-config.yaml') expected_error_msg = 'Unable to parse YAML file tests/test-config/invalid-config.yaml. Error: ' assert (expected_error_msg in str(excinfo.value))
[ "def", "test_invalid_config", "(", ")", ":", "with", "pytest", ".", "raises", "(", "InvalidConfiguration", ")", "as", "excinfo", ":", "config", ".", "get_config", "(", "'tests/test-config/invalid-config.yaml'", ")", "expected_error_msg", "=", "'Unable to parse YAML file tests/test-config/invalid-config.yaml. Error: '", "assert", "(", "expected_error_msg", "in", "str", "(", "excinfo", ".", "value", ")", ")" ]
an invalid config file should raise an invalidconfiguration exception .
train
false
54,467
def automodel(doctype): pass
[ "def", "automodel", "(", "doctype", ")", ":", "pass" ]
return doctype template .
train
false
54,468
@pytest.mark.not_frozen def test_python2(): try: proc = subprocess.Popen(['python2', checkpyver.__file__, '--no-err-windows'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() except FileNotFoundError: pytest.skip('python2 not found') assert (not stdout) stderr = stderr.decode('utf-8') assert re.match(TEXT, stderr), stderr assert (proc.returncode == 1)
[ "@", "pytest", ".", "mark", ".", "not_frozen", "def", "test_python2", "(", ")", ":", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "[", "'python2'", ",", "checkpyver", ".", "__file__", ",", "'--no-err-windows'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "stdout", ",", "stderr", ")", "=", "proc", ".", "communicate", "(", ")", "except", "FileNotFoundError", ":", "pytest", ".", "skip", "(", "'python2 not found'", ")", "assert", "(", "not", "stdout", ")", "stderr", "=", "stderr", ".", "decode", "(", "'utf-8'", ")", "assert", "re", ".", "match", "(", "TEXT", ",", "stderr", ")", ",", "stderr", "assert", "(", "proc", ".", "returncode", "==", "1", ")" ]
run checkpyver with python 2 .
train
false
54,470
def candidates(items, artist, album, va_likely): for plugin in find_plugins(): for candidate in plugin.candidates(items, artist, album, va_likely): (yield candidate)
[ "def", "candidates", "(", "items", ",", "artist", ",", "album", ",", "va_likely", ")", ":", "for", "plugin", "in", "find_plugins", "(", ")", ":", "for", "candidate", "in", "plugin", ".", "candidates", "(", "items", ",", "artist", ",", "album", ",", "va_likely", ")", ":", "(", "yield", "candidate", ")" ]
gets musicbrainz candidates for an album from each plugin .
train
false
54,472
def file_list_emptydirs(load): return _file_lists(load, 'empty_dirs')
[ "def", "file_list_emptydirs", "(", "load", ")", ":", "return", "_file_lists", "(", "load", ",", "'empty_dirs'", ")" ]
return a list of all empty directories on the master .
train
false
54,473
def hasDependency(module, fake_module=None): import mock import sys if (fake_module is None): fake_module = mock.MagicMock() if fake_module: sys.modules[module] = fake_module
[ "def", "hasDependency", "(", "module", ",", "fake_module", "=", "None", ")", ":", "import", "mock", "import", "sys", "if", "(", "fake_module", "is", "None", ")", ":", "fake_module", "=", "mock", ".", "MagicMock", "(", ")", "if", "fake_module", ":", "sys", ".", "modules", "[", "module", "]", "=", "fake_module" ]
use this function in your test class setup to mock modules into your namespace .
train
false
54,476
def get_all_credentials(tenant_id): session = db.get_session() try: creds = session.query(l2network_models.Credential).filter_by(tenant_id=tenant_id).all() return creds except exc.NoResultFound: return []
[ "def", "get_all_credentials", "(", "tenant_id", ")", ":", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "creds", "=", "session", ".", "query", "(", "l2network_models", ".", "Credential", ")", ".", "filter_by", "(", "tenant_id", "=", "tenant_id", ")", ".", "all", "(", ")", "return", "creds", "except", "exc", ".", "NoResultFound", ":", "return", "[", "]" ]
lists all the creds for a tenant .
train
false
54,479
def const(result): def constResult(*args, **kwargs): return result return constResult
[ "def", "const", "(", "result", ")", ":", "def", "constResult", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "result", "return", "constResult" ]
return a function that ignores any arguments and just returns the specified result .
train
false
54,480
def fast_denoiser(sff_fps, fasta_fp, tmp_outdir, num_cpus, primer, verbose=True, titanium=False): if (num_cpus > 1): denoise_seqs(sff_fps, fasta_fp, tmp_outdir, primer=primer, cluster=True, num_cpus=num_cpus, verbose=verbose, titanium=titanium) else: denoise_seqs(sff_fps, fasta_fp, tmp_outdir, primer=primer, verbose=verbose, titanium=titanium) centroids = parse_fasta(open((tmp_outdir + '/centroids.fasta'))) singletons = parse_fasta(open((tmp_outdir + '/singletons.fasta'))) seqs = chain(centroids, singletons) mapping = {} cluster_mapping = open((tmp_outdir + '/denoiser_mapping.txt')) for (i, cluster) in enumerate(cluster_mapping): (cluster, members) = cluster.split(':') members = members.split() clust = [cluster] clust.extend(members) mapping[i] = clust return (seqs, mapping)
[ "def", "fast_denoiser", "(", "sff_fps", ",", "fasta_fp", ",", "tmp_outdir", ",", "num_cpus", ",", "primer", ",", "verbose", "=", "True", ",", "titanium", "=", "False", ")", ":", "if", "(", "num_cpus", ">", "1", ")", ":", "denoise_seqs", "(", "sff_fps", ",", "fasta_fp", ",", "tmp_outdir", ",", "primer", "=", "primer", ",", "cluster", "=", "True", ",", "num_cpus", "=", "num_cpus", ",", "verbose", "=", "verbose", ",", "titanium", "=", "titanium", ")", "else", ":", "denoise_seqs", "(", "sff_fps", ",", "fasta_fp", ",", "tmp_outdir", ",", "primer", "=", "primer", ",", "verbose", "=", "verbose", ",", "titanium", "=", "titanium", ")", "centroids", "=", "parse_fasta", "(", "open", "(", "(", "tmp_outdir", "+", "'/centroids.fasta'", ")", ")", ")", "singletons", "=", "parse_fasta", "(", "open", "(", "(", "tmp_outdir", "+", "'/singletons.fasta'", ")", ")", ")", "seqs", "=", "chain", "(", "centroids", ",", "singletons", ")", "mapping", "=", "{", "}", "cluster_mapping", "=", "open", "(", "(", "tmp_outdir", "+", "'/denoiser_mapping.txt'", ")", ")", "for", "(", "i", ",", "cluster", ")", "in", "enumerate", "(", "cluster_mapping", ")", ":", "(", "cluster", ",", "members", ")", "=", "cluster", ".", "split", "(", "':'", ")", "members", "=", "members", ".", "split", "(", ")", "clust", "=", "[", "cluster", "]", "clust", ".", "extend", "(", "members", ")", "mapping", "[", "i", "]", "=", "clust", "return", "(", "seqs", ",", "mapping", ")" ]
wrapper function calling methods from the denoiser package .
train
false
54,482
def str_translate(arr, table, deletechars=None): if (deletechars is None): f = (lambda x: x.translate(table)) else: from pandas import compat if compat.PY3: raise ValueError('deletechars is not a valid argument for str.translate in python 3. You should simply specify character deletions in the table argument') f = (lambda x: x.translate(table, deletechars)) return _na_map(f, arr)
[ "def", "str_translate", "(", "arr", ",", "table", ",", "deletechars", "=", "None", ")", ":", "if", "(", "deletechars", "is", "None", ")", ":", "f", "=", "(", "lambda", "x", ":", "x", ".", "translate", "(", "table", ")", ")", "else", ":", "from", "pandas", "import", "compat", "if", "compat", ".", "PY3", ":", "raise", "ValueError", "(", "'deletechars is not a valid argument for str.translate in python 3. You should simply specify character deletions in the table argument'", ")", "f", "=", "(", "lambda", "x", ":", "x", ".", "translate", "(", "table", ",", "deletechars", ")", ")", "return", "_na_map", "(", "f", ",", "arr", ")" ]
map all characters in the string through the given mapping table .
train
false
54,483
def _mobius_from_interval(I, field): (s, t) = I (a, c) = (field.numer(s), field.denom(s)) (b, d) = (field.numer(t), field.denom(t)) return (a, b, c, d)
[ "def", "_mobius_from_interval", "(", "I", ",", "field", ")", ":", "(", "s", ",", "t", ")", "=", "I", "(", "a", ",", "c", ")", "=", "(", "field", ".", "numer", "(", "s", ")", ",", "field", ".", "denom", "(", "s", ")", ")", "(", "b", ",", "d", ")", "=", "(", "field", ".", "numer", "(", "t", ")", ",", "field", ".", "denom", "(", "t", ")", ")", "return", "(", "a", ",", "b", ",", "c", ",", "d", ")" ]
convert an open interval to a mobius transform .
train
false
54,484
def assert_any_equal(output, alternatives): one_equal = False for expected in alternatives: if np.all((output == expected)): one_equal = True break assert_(one_equal)
[ "def", "assert_any_equal", "(", "output", ",", "alternatives", ")", ":", "one_equal", "=", "False", "for", "expected", "in", "alternatives", ":", "if", "np", ".", "all", "(", "(", "output", "==", "expected", ")", ")", ":", "one_equal", "=", "True", "break", "assert_", "(", "one_equal", ")" ]
assert output is equal to at least one element in alternatives .
train
false
54,485
def s3_URLise(text): output = URLSCHEMA.sub((lambda m: ('<a href="%s" target="_blank">%s</a>' % (m.group(0), m.group(0)))), text) return output
[ "def", "s3_URLise", "(", "text", ")", ":", "output", "=", "URLSCHEMA", ".", "sub", "(", "(", "lambda", "m", ":", "(", "'<a href=\"%s\" target=\"_blank\">%s</a>'", "%", "(", "m", ".", "group", "(", "0", ")", ",", "m", ".", "group", "(", "0", ")", ")", ")", ")", ",", "text", ")", "return", "output" ]
convert all urls in a text into an html <a> tag .
train
false
54,486
def runSubprocess(command, return_code=False, **kwargs): use_kwargs = dict(stderr=None, stdout=sp.PIPE) use_kwargs.update(kwargs) p = sp.Popen(command, **use_kwargs) output = p.communicate()[0] output = ('' if (output is None) else output) output = (output.decode('utf-8') if isinstance(output, bytes) else output) if (p.returncode != 0): print output err_fun = sp.CalledProcessError.__init__ if ('output' in inspect.getargspec(err_fun).args): raise sp.CalledProcessError(p.returncode, command, output) else: raise sp.CalledProcessError(p.returncode, command) return output
[ "def", "runSubprocess", "(", "command", ",", "return_code", "=", "False", ",", "**", "kwargs", ")", ":", "use_kwargs", "=", "dict", "(", "stderr", "=", "None", ",", "stdout", "=", "sp", ".", "PIPE", ")", "use_kwargs", ".", "update", "(", "kwargs", ")", "p", "=", "sp", ".", "Popen", "(", "command", ",", "**", "use_kwargs", ")", "output", "=", "p", ".", "communicate", "(", ")", "[", "0", "]", "output", "=", "(", "''", "if", "(", "output", "is", "None", ")", "else", "output", ")", "output", "=", "(", "output", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "output", ",", "bytes", ")", "else", "output", ")", "if", "(", "p", ".", "returncode", "!=", "0", ")", ":", "print", "output", "err_fun", "=", "sp", ".", "CalledProcessError", ".", "__init__", "if", "(", "'output'", "in", "inspect", ".", "getargspec", "(", "err_fun", ")", ".", "args", ")", ":", "raise", "sp", ".", "CalledProcessError", "(", "p", ".", "returncode", ",", "command", ",", "output", ")", "else", ":", "raise", "sp", ".", "CalledProcessError", "(", "p", ".", "returncode", ",", "command", ")", "return", "output" ]
run command using subprocess .
train
false
54,487
def xmlsec(): try: proc = subprocess.Popen(['which', 'xmlsec1'], stdout=subprocess.PIPE) return proc.stdout.read().strip() except subprocess.CalledProcessError: return '/usr/local/bin/xmlsec1'
[ "def", "xmlsec", "(", ")", ":", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "[", "'which'", ",", "'xmlsec1'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "return", "proc", ".", "stdout", ".", "read", "(", ")", ".", "strip", "(", ")", "except", "subprocess", ".", "CalledProcessError", ":", "return", "'/usr/local/bin/xmlsec1'" ]
xmlsec path .
train
false
54,489
def network_disassociate(context, network_id, disassociate_host=True, disassociate_project=True): return IMPL.network_disassociate(context, network_id, disassociate_host, disassociate_project)
[ "def", "network_disassociate", "(", "context", ",", "network_id", ",", "disassociate_host", "=", "True", ",", "disassociate_project", "=", "True", ")", ":", "return", "IMPL", ".", "network_disassociate", "(", "context", ",", "network_id", ",", "disassociate_host", ",", "disassociate_project", ")" ]
disassociate the network from project or host and raise if it does not exist .
train
false
54,490
def _maybe_encrypt_password(role, password, encrypted=_DEFAULT_PASSWORDS_ENCRYPTION): if (password is not None): password = str(password) if (encrypted and password and (not password.startswith('md5'))): password = 'md5{0}'.format(hashlib.md5(salt.utils.to_bytes('{0}{1}'.format(password, role))).hexdigest()) return password
[ "def", "_maybe_encrypt_password", "(", "role", ",", "password", ",", "encrypted", "=", "_DEFAULT_PASSWORDS_ENCRYPTION", ")", ":", "if", "(", "password", "is", "not", "None", ")", ":", "password", "=", "str", "(", "password", ")", "if", "(", "encrypted", "and", "password", "and", "(", "not", "password", ".", "startswith", "(", "'md5'", ")", ")", ")", ":", "password", "=", "'md5{0}'", ".", "format", "(", "hashlib", ".", "md5", "(", "salt", ".", "utils", ".", "to_bytes", "(", "'{0}{1}'", ".", "format", "(", "password", ",", "role", ")", ")", ")", ".", "hexdigest", "(", ")", ")", "return", "password" ]
pgsql passwords are md5 hashes of the string: md5{password}{rolename} .
train
false
54,491
def get_pr_info(num): url = u'https://api.github.com/repos/edx/edx-platform/pulls/{num}'.format(num=num) (username, token) = get_github_creds() headers = {u'Authorization': u'token {}'.format(token), u'User-Agent': u'edx-release'} response = requests.get(url, headers=headers) result = response.json() if (not response.ok): raise requests.exceptions.RequestException(result[u'message']) return result
[ "def", "get_pr_info", "(", "num", ")", ":", "url", "=", "u'https://api.github.com/repos/edx/edx-platform/pulls/{num}'", ".", "format", "(", "num", "=", "num", ")", "(", "username", ",", "token", ")", "=", "get_github_creds", "(", ")", "headers", "=", "{", "u'Authorization'", ":", "u'token {}'", ".", "format", "(", "token", ")", ",", "u'User-Agent'", ":", "u'edx-release'", "}", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ")", "result", "=", "response", ".", "json", "(", ")", "if", "(", "not", "response", ".", "ok", ")", ":", "raise", "requests", ".", "exceptions", ".", "RequestException", "(", "result", "[", "u'message'", "]", ")", "return", "result" ]
returns the info from the github api .
train
false
54,492
def _is_ignorable_404(uri): for start in settings.IGNORABLE_404_STARTS: if uri.startswith(start): return True for end in settings.IGNORABLE_404_ENDS: if uri.endswith(end): return True return False
[ "def", "_is_ignorable_404", "(", "uri", ")", ":", "for", "start", "in", "settings", ".", "IGNORABLE_404_STARTS", ":", "if", "uri", ".", "startswith", "(", "start", ")", ":", "return", "True", "for", "end", "in", "settings", ".", "IGNORABLE_404_ENDS", ":", "if", "uri", ".", "endswith", "(", "end", ")", ":", "return", "True", "return", "False" ]
returns true if a 404 at the given url *shouldnt* notify the site managers .
train
false
54,493
def qt4(): try: allow_CTRL_C() app = QtCore.QCoreApplication.instance() if (not app): return 0 app.processEvents(QtCore.QEventLoop.AllEvents, 300) if (not stdin_ready()): timer = QtCore.QTimer() event_loop = QtCore.QEventLoop() timer.timeout.connect(event_loop.quit) while (not stdin_ready()): timer.start(50) event_loop.exec_() timer.stop() except KeyboardInterrupt: print '\nKeyboardInterrupt - Press Enter for new prompt' except: ignore_CTRL_C() from traceback import print_exc print_exc() print 'Got exception from inputhook, unregistering.' clear_inputhook() finally: allow_CTRL_C() return 0
[ "def", "qt4", "(", ")", ":", "try", ":", "allow_CTRL_C", "(", ")", "app", "=", "QtCore", ".", "QCoreApplication", ".", "instance", "(", ")", "if", "(", "not", "app", ")", ":", "return", "0", "app", ".", "processEvents", "(", "QtCore", ".", "QEventLoop", ".", "AllEvents", ",", "300", ")", "if", "(", "not", "stdin_ready", "(", ")", ")", ":", "timer", "=", "QtCore", ".", "QTimer", "(", ")", "event_loop", "=", "QtCore", ".", "QEventLoop", "(", ")", "timer", ".", "timeout", ".", "connect", "(", "event_loop", ".", "quit", ")", "while", "(", "not", "stdin_ready", "(", ")", ")", ":", "timer", ".", "start", "(", "50", ")", "event_loop", ".", "exec_", "(", ")", "timer", ".", "stop", "(", ")", "except", "KeyboardInterrupt", ":", "print", "'\\nKeyboardInterrupt - Press Enter for new prompt'", "except", ":", "ignore_CTRL_C", "(", ")", "from", "traceback", "import", "print_exc", "print_exc", "(", ")", "print", "'Got exception from inputhook, unregistering.'", "clear_inputhook", "(", ")", "finally", ":", "allow_CTRL_C", "(", ")", "return", "0" ]
pyos_inputhook python hook for qt4 .
train
false
54,494
@register.filter(name='str_to_list') def str_to_list(info): print ast.literal_eval(info), type(ast.literal_eval(info)) return ast.literal_eval(info)
[ "@", "register", ".", "filter", "(", "name", "=", "'str_to_list'", ")", "def", "str_to_list", "(", "info", ")", ":", "print", "ast", ".", "literal_eval", "(", "info", ")", ",", "type", "(", "ast", ".", "literal_eval", "(", "info", ")", ")", "return", "ast", ".", "literal_eval", "(", "info", ")" ]
convert string to list .
train
false
54,495
@contextlib.contextmanager def capture_output(stream, loglevel=None): root_logger = logging.getLogger() old_level = root_logger.getEffectiveLevel() old_id = getattr(local_context, u'session_id', None) local_context.session_id = (old_id or uuid.uuid4()) old_output = getattr(local_context, u'output', None) old_loglevel = getattr(local_context, u'loglevel', None) streamhandler = logging.StreamHandler(stream) streamhandler.setFormatter(FlexGetFormatter()) streamhandler.addFilter(SessionFilter(local_context.session_id)) if (loglevel is not None): loglevel = get_level_no(loglevel) streamhandler.setLevel(loglevel) if (not root_logger.isEnabledFor(loglevel)): root_logger.setLevel(loglevel) local_context.output = stream local_context.loglevel = loglevel root_logger.addHandler(streamhandler) try: (yield) finally: root_logger.removeHandler(streamhandler) root_logger.setLevel(old_level) local_context.session_id = old_id local_context.output = old_output local_context.loglevel = old_loglevel
[ "@", "contextlib", ".", "contextmanager", "def", "capture_output", "(", "stream", ",", "loglevel", "=", "None", ")", ":", "root_logger", "=", "logging", ".", "getLogger", "(", ")", "old_level", "=", "root_logger", ".", "getEffectiveLevel", "(", ")", "old_id", "=", "getattr", "(", "local_context", ",", "u'session_id'", ",", "None", ")", "local_context", ".", "session_id", "=", "(", "old_id", "or", "uuid", ".", "uuid4", "(", ")", ")", "old_output", "=", "getattr", "(", "local_context", ",", "u'output'", ",", "None", ")", "old_loglevel", "=", "getattr", "(", "local_context", ",", "u'loglevel'", ",", "None", ")", "streamhandler", "=", "logging", ".", "StreamHandler", "(", "stream", ")", "streamhandler", ".", "setFormatter", "(", "FlexGetFormatter", "(", ")", ")", "streamhandler", ".", "addFilter", "(", "SessionFilter", "(", "local_context", ".", "session_id", ")", ")", "if", "(", "loglevel", "is", "not", "None", ")", ":", "loglevel", "=", "get_level_no", "(", "loglevel", ")", "streamhandler", ".", "setLevel", "(", "loglevel", ")", "if", "(", "not", "root_logger", ".", "isEnabledFor", "(", "loglevel", ")", ")", ":", "root_logger", ".", "setLevel", "(", "loglevel", ")", "local_context", ".", "output", "=", "stream", "local_context", ".", "loglevel", "=", "loglevel", "root_logger", ".", "addHandler", "(", "streamhandler", ")", "try", ":", "(", "yield", ")", "finally", ":", "root_logger", ".", "removeHandler", "(", "streamhandler", ")", "root_logger", ".", "setLevel", "(", "old_level", ")", "local_context", ".", "session_id", "=", "old_id", "local_context", ".", "output", "=", "old_output", "local_context", ".", "loglevel", "=", "old_loglevel" ]
context manager which captures all log and console output to given stream while in scope .
train
false
54,496
def valid_doc_types(): return ', '.join(DOC_PATHS.keys())
[ "def", "valid_doc_types", "(", ")", ":", "return", "', '", ".", "join", "(", "DOC_PATHS", ".", "keys", "(", ")", ")" ]
return a comma-separated string of valid doc types .
train
false
54,497
def issues_closed_since(period=timedelta(days=365), project='statsmodels/statsmodels', pulls=False): which = ('pulls' if pulls else 'issues') if isinstance(period, timedelta): since = round_hour((datetime.utcnow() - period)) else: since = period url = ('https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i' % (project, which, since.strftime(ISO8601), PER_PAGE)) allclosed = get_paged_request(url, headers=make_auth_header()) filtered = [i for i in allclosed if (_parse_datetime(i['closed_at']) > since)] if pulls: filtered = [i for i in filtered if (_parse_datetime(i['merged_at']) > since)] filtered = [i for i in filtered if (i['base']['ref'] == 'master')] else: filtered = [i for i in filtered if (not is_pull_request(i))] return filtered
[ "def", "issues_closed_since", "(", "period", "=", "timedelta", "(", "days", "=", "365", ")", ",", "project", "=", "'statsmodels/statsmodels'", ",", "pulls", "=", "False", ")", ":", "which", "=", "(", "'pulls'", "if", "pulls", "else", "'issues'", ")", "if", "isinstance", "(", "period", ",", "timedelta", ")", ":", "since", "=", "round_hour", "(", "(", "datetime", ".", "utcnow", "(", ")", "-", "period", ")", ")", "else", ":", "since", "=", "period", "url", "=", "(", "'https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i'", "%", "(", "project", ",", "which", ",", "since", ".", "strftime", "(", "ISO8601", ")", ",", "PER_PAGE", ")", ")", "allclosed", "=", "get_paged_request", "(", "url", ",", "headers", "=", "make_auth_header", "(", ")", ")", "filtered", "=", "[", "i", "for", "i", "in", "allclosed", "if", "(", "_parse_datetime", "(", "i", "[", "'closed_at'", "]", ")", ">", "since", ")", "]", "if", "pulls", ":", "filtered", "=", "[", "i", "for", "i", "in", "filtered", "if", "(", "_parse_datetime", "(", "i", "[", "'merged_at'", "]", ")", ">", "since", ")", "]", "filtered", "=", "[", "i", "for", "i", "in", "filtered", "if", "(", "i", "[", "'base'", "]", "[", "'ref'", "]", "==", "'master'", ")", "]", "else", ":", "filtered", "=", "[", "i", "for", "i", "in", "filtered", "if", "(", "not", "is_pull_request", "(", "i", ")", ")", "]", "return", "filtered" ]
get all issues closed since a particular point in time .
train
true
54,498
@contextmanager def expected_warnings(matching): with all_warnings() as w: (yield w) remaining = [m for m in matching if ('\\A\\Z' not in m.split('|'))] for warn in w: found = False for match in matching: if (re.search(match, str(warn.message)) is not None): found = True if (match in remaining): remaining.remove(match) if (not found): raise ValueError(('Unexpected warning: %s' % str(warn.message))) if (len(remaining) > 0): msg = ('No warning raised matching:\n%s' % '\n'.join(remaining)) raise ValueError(msg)
[ "@", "contextmanager", "def", "expected_warnings", "(", "matching", ")", ":", "with", "all_warnings", "(", ")", "as", "w", ":", "(", "yield", "w", ")", "remaining", "=", "[", "m", "for", "m", "in", "matching", "if", "(", "'\\\\A\\\\Z'", "not", "in", "m", ".", "split", "(", "'|'", ")", ")", "]", "for", "warn", "in", "w", ":", "found", "=", "False", "for", "match", "in", "matching", ":", "if", "(", "re", ".", "search", "(", "match", ",", "str", "(", "warn", ".", "message", ")", ")", "is", "not", "None", ")", ":", "found", "=", "True", "if", "(", "match", "in", "remaining", ")", ":", "remaining", ".", "remove", "(", "match", ")", "if", "(", "not", "found", ")", ":", "raise", "ValueError", "(", "(", "'Unexpected warning: %s'", "%", "str", "(", "warn", ".", "message", ")", ")", ")", "if", "(", "len", "(", "remaining", ")", ">", "0", ")", ":", "msg", "=", "(", "'No warning raised matching:\\n%s'", "%", "'\\n'", ".", "join", "(", "remaining", ")", ")", "raise", "ValueError", "(", "msg", ")" ]
context for use in testing to catch known warnings matching regexes parameters matching : list of strings or compiled regexes regexes for the desired warning to catch examples .
train
false
54,499
def pending_deprecation(version, message=None, add_deprecation_to_docstring=True): if add_deprecation_to_docstring: header = ('.. deprecated:: %s (pending) %s' % (version, (message or ''))) else: header = None if (message is None): message = 'Call to deprecated function %(func)s' def decorate(fn): return _decorate_with_warning(fn, exc.SAPendingDeprecationWarning, (message % dict(func=fn.__name__)), header) return decorate
[ "def", "pending_deprecation", "(", "version", ",", "message", "=", "None", ",", "add_deprecation_to_docstring", "=", "True", ")", ":", "if", "add_deprecation_to_docstring", ":", "header", "=", "(", "'.. deprecated:: %s (pending) %s'", "%", "(", "version", ",", "(", "message", "or", "''", ")", ")", ")", "else", ":", "header", "=", "None", "if", "(", "message", "is", "None", ")", ":", "message", "=", "'Call to deprecated function %(func)s'", "def", "decorate", "(", "fn", ")", ":", "return", "_decorate_with_warning", "(", "fn", ",", "exc", ".", "SAPendingDeprecationWarning", ",", "(", "message", "%", "dict", "(", "func", "=", "fn", ".", "__name__", ")", ")", ",", "header", ")", "return", "decorate" ]
decorates a function and issues a pending deprecation warning on use .
train
false
54,500
def capture_exceptions(async_result): def capture(function): @functools.wraps(function) def captured_function(*args, **kwargs): try: return function(*args, **kwargs) except Exception as exc: async_result.set_exception(exc) return captured_function return capture
[ "def", "capture_exceptions", "(", "async_result", ")", ":", "def", "capture", "(", "function", ")", ":", "@", "functools", ".", "wraps", "(", "function", ")", "def", "captured_function", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "function", "(", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", "as", "exc", ":", "async_result", ".", "set_exception", "(", "exc", ")", "return", "captured_function", "return", "capture" ]
return a new decorated function that propagates the exceptions of the wrapped function to an async_result .
train
false
54,501
def get_catalog_by_name(name): kwargs = {'name': name} result = util.callm(('%s/%s' % ('catalog', 'profile')), kwargs) return Catalog(**util.fix(result['response']['catalog']))
[ "def", "get_catalog_by_name", "(", "name", ")", ":", "kwargs", "=", "{", "'name'", ":", "name", "}", "result", "=", "util", ".", "callm", "(", "(", "'%s/%s'", "%", "(", "'catalog'", ",", "'profile'", ")", ")", ",", "kwargs", ")", "return", "Catalog", "(", "**", "util", ".", "fix", "(", "result", "[", "'response'", "]", "[", "'catalog'", "]", ")", ")" ]
grabs a catalog by name .
train
true
54,502
def run_epoch(sess, cost_op, ops, reset, num_unrolls): start = timer() sess.run(reset) for _ in xrange(num_unrolls): cost = sess.run(([cost_op] + ops))[0] return ((timer() - start), cost)
[ "def", "run_epoch", "(", "sess", ",", "cost_op", ",", "ops", ",", "reset", ",", "num_unrolls", ")", ":", "start", "=", "timer", "(", ")", "sess", ".", "run", "(", "reset", ")", "for", "_", "in", "xrange", "(", "num_unrolls", ")", ":", "cost", "=", "sess", ".", "run", "(", "(", "[", "cost_op", "]", "+", "ops", ")", ")", "[", "0", "]", "return", "(", "(", "timer", "(", ")", "-", "start", ")", ",", "cost", ")" ]
runs one optimization epoch .
train
false
54,503
def _update_usage_plan_apis(plan_id, apis, op, region=None, key=None, keyid=None, profile=None): try: patchOperations = [] for api in apis: patchOperations.append({'op': op, 'path': '/apiStages', 'value': '{0}:{1}'.format(api['apiId'], api['stage'])}) res = None if patchOperations: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) res = conn.update_usage_plan(usagePlanId=plan_id, patchOperations=patchOperations) return {'success': True, 'result': res} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)} except Exception as e: return {'error': e}
[ "def", "_update_usage_plan_apis", "(", "plan_id", ",", "apis", ",", "op", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "patchOperations", "=", "[", "]", "for", "api", "in", "apis", ":", "patchOperations", ".", "append", "(", "{", "'op'", ":", "op", ",", "'path'", ":", "'/apiStages'", ",", "'value'", ":", "'{0}:{1}'", ".", "format", "(", "api", "[", "'apiId'", "]", ",", "api", "[", "'stage'", "]", ")", "}", ")", "res", "=", "None", "if", "patchOperations", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "res", "=", "conn", ".", "update_usage_plan", "(", "usagePlanId", "=", "plan_id", ",", "patchOperations", "=", "patchOperations", ")", "return", "{", "'success'", ":", "True", ",", "'result'", ":", "res", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}", "except", "Exception", "as", "e", ":", "return", "{", "'error'", ":", "e", "}" ]
helper function that updates the usage plan identified by plan_id by adding or removing it to each of the stages .
train
true
54,504
def try_alt_nzb(nzo): url = nzo.nzo_info.get('failure') if (url and cfg.new_nzb_on_failure()): sabnzbd.add_url(url, nzo.pp, nzo.script, nzo.cat, nzo.priority)
[ "def", "try_alt_nzb", "(", "nzo", ")", ":", "url", "=", "nzo", ".", "nzo_info", ".", "get", "(", "'failure'", ")", "if", "(", "url", "and", "cfg", ".", "new_nzb_on_failure", "(", ")", ")", ":", "sabnzbd", ".", "add_url", "(", "url", ",", "nzo", ".", "pp", ",", "nzo", ".", "script", ",", "nzo", ".", "cat", ",", "nzo", ".", "priority", ")" ]
try to get a new nzb if available .
train
false
54,505
def log_1_plus_exp(mat, target=None, exact=False): if (not target): target = mat if exact: err_code = _cudamat.apply_log_1_plus_exp_exact(mat.p_mat, target.p_mat) else: err_code = _cudamat.apply_log_1_plus_exp(mat.p_mat, target.p_mat) if err_code: raise generate_exception(err_code) return target
[ "def", "log_1_plus_exp", "(", "mat", ",", "target", "=", "None", ",", "exact", "=", "False", ")", ":", "if", "(", "not", "target", ")", ":", "target", "=", "mat", "if", "exact", ":", "err_code", "=", "_cudamat", ".", "apply_log_1_plus_exp_exact", "(", "mat", ".", "p_mat", ",", "target", ".", "p_mat", ")", "else", ":", "err_code", "=", "_cudamat", ".", "apply_log_1_plus_exp", "(", "mat", ".", "p_mat", ",", "target", ".", "p_mat", ")", "if", "err_code", ":", "raise", "generate_exception", "(", "err_code", ")", "return", "target" ]
apply log(1+exp(x)) to each element of the matrix mat .
train
false
54,506
@connect_on_app_finalize def add_backend_cleanup_task(app): @app.task(name=u'celery.backend_cleanup', shared=False, lazy=False) def backend_cleanup(): app.backend.cleanup() return backend_cleanup
[ "@", "connect_on_app_finalize", "def", "add_backend_cleanup_task", "(", "app", ")", ":", "@", "app", ".", "task", "(", "name", "=", "u'celery.backend_cleanup'", ",", "shared", "=", "False", ",", "lazy", "=", "False", ")", "def", "backend_cleanup", "(", ")", ":", "app", ".", "backend", ".", "cleanup", "(", ")", "return", "backend_cleanup" ]
task used to clean up expired results .
train
false
54,508
def remove_comments(string): pattern = '(\\".*?\\"|\\\'.*?\\\')|(<#.*?#>|#[^\\r\\n]*$)' regex = re.compile(pattern, (re.MULTILINE | re.DOTALL)) def _replacer(match): if (match.group(2) is not None): return '' else: return match.group(1) return regex.sub(_replacer, string)
[ "def", "remove_comments", "(", "string", ")", ":", "pattern", "=", "'(\\\\\".*?\\\\\"|\\\\\\'.*?\\\\\\')|(<#.*?#>|#[^\\\\r\\\\n]*$)'", "regex", "=", "re", ".", "compile", "(", "pattern", ",", "(", "re", ".", "MULTILINE", "|", "re", ".", "DOTALL", ")", ")", "def", "_replacer", "(", "match", ")", ":", "if", "(", "match", ".", "group", "(", "2", ")", "is", "not", "None", ")", ":", "return", "''", "else", ":", "return", "match", ".", "group", "(", "1", ")", "return", "regex", ".", "sub", "(", "_replacer", ",", "string", ")" ]
remove comments in powershell script .
train
true
54,511
def get_subscribers_query(stream, requesting_user): validate_user_access_to_subscribers(requesting_user, stream) subscriptions = Subscription.objects.filter(recipient__type=Recipient.STREAM, recipient__type_id=stream.id, user_profile__is_active=True, active=True) return subscriptions
[ "def", "get_subscribers_query", "(", "stream", ",", "requesting_user", ")", ":", "validate_user_access_to_subscribers", "(", "requesting_user", ",", "stream", ")", "subscriptions", "=", "Subscription", ".", "objects", ".", "filter", "(", "recipient__type", "=", "Recipient", ".", "STREAM", ",", "recipient__type_id", "=", "stream", ".", "id", ",", "user_profile__is_active", "=", "True", ",", "active", "=", "True", ")", "return", "subscriptions" ]
build a query to get the subscribers list for a stream .
train
false
54,513
def get_parent_xblock(xblock): locator = xblock.location parent_location = modulestore().get_parent_location(locator) if (parent_location is None): return None return modulestore().get_item(parent_location)
[ "def", "get_parent_xblock", "(", "xblock", ")", ":", "locator", "=", "xblock", ".", "location", "parent_location", "=", "modulestore", "(", ")", ".", "get_parent_location", "(", "locator", ")", "if", "(", "parent_location", "is", "None", ")", ":", "return", "None", "return", "modulestore", "(", ")", ".", "get_item", "(", "parent_location", ")" ]
returns the xblock that is the parent of the specified xblock .
train
false
54,516
def test_install_from_wheel_installs_deps(script, data): package = data.packages.join('requires_source-1.0-py2.py3-none-any.whl') result = script.pip('install', '--no-index', '--find-links', data.find_links, package) result.assert_installed('source', editable=False)
[ "def", "test_install_from_wheel_installs_deps", "(", "script", ",", "data", ")", ":", "package", "=", "data", ".", "packages", ".", "join", "(", "'requires_source-1.0-py2.py3-none-any.whl'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--no-index'", ",", "'--find-links'", ",", "data", ".", "find_links", ",", "package", ")", "result", ".", "assert_installed", "(", "'source'", ",", "editable", "=", "False", ")" ]
test can install dependencies of wheels .
train
false
54,517
def _save_attachment(attachment, filepath): filepath = private_storage.save(filepath, attachment) return filepath.split('/')[(-1)]
[ "def", "_save_attachment", "(", "attachment", ",", "filepath", ")", ":", "filepath", "=", "private_storage", ".", "save", "(", "filepath", ",", "attachment", ")", "return", "filepath", ".", "split", "(", "'/'", ")", "[", "(", "-", "1", ")", "]" ]
saves an attachment and returns the filename .
train
false
54,518
def restoreConfigZip(archive, targetDir): try: if (not ek(os.path.exists, targetDir)): ek(os.mkdir, targetDir) else: def path_leaf(path): (head, tail) = ek(os.path.split, path) return (tail or ek(os.path.basename, head)) bakFilename = u'{0}-{1}'.format(path_leaf(targetDir), datetime.datetime.now().strftime(u'%Y%m%d_%H%M%S')) shutil.move(targetDir, ek(os.path.join, ek(os.path.dirname, targetDir), bakFilename)) zip_file = zipfile.ZipFile(archive, u'r', allowZip64=True) for member in zip_file.namelist(): zip_file.extract(member, targetDir) zip_file.close() return True except Exception as error: logger.log(u'Zip extraction error: {0}'.format(error), logger.ERROR) shutil.rmtree(targetDir) return False
[ "def", "restoreConfigZip", "(", "archive", ",", "targetDir", ")", ":", "try", ":", "if", "(", "not", "ek", "(", "os", ".", "path", ".", "exists", ",", "targetDir", ")", ")", ":", "ek", "(", "os", ".", "mkdir", ",", "targetDir", ")", "else", ":", "def", "path_leaf", "(", "path", ")", ":", "(", "head", ",", "tail", ")", "=", "ek", "(", "os", ".", "path", ".", "split", ",", "path", ")", "return", "(", "tail", "or", "ek", "(", "os", ".", "path", ".", "basename", ",", "head", ")", ")", "bakFilename", "=", "u'{0}-{1}'", ".", "format", "(", "path_leaf", "(", "targetDir", ")", ",", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "u'%Y%m%d_%H%M%S'", ")", ")", "shutil", ".", "move", "(", "targetDir", ",", "ek", "(", "os", ".", "path", ".", "join", ",", "ek", "(", "os", ".", "path", ".", "dirname", ",", "targetDir", ")", ",", "bakFilename", ")", ")", "zip_file", "=", "zipfile", ".", "ZipFile", "(", "archive", ",", "u'r'", ",", "allowZip64", "=", "True", ")", "for", "member", "in", "zip_file", ".", "namelist", "(", ")", ":", "zip_file", ".", "extract", "(", "member", ",", "targetDir", ")", "zip_file", ".", "close", "(", ")", "return", "True", "except", "Exception", "as", "error", ":", "logger", ".", "log", "(", "u'Zip extraction error: {0}'", ".", "format", "(", "error", ")", ",", "logger", ".", "ERROR", ")", "shutil", ".", "rmtree", "(", "targetDir", ")", "return", "False" ]
restores a config zip file back in place .
train
false
54,519
@frappe.whitelist() def setup_complete(args): if cint(frappe.db.get_single_value(u'System Settings', u'setup_complete')): return args = process_args(args) try: if (args.language and (args.language != u'english')): set_default_language(get_language_code(args.lang)) frappe.clear_cache() update_system_settings(args) update_user_name(args) for method in frappe.get_hooks(u'setup_wizard_complete'): frappe.get_attr(method)(args) disable_future_access() frappe.db.commit() frappe.clear_cache() except: frappe.db.rollback() if args: traceback = frappe.get_traceback() for hook in frappe.get_hooks(u'setup_wizard_exception'): frappe.get_attr(hook)(traceback, args) raise else: for hook in frappe.get_hooks(u'setup_wizard_success'): frappe.get_attr(hook)(args)
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "setup_complete", "(", "args", ")", ":", "if", "cint", "(", "frappe", ".", "db", ".", "get_single_value", "(", "u'System Settings'", ",", "u'setup_complete'", ")", ")", ":", "return", "args", "=", "process_args", "(", "args", ")", "try", ":", "if", "(", "args", ".", "language", "and", "(", "args", ".", "language", "!=", "u'english'", ")", ")", ":", "set_default_language", "(", "get_language_code", "(", "args", ".", "lang", ")", ")", "frappe", ".", "clear_cache", "(", ")", "update_system_settings", "(", "args", ")", "update_user_name", "(", "args", ")", "for", "method", "in", "frappe", ".", "get_hooks", "(", "u'setup_wizard_complete'", ")", ":", "frappe", ".", "get_attr", "(", "method", ")", "(", "args", ")", "disable_future_access", "(", ")", "frappe", ".", "db", ".", "commit", "(", ")", "frappe", ".", "clear_cache", "(", ")", "except", ":", "frappe", ".", "db", ".", "rollback", "(", ")", "if", "args", ":", "traceback", "=", "frappe", ".", "get_traceback", "(", ")", "for", "hook", "in", "frappe", ".", "get_hooks", "(", "u'setup_wizard_exception'", ")", ":", "frappe", ".", "get_attr", "(", "hook", ")", "(", "traceback", ",", "args", ")", "raise", "else", ":", "for", "hook", "in", "frappe", ".", "get_hooks", "(", "u'setup_wizard_success'", ")", ":", "frappe", ".", "get_attr", "(", "hook", ")", "(", "args", ")" ]
calls hooks for setup_wizard_complete .
train
false
54,521
def __guess_dataset_by_filename(filename): try: fields = os.path.split(filename) if fields: if (fields[(-1)].startswith('dataset_') and fields[(-1)].endswith('.dat')): return Dataset.get(int(fields[(-1)][len('dataset_'):(- len('.dat'))])) except: pass return None
[ "def", "__guess_dataset_by_filename", "(", "filename", ")", ":", "try", ":", "fields", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "if", "fields", ":", "if", "(", "fields", "[", "(", "-", "1", ")", "]", ".", "startswith", "(", "'dataset_'", ")", "and", "fields", "[", "(", "-", "1", ")", "]", ".", "endswith", "(", "'.dat'", ")", ")", ":", "return", "Dataset", ".", "get", "(", "int", "(", "fields", "[", "(", "-", "1", ")", "]", "[", "len", "(", "'dataset_'", ")", ":", "(", "-", "len", "(", "'.dat'", ")", ")", "]", ")", ")", "except", ":", "pass", "return", "None" ]
return a guessed dataset by filename .
train
false
54,522
def prepare_to_run(): (is_travis, non_pr) = check_environment() if (not is_travis): return if (not non_pr): print('Running in Travis during non-merge to master, doing nothing.') sys.exit(0) decrypt_keyfile()
[ "def", "prepare_to_run", "(", ")", ":", "(", "is_travis", ",", "non_pr", ")", "=", "check_environment", "(", ")", "if", "(", "not", "is_travis", ")", ":", "return", "if", "(", "not", "non_pr", ")", ":", "print", "(", "'Running in Travis during non-merge to master, doing nothing.'", ")", "sys", ".", "exit", "(", "0", ")", "decrypt_keyfile", "(", ")" ]
prepare to run system tests .
train
false
54,524
def test_pix2world(): filename = get_pkg_data_filename(u'data/sip2.fits') with catch_warnings(wcs.wcs.FITSFixedWarning) as caught_warnings: ww = wcs.WCS(filename) assert (len(caught_warnings) == 1) n = 3 pixels = (np.arange(n) * np.ones((2, n))).T result = ww.wcs_pix2world(pixels, 0, ra_dec_order=True) ww.wcs_pix2world(pixels[..., 0], pixels[..., 1], 0, ra_dec_order=True) close_enough = 1e-08 answer = np.array([[0.00024976, 0.00023018], [0.00023043, (-0.00024997)]]) assert np.all((np.abs((ww.wcs.pc - answer)) < close_enough)) answer = np.array([[202.39265216, 47.17756518], [202.39335826, 47.17754619], [202.39406436, 47.1775272]]) assert np.all((np.abs((result - answer)) < close_enough))
[ "def", "test_pix2world", "(", ")", ":", "filename", "=", "get_pkg_data_filename", "(", "u'data/sip2.fits'", ")", "with", "catch_warnings", "(", "wcs", ".", "wcs", ".", "FITSFixedWarning", ")", "as", "caught_warnings", ":", "ww", "=", "wcs", ".", "WCS", "(", "filename", ")", "assert", "(", "len", "(", "caught_warnings", ")", "==", "1", ")", "n", "=", "3", "pixels", "=", "(", "np", ".", "arange", "(", "n", ")", "*", "np", ".", "ones", "(", "(", "2", ",", "n", ")", ")", ")", ".", "T", "result", "=", "ww", ".", "wcs_pix2world", "(", "pixels", ",", "0", ",", "ra_dec_order", "=", "True", ")", "ww", ".", "wcs_pix2world", "(", "pixels", "[", "...", ",", "0", "]", ",", "pixels", "[", "...", ",", "1", "]", ",", "0", ",", "ra_dec_order", "=", "True", ")", "close_enough", "=", "1e-08", "answer", "=", "np", ".", "array", "(", "[", "[", "0.00024976", ",", "0.00023018", "]", ",", "[", "0.00023043", ",", "(", "-", "0.00024997", ")", "]", "]", ")", "assert", "np", ".", "all", "(", "(", "np", ".", "abs", "(", "(", "ww", ".", "wcs", ".", "pc", "-", "answer", ")", ")", "<", "close_enough", ")", ")", "answer", "=", "np", ".", "array", "(", "[", "[", "202.39265216", ",", "47.17756518", "]", ",", "[", "202.39335826", ",", "47.17754619", "]", ",", "[", "202.39406436", ",", "47.1775272", "]", "]", ")", "assert", "np", ".", "all", "(", "(", "np", ".", "abs", "(", "(", "result", "-", "answer", ")", ")", "<", "close_enough", ")", ")" ]
from github issue #1463 .
train
false
54,526
def block_device_mapping_update_or_create(context, values): return IMPL.block_device_mapping_update_or_create(context, values)
[ "def", "block_device_mapping_update_or_create", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "block_device_mapping_update_or_create", "(", "context", ",", "values", ")" ]
update an entry of block device mapping .
train
false
54,528
def build_content_type(format, encoding=u'utf-8'): if (u'charset' in format): return format if (format in (u'application/json', u'text/javascript')): return format return (u'%s; charset=%s' % (format, encoding))
[ "def", "build_content_type", "(", "format", ",", "encoding", "=", "u'utf-8'", ")", ":", "if", "(", "u'charset'", "in", "format", ")", ":", "return", "format", "if", "(", "format", "in", "(", "u'application/json'", ",", "u'text/javascript'", ")", ")", ":", "return", "format", "return", "(", "u'%s; charset=%s'", "%", "(", "format", ",", "encoding", ")", ")" ]
appends character encoding to the provided format if not already present .
train
false
54,529
def assign_random_category(resource): random_index = randint(0, (TopicCategory.objects.all().count() - 1)) tc = TopicCategory.objects.all()[random_index] resource.category = tc resource.save()
[ "def", "assign_random_category", "(", "resource", ")", ":", "random_index", "=", "randint", "(", "0", ",", "(", "TopicCategory", ".", "objects", ".", "all", "(", ")", ".", "count", "(", ")", "-", "1", ")", ")", "tc", "=", "TopicCategory", ".", "objects", ".", "all", "(", ")", "[", "random_index", "]", "resource", ".", "category", "=", "tc", "resource", ".", "save", "(", ")" ]
assign a random category to a resource .
train
false
54,531
def is_harvard_notes_enabled(course): modules = set(['textannotation', 'imageannotation', 'videoannotation']) return bool(modules.intersection(course.advanced_modules))
[ "def", "is_harvard_notes_enabled", "(", "course", ")", ":", "modules", "=", "set", "(", "[", "'textannotation'", ",", "'imageannotation'", ",", "'videoannotation'", "]", ")", "return", "bool", "(", "modules", ".", "intersection", "(", "course", ".", "advanced_modules", ")", ")" ]
returns true if harvard annotation tool is enabled for the course .
train
false
54,532
def seqs_from_file(ids, file_lines): for (label, seq) in parse_fasta(file_lines): if (id_from_fasta_label_line(label) in ids): (yield (label, seq))
[ "def", "seqs_from_file", "(", "ids", ",", "file_lines", ")", ":", "for", "(", "label", ",", "seq", ")", "in", "parse_fasta", "(", "file_lines", ")", ":", "if", "(", "id_from_fasta_label_line", "(", "label", ")", "in", "ids", ")", ":", "(", "yield", "(", "label", ",", "seq", ")", ")" ]
extract labels and seqs from file .
train
false
54,533
def dice_coe(output, target, epsilon=1e-10): inse = tf.reduce_sum((output * target)) l = tf.reduce_sum((output * output)) r = tf.reduce_sum((target * target)) dice = ((2 * inse) / (l + r)) if (epsilon == 0): return dice else: return tf.clip_by_value(dice, 0, (1.0 - epsilon))
[ "def", "dice_coe", "(", "output", ",", "target", ",", "epsilon", "=", "1e-10", ")", ":", "inse", "=", "tf", ".", "reduce_sum", "(", "(", "output", "*", "target", ")", ")", "l", "=", "tf", ".", "reduce_sum", "(", "(", "output", "*", "output", ")", ")", "r", "=", "tf", ".", "reduce_sum", "(", "(", "target", "*", "target", ")", ")", "dice", "=", "(", "(", "2", "*", "inse", ")", "/", "(", "l", "+", "r", ")", ")", "if", "(", "epsilon", "==", "0", ")", ":", "return", "dice", "else", ":", "return", "tf", ".", "clip_by_value", "(", "dice", ",", "0", ",", "(", "1.0", "-", "epsilon", ")", ")" ]
sørensen–dice coefficient for comparing the similarity of two distributions .
train
false
54,535
def upload_mission(aFileName): missionlist = readmission(aFileName) print ('\nUpload mission from a file: %s' % import_mission_filename) print ' Clear mission' cmds = vehicle.commands cmds.clear() for command in missionlist: cmds.add(command) print ' Upload mission' vehicle.commands.upload()
[ "def", "upload_mission", "(", "aFileName", ")", ":", "missionlist", "=", "readmission", "(", "aFileName", ")", "print", "(", "'\\nUpload mission from a file: %s'", "%", "import_mission_filename", ")", "print", "' Clear mission'", "cmds", "=", "vehicle", ".", "commands", "cmds", ".", "clear", "(", ")", "for", "command", "in", "missionlist", ":", "cmds", ".", "add", "(", "command", ")", "print", "' Upload mission'", "vehicle", ".", "commands", ".", "upload", "(", ")" ]
upload a mission from a file .
train
true
54,536
def evaluator(variables, functions, math_expr, case_sensitive=False): if (math_expr.strip() == ''): return float('nan') math_interpreter = ParseAugmenter(math_expr, case_sensitive) math_interpreter.parse_algebra() (all_variables, all_functions) = add_defaults(variables, functions, case_sensitive) math_interpreter.check_variables(all_variables, all_functions) if case_sensitive: casify = (lambda x: x) else: casify = (lambda x: x.lower()) evaluate_actions = {'number': eval_number, 'variable': (lambda x: all_variables[casify(x[0])]), 'function': (lambda x: all_functions[casify(x[0])](x[1])), 'atom': eval_atom, 'power': eval_power, 'parallel': eval_parallel, 'product': eval_product, 'sum': eval_sum} return math_interpreter.reduce_tree(evaluate_actions)
[ "def", "evaluator", "(", "variables", ",", "functions", ",", "math_expr", ",", "case_sensitive", "=", "False", ")", ":", "if", "(", "math_expr", ".", "strip", "(", ")", "==", "''", ")", ":", "return", "float", "(", "'nan'", ")", "math_interpreter", "=", "ParseAugmenter", "(", "math_expr", ",", "case_sensitive", ")", "math_interpreter", ".", "parse_algebra", "(", ")", "(", "all_variables", ",", "all_functions", ")", "=", "add_defaults", "(", "variables", ",", "functions", ",", "case_sensitive", ")", "math_interpreter", ".", "check_variables", "(", "all_variables", ",", "all_functions", ")", "if", "case_sensitive", ":", "casify", "=", "(", "lambda", "x", ":", "x", ")", "else", ":", "casify", "=", "(", "lambda", "x", ":", "x", ".", "lower", "(", ")", ")", "evaluate_actions", "=", "{", "'number'", ":", "eval_number", ",", "'variable'", ":", "(", "lambda", "x", ":", "all_variables", "[", "casify", "(", "x", "[", "0", "]", ")", "]", ")", ",", "'function'", ":", "(", "lambda", "x", ":", "all_functions", "[", "casify", "(", "x", "[", "0", "]", ")", "]", "(", "x", "[", "1", "]", ")", ")", ",", "'atom'", ":", "eval_atom", ",", "'power'", ":", "eval_power", ",", "'parallel'", ":", "eval_parallel", ",", "'product'", ":", "eval_product", ",", "'sum'", ":", "eval_sum", "}", "return", "math_interpreter", ".", "reduce_tree", "(", "evaluate_actions", ")" ]
evaluate an expression; that is .
train
false
54,538
def combine_path_lists(*path_seqs): results = [] for path in combine_lists(*path_seqs): expanded = expand_path(path) paths = (sorted(glob.glob(expanded)) or [expanded]) results.extend(paths) return results
[ "def", "combine_path_lists", "(", "*", "path_seqs", ")", ":", "results", "=", "[", "]", "for", "path", "in", "combine_lists", "(", "*", "path_seqs", ")", ":", "expanded", "=", "expand_path", "(", "path", ")", "paths", "=", "(", "sorted", "(", "glob", ".", "glob", "(", "expanded", ")", ")", "or", "[", "expanded", "]", ")", "results", ".", "extend", "(", "paths", ")", "return", "results" ]
concatenate the given sequences into a list .
train
false
54,539
def sum_parts(data): arr = np.asarray(data, dtype=np.float32) out = cuda.device_array(1, dtype=np.float32) gpu_single_block_sum[(1, gpu_block_sum_max_blockdim)](arr, out) return out.copy_to_host()[0]
[ "def", "sum_parts", "(", "data", ")", ":", "arr", "=", "np", ".", "asarray", "(", "data", ",", "dtype", "=", "np", ".", "float32", ")", "out", "=", "cuda", ".", "device_array", "(", "1", ",", "dtype", "=", "np", ".", "float32", ")", "gpu_single_block_sum", "[", "(", "1", ",", "gpu_block_sum_max_blockdim", ")", "]", "(", "arr", ",", "out", ")", "return", "out", ".", "copy_to_host", "(", ")", "[", "0", "]" ]
driver for gpu_single_block_sum kernel .
train
false
54,540
def test_contains_one_of(): assert hug.validate.contains_one_of('no', 'way')(TEST_SCHEMA) assert (not hug.validate.contains_one_of('last', 'place')(TEST_SCHEMA))
[ "def", "test_contains_one_of", "(", ")", ":", "assert", "hug", ".", "validate", ".", "contains_one_of", "(", "'no'", ",", "'way'", ")", "(", "TEST_SCHEMA", ")", "assert", "(", "not", "hug", ".", "validate", ".", "contains_one_of", "(", "'last'", ",", "'place'", ")", "(", "TEST_SCHEMA", ")", ")" ]
test to ensure hugs contains_one_of validation function works as expected to ensure presence of a field .
train
false
54,541
def getTransferClosestNestedRing(extrusionHalfWidth, nestedRings, oldOrderedLocation, skein, threadSequence): if (len(nestedRings) > 0): oldOrderedLocation.z = nestedRings[0].z closestDistance = 9.876543219876543e+17 closestNestedRing = None for remainingNestedRing in nestedRings: distance = getClosestDistanceIndexToLine(oldOrderedLocation.dropAxis(), remainingNestedRing.boundary).distance if (distance < closestDistance): closestDistance = distance closestNestedRing = remainingNestedRing nestedRings.remove(closestNestedRing) closestNestedRing.addToThreads(extrusionHalfWidth, oldOrderedLocation, skein, threadSequence) return closestNestedRing
[ "def", "getTransferClosestNestedRing", "(", "extrusionHalfWidth", ",", "nestedRings", ",", "oldOrderedLocation", ",", "skein", ",", "threadSequence", ")", ":", "if", "(", "len", "(", "nestedRings", ")", ">", "0", ")", ":", "oldOrderedLocation", ".", "z", "=", "nestedRings", "[", "0", "]", ".", "z", "closestDistance", "=", "9.876543219876543e+17", "closestNestedRing", "=", "None", "for", "remainingNestedRing", "in", "nestedRings", ":", "distance", "=", "getClosestDistanceIndexToLine", "(", "oldOrderedLocation", ".", "dropAxis", "(", ")", ",", "remainingNestedRing", ".", "boundary", ")", ".", "distance", "if", "(", "distance", "<", "closestDistance", ")", ":", "closestDistance", "=", "distance", "closestNestedRing", "=", "remainingNestedRing", "nestedRings", ".", "remove", "(", "closestNestedRing", ")", "closestNestedRing", ".", "addToThreads", "(", "extrusionHalfWidth", ",", "oldOrderedLocation", ",", "skein", ",", "threadSequence", ")", "return", "closestNestedRing" ]
get and transfer the closest remaining nested ring .
train
false
54,543
def set_network(ip, netmask, gateway): return __execute_cmd('setniccfg -s {0} {1} {2}'.format(ip, netmask, gateway))
[ "def", "set_network", "(", "ip", ",", "netmask", ",", "gateway", ")", ":", "return", "__execute_cmd", "(", "'setniccfg -s {0} {1} {2}'", ".", "format", "(", "ip", ",", "netmask", ",", "gateway", ")", ")" ]
configure network cli example: .
train
false
54,544
def pickle(obj, fname, protocol=2): with smart_open(fname, 'wb') as fout: _pickle.dump(obj, fout, protocol=protocol)
[ "def", "pickle", "(", "obj", ",", "fname", ",", "protocol", "=", "2", ")", ":", "with", "smart_open", "(", "fname", ",", "'wb'", ")", "as", "fout", ":", "_pickle", ".", "dump", "(", "obj", ",", "fout", ",", "protocol", "=", "protocol", ")" ]
pickle object obj to file fname .
train
false
54,545
def split_file_dummy(changed_file): return (None, changed_file)
[ "def", "split_file_dummy", "(", "changed_file", ")", ":", "return", "(", "None", ",", "changed_file", ")" ]
split the repository-relative filename into a tuple of .
train
false
54,546
def register_onaccept(form): req_vars = form.request_vars position = req_vars.get('position', '') reason = req_vars.get('reason', '') db = current.db table = db.auth_user db((table.id == form.vars.id)).update(comments=('%s | %s' % (position, reason)))
[ "def", "register_onaccept", "(", "form", ")", ":", "req_vars", "=", "form", ".", "request_vars", "position", "=", "req_vars", ".", "get", "(", "'position'", ",", "''", ")", "reason", "=", "req_vars", ".", "get", "(", "'reason'", ",", "''", ")", "db", "=", "current", ".", "db", "table", "=", "db", ".", "auth_user", "db", "(", "(", "table", ".", "id", "==", "form", ".", "vars", ".", "id", ")", ")", ".", "update", "(", "comments", "=", "(", "'%s | %s'", "%", "(", "position", ",", "reason", ")", ")", ")" ]
tasks to be performed after a new user registers .
train
false
54,547
def gf_mul_ground(f, a, p, K): if (not a): return [] else: return [((a * b) % p) for b in f]
[ "def", "gf_mul_ground", "(", "f", ",", "a", ",", "p", ",", "K", ")", ":", "if", "(", "not", "a", ")", ":", "return", "[", "]", "else", ":", "return", "[", "(", "(", "a", "*", "b", ")", "%", "p", ")", "for", "b", "in", "f", "]" ]
compute f * a where f in gf(p)[x] and a in gf(p) .
train
false
54,548
def close_review_requests(payload, server_url): review_request_id_to_commits_map = defaultdict(list) branch_name = payload.get(u'repository_path') if (not branch_name): return review_request_id_to_commits_map revisions = payload.get(u'revisions', []) for revision in revisions: revision_id = revision.get(u'revision') if (len(revision_id) > 7): revision_id = revision_id[:7] commit_message = revision.get(u'message') review_request_id = get_review_request_id(commit_message, server_url) review_request_id_to_commits_map[review_request_id].append((u'%s (%s)' % (branch_name, revision_id))) return review_request_id_to_commits_map
[ "def", "close_review_requests", "(", "payload", ",", "server_url", ")", ":", "review_request_id_to_commits_map", "=", "defaultdict", "(", "list", ")", "branch_name", "=", "payload", ".", "get", "(", "u'repository_path'", ")", "if", "(", "not", "branch_name", ")", ":", "return", "review_request_id_to_commits_map", "revisions", "=", "payload", ".", "get", "(", "u'revisions'", ",", "[", "]", ")", "for", "revision", "in", "revisions", ":", "revision_id", "=", "revision", ".", "get", "(", "u'revision'", ")", "if", "(", "len", "(", "revision_id", ")", ">", "7", ")", ":", "revision_id", "=", "revision_id", "[", ":", "7", "]", "commit_message", "=", "revision", ".", "get", "(", "u'message'", ")", "review_request_id", "=", "get_review_request_id", "(", "commit_message", ",", "server_url", ")", "review_request_id_to_commits_map", "[", "review_request_id", "]", ".", "append", "(", "(", "u'%s (%s)'", "%", "(", "branch_name", ",", "revision_id", ")", ")", ")", "return", "review_request_id_to_commits_map" ]
closes all review requests for the google code repository .
train
false
54,549
def encoding(argument): try: codecs.lookup(argument) except LookupError: raise ValueError(('unknown encoding: "%s"' % argument)) return argument
[ "def", "encoding", "(", "argument", ")", ":", "try", ":", "codecs", ".", "lookup", "(", "argument", ")", "except", "LookupError", ":", "raise", "ValueError", "(", "(", "'unknown encoding: \"%s\"'", "%", "argument", ")", ")", "return", "argument" ]
verfies the encoding argument by lookup .
train
false