id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
24,728
@home_routes.route('/reset_password/<hash>', methods=('GET', 'POST')) def change_password_view(hash): if (request.method == 'GET'): return render_template('gentelella/admin/login/change_password.html') if (request.method == 'POST'): DataManager.reset_password(request.form, hash) return redirect(url_for('.index'))
[ "@", "home_routes", ".", "route", "(", "'/reset_password/<hash>'", ",", "methods", "=", "(", "'GET'", ",", "'POST'", ")", ")", "def", "change_password_view", "(", "hash", ")", ":", "if", "(", "request", ".", "method", "==", "'GET'", ")", ":", "return", "render_template", "(", "'gentelella/admin/login/change_password.html'", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "DataManager", ".", "reset_password", "(", "request", ".", "form", ",", "hash", ")", "return", "redirect", "(", "url_for", "(", "'.index'", ")", ")" ]
change password view .
train
false
24,729
def test_world_should_be_able_to_absorb_functions(): assert (not hasattr(world, 'function1')) @world.absorb def function1(): return 'absorbed' assert hasattr(world, 'function1') assert callable(world.function1) assert_equals(world.function1(), 'absorbed') world.spew('function1') assert (not hasattr(world, 'function1'))
[ "def", "test_world_should_be_able_to_absorb_functions", "(", ")", ":", "assert", "(", "not", "hasattr", "(", "world", ",", "'function1'", ")", ")", "@", "world", ".", "absorb", "def", "function1", "(", ")", ":", "return", "'absorbed'", "assert", "hasattr", "(", "world", ",", "'function1'", ")", "assert", "callable", "(", "world", ".", "function1", ")", "assert_equals", "(", "world", ".", "function1", "(", ")", ",", "'absorbed'", ")", "world", ".", "spew", "(", "'function1'", ")", "assert", "(", "not", "hasattr", "(", "world", ",", "'function1'", ")", ")" ]
world should be able to absorb functions .
train
false
24,730
@utils.arg('monitor', metavar='<monitor>', help='ID of the monitor to backup.') @utils.arg('--container', metavar='<container>', help='Optional Backup container name. (Default=None)', default=None) @utils.arg('--display-name', metavar='<display-name>', help='Optional backup name. (Default=None)', default=None) @utils.arg('--display-description', metavar='<display-description>', help='Optional backup description. (Default=None)', default=None) @utils.service_type('monitor') def do_backup_create(cs, args): cs.backups.create(args.monitor, args.container, args.display_name, args.display_description)
[ "@", "utils", ".", "arg", "(", "'monitor'", ",", "metavar", "=", "'<monitor>'", ",", "help", "=", "'ID of the monitor to backup.'", ")", "@", "utils", ".", "arg", "(", "'--container'", ",", "metavar", "=", "'<container>'", ",", "help", "=", "'Optional Backup container name. (Default=None)'", ",", "default", "=", "None", ")", "@", "utils", ".", "arg", "(", "'--display-name'", ",", "metavar", "=", "'<display-name>'", ",", "help", "=", "'Optional backup name. (Default=None)'", ",", "default", "=", "None", ")", "@", "utils", ".", "arg", "(", "'--display-description'", ",", "metavar", "=", "'<display-description>'", ",", "help", "=", "'Optional backup description. (Default=None)'", ",", "default", "=", "None", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_backup_create", "(", "cs", ",", "args", ")", ":", "cs", ".", "backups", ".", "create", "(", "args", ".", "monitor", ",", "args", ".", "container", ",", "args", ".", "display_name", ",", "args", ".", "display_description", ")" ]
creates a backup .
train
false
24,731
def round_rectangle(size, radius, fill): (width, height) = size rectangle = Image.new(u'L', size, 255) corner = round_corner(radius, 255) rectangle.paste(corner, (0, 0)) rectangle.paste(corner.rotate(90), (0, (height - radius))) rectangle.paste(corner.rotate(180), ((width - radius), (height - radius))) rectangle.paste(corner.rotate(270), ((width - radius), 0)) return rectangle
[ "def", "round_rectangle", "(", "size", ",", "radius", ",", "fill", ")", ":", "(", "width", ",", "height", ")", "=", "size", "rectangle", "=", "Image", ".", "new", "(", "u'L'", ",", "size", ",", "255", ")", "corner", "=", "round_corner", "(", "radius", ",", "255", ")", "rectangle", ".", "paste", "(", "corner", ",", "(", "0", ",", "0", ")", ")", "rectangle", ".", "paste", "(", "corner", ".", "rotate", "(", "90", ")", ",", "(", "0", ",", "(", "height", "-", "radius", ")", ")", ")", "rectangle", ".", "paste", "(", "corner", ".", "rotate", "(", "180", ")", ",", "(", "(", "width", "-", "radius", ")", ",", "(", "height", "-", "radius", ")", ")", ")", "rectangle", ".", "paste", "(", "corner", ".", "rotate", "(", "270", ")", ",", "(", "(", "width", "-", "radius", ")", ",", "0", ")", ")", "return", "rectangle" ]
draw a rounded rectangle .
train
true
24,732
def get_configured_ui(): _ui = ui.ui() _ui.setconfig('ui', 'quiet', True) return _ui
[ "def", "get_configured_ui", "(", ")", ":", "_ui", "=", "ui", ".", "ui", "(", ")", "_ui", ".", "setconfig", "(", "'ui'", ",", "'quiet'", ",", "True", ")", "return", "_ui" ]
configure any desired ui settings .
train
false
24,733
def GetTag(node): return node.tag.rsplit('}', 1)[(-1)]
[ "def", "GetTag", "(", "node", ")", ":", "return", "node", ".", "tag", ".", "rsplit", "(", "'}'", ",", "1", ")", "[", "(", "-", "1", ")", "]" ]
strips namespace prefix .
train
false
24,734
def test_handshake_protocol_agreement(message, transport): best_proto = 'worse_proto' wanted_protos = ['best', 'chat', 'worse_proto'] server_protos = 'worse_proto,chat' message.headers.extend(gen_ws_headers(server_protos)[0]) (_, resp_headers, _, _, protocol) = do_handshake(message.method, message.headers, transport, protocols=wanted_protos) assert (protocol == best_proto)
[ "def", "test_handshake_protocol_agreement", "(", "message", ",", "transport", ")", ":", "best_proto", "=", "'worse_proto'", "wanted_protos", "=", "[", "'best'", ",", "'chat'", ",", "'worse_proto'", "]", "server_protos", "=", "'worse_proto,chat'", "message", ".", "headers", ".", "extend", "(", "gen_ws_headers", "(", "server_protos", ")", "[", "0", "]", ")", "(", "_", ",", "resp_headers", ",", "_", ",", "_", ",", "protocol", ")", "=", "do_handshake", "(", "message", ".", "method", ",", "message", ".", "headers", ",", "transport", ",", "protocols", "=", "wanted_protos", ")", "assert", "(", "protocol", "==", "best_proto", ")" ]
tests if the right protocol is selected given multiple .
train
false
24,735
def compute_wcs(key, challenge): assert (type(key) in [six.text_type, six.binary_type]) assert (type(challenge) in [six.text_type, six.binary_type]) if (type(key) == six.text_type): key = key.encode('utf8') if (type(challenge) == six.text_type): challenge = challenge.encode('utf8') sig = hmac.new(key, challenge, hashlib.sha256).digest() return binascii.b2a_base64(sig).strip()
[ "def", "compute_wcs", "(", "key", ",", "challenge", ")", ":", "assert", "(", "type", "(", "key", ")", "in", "[", "six", ".", "text_type", ",", "six", ".", "binary_type", "]", ")", "assert", "(", "type", "(", "challenge", ")", "in", "[", "six", ".", "text_type", ",", "six", ".", "binary_type", "]", ")", "if", "(", "type", "(", "key", ")", "==", "six", ".", "text_type", ")", ":", "key", "=", "key", ".", "encode", "(", "'utf8'", ")", "if", "(", "type", "(", "challenge", ")", "==", "six", ".", "text_type", ")", ":", "challenge", "=", "challenge", ".", "encode", "(", "'utf8'", ")", "sig", "=", "hmac", ".", "new", "(", "key", ",", "challenge", ",", "hashlib", ".", "sha256", ")", ".", "digest", "(", ")", "return", "binascii", ".", "b2a_base64", "(", "sig", ")", ".", "strip", "(", ")" ]
compute an wamp-cra authentication signature from an authentication challenge and a key .
train
false
24,737
def _get_twilio(profile): creds = __salt__['config.option'](profile) client = TwilioRestClient(creds.get('twilio.account_sid'), creds.get('twilio.auth_token')) return client
[ "def", "_get_twilio", "(", "profile", ")", ":", "creds", "=", "__salt__", "[", "'config.option'", "]", "(", "profile", ")", "client", "=", "TwilioRestClient", "(", "creds", ".", "get", "(", "'twilio.account_sid'", ")", ",", "creds", ".", "get", "(", "'twilio.auth_token'", ")", ")", "return", "client" ]
return the twilio connection .
train
true
24,738
def get_root_path(import_name): mod = sys.modules.get(import_name) if ((mod is not None) and hasattr(mod, '__file__')): return os.path.dirname(os.path.abspath(mod.__file__)) loader = pkgutil.get_loader(import_name) if ((loader is None) or (import_name == '__main__')): return os.getcwd() if hasattr(loader, 'get_filename'): filepath = loader.get_filename(import_name) else: __import__(import_name) mod = sys.modules[import_name] filepath = getattr(mod, '__file__', None) if (filepath is None): raise RuntimeError(('No root path can be found for the provided module "%s". This can happen because the module came from an import hook that does not provide file name information or because it\'s a namespace package. In this case the root path needs to be explicitly provided.' % import_name)) return os.path.dirname(os.path.abspath(filepath))
[ "def", "get_root_path", "(", "import_name", ")", ":", "mod", "=", "sys", ".", "modules", ".", "get", "(", "import_name", ")", "if", "(", "(", "mod", "is", "not", "None", ")", "and", "hasattr", "(", "mod", ",", "'__file__'", ")", ")", ":", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "mod", ".", "__file__", ")", ")", "loader", "=", "pkgutil", ".", "get_loader", "(", "import_name", ")", "if", "(", "(", "loader", "is", "None", ")", "or", "(", "import_name", "==", "'__main__'", ")", ")", ":", "return", "os", ".", "getcwd", "(", ")", "if", "hasattr", "(", "loader", ",", "'get_filename'", ")", ":", "filepath", "=", "loader", ".", "get_filename", "(", "import_name", ")", "else", ":", "__import__", "(", "import_name", ")", "mod", "=", "sys", ".", "modules", "[", "import_name", "]", "filepath", "=", "getattr", "(", "mod", ",", "'__file__'", ",", "None", ")", "if", "(", "filepath", "is", "None", ")", ":", "raise", "RuntimeError", "(", "(", "'No root path can be found for the provided module \"%s\". This can happen because the module came from an import hook that does not provide file name information or because it\\'s a namespace package. In this case the root path needs to be explicitly provided.'", "%", "import_name", ")", ")", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "filepath", ")", ")" ]
returns the path to a package or cwd if that cannot be found .
train
true
24,739
def compute_log_moment(q, sigma, steps, lmbd, verify=False, verbose=False): moment = compute_a(sigma, q, lmbd, verbose=verbose) if verify: mp.dps = 50 moment_a_mp = compute_a_mp(sigma, q, lmbd, verbose=verbose) moment_b_mp = compute_b_mp(sigma, q, lmbd, verbose=verbose) np.testing.assert_allclose(moment, moment_a_mp, rtol=1e-10) if (not np.isinf(moment_a_mp)): np.testing.assert_array_less(moment_b_mp, moment_a_mp) if np.isinf(moment): return np.inf else: return (np.log(moment) * steps)
[ "def", "compute_log_moment", "(", "q", ",", "sigma", ",", "steps", ",", "lmbd", ",", "verify", "=", "False", ",", "verbose", "=", "False", ")", ":", "moment", "=", "compute_a", "(", "sigma", ",", "q", ",", "lmbd", ",", "verbose", "=", "verbose", ")", "if", "verify", ":", "mp", ".", "dps", "=", "50", "moment_a_mp", "=", "compute_a_mp", "(", "sigma", ",", "q", ",", "lmbd", ",", "verbose", "=", "verbose", ")", "moment_b_mp", "=", "compute_b_mp", "(", "sigma", ",", "q", ",", "lmbd", ",", "verbose", "=", "verbose", ")", "np", ".", "testing", ".", "assert_allclose", "(", "moment", ",", "moment_a_mp", ",", "rtol", "=", "1e-10", ")", "if", "(", "not", "np", ".", "isinf", "(", "moment_a_mp", ")", ")", ":", "np", ".", "testing", ".", "assert_array_less", "(", "moment_b_mp", ",", "moment_a_mp", ")", "if", "np", ".", "isinf", "(", "moment", ")", ":", "return", "np", ".", "inf", "else", ":", "return", "(", "np", ".", "log", "(", "moment", ")", "*", "steps", ")" ]
compute the log moment of gaussian mechanism for given parameters .
train
false
24,740
def get_public_rsa_fingerprint(key_location=None, key_file_obj=None, passphrase=None): privkey = get_rsa_key(key_location=key_location, key_file_obj=key_file_obj, passphrase=passphrase, use_pycrypto=True) pubkey = privkey.publickey() md5digest = hashlib.md5(pubkey.exportKey('DER')).hexdigest() fingerprint = insert_char_every_n_chars(md5digest, ':', 2) key = (key_location or key_file_obj) log.debug(('rsa public key fingerprint (%s): %s' % (key, fingerprint))) return fingerprint
[ "def", "get_public_rsa_fingerprint", "(", "key_location", "=", "None", ",", "key_file_obj", "=", "None", ",", "passphrase", "=", "None", ")", ":", "privkey", "=", "get_rsa_key", "(", "key_location", "=", "key_location", ",", "key_file_obj", "=", "key_file_obj", ",", "passphrase", "=", "passphrase", ",", "use_pycrypto", "=", "True", ")", "pubkey", "=", "privkey", ".", "publickey", "(", ")", "md5digest", "=", "hashlib", ".", "md5", "(", "pubkey", ".", "exportKey", "(", "'DER'", ")", ")", ".", "hexdigest", "(", ")", "fingerprint", "=", "insert_char_every_n_chars", "(", "md5digest", ",", "':'", ",", "2", ")", "key", "=", "(", "key_location", "or", "key_file_obj", ")", "log", ".", "debug", "(", "(", "'rsa public key fingerprint (%s): %s'", "%", "(", "key", ",", "fingerprint", ")", ")", ")", "return", "fingerprint" ]
returns the fingerprint of the public portion of an rsa key as a 47-character string .
train
false
24,741
def _create_achalls(plugin): achalls = list() names = plugin.get_testable_domain_names() for domain in names: prefs = plugin.get_chall_pref(domain) for chall_type in prefs: if (chall_type == challenges.TLSSNI01): chall = challenges.TLSSNI01(token=os.urandom(challenges.TLSSNI01.TOKEN_SIZE)) challb = acme_util.chall_to_challb(chall, messages.STATUS_PENDING) achall = achallenges.KeyAuthorizationAnnotatedChallenge(challb=challb, domain=domain, account_key=util.JWK) achalls.append(achall) return achalls
[ "def", "_create_achalls", "(", "plugin", ")", ":", "achalls", "=", "list", "(", ")", "names", "=", "plugin", ".", "get_testable_domain_names", "(", ")", "for", "domain", "in", "names", ":", "prefs", "=", "plugin", ".", "get_chall_pref", "(", "domain", ")", "for", "chall_type", "in", "prefs", ":", "if", "(", "chall_type", "==", "challenges", ".", "TLSSNI01", ")", ":", "chall", "=", "challenges", ".", "TLSSNI01", "(", "token", "=", "os", ".", "urandom", "(", "challenges", ".", "TLSSNI01", ".", "TOKEN_SIZE", ")", ")", "challb", "=", "acme_util", ".", "chall_to_challb", "(", "chall", ",", "messages", ".", "STATUS_PENDING", ")", "achall", "=", "achallenges", ".", "KeyAuthorizationAnnotatedChallenge", "(", "challb", "=", "challb", ",", "domain", "=", "domain", ",", "account_key", "=", "util", ".", "JWK", ")", "achalls", ".", "append", "(", "achall", ")", "return", "achalls" ]
returns a list of annotated challenges to test on plugin .
train
false
24,742
def check_working_directory(): if (not os.path.exists(CUCKOO_ROOT)): raise CuckooStartupError('You specified a non-existing root directory: {0}'.format(CUCKOO_ROOT)) cwd = os.path.join(os.getcwd(), 'cuckoo.py') if (not os.path.exists(cwd)): raise CuckooStartupError("You are not running Cuckoo from it's root directory")
[ "def", "check_working_directory", "(", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "CUCKOO_ROOT", ")", ")", ":", "raise", "CuckooStartupError", "(", "'You specified a non-existing root directory: {0}'", ".", "format", "(", "CUCKOO_ROOT", ")", ")", "cwd", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'cuckoo.py'", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "cwd", ")", ")", ":", "raise", "CuckooStartupError", "(", "\"You are not running Cuckoo from it's root directory\"", ")" ]
checks if working directories are ready .
train
false
24,743
def MERGE_SMALL(writer, segments): from whoosh.filedb.filereading import SegmentReader newsegments = [] sorted_segment_list = sorted(((s.doc_count_all(), s) for s in segments)) total_docs = 0 for (i, (count, seg)) in enumerate(sorted_segment_list): if (count > 0): total_docs += count if (total_docs < fib((i + 5))): reader = SegmentReader(writer.storage, writer.schema, seg) writer.add_reader(reader) reader.close() else: newsegments.append(seg) return newsegments
[ "def", "MERGE_SMALL", "(", "writer", ",", "segments", ")", ":", "from", "whoosh", ".", "filedb", ".", "filereading", "import", "SegmentReader", "newsegments", "=", "[", "]", "sorted_segment_list", "=", "sorted", "(", "(", "(", "s", ".", "doc_count_all", "(", ")", ",", "s", ")", "for", "s", "in", "segments", ")", ")", "total_docs", "=", "0", "for", "(", "i", ",", "(", "count", ",", "seg", ")", ")", "in", "enumerate", "(", "sorted_segment_list", ")", ":", "if", "(", "count", ">", "0", ")", ":", "total_docs", "+=", "count", "if", "(", "total_docs", "<", "fib", "(", "(", "i", "+", "5", ")", ")", ")", ":", "reader", "=", "SegmentReader", "(", "writer", ".", "storage", ",", "writer", ".", "schema", ",", "seg", ")", "writer", ".", "add_reader", "(", "reader", ")", "reader", ".", "close", "(", ")", "else", ":", "newsegments", ".", "append", "(", "seg", ")", "return", "newsegments" ]
this policy merges small segments .
train
false
24,744
def lineLineIntersection(p0, p1, p2, p3): A1 = (p1[1] - p0[1]) B1 = (p0[0] - p1[0]) C1 = ((A1 * p0[0]) + (B1 * p0[1])) A2 = (p3[1] - p2[1]) B2 = (p2[0] - p3[0]) C2 = ((A2 * p2[0]) + (B2 * p2[1])) det = ((A1 * B2) - (A2 * B1)) if (det == 0): return p0 return [(((B2 * C1) - (B1 * C2)) / det), (((A1 * C2) - (A2 * C1)) / det)]
[ "def", "lineLineIntersection", "(", "p0", ",", "p1", ",", "p2", ",", "p3", ")", ":", "A1", "=", "(", "p1", "[", "1", "]", "-", "p0", "[", "1", "]", ")", "B1", "=", "(", "p0", "[", "0", "]", "-", "p1", "[", "0", "]", ")", "C1", "=", "(", "(", "A1", "*", "p0", "[", "0", "]", ")", "+", "(", "B1", "*", "p0", "[", "1", "]", ")", ")", "A2", "=", "(", "p3", "[", "1", "]", "-", "p2", "[", "1", "]", ")", "B2", "=", "(", "p2", "[", "0", "]", "-", "p3", "[", "0", "]", ")", "C2", "=", "(", "(", "A2", "*", "p2", "[", "0", "]", ")", "+", "(", "B2", "*", "p2", "[", "1", "]", ")", ")", "det", "=", "(", "(", "A1", "*", "B2", ")", "-", "(", "A2", "*", "B1", ")", ")", "if", "(", "det", "==", "0", ")", ":", "return", "p0", "return", "[", "(", "(", "(", "B2", "*", "C1", ")", "-", "(", "B1", "*", "C2", ")", ")", "/", "det", ")", ",", "(", "(", "(", "A1", "*", "C2", ")", "-", "(", "A2", "*", "C1", ")", ")", "/", "det", ")", "]" ]
return the intersection of the infinite line trough points p0 and p1 and infinite line trough points p2 and p3 .
train
false
24,745
def prespi_create_container_table(self, conn): conn.executescript("\n CREATE TABLE container (\n ROWID INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT,\n put_timestamp TEXT,\n delete_timestamp TEXT,\n object_count INTEGER,\n bytes_used INTEGER,\n deleted INTEGER DEFAULT 0\n );\n\n CREATE INDEX ix_container_deleted_name ON\n container (deleted, name);\n\n CREATE TRIGGER container_insert AFTER INSERT ON container\n BEGIN\n UPDATE account_stat\n SET container_count = container_count + (1 - new.deleted),\n object_count = object_count + new.object_count,\n bytes_used = bytes_used + new.bytes_used,\n hash = chexor(hash, new.name,\n new.put_timestamp || '-' ||\n new.delete_timestamp || '-' ||\n new.object_count || '-' || new.bytes_used);\n END;\n\n CREATE TRIGGER container_update BEFORE UPDATE ON container\n BEGIN\n SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT');\n END;\n\n\n CREATE TRIGGER container_delete AFTER DELETE ON container\n BEGIN\n UPDATE account_stat\n SET container_count = container_count - (1 - old.deleted),\n object_count = object_count - old.object_count,\n bytes_used = bytes_used - old.bytes_used,\n hash = chexor(hash, old.name,\n old.put_timestamp || '-' ||\n old.delete_timestamp || '-' ||\n old.object_count || '-' || old.bytes_used);\n END;\n ")
[ "def", "prespi_create_container_table", "(", "self", ",", "conn", ")", ":", "conn", ".", "executescript", "(", "\"\\n CREATE TABLE container (\\n ROWID INTEGER PRIMARY KEY AUTOINCREMENT,\\n name TEXT,\\n put_timestamp TEXT,\\n delete_timestamp TEXT,\\n object_count INTEGER,\\n bytes_used INTEGER,\\n deleted INTEGER DEFAULT 0\\n );\\n\\n CREATE INDEX ix_container_deleted_name ON\\n container (deleted, name);\\n\\n CREATE TRIGGER container_insert AFTER INSERT ON container\\n BEGIN\\n UPDATE account_stat\\n SET container_count = container_count + (1 - new.deleted),\\n object_count = object_count + new.object_count,\\n bytes_used = bytes_used + new.bytes_used,\\n hash = chexor(hash, new.name,\\n new.put_timestamp || '-' ||\\n new.delete_timestamp || '-' ||\\n new.object_count || '-' || new.bytes_used);\\n END;\\n\\n CREATE TRIGGER container_update BEFORE UPDATE ON container\\n BEGIN\\n SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT');\\n END;\\n\\n\\n CREATE TRIGGER container_delete AFTER DELETE ON container\\n BEGIN\\n UPDATE account_stat\\n SET container_count = container_count - (1 - old.deleted),\\n object_count = object_count - old.object_count,\\n bytes_used = bytes_used - old.bytes_used,\\n hash = chexor(hash, old.name,\\n old.put_timestamp || '-' ||\\n old.delete_timestamp || '-' ||\\n old.object_count || '-' || old.bytes_used);\\n END;\\n \"", ")" ]
copied from accountbroker before the sstoage_policy_index column was added; used for testing with testaccountbrokerbeforespi .
train
false
24,746
def to_vertex_cover(G, matching): (L, R) = bipartite_sets(G) unmatched_vertices = (set(G) - set(matching)) U = (unmatched_vertices & L) Z = _connected_by_alternating_paths(G, matching, U) return ((L - Z) | (R & Z))
[ "def", "to_vertex_cover", "(", "G", ",", "matching", ")", ":", "(", "L", ",", "R", ")", "=", "bipartite_sets", "(", "G", ")", "unmatched_vertices", "=", "(", "set", "(", "G", ")", "-", "set", "(", "matching", ")", ")", "U", "=", "(", "unmatched_vertices", "&", "L", ")", "Z", "=", "_connected_by_alternating_paths", "(", "G", ",", "matching", ",", "U", ")", "return", "(", "(", "L", "-", "Z", ")", "|", "(", "R", "&", "Z", ")", ")" ]
returns the minimum vertex cover corresponding to the given maximum matching of the bipartite graph g .
train
false
24,747
def _read_assoc_file(fid): out = {'file_id': read_int16(fid), 'length': read_int16(fid)} fid.seek(32, 1) out['checksum'] = read_int32(fid) return out
[ "def", "_read_assoc_file", "(", "fid", ")", ":", "out", "=", "{", "'file_id'", ":", "read_int16", "(", "fid", ")", ",", "'length'", ":", "read_int16", "(", "fid", ")", "}", "fid", ".", "seek", "(", "32", ",", "1", ")", "out", "[", "'checksum'", "]", "=", "read_int32", "(", "fid", ")", "return", "out" ]
read bti pdf assocfile .
train
false
24,748
def get_placeholder_cache(placeholder, lang, site_id, request): from django.core.cache import cache key = _get_placeholder_cache_key(placeholder, lang, site_id, request, soft=True) content = cache.get(key) return content
[ "def", "get_placeholder_cache", "(", "placeholder", ",", "lang", ",", "site_id", ",", "request", ")", ":", "from", "django", ".", "core", ".", "cache", "import", "cache", "key", "=", "_get_placeholder_cache_key", "(", "placeholder", ",", "lang", ",", "site_id", ",", "request", ",", "soft", "=", "True", ")", "content", "=", "cache", ".", "get", "(", "key", ")", "return", "content" ]
returns the placeholder from cache respecting the placeholders vary headers .
train
false
24,749
def CDLXSIDEGAP3METHODS(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLXSIDEGAP3METHODS)
[ "def", "CDLXSIDEGAP3METHODS", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLXSIDEGAP3METHODS", ")" ]
upside/downside gap three methods .
train
false
24,750
def _make_menu_item(menu_item, title, **kw): _menu_items = config['routes.named_routes'] if (menu_item not in _menu_items): raise Exception(('menu item `%s` cannot be found' % menu_item)) item = copy.copy(_menu_items[menu_item]) item.update(kw) active = _link_active(item) needed = item.pop('needed') for need in needed: if (need not in kw): raise Exception(('menu item `%s` need parameter `%s`' % (menu_item, need))) link = _link_to(title, menu_item, suppress_active_class=True, **item) if active: return ((literal('<li class="active">') + link) + literal('</li>')) return ((literal('<li>') + link) + literal('</li>'))
[ "def", "_make_menu_item", "(", "menu_item", ",", "title", ",", "**", "kw", ")", ":", "_menu_items", "=", "config", "[", "'routes.named_routes'", "]", "if", "(", "menu_item", "not", "in", "_menu_items", ")", ":", "raise", "Exception", "(", "(", "'menu item `%s` cannot be found'", "%", "menu_item", ")", ")", "item", "=", "copy", ".", "copy", "(", "_menu_items", "[", "menu_item", "]", ")", "item", ".", "update", "(", "kw", ")", "active", "=", "_link_active", "(", "item", ")", "needed", "=", "item", ".", "pop", "(", "'needed'", ")", "for", "need", "in", "needed", ":", "if", "(", "need", "not", "in", "kw", ")", ":", "raise", "Exception", "(", "(", "'menu item `%s` need parameter `%s`'", "%", "(", "menu_item", ",", "need", ")", ")", ")", "link", "=", "_link_to", "(", "title", ",", "menu_item", ",", "suppress_active_class", "=", "True", ",", "**", "item", ")", "if", "active", ":", "return", "(", "(", "literal", "(", "'<li class=\"active\">'", ")", "+", "link", ")", "+", "literal", "(", "'</li>'", ")", ")", "return", "(", "(", "literal", "(", "'<li>'", ")", "+", "link", ")", "+", "literal", "(", "'</li>'", ")", ")" ]
build a navigation item used for example breadcrumbs outputs <li><a href=" .
train
false
24,751
def django_auth_setup(): global DJANGO_AUTH_CLASS if (DJANGO_AUTH_CLASS is not None): return if ('^model' in __opts__['external_auth']['django']): django_model_fullname = __opts__['external_auth']['django']['^model'] django_model_name = django_model_fullname.split('.')[(-1)] django_module_name = '.'.join(django_model_fullname.split('.')[0:(-1)]) __import__(django_module_name, globals(), locals(), 'SaltExternalAuthModel') DJANGO_AUTH_CLASS_str = 'django_auth_module.{0}'.format(django_model_name) DJANGO_AUTH_CLASS = eval(DJANGO_AUTH_CLASS_str) if (django.VERSION >= (1, 7)): django.setup()
[ "def", "django_auth_setup", "(", ")", ":", "global", "DJANGO_AUTH_CLASS", "if", "(", "DJANGO_AUTH_CLASS", "is", "not", "None", ")", ":", "return", "if", "(", "'^model'", "in", "__opts__", "[", "'external_auth'", "]", "[", "'django'", "]", ")", ":", "django_model_fullname", "=", "__opts__", "[", "'external_auth'", "]", "[", "'django'", "]", "[", "'^model'", "]", "django_model_name", "=", "django_model_fullname", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", "django_module_name", "=", "'.'", ".", "join", "(", "django_model_fullname", ".", "split", "(", "'.'", ")", "[", "0", ":", "(", "-", "1", ")", "]", ")", "__import__", "(", "django_module_name", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "'SaltExternalAuthModel'", ")", "DJANGO_AUTH_CLASS_str", "=", "'django_auth_module.{0}'", ".", "format", "(", "django_model_name", ")", "DJANGO_AUTH_CLASS", "=", "eval", "(", "DJANGO_AUTH_CLASS_str", ")", "if", "(", "django", ".", "VERSION", ">=", "(", "1", ",", "7", ")", ")", ":", "django", ".", "setup", "(", ")" ]
prepare the connection to the django authentication framework .
train
true
24,754
def _rows_page_start(iterator, page, response): total_rows = response.get('totalRows') if (total_rows is not None): total_rows = int(total_rows) iterator.total_rows = total_rows
[ "def", "_rows_page_start", "(", "iterator", ",", "page", ",", "response", ")", ":", "total_rows", "=", "response", ".", "get", "(", "'totalRows'", ")", "if", "(", "total_rows", "is", "not", "None", ")", ":", "total_rows", "=", "int", "(", "total_rows", ")", "iterator", ".", "total_rows", "=", "total_rows" ]
grab total rows after a :class:~google .
train
true
24,755
def _get_human_name(problem_class): return getattr(problem_class, 'human_name', problem_class.__name__)
[ "def", "_get_human_name", "(", "problem_class", ")", ":", "return", "getattr", "(", "problem_class", ",", "'human_name'", ",", "problem_class", ".", "__name__", ")" ]
get the human-friendly name for a problem type .
train
false
24,756
def concatenate_axes(arrays, axes): if (len(axes) != ndimlist(arrays)): raise ValueError('Length of axes should equal depth of nested arrays') extradims = max(0, (deepfirst(arrays).ndim - (max(axes) + 1))) return concatenate3(transposelist(arrays, axes, extradims=extradims))
[ "def", "concatenate_axes", "(", "arrays", ",", "axes", ")", ":", "if", "(", "len", "(", "axes", ")", "!=", "ndimlist", "(", "arrays", ")", ")", ":", "raise", "ValueError", "(", "'Length of axes should equal depth of nested arrays'", ")", "extradims", "=", "max", "(", "0", ",", "(", "deepfirst", "(", "arrays", ")", ".", "ndim", "-", "(", "max", "(", "axes", ")", "+", "1", ")", ")", ")", "return", "concatenate3", "(", "transposelist", "(", "arrays", ",", "axes", ",", "extradims", "=", "extradims", ")", ")" ]
recursively call np .
train
false
24,757
def pwnstallerGenerateUtilsH(methodSubs): code = '#include "launch.h"\n' code += 'void init_launcher(void);\n' code += ('int get_thisfile(char *%s, const char *%s);\n' % (helpers.randomString(), helpers.randomString())) code += ('int CreateActContext(char *%s, char *%s);\n' % (helpers.randomString(), helpers.randomString())) code += 'void ReleaseActContext(void);\n' code += ('int get_thisfilew(LPWSTR %s);\n' % helpers.randomString()) code += ('void get_homepath(char *%s, const char *%s);\n' % (helpers.randomString(), helpers.randomString())) code += ('void get_archivefile(char *%s, const char *%s);\n' % (helpers.randomString(), helpers.randomString())) code += ('int set_environment(const ARCHIVE_STATUS *%s);\n' % helpers.randomString()) code += ('int spawn(LPWSTR %s);\n' % helpers.randomString()) for m in methodSubs: code = code.replace(m[0], m[1]) return code
[ "def", "pwnstallerGenerateUtilsH", "(", "methodSubs", ")", ":", "code", "=", "'#include \"launch.h\"\\n'", "code", "+=", "'void init_launcher(void);\\n'", "code", "+=", "(", "'int get_thisfile(char *%s, const char *%s);\\n'", "%", "(", "helpers", ".", "randomString", "(", ")", ",", "helpers", ".", "randomString", "(", ")", ")", ")", "code", "+=", "(", "'int CreateActContext(char *%s, char *%s);\\n'", "%", "(", "helpers", ".", "randomString", "(", ")", ",", "helpers", ".", "randomString", "(", ")", ")", ")", "code", "+=", "'void ReleaseActContext(void);\\n'", "code", "+=", "(", "'int get_thisfilew(LPWSTR %s);\\n'", "%", "helpers", ".", "randomString", "(", ")", ")", "code", "+=", "(", "'void get_homepath(char *%s, const char *%s);\\n'", "%", "(", "helpers", ".", "randomString", "(", ")", ",", "helpers", ".", "randomString", "(", ")", ")", ")", "code", "+=", "(", "'void get_archivefile(char *%s, const char *%s);\\n'", "%", "(", "helpers", ".", "randomString", "(", ")", ",", "helpers", ".", "randomString", "(", ")", ")", ")", "code", "+=", "(", "'int set_environment(const ARCHIVE_STATUS *%s);\\n'", "%", "helpers", ".", "randomString", "(", ")", ")", "code", "+=", "(", "'int spawn(LPWSTR %s);\\n'", "%", "helpers", ".", "randomString", "(", ")", ")", "for", "m", "in", "methodSubs", ":", "code", "=", "code", ".", "replace", "(", "m", "[", "0", "]", ",", "m", "[", "1", "]", ")", "return", "code" ]
generate an obfuscated version of pwnstallers utils .
train
false
24,758
def load_module(name, file, filename, details): (suffix, mode, type_) = details if (mode and ((not mode.startswith(('r', 'U'))) or ('+' in mode))): raise ValueError('invalid file open mode {!r}'.format(mode)) elif ((file is None) and (type_ in {PY_SOURCE, PY_COMPILED})): msg = 'file object required for import (type code {})'.format(type_) raise ValueError(msg) elif (type_ == PY_SOURCE): return load_source(name, filename, file) elif (type_ == PY_COMPILED): return load_compiled(name, filename, file) elif ((type_ == C_EXTENSION) and (load_dynamic is not None)): if (file is None): with open(filename, 'rb') as opened_file: return load_dynamic(name, filename, opened_file) else: return load_dynamic(name, filename, file) elif (type_ == PKG_DIRECTORY): return load_package(name, filename) elif (type_ == C_BUILTIN): return init_builtin(name) elif (type_ == PY_FROZEN): return init_frozen(name) else: msg = "Don't know how to import {} (type code {})".format(name, type_) raise ImportError(msg, name=name)
[ "def", "load_module", "(", "name", ",", "file", ",", "filename", ",", "details", ")", ":", "(", "suffix", ",", "mode", ",", "type_", ")", "=", "details", "if", "(", "mode", "and", "(", "(", "not", "mode", ".", "startswith", "(", "(", "'r'", ",", "'U'", ")", ")", ")", "or", "(", "'+'", "in", "mode", ")", ")", ")", ":", "raise", "ValueError", "(", "'invalid file open mode {!r}'", ".", "format", "(", "mode", ")", ")", "elif", "(", "(", "file", "is", "None", ")", "and", "(", "type_", "in", "{", "PY_SOURCE", ",", "PY_COMPILED", "}", ")", ")", ":", "msg", "=", "'file object required for import (type code {})'", ".", "format", "(", "type_", ")", "raise", "ValueError", "(", "msg", ")", "elif", "(", "type_", "==", "PY_SOURCE", ")", ":", "return", "load_source", "(", "name", ",", "filename", ",", "file", ")", "elif", "(", "type_", "==", "PY_COMPILED", ")", ":", "return", "load_compiled", "(", "name", ",", "filename", ",", "file", ")", "elif", "(", "(", "type_", "==", "C_EXTENSION", ")", "and", "(", "load_dynamic", "is", "not", "None", ")", ")", ":", "if", "(", "file", "is", "None", ")", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "opened_file", ":", "return", "load_dynamic", "(", "name", ",", "filename", ",", "opened_file", ")", "else", ":", "return", "load_dynamic", "(", "name", ",", "filename", ",", "file", ")", "elif", "(", "type_", "==", "PKG_DIRECTORY", ")", ":", "return", "load_package", "(", "name", ",", "filename", ")", "elif", "(", "type_", "==", "C_BUILTIN", ")", ":", "return", "init_builtin", "(", "name", ")", "elif", "(", "type_", "==", "PY_FROZEN", ")", ":", "return", "init_frozen", "(", "name", ")", "else", ":", "msg", "=", "\"Don't know how to import {} (type code {})\"", ".", "format", "(", "name", ",", "type_", ")", "raise", "ImportError", "(", "msg", ",", "name", "=", "name", ")" ]
imports a module given its name and returns a handle to it .
train
false
24,760
def rotate_async(image_data, degrees, output_encoding=PNG, quality=None, correct_orientation=UNCHANGED_ORIENTATION, rpc=None, transparent_substitution_rgb=None): image = Image(image_data) image.rotate(degrees) image.set_correct_orientation(correct_orientation) return image.execute_transforms_async(output_encoding=output_encoding, quality=quality, rpc=rpc, transparent_substitution_rgb=transparent_substitution_rgb)
[ "def", "rotate_async", "(", "image_data", ",", "degrees", ",", "output_encoding", "=", "PNG", ",", "quality", "=", "None", ",", "correct_orientation", "=", "UNCHANGED_ORIENTATION", ",", "rpc", "=", "None", ",", "transparent_substitution_rgb", "=", "None", ")", ":", "image", "=", "Image", "(", "image_data", ")", "image", ".", "rotate", "(", "degrees", ")", "image", ".", "set_correct_orientation", "(", "correct_orientation", ")", "return", "image", ".", "execute_transforms_async", "(", "output_encoding", "=", "output_encoding", ",", "quality", "=", "quality", ",", "rpc", "=", "rpc", ",", "transparent_substitution_rgb", "=", "transparent_substitution_rgb", ")" ]
rotate a given image a given number of degrees clockwise - async version .
train
false
24,761
def test_special_cases(): import foo AreEqual(foo.Foo().BAR, 4) for partial_ns in ['ZERO', 'ONE', 'a', 'UNDERSCORE', 'WHITESPACE', 'BIGFILENAME']: mod_name = ('foo' + partial_ns) exec ('import ' + mod_name) exec (('AreEqual(' + mod_name) + '.Foo().BAR, 1)')
[ "def", "test_special_cases", "(", ")", ":", "import", "foo", "AreEqual", "(", "foo", ".", "Foo", "(", ")", ".", "BAR", ",", "4", ")", "for", "partial_ns", "in", "[", "'ZERO'", ",", "'ONE'", ",", "'a'", ",", "'UNDERSCORE'", ",", "'WHITESPACE'", ",", "'BIGFILENAME'", "]", ":", "mod_name", "=", "(", "'foo'", "+", "partial_ns", ")", "exec", "(", "'import '", "+", "mod_name", ")", "exec", "(", "(", "'AreEqual('", "+", "mod_name", ")", "+", "'.Foo().BAR, 1)'", ")" ]
extraordinary cases that should still be supported by ip .
train
false
24,762
def doctest_skip_parser(func): lines = func.__doc__.split('\n') new_lines = [] for line in lines: match = SKIP_RE.match(line) if (match is None): new_lines.append(line) continue (code, space, expr) = match.groups() try: if eval(expr, func.__globals__): code = ((code + space) + '# doctest: +SKIP') except AttributeError: if eval(expr, func.__init__.__globals__): code = ((code + space) + '# doctest: +SKIP') new_lines.append(code) func.__doc__ = '\n'.join(new_lines) return func
[ "def", "doctest_skip_parser", "(", "func", ")", ":", "lines", "=", "func", ".", "__doc__", ".", "split", "(", "'\\n'", ")", "new_lines", "=", "[", "]", "for", "line", "in", "lines", ":", "match", "=", "SKIP_RE", ".", "match", "(", "line", ")", "if", "(", "match", "is", "None", ")", ":", "new_lines", ".", "append", "(", "line", ")", "continue", "(", "code", ",", "space", ",", "expr", ")", "=", "match", ".", "groups", "(", ")", "try", ":", "if", "eval", "(", "expr", ",", "func", ".", "__globals__", ")", ":", "code", "=", "(", "(", "code", "+", "space", ")", "+", "'# doctest: +SKIP'", ")", "except", "AttributeError", ":", "if", "eval", "(", "expr", ",", "func", ".", "__init__", ".", "__globals__", ")", ":", "code", "=", "(", "(", "code", "+", "space", ")", "+", "'# doctest: +SKIP'", ")", "new_lines", ".", "append", "(", "code", ")", "func", ".", "__doc__", "=", "'\\n'", ".", "join", "(", "new_lines", ")", "return", "func" ]
decorator replaces custom skip test markup in doctests say a function has a docstring:: .
train
false
24,764
def extract_params(raw): if (isinstance(raw, bytes_type) or isinstance(raw, unicode_type)): try: params = urldecode(raw) except ValueError: params = None elif hasattr(raw, u'__iter__'): try: dict(raw) except ValueError: params = None except TypeError: params = None else: params = list((raw.items() if isinstance(raw, dict) else raw)) params = decode_params_utf8(params) else: params = None return params
[ "def", "extract_params", "(", "raw", ")", ":", "if", "(", "isinstance", "(", "raw", ",", "bytes_type", ")", "or", "isinstance", "(", "raw", ",", "unicode_type", ")", ")", ":", "try", ":", "params", "=", "urldecode", "(", "raw", ")", "except", "ValueError", ":", "params", "=", "None", "elif", "hasattr", "(", "raw", ",", "u'__iter__'", ")", ":", "try", ":", "dict", "(", "raw", ")", "except", "ValueError", ":", "params", "=", "None", "except", "TypeError", ":", "params", "=", "None", "else", ":", "params", "=", "list", "(", "(", "raw", ".", "items", "(", ")", "if", "isinstance", "(", "raw", ",", "dict", ")", "else", "raw", ")", ")", "params", "=", "decode_params_utf8", "(", "params", ")", "else", ":", "params", "=", "None", "return", "params" ]
extract request params .
train
true
24,765
def expanding_count(arg, freq=None): return ensure_compat('expanding', 'count', arg, freq=freq)
[ "def", "expanding_count", "(", "arg", ",", "freq", "=", "None", ")", ":", "return", "ensure_compat", "(", "'expanding'", ",", "'count'", ",", "arg", ",", "freq", "=", "freq", ")" ]
expanding count of number of non-nan observations .
train
false
24,766
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): target_name = spec.get('target_name') if ((target_extras is not None) and re.search(target_extras, target_name)): return True if ((int(spec.get('mac_xctest_bundle', 0)) != 0) or ((spec.get('type', '') == 'executable') and (spec.get('product_extension', '') != 'bundle'))): if (executable_target_pattern is not None): if (not re.search(executable_target_pattern, target_name)): return False return True return False
[ "def", "IsValidTargetForWrapper", "(", "target_extras", ",", "executable_target_pattern", ",", "spec", ")", ":", "target_name", "=", "spec", ".", "get", "(", "'target_name'", ")", "if", "(", "(", "target_extras", "is", "not", "None", ")", "and", "re", ".", "search", "(", "target_extras", ",", "target_name", ")", ")", ":", "return", "True", "if", "(", "(", "int", "(", "spec", ".", "get", "(", "'mac_xctest_bundle'", ",", "0", ")", ")", "!=", "0", ")", "or", "(", "(", "spec", ".", "get", "(", "'type'", ",", "''", ")", "==", "'executable'", ")", "and", "(", "spec", ".", "get", "(", "'product_extension'", ",", "''", ")", "!=", "'bundle'", ")", ")", ")", ":", "if", "(", "executable_target_pattern", "is", "not", "None", ")", ":", "if", "(", "not", "re", ".", "search", "(", "executable_target_pattern", ",", "target_name", ")", ")", ":", "return", "False", "return", "True", "return", "False" ]
limit targets for xcode wrapper .
train
false
24,767
def points_in_poly(points, verts): return _points_in_poly(points, verts)
[ "def", "points_in_poly", "(", "points", ",", "verts", ")", ":", "return", "_points_in_poly", "(", "points", ",", "verts", ")" ]
test whether points lie inside a polygon .
train
false
24,768
def delval(filename, keyword, *args, **kwargs): if ('do_not_scale_image_data' not in kwargs): kwargs['do_not_scale_image_data'] = True closed = fileobj_closed(filename) (hdulist, extidx) = _getext(filename, 'update', *args, **kwargs) try: del hdulist[extidx].header[keyword] finally: hdulist._close(closed=closed)
[ "def", "delval", "(", "filename", ",", "keyword", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "'do_not_scale_image_data'", "not", "in", "kwargs", ")", ":", "kwargs", "[", "'do_not_scale_image_data'", "]", "=", "True", "closed", "=", "fileobj_closed", "(", "filename", ")", "(", "hdulist", ",", "extidx", ")", "=", "_getext", "(", "filename", ",", "'update'", ",", "*", "args", ",", "**", "kwargs", ")", "try", ":", "del", "hdulist", "[", "extidx", "]", ".", "header", "[", "keyword", "]", "finally", ":", "hdulist", ".", "_close", "(", "closed", "=", "closed", ")" ]
delete all instances of keyword from a header in a fits file .
train
false
24,769
def setup_mainloop(extension, iteration_scheme=None): features = [numpy.array(f, dtype=theano.config.floatX) for f in ([[1, 2]] * 101)] dataset = IterableDataset(dict(features=features)) data_stream = DataStream(dataset, iteration_scheme=iteration_scheme) W = shared_floatx([0, 0], name='W') x = tensor.vector('features') cost = tensor.sum(((x - W) ** 2)) cost.name = 'cost' algorithm = GradientDescent(cost=cost, parameters=[W], step_rule=Scale(0.001)) main_loop = MainLoop(model=None, data_stream=data_stream, algorithm=algorithm, extensions=[FinishAfter(after_n_epochs=1), extension]) return main_loop
[ "def", "setup_mainloop", "(", "extension", ",", "iteration_scheme", "=", "None", ")", ":", "features", "=", "[", "numpy", ".", "array", "(", "f", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", "for", "f", "in", "(", "[", "[", "1", ",", "2", "]", "]", "*", "101", ")", "]", "dataset", "=", "IterableDataset", "(", "dict", "(", "features", "=", "features", ")", ")", "data_stream", "=", "DataStream", "(", "dataset", ",", "iteration_scheme", "=", "iteration_scheme", ")", "W", "=", "shared_floatx", "(", "[", "0", ",", "0", "]", ",", "name", "=", "'W'", ")", "x", "=", "tensor", ".", "vector", "(", "'features'", ")", "cost", "=", "tensor", ".", "sum", "(", "(", "(", "x", "-", "W", ")", "**", "2", ")", ")", "cost", ".", "name", "=", "'cost'", "algorithm", "=", "GradientDescent", "(", "cost", "=", "cost", ",", "parameters", "=", "[", "W", "]", ",", "step_rule", "=", "Scale", "(", "0.001", ")", ")", "main_loop", "=", "MainLoop", "(", "model", "=", "None", ",", "data_stream", "=", "data_stream", ",", "algorithm", "=", "algorithm", ",", "extensions", "=", "[", "FinishAfter", "(", "after_n_epochs", "=", "1", ")", ",", "extension", "]", ")", "return", "main_loop" ]
set up a simple main loop for progress bar tests .
train
false
24,770
def get_info_on_inputs(named_inputs, n_unnamed_inputs): n_named_inputs = len(named_inputs) def get_plural(n): if (n > 1): return 's' else: return '' if (n_named_inputs == 0): if (n_unnamed_inputs == 0): msg = 'The function is supposed to have no input.' elif (n_unnamed_inputs == 1): msg = "The function has a single input variable which has no name, and thus cannot be assigned through a keyword argument (use 'name=...' in a Variable's constructor to give it a name)." else: msg = ("The function has %s inputs, but none of them is named, and thus they cannot be assigned through keyword arguments (use 'name=...' in a Variable's constructor to give it a name)." % n_unnamed_inputs) elif (n_unnamed_inputs == 0): msg = ('The function has %s named input%s (%s).' % (n_named_inputs, get_plural(n_named_inputs), ', '.join(named_inputs))) else: msg = ("The function has %s named input%s (%s), and %s unnamed input%s which thus cannot be accessed through keyword argument%s (use 'name=...' in a variable's constructor to give it a name)." % (n_named_inputs, get_plural(n_named_inputs), ', '.join(named_inputs), n_unnamed_inputs, get_plural(n_unnamed_inputs), get_plural(n_unnamed_inputs))) return msg
[ "def", "get_info_on_inputs", "(", "named_inputs", ",", "n_unnamed_inputs", ")", ":", "n_named_inputs", "=", "len", "(", "named_inputs", ")", "def", "get_plural", "(", "n", ")", ":", "if", "(", "n", ">", "1", ")", ":", "return", "'s'", "else", ":", "return", "''", "if", "(", "n_named_inputs", "==", "0", ")", ":", "if", "(", "n_unnamed_inputs", "==", "0", ")", ":", "msg", "=", "'The function is supposed to have no input.'", "elif", "(", "n_unnamed_inputs", "==", "1", ")", ":", "msg", "=", "\"The function has a single input variable which has no name, and thus cannot be assigned through a keyword argument (use 'name=...' in a Variable's constructor to give it a name).\"", "else", ":", "msg", "=", "(", "\"The function has %s inputs, but none of them is named, and thus they cannot be assigned through keyword arguments (use 'name=...' in a Variable's constructor to give it a name).\"", "%", "n_unnamed_inputs", ")", "elif", "(", "n_unnamed_inputs", "==", "0", ")", ":", "msg", "=", "(", "'The function has %s named input%s (%s).'", "%", "(", "n_named_inputs", ",", "get_plural", "(", "n_named_inputs", ")", ",", "', '", ".", "join", "(", "named_inputs", ")", ")", ")", "else", ":", "msg", "=", "(", "\"The function has %s named input%s (%s), and %s unnamed input%s which thus cannot be accessed through keyword argument%s (use 'name=...' in a variable's constructor to give it a name).\"", "%", "(", "n_named_inputs", ",", "get_plural", "(", "n_named_inputs", ")", ",", "', '", ".", "join", "(", "named_inputs", ")", ",", "n_unnamed_inputs", ",", "get_plural", "(", "n_unnamed_inputs", ")", ",", "get_plural", "(", "n_unnamed_inputs", ")", ")", ")", "return", "msg" ]
return a human-readable description of named and un-named inputs .
train
false
24,771
def get_alphabetical_topics(course_module): return sorted(course_module.teams_topics, key=(lambda t: t['name'].lower()))
[ "def", "get_alphabetical_topics", "(", "course_module", ")", ":", "return", "sorted", "(", "course_module", ".", "teams_topics", ",", "key", "=", "(", "lambda", "t", ":", "t", "[", "'name'", "]", ".", "lower", "(", ")", ")", ")" ]
return a list of team topics sorted alphabetically .
train
false
24,772
def get_color(colour, return_formatted=True): colour = colour.lower() if (colour not in IRC_COLOUR_DICT): raise KeyError("The colour '{}' is not in the list of available colours.".format(colour)) if (colour == 'random'): rand = randint(0, 15) if (rand < 10): rand = ('0' + str(rand)) rand = str(rand) if return_formatted: return (get_format('colour') + rand) return rand if return_formatted: return (get_format('colour') + IRC_COLOUR_DICT[colour]) return IRC_COLOUR_DICT[colour]
[ "def", "get_color", "(", "colour", ",", "return_formatted", "=", "True", ")", ":", "colour", "=", "colour", ".", "lower", "(", ")", "if", "(", "colour", "not", "in", "IRC_COLOUR_DICT", ")", ":", "raise", "KeyError", "(", "\"The colour '{}' is not in the list of available colours.\"", ".", "format", "(", "colour", ")", ")", "if", "(", "colour", "==", "'random'", ")", ":", "rand", "=", "randint", "(", "0", ",", "15", ")", "if", "(", "rand", "<", "10", ")", ":", "rand", "=", "(", "'0'", "+", "str", "(", "rand", ")", ")", "rand", "=", "str", "(", "rand", ")", "if", "return_formatted", ":", "return", "(", "get_format", "(", "'colour'", ")", "+", "rand", ")", "return", "rand", "if", "return_formatted", ":", "return", "(", "get_format", "(", "'colour'", ")", "+", "IRC_COLOUR_DICT", "[", "colour", "]", ")", "return", "IRC_COLOUR_DICT", "[", "colour", "]" ]
gets a color by looking up its name or initializing with name+data .
train
false
24,773
def test_completion_for_zsh(script): zsh_completion = 'function _pip_completion {\n local words cword\n read -Ac words\n read -cn cword\n reply=( $( COMP_WORDS="$words[*]" \\\n COMP_CWORD=$(( cword-1 )) \\\n PIP_AUTO_COMPLETE=1 $words[1] ) )\n}\ncompctl -K _pip_completion pip' result = script.pip('completion', '--zsh') assert (zsh_completion in result.stdout), 'zsh completion is wrong'
[ "def", "test_completion_for_zsh", "(", "script", ")", ":", "zsh_completion", "=", "'function _pip_completion {\\n local words cword\\n read -Ac words\\n read -cn cword\\n reply=( $( COMP_WORDS=\"$words[*]\" \\\\\\n COMP_CWORD=$(( cword-1 )) \\\\\\n PIP_AUTO_COMPLETE=1 $words[1] ) )\\n}\\ncompctl -K _pip_completion pip'", "result", "=", "script", ".", "pip", "(", "'completion'", ",", "'--zsh'", ")", "assert", "(", "zsh_completion", "in", "result", ".", "stdout", ")", ",", "'zsh completion is wrong'" ]
test getting completion for zsh shell .
train
false
24,774
def _end_of_set_index(string, start_index): length = len(string) closing_index = start_index if ((closing_index < length) and (string[closing_index] == '!')): closing_index += 1 if (closing_index < length): closing_index += 1 while ((closing_index < length) and (string[closing_index] != ']')): closing_index += 1 return closing_index
[ "def", "_end_of_set_index", "(", "string", ",", "start_index", ")", ":", "length", "=", "len", "(", "string", ")", "closing_index", "=", "start_index", "if", "(", "(", "closing_index", "<", "length", ")", "and", "(", "string", "[", "closing_index", "]", "==", "'!'", ")", ")", ":", "closing_index", "+=", "1", "if", "(", "closing_index", "<", "length", ")", ":", "closing_index", "+=", "1", "while", "(", "(", "closing_index", "<", "length", ")", "and", "(", "string", "[", "closing_index", "]", "!=", "']'", ")", ")", ":", "closing_index", "+=", "1", "return", "closing_index" ]
returns the position of the appropriate closing bracket for a glob set in string .
train
false
24,776
def acceptable_mime_type(accept_patterns, mime_type): if ('/' not in mime_type): raise ValueError(('Invalid MIME type: "%s"' % mime_type)) unsupported_patterns = [p for p in accept_patterns if (';' in p)] if unsupported_patterns: raise ValueError(('MIME patterns with parameter unsupported: "%s"' % ', '.join(unsupported_patterns))) def _match(pattern, mime_type): 'Return True iff mime_type is acceptable for pattern.' return all(((accept in ('*', provided)) for (accept, provided) in zip(pattern.split('/'), mime_type.split('/')))) return any((_match(pattern, mime_type) for pattern in accept_patterns))
[ "def", "acceptable_mime_type", "(", "accept_patterns", ",", "mime_type", ")", ":", "if", "(", "'/'", "not", "in", "mime_type", ")", ":", "raise", "ValueError", "(", "(", "'Invalid MIME type: \"%s\"'", "%", "mime_type", ")", ")", "unsupported_patterns", "=", "[", "p", "for", "p", "in", "accept_patterns", "if", "(", "';'", "in", "p", ")", "]", "if", "unsupported_patterns", ":", "raise", "ValueError", "(", "(", "'MIME patterns with parameter unsupported: \"%s\"'", "%", "', '", ".", "join", "(", "unsupported_patterns", ")", ")", ")", "def", "_match", "(", "pattern", ",", "mime_type", ")", ":", "return", "all", "(", "(", "(", "accept", "in", "(", "'*'", ",", "provided", ")", ")", "for", "(", "accept", ",", "provided", ")", "in", "zip", "(", "pattern", ".", "split", "(", "'/'", ")", ",", "mime_type", ".", "split", "(", "'/'", ")", ")", ")", ")", "return", "any", "(", "(", "_match", "(", "pattern", ",", "mime_type", ")", "for", "pattern", "in", "accept_patterns", ")", ")" ]
check that mime_type matches one of accept_patterns .
train
false
24,777
def pwRecode(epw, oldPassword, newPassword): if (epw == u''): return epw if (newPassword == u''): (plaintext, ok) = pwDecrypt(epw) return (pwEncode(plaintext) if ok else epw) elif (oldPassword == u''): plaintext = pwDecode(epw) (cipher, ok) = pwEncrypt(plaintext, newPassword) return (cipher if ok else epw) else: (npw, ok) = pwReencrypt(epw, oldPassword, newPassword) return (npw if ok else epw)
[ "def", "pwRecode", "(", "epw", ",", "oldPassword", ",", "newPassword", ")", ":", "if", "(", "epw", "==", "u''", ")", ":", "return", "epw", "if", "(", "newPassword", "==", "u''", ")", ":", "(", "plaintext", ",", "ok", ")", "=", "pwDecrypt", "(", "epw", ")", "return", "(", "pwEncode", "(", "plaintext", ")", "if", "ok", "else", "epw", ")", "elif", "(", "oldPassword", "==", "u''", ")", ":", "plaintext", "=", "pwDecode", "(", "epw", ")", "(", "cipher", ",", "ok", ")", "=", "pwEncrypt", "(", "plaintext", ",", "newPassword", ")", "return", "(", "cipher", "if", "ok", "else", "epw", ")", "else", ":", "(", "npw", ",", "ok", ")", "=", "pwReencrypt", "(", "epw", ",", "oldPassword", ",", "newPassword", ")", "return", "(", "npw", "if", "ok", "else", "epw", ")" ]
module function to re-encode a password .
train
false
24,781
def test_area_base_values(test_data): x = pd.Series(test_data.array_data[0]) y = pd.Series(test_data.array_data[1]) ag = AreaGlyph(x=x, y=y) assert (ag.source.data['y_values'][0][0] == 0) assert (ag.source.data['y_values'][0][(-1)] == 0)
[ "def", "test_area_base_values", "(", "test_data", ")", ":", "x", "=", "pd", ".", "Series", "(", "test_data", ".", "array_data", "[", "0", "]", ")", "y", "=", "pd", ".", "Series", "(", "test_data", ".", "array_data", "[", "1", "]", ")", "ag", "=", "AreaGlyph", "(", "x", "=", "x", ",", "y", "=", "y", ")", "assert", "(", "ag", ".", "source", ".", "data", "[", "'y_values'", "]", "[", "0", "]", "[", "0", "]", "==", "0", ")", "assert", "(", "ag", ".", "source", ".", "data", "[", "'y_values'", "]", "[", "0", "]", "[", "(", "-", "1", ")", "]", "==", "0", ")" ]
test creating chart data source from array-like list data .
train
false
24,783
def set_color_scheme(name, color_scheme, replace=True): section = 'color_schemes' names = CONF.get('color_schemes', 'names', []) for key in sh.COLOR_SCHEME_KEYS: option = ('%s/%s' % (name, key)) value = CONF.get(section, option, default=None) if ((value is None) or replace or (name not in names)): CONF.set(section, option, color_scheme[key]) names.append(to_text_string(name)) CONF.set(section, 'names', sorted(list(set(names))))
[ "def", "set_color_scheme", "(", "name", ",", "color_scheme", ",", "replace", "=", "True", ")", ":", "section", "=", "'color_schemes'", "names", "=", "CONF", ".", "get", "(", "'color_schemes'", ",", "'names'", ",", "[", "]", ")", "for", "key", "in", "sh", ".", "COLOR_SCHEME_KEYS", ":", "option", "=", "(", "'%s/%s'", "%", "(", "name", ",", "key", ")", ")", "value", "=", "CONF", ".", "get", "(", "section", ",", "option", ",", "default", "=", "None", ")", "if", "(", "(", "value", "is", "None", ")", "or", "replace", "or", "(", "name", "not", "in", "names", ")", ")", ":", "CONF", ".", "set", "(", "section", ",", "option", ",", "color_scheme", "[", "key", "]", ")", "names", ".", "append", "(", "to_text_string", "(", "name", ")", ")", "CONF", ".", "set", "(", "section", ",", "'names'", ",", "sorted", "(", "list", "(", "set", "(", "names", ")", ")", ")", ")" ]
set syntax color scheme .
train
true
24,784
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('address', metavar='<address>', help=_('IP Address.')) @utils.arg('--fixed-address', metavar='<fixed_address>', default=None, help=_('Fixed IP Address to associate with.')) def do_floating_ip_associate(cs, args): _associate_floating_ip(cs, args)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'address'", ",", "metavar", "=", "'<address>'", ",", "help", "=", "_", "(", "'IP Address.'", ")", ")", "@", "utils", ".", "arg", "(", "'--fixed-address'", ",", "metavar", "=", "'<fixed_address>'", ",", "default", "=", "None", ",", "help", "=", "_", "(", "'Fixed IP Address to associate with.'", ")", ")", "def", "do_floating_ip_associate", "(", "cs", ",", "args", ")", ":", "_associate_floating_ip", "(", "cs", ",", "args", ")" ]
associate a floating ip address to a server .
train
false
24,788
def save_instance_type_info(metadata, instance_type, prefix=''): for key in system_metadata_instance_type_props.keys(): to_key = ('%sinstance_type_%s' % (prefix, key)) metadata[to_key] = instance_type[key] return metadata
[ "def", "save_instance_type_info", "(", "metadata", ",", "instance_type", ",", "prefix", "=", "''", ")", ":", "for", "key", "in", "system_metadata_instance_type_props", ".", "keys", "(", ")", ":", "to_key", "=", "(", "'%sinstance_type_%s'", "%", "(", "prefix", ",", "key", ")", ")", "metadata", "[", "to_key", "]", "=", "instance_type", "[", "key", "]", "return", "metadata" ]
save properties from instance_type into instances system_metadata .
train
false
24,789
@pytest.mark.skipif('not HAS_BEAUTIFUL_SOUP') def test_missing_data(): table_in = ['<table>', '<tr><th>A</th></tr>', '<tr><td></td></tr>', '<tr><td>1</td></tr>', '</table>'] dat = Table.read(table_in, format='ascii.html') assert (dat.masked is True) assert np.all((dat['A'].mask == [True, False])) assert (dat['A'].dtype.kind == 'i') table_in = ['<table>', '<tr><th>A</th></tr>', '<tr><td>...</td></tr>', '<tr><td>1</td></tr>', '</table>'] dat = Table.read(table_in, format='ascii.html', fill_values=[('...', '0')]) assert (dat.masked is True) assert np.all((dat['A'].mask == [True, False])) assert (dat['A'].dtype.kind == 'i')
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "'not HAS_BEAUTIFUL_SOUP'", ")", "def", "test_missing_data", "(", ")", ":", "table_in", "=", "[", "'<table>'", ",", "'<tr><th>A</th></tr>'", ",", "'<tr><td></td></tr>'", ",", "'<tr><td>1</td></tr>'", ",", "'</table>'", "]", "dat", "=", "Table", ".", "read", "(", "table_in", ",", "format", "=", "'ascii.html'", ")", "assert", "(", "dat", ".", "masked", "is", "True", ")", "assert", "np", ".", "all", "(", "(", "dat", "[", "'A'", "]", ".", "mask", "==", "[", "True", ",", "False", "]", ")", ")", "assert", "(", "dat", "[", "'A'", "]", ".", "dtype", ".", "kind", "==", "'i'", ")", "table_in", "=", "[", "'<table>'", ",", "'<tr><th>A</th></tr>'", ",", "'<tr><td>...</td></tr>'", ",", "'<tr><td>1</td></tr>'", ",", "'</table>'", "]", "dat", "=", "Table", ".", "read", "(", "table_in", ",", "format", "=", "'ascii.html'", ",", "fill_values", "=", "[", "(", "'...'", ",", "'0'", ")", "]", ")", "assert", "(", "dat", ".", "masked", "is", "True", ")", "assert", "np", ".", "all", "(", "(", "dat", "[", "'A'", "]", ".", "mask", "==", "[", "True", ",", "False", "]", ")", ")", "assert", "(", "dat", "[", "'A'", "]", ".", "dtype", ".", "kind", "==", "'i'", ")" ]
test reading a table with missing data .
train
false
24,790
@receiver(COURSE_CERT_AWARDED, sender=GeneratedCertificate) def handle_course_cert_awarded(sender, user, course_key, **kwargs): if is_prerequisite_courses_enabled(): fulfill_course_milestone(course_key, user)
[ "@", "receiver", "(", "COURSE_CERT_AWARDED", ",", "sender", "=", "GeneratedCertificate", ")", "def", "handle_course_cert_awarded", "(", "sender", ",", "user", ",", "course_key", ",", "**", "kwargs", ")", ":", "if", "is_prerequisite_courses_enabled", "(", ")", ":", "fulfill_course_milestone", "(", "course_key", ",", "user", ")" ]
if programs is enabled and a learner is awarded a course certificate .
train
false
24,791
def latestFunction(oldFunc): dictID = id(oldFunc.func_globals) module = _modDictIDMap.get(dictID) if (module is None): return oldFunc return getattr(module, oldFunc.__name__)
[ "def", "latestFunction", "(", "oldFunc", ")", ":", "dictID", "=", "id", "(", "oldFunc", ".", "func_globals", ")", "module", "=", "_modDictIDMap", ".", "get", "(", "dictID", ")", "if", "(", "module", "is", "None", ")", ":", "return", "oldFunc", "return", "getattr", "(", "module", ",", "oldFunc", ".", "__name__", ")" ]
get the latest version of a function .
train
false
24,792
def send_draft(account, draft, db_session): update_draft_on_send(account, draft, db_session) response_on_success = APIEncoder().jsonify(draft) try: sendmail_client = get_sendmail_client(account) sendmail_client.send(draft) except SendMailException as exc: kwargs = {} if exc.failures: kwargs['failures'] = exc.failures if exc.server_error: kwargs['server_error'] = exc.server_error return err(exc.http_code, exc.message, **kwargs) return response_on_success
[ "def", "send_draft", "(", "account", ",", "draft", ",", "db_session", ")", ":", "update_draft_on_send", "(", "account", ",", "draft", ",", "db_session", ")", "response_on_success", "=", "APIEncoder", "(", ")", ".", "jsonify", "(", "draft", ")", "try", ":", "sendmail_client", "=", "get_sendmail_client", "(", "account", ")", "sendmail_client", ".", "send", "(", "draft", ")", "except", "SendMailException", "as", "exc", ":", "kwargs", "=", "{", "}", "if", "exc", ".", "failures", ":", "kwargs", "[", "'failures'", "]", "=", "exc", ".", "failures", "if", "exc", ".", "server_error", ":", "kwargs", "[", "'server_error'", "]", "=", "exc", ".", "server_error", "return", "err", "(", "exc", ".", "http_code", ",", "exc", ".", "message", ",", "**", "kwargs", ")", "return", "response_on_success" ]
send the draft with id = draft_id .
train
false
24,793
def get_template_from_string(source, origin=None, name=None): return Template(source, origin, name)
[ "def", "get_template_from_string", "(", "source", ",", "origin", "=", "None", ",", "name", "=", "None", ")", ":", "return", "Template", "(", "source", ",", "origin", ",", "name", ")" ]
returns a compiled template object for the given template code .
train
false
24,794
def first_true_index(iterable, pred=None, default=None): if (pred is None): func = operator.itemgetter(1) else: func = (lambda x: pred(x[1])) ii = next(filter(func, enumerate(iterable)), default) return (ii[0] if ii else default)
[ "def", "first_true_index", "(", "iterable", ",", "pred", "=", "None", ",", "default", "=", "None", ")", ":", "if", "(", "pred", "is", "None", ")", ":", "func", "=", "operator", ".", "itemgetter", "(", "1", ")", "else", ":", "func", "=", "(", "lambda", "x", ":", "pred", "(", "x", "[", "1", "]", ")", ")", "ii", "=", "next", "(", "filter", "(", "func", ",", "enumerate", "(", "iterable", ")", ")", ",", "default", ")", "return", "(", "ii", "[", "0", "]", "if", "ii", "else", "default", ")" ]
find the first index position for the which the callable pred returns true .
train
false
24,795
def extract_seqs_by_sample_id(seqs, sample_ids, negate=False): sample_ids = {}.fromkeys(sample_ids) if (not negate): def f(s): return (s in sample_ids) else: def f(s): return (s not in sample_ids) for (seq_id, seq) in seqs: sample_id = seq_id.split('_')[0] if f(sample_id): (yield (seq_id, seq))
[ "def", "extract_seqs_by_sample_id", "(", "seqs", ",", "sample_ids", ",", "negate", "=", "False", ")", ":", "sample_ids", "=", "{", "}", ".", "fromkeys", "(", "sample_ids", ")", "if", "(", "not", "negate", ")", ":", "def", "f", "(", "s", ")", ":", "return", "(", "s", "in", "sample_ids", ")", "else", ":", "def", "f", "(", "s", ")", ":", "return", "(", "s", "not", "in", "sample_ids", ")", "for", "(", "seq_id", ",", "seq", ")", "in", "seqs", ":", "sample_id", "=", "seq_id", ".", "split", "(", "'_'", ")", "[", "0", "]", "if", "f", "(", "sample_id", ")", ":", "(", "yield", "(", "seq_id", ",", "seq", ")", ")" ]
returns pairs if sample_id is in sample_ids .
train
false
24,796
def setup_rheader(r, tabs=[]): if (r.representation == 'html'): T = current.T tabs = [(T('Deployment Details'), None), (T('Servers'), 'server'), (T('Instances'), 'instance')] rheader_tabs = s3_rheader_tabs(r, tabs) rheader = DIV(rheader_tabs) return rheader
[ "def", "setup_rheader", "(", "r", ",", "tabs", "=", "[", "]", ")", ":", "if", "(", "r", ".", "representation", "==", "'html'", ")", ":", "T", "=", "current", ".", "T", "tabs", "=", "[", "(", "T", "(", "'Deployment Details'", ")", ",", "None", ")", ",", "(", "T", "(", "'Servers'", ")", ",", "'server'", ")", ",", "(", "T", "(", "'Instances'", ")", ",", "'instance'", ")", "]", "rheader_tabs", "=", "s3_rheader_tabs", "(", "r", ",", "tabs", ")", "rheader", "=", "DIV", "(", "rheader_tabs", ")", "return", "rheader" ]
resource component page header .
train
false
24,797
def addLinesToCString(cString, lines): for line in lines: if (line != ''): cString.write((line + '\n'))
[ "def", "addLinesToCString", "(", "cString", ",", "lines", ")", ":", "for", "line", "in", "lines", ":", "if", "(", "line", "!=", "''", ")", ":", "cString", ".", "write", "(", "(", "line", "+", "'\\n'", ")", ")" ]
add lines which have something to cstringio .
train
false
24,798
def get_driver_names(): return drivers.keys()
[ "def", "get_driver_names", "(", ")", ":", "return", "drivers", ".", "keys", "(", ")" ]
get the list of driver names currently registered with this api .
train
false
24,802
def is_shopping_cart_enabled(): enable_paid_course_registration = configuration_helpers.get_value('ENABLE_PAID_COURSE_REGISTRATION', settings.FEATURES.get('ENABLE_PAID_COURSE_REGISTRATION')) enable_shopping_cart = configuration_helpers.get_value('ENABLE_SHOPPING_CART', settings.FEATURES.get('ENABLE_SHOPPING_CART')) return (enable_paid_course_registration and enable_shopping_cart)
[ "def", "is_shopping_cart_enabled", "(", ")", ":", "enable_paid_course_registration", "=", "configuration_helpers", ".", "get_value", "(", "'ENABLE_PAID_COURSE_REGISTRATION'", ",", "settings", ".", "FEATURES", ".", "get", "(", "'ENABLE_PAID_COURSE_REGISTRATION'", ")", ")", "enable_shopping_cart", "=", "configuration_helpers", ".", "get_value", "(", "'ENABLE_SHOPPING_CART'", ",", "settings", ".", "FEATURES", ".", "get", "(", "'ENABLE_SHOPPING_CART'", ")", ")", "return", "(", "enable_paid_course_registration", "and", "enable_shopping_cart", ")" ]
utility method to check the various configuration to verify that all of the settings have been enabled .
train
false
24,803
def get_projection_names(): return projection_registry.get_projection_names()
[ "def", "get_projection_names", "(", ")", ":", "return", "projection_registry", ".", "get_projection_names", "(", ")" ]
get a list of acceptable projection names .
train
false
24,804
def cloud(tgt, provider=None): if (not isinstance(tgt, six.string_types)): return {} ret = {} opts = salt.config.cloud_config('/etc/salt/cloud') cloud_cache = __utils__['cloud.list_cache_nodes_full'](opts=opts, provider=provider) for (driver, providers) in cloud_cache.items(): for (provider, servers) in providers.items(): for (name, data) in servers.items(): if fnmatch.fnmatch(name, tgt): ret[name] = data ret['name']['provider'] = provider return ret
[ "def", "cloud", "(", "tgt", ",", "provider", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "tgt", ",", "six", ".", "string_types", ")", ")", ":", "return", "{", "}", "ret", "=", "{", "}", "opts", "=", "salt", ".", "config", ".", "cloud_config", "(", "'/etc/salt/cloud'", ")", "cloud_cache", "=", "__utils__", "[", "'cloud.list_cache_nodes_full'", "]", "(", "opts", "=", "opts", ",", "provider", "=", "provider", ")", "for", "(", "driver", ",", "providers", ")", "in", "cloud_cache", ".", "items", "(", ")", ":", "for", "(", "provider", ",", "servers", ")", "in", "providers", ".", "items", "(", ")", ":", "for", "(", "name", ",", "data", ")", "in", "servers", ".", "items", "(", ")", ":", "if", "fnmatch", ".", "fnmatch", "(", "name", ",", "tgt", ")", ":", "ret", "[", "name", "]", "=", "data", "ret", "[", "'name'", "]", "[", "'provider'", "]", "=", "provider", "return", "ret" ]
return cloud cache data for target .
train
false
24,805
def getRemovedFloatByKeys(keys, prefix, xmlElement): removedFloat = 0.0 for key in keys: prefixKey = (prefix + key) if (prefixKey in xmlElement.attributeDictionary): floatValue = evaluate.getEvaluatedFloat(prefixKey, xmlElement) if (floatValue == None): print 'Warning, evaluated value in getEvaluatedFloatByPrefixes in matrix is None for key:' print prefixKey print 'for xmlElement dictionary value:' print xmlElement.attributeDictionary[prefixKey] print 'for xmlElement dictionary:' print xmlElement.attributeDictionary else: removedFloat += floatValue del xmlElement.attributeDictionary[prefixKey] return removedFloat
[ "def", "getRemovedFloatByKeys", "(", "keys", ",", "prefix", ",", "xmlElement", ")", ":", "removedFloat", "=", "0.0", "for", "key", "in", "keys", ":", "prefixKey", "=", "(", "prefix", "+", "key", ")", "if", "(", "prefixKey", "in", "xmlElement", ".", "attributeDictionary", ")", ":", "floatValue", "=", "evaluate", ".", "getEvaluatedFloat", "(", "prefixKey", ",", "xmlElement", ")", "if", "(", "floatValue", "==", "None", ")", ":", "print", "'Warning, evaluated value in getEvaluatedFloatByPrefixes in matrix is None for key:'", "print", "prefixKey", "print", "'for xmlElement dictionary value:'", "print", "xmlElement", ".", "attributeDictionary", "[", "prefixKey", "]", "print", "'for xmlElement dictionary:'", "print", "xmlElement", ".", "attributeDictionary", "else", ":", "removedFloat", "+=", "floatValue", "del", "xmlElement", ".", "attributeDictionary", "[", "prefixKey", "]", "return", "removedFloat" ]
get the float by the keys and the prefix .
train
false
24,806
def TreeNet(depth=1, fanout=2, **kwargs): topo = TreeTopo(depth, fanout) return Mininet(topo, **kwargs)
[ "def", "TreeNet", "(", "depth", "=", "1", ",", "fanout", "=", "2", ",", "**", "kwargs", ")", ":", "topo", "=", "TreeTopo", "(", "depth", ",", "fanout", ")", "return", "Mininet", "(", "topo", ",", "**", "kwargs", ")" ]
convenience function for creating tree networks .
train
false
24,807
def test_biweight_midvariance_axis(): with NumpyRNGContext(12345): ny = 100 nx = 200 data = normal(5, 2, (ny, nx)) bw = funcs.biweight_midvariance(data, axis=0) bwi = [] for i in range(nx): bwi.append(funcs.biweight_midvariance(data[:, i])) bwi = np.array(bwi) assert_allclose(bw, bwi) bw = funcs.biweight_midvariance(data, axis=1) bwi = [] for i in range(ny): bwi.append(funcs.biweight_midvariance(data[i, :])) bwi = np.array(bwi) assert_allclose(bw, bwi)
[ "def", "test_biweight_midvariance_axis", "(", ")", ":", "with", "NumpyRNGContext", "(", "12345", ")", ":", "ny", "=", "100", "nx", "=", "200", "data", "=", "normal", "(", "5", ",", "2", ",", "(", "ny", ",", "nx", ")", ")", "bw", "=", "funcs", ".", "biweight_midvariance", "(", "data", ",", "axis", "=", "0", ")", "bwi", "=", "[", "]", "for", "i", "in", "range", "(", "nx", ")", ":", "bwi", ".", "append", "(", "funcs", ".", "biweight_midvariance", "(", "data", "[", ":", ",", "i", "]", ")", ")", "bwi", "=", "np", ".", "array", "(", "bwi", ")", "assert_allclose", "(", "bw", ",", "bwi", ")", "bw", "=", "funcs", ".", "biweight_midvariance", "(", "data", ",", "axis", "=", "1", ")", "bwi", "=", "[", "]", "for", "i", "in", "range", "(", "ny", ")", ":", "bwi", ".", "append", "(", "funcs", ".", "biweight_midvariance", "(", "data", "[", "i", ",", ":", "]", ")", ")", "bwi", "=", "np", ".", "array", "(", "bwi", ")", "assert_allclose", "(", "bw", ",", "bwi", ")" ]
test a 2d array with the axis keyword .
train
false
24,808
@core_helper def view_resource_url(resource_view, resource, package, **kw): return resource['url']
[ "@", "core_helper", "def", "view_resource_url", "(", "resource_view", ",", "resource", ",", "package", ",", "**", "kw", ")", ":", "return", "resource", "[", "'url'", "]" ]
returns url for resource .
train
false
24,810
@register.tag def get_legalpages(parser, token): bits = token.split_contents() syntax_message = ('%(tag_name)s expects a syntax of %(tag_name)s as context_name' % dict(tag_name=bits[0])) if (len(bits) == 3): if (bits[1] != 'as'): raise template.TemplateSyntaxError(syntax_message) context_name = bits[2] return LegalPageNode(context_name) else: raise template.TemplateSyntaxError(syntax_message)
[ "@", "register", ".", "tag", "def", "get_legalpages", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "syntax_message", "=", "(", "'%(tag_name)s expects a syntax of %(tag_name)s as context_name'", "%", "dict", "(", "tag_name", "=", "bits", "[", "0", "]", ")", ")", "if", "(", "len", "(", "bits", ")", "==", "3", ")", ":", "if", "(", "bits", "[", "1", "]", "!=", "'as'", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "syntax_message", ")", "context_name", "=", "bits", "[", "2", "]", "return", "LegalPageNode", "(", "context_name", ")", "else", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "syntax_message", ")" ]
retrieves all active legalpage objects .
train
false
24,812
def _monkeypatch_unicode_mask_fill_values(): if (NUMPY_LT_1_8 and six.PY2): import numpy as np from numpy.ma import core as ma_core _check_fill_value_original = ma_core._check_fill_value def _check_fill_value(fill_value, ndtype): if ((not ndtype.fields) and isinstance(fill_value, six.string_types) and (ndtype.char in u'SVU')): return np.array(fill_value, copy=False, dtype=ndtype) return _check_fill_value_original(fill_value, ndtype) ma_core._check_fill_value = _check_fill_value
[ "def", "_monkeypatch_unicode_mask_fill_values", "(", ")", ":", "if", "(", "NUMPY_LT_1_8", "and", "six", ".", "PY2", ")", ":", "import", "numpy", "as", "np", "from", "numpy", ".", "ma", "import", "core", "as", "ma_core", "_check_fill_value_original", "=", "ma_core", ".", "_check_fill_value", "def", "_check_fill_value", "(", "fill_value", ",", "ndtype", ")", ":", "if", "(", "(", "not", "ndtype", ".", "fields", ")", "and", "isinstance", "(", "fill_value", ",", "six", ".", "string_types", ")", "and", "(", "ndtype", ".", "char", "in", "u'SVU'", ")", ")", ":", "return", "np", ".", "array", "(", "fill_value", ",", "copy", "=", "False", ",", "dtype", "=", "ndtype", ")", "return", "_check_fill_value_original", "(", "fill_value", ",", "ndtype", ")", "ma_core", ".", "_check_fill_value", "=", "_check_fill_value" ]
numpy < 1 .
train
false
24,814
def _GetMSVSConfigurationType(spec, build_file): try: config_type = {'executable': '1', 'shared_library': '2', 'loadable_module': '2', 'static_library': '4', 'none': '10'}[spec['type']] except KeyError: if spec.get('type'): raise GypError(('Target type %s is not a valid target type for target %s in %s.' % (spec['type'], spec['target_name'], build_file))) else: raise GypError(('Missing type field for target %s in %s.' % (spec['target_name'], build_file))) return config_type
[ "def", "_GetMSVSConfigurationType", "(", "spec", ",", "build_file", ")", ":", "try", ":", "config_type", "=", "{", "'executable'", ":", "'1'", ",", "'shared_library'", ":", "'2'", ",", "'loadable_module'", ":", "'2'", ",", "'static_library'", ":", "'4'", ",", "'none'", ":", "'10'", "}", "[", "spec", "[", "'type'", "]", "]", "except", "KeyError", ":", "if", "spec", ".", "get", "(", "'type'", ")", ":", "raise", "GypError", "(", "(", "'Target type %s is not a valid target type for target %s in %s.'", "%", "(", "spec", "[", "'type'", "]", ",", "spec", "[", "'target_name'", "]", ",", "build_file", ")", ")", ")", "else", ":", "raise", "GypError", "(", "(", "'Missing type field for target %s in %s.'", "%", "(", "spec", "[", "'target_name'", "]", ",", "build_file", ")", ")", ")", "return", "config_type" ]
returns the configuration type for this project .
train
false
24,815
def get_login_failed_count(name): ret = _get_account_policy_data_value(name, 'failedLoginCount') return salt.utils.mac_utils.parse_return(ret)
[ "def", "get_login_failed_count", "(", "name", ")", ":", "ret", "=", "_get_account_policy_data_value", "(", "name", ",", "'failedLoginCount'", ")", "return", "salt", ".", "utils", ".", "mac_utils", ".", "parse_return", "(", "ret", ")" ]
get the the number of failed login attempts .
train
true
24,818
def RekallStringRenderer(x): try: return x['str'] except KeyError: return x['b64']
[ "def", "RekallStringRenderer", "(", "x", ")", ":", "try", ":", "return", "x", "[", "'str'", "]", "except", "KeyError", ":", "return", "x", "[", "'b64'", "]" ]
function used to render rekall str objects .
train
false
24,819
def _dig_sort_key(dig): return ((10000 * dig['kind']) + dig['ident'])
[ "def", "_dig_sort_key", "(", "dig", ")", ":", "return", "(", "(", "10000", "*", "dig", "[", "'kind'", "]", ")", "+", "dig", "[", "'ident'", "]", ")" ]
helper for sorting .
train
false
24,820
def matchPatterns(patterns, keys): results = [] if patterns: for pattern in patterns: prog = re.compile(pattern) for key in keys: if prog.match(key): results.append(key) else: return None return results
[ "def", "matchPatterns", "(", "patterns", ",", "keys", ")", ":", "results", "=", "[", "]", "if", "patterns", ":", "for", "pattern", "in", "patterns", ":", "prog", "=", "re", ".", "compile", "(", "pattern", ")", "for", "key", "in", "keys", ":", "if", "prog", ".", "match", "(", "key", ")", ":", "results", ".", "append", "(", "key", ")", "else", ":", "return", "None", "return", "results" ]
returns a subset of the keys that match any of the given patterns parameters: patterns: a list of regular expressions to match keys: a list of keys to search for matches .
train
true
24,821
def __has_required_azure(): if HAS_AZURE: if hasattr(azure, '__version__'): version = LooseVersion(azure.__version__) else: version = LooseVersion(azure.common.__version__) if (REQUIRED_AZURE <= version): return True return False
[ "def", "__has_required_azure", "(", ")", ":", "if", "HAS_AZURE", ":", "if", "hasattr", "(", "azure", ",", "'__version__'", ")", ":", "version", "=", "LooseVersion", "(", "azure", ".", "__version__", ")", "else", ":", "version", "=", "LooseVersion", "(", "azure", ".", "common", ".", "__version__", ")", "if", "(", "REQUIRED_AZURE", "<=", "version", ")", ":", "return", "True", "return", "False" ]
returns true/false if the required version of the azure sdk is installed .
train
false
24,822
def _get_action_user_name(kwargs): if ('user' in kwargs): user = kwargs['user'] else: user = helpers.call_action('get_site_user') if (user is None): user_name = None else: user_name = user['name'] return user_name
[ "def", "_get_action_user_name", "(", "kwargs", ")", ":", "if", "(", "'user'", "in", "kwargs", ")", ":", "user", "=", "kwargs", "[", "'user'", "]", "else", ":", "user", "=", "helpers", ".", "call_action", "(", "'get_site_user'", ")", "if", "(", "user", "is", "None", ")", ":", "user_name", "=", "None", "else", ":", "user_name", "=", "user", "[", "'name'", "]", "return", "user_name" ]
return the name of the user in kwargs .
train
false
24,823
def set_recommendations(exp_id, new_recommendations): recommendations_models.ExplorationRecommendationsModel(id=exp_id, recommended_exploration_ids=new_recommendations).put()
[ "def", "set_recommendations", "(", "exp_id", ",", "new_recommendations", ")", ":", "recommendations_models", ".", "ExplorationRecommendationsModel", "(", "id", "=", "exp_id", ",", "recommended_exploration_ids", "=", "new_recommendations", ")", ".", "put", "(", ")" ]
stores a list of exploration ids of recommended explorations to play after completing the exploration keyed by exp_id .
train
false
24,825
def get_formatted_wwn(wwn_str): if (len(wwn_str) != 16): return wwn_str.lower() else: return ':'.join([wwn_str[i:(i + 2)] for i in range(0, len(wwn_str), 2)]).lower()
[ "def", "get_formatted_wwn", "(", "wwn_str", ")", ":", "if", "(", "len", "(", "wwn_str", ")", "!=", "16", ")", ":", "return", "wwn_str", ".", "lower", "(", ")", "else", ":", "return", "':'", ".", "join", "(", "[", "wwn_str", "[", "i", ":", "(", "i", "+", "2", ")", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "wwn_str", ")", ",", "2", ")", "]", ")", ".", "lower", "(", ")" ]
utility api that formats wwn to insert : .
train
false
24,826
def displayhosts_json(recordsgen, out=sys.stdout): out.write(json.dumps(recordsgen, default=utils.serialize)) out.write('\n')
[ "def", "displayhosts_json", "(", "recordsgen", ",", "out", "=", "sys", ".", "stdout", ")", ":", "out", ".", "write", "(", "json", ".", "dumps", "(", "recordsgen", ",", "default", "=", "utils", ".", "serialize", ")", ")", "out", ".", "write", "(", "'\\n'", ")" ]
displays the nmap scan result contained in record as json .
train
false
24,827
def merge_similar_guesses(guesses, prop, choose): similar = [guess for guess in guesses if (prop in guess)] if (len(similar) < 2): return if (len(similar) == 2): _merge_similar_guesses_nocheck(guesses, prop, choose) if (len(similar) > 2): log.debug(u'complex merge, trying our best...') before = len(guesses) _merge_similar_guesses_nocheck(guesses, prop, choose) after = len(guesses) if (after < before): merge_similar_guesses(guesses, prop, choose)
[ "def", "merge_similar_guesses", "(", "guesses", ",", "prop", ",", "choose", ")", ":", "similar", "=", "[", "guess", "for", "guess", "in", "guesses", "if", "(", "prop", "in", "guess", ")", "]", "if", "(", "len", "(", "similar", ")", "<", "2", ")", ":", "return", "if", "(", "len", "(", "similar", ")", "==", "2", ")", ":", "_merge_similar_guesses_nocheck", "(", "guesses", ",", "prop", ",", "choose", ")", "if", "(", "len", "(", "similar", ")", ">", "2", ")", ":", "log", ".", "debug", "(", "u'complex merge, trying our best...'", ")", "before", "=", "len", "(", "guesses", ")", "_merge_similar_guesses_nocheck", "(", "guesses", ",", "prop", ",", "choose", ")", "after", "=", "len", "(", "guesses", ")", "if", "(", "after", "<", "before", ")", ":", "merge_similar_guesses", "(", "guesses", ",", "prop", ",", "choose", ")" ]
take a list of guesses and merge those which have the same properties .
train
false
24,828
def survey_save_answers_for_series(series_id, complete_id, rvars): questions = survey_getAllQuestionsForSeries(series_id) return saveAnswers(questions, series_id, complete_id, rvars)
[ "def", "survey_save_answers_for_series", "(", "series_id", ",", "complete_id", ",", "rvars", ")", ":", "questions", "=", "survey_getAllQuestionsForSeries", "(", "series_id", ")", "return", "saveAnswers", "(", "questions", ",", "series_id", ",", "complete_id", ",", "rvars", ")" ]
function to save the list of answers for a completed series @todo: remove wrapper .
train
false
24,829
def detach_devices_from_vm(session, vm_ref, devices): client_factory = session.vim.client.factory config_spec = _detach_and_delete_devices_config_spec(client_factory, devices) reconfigure_vm(session, vm_ref, config_spec)
[ "def", "detach_devices_from_vm", "(", "session", ",", "vm_ref", ",", "devices", ")", ":", "client_factory", "=", "session", ".", "vim", ".", "client", ".", "factory", "config_spec", "=", "_detach_and_delete_devices_config_spec", "(", "client_factory", ",", "devices", ")", "reconfigure_vm", "(", "session", ",", "vm_ref", ",", "config_spec", ")" ]
detach specified devices from vm .
train
false
24,830
def gf_irreducible_p(f, p, K): method = query('GF_IRRED_METHOD') if (method is not None): irred = _irred_methods[method](f, p, K) else: irred = gf_irred_p_rabin(f, p, K) return irred
[ "def", "gf_irreducible_p", "(", "f", ",", "p", ",", "K", ")", ":", "method", "=", "query", "(", "'GF_IRRED_METHOD'", ")", "if", "(", "method", "is", "not", "None", ")", ":", "irred", "=", "_irred_methods", "[", "method", "]", "(", "f", ",", "p", ",", "K", ")", "else", ":", "irred", "=", "gf_irred_p_rabin", "(", "f", ",", "p", ",", "K", ")", "return", "irred" ]
test irreducibility of a polynomial f in gf(p)[x] .
train
false
24,831
def truncate_text(text, maxlen=128, suffix='...'): if (len(text) >= maxlen): return (text[:maxlen].rsplit(' ', 1)[0] + suffix) return text
[ "def", "truncate_text", "(", "text", ",", "maxlen", "=", "128", ",", "suffix", "=", "'...'", ")", ":", "if", "(", "len", "(", "text", ")", ">=", "maxlen", ")", ":", "return", "(", "text", "[", ":", "maxlen", "]", ".", "rsplit", "(", "' '", ",", "1", ")", "[", "0", "]", "+", "suffix", ")", "return", "text" ]
return as many characters as possible without going over the limit .
train
false
24,833
def window_hanning(x): return (np.hanning(len(x)) * x)
[ "def", "window_hanning", "(", "x", ")", ":", "return", "(", "np", ".", "hanning", "(", "len", "(", "x", ")", ")", "*", "x", ")" ]
return x times the hanning window of len(x) .
train
false
24,835
def _ma_invtransparams(macoefs): tmp = macoefs.copy() for j in range((len(macoefs) - 1), 0, (-1)): b = macoefs[j] for kiter in range(j): tmp[kiter] = ((macoefs[kiter] - (b * macoefs[((j - kiter) - 1)])) / (1 - (b ** 2))) macoefs[:j] = tmp[:j] invmacoefs = (- np.log(((1 - macoefs) / (1 + macoefs)))) return invmacoefs
[ "def", "_ma_invtransparams", "(", "macoefs", ")", ":", "tmp", "=", "macoefs", ".", "copy", "(", ")", "for", "j", "in", "range", "(", "(", "len", "(", "macoefs", ")", "-", "1", ")", ",", "0", ",", "(", "-", "1", ")", ")", ":", "b", "=", "macoefs", "[", "j", "]", "for", "kiter", "in", "range", "(", "j", ")", ":", "tmp", "[", "kiter", "]", "=", "(", "(", "macoefs", "[", "kiter", "]", "-", "(", "b", "*", "macoefs", "[", "(", "(", "j", "-", "kiter", ")", "-", "1", ")", "]", ")", ")", "/", "(", "1", "-", "(", "b", "**", "2", ")", ")", ")", "macoefs", "[", ":", "j", "]", "=", "tmp", "[", ":", "j", "]", "invmacoefs", "=", "(", "-", "np", ".", "log", "(", "(", "(", "1", "-", "macoefs", ")", "/", "(", "1", "+", "macoefs", ")", ")", ")", ")", "return", "invmacoefs" ]
inverse of the jones reparameterization parameters params : array the transformed ma coefficients .
train
false
24,836
def time_pyexcelerate(): start_time = clock() workbook = pyexcelerate.Workbook() worksheet = workbook.new_sheet('Sheet1') for row in range((row_max // 2)): for col in range(col_max): worksheet.set_cell_value(((row * 2) + 1), (col + 1), ('Row: %d Col: %d' % (row, col))) for col in range(col_max): worksheet.set_cell_value(((row * 2) + 2), (col + 1), (row + col)) workbook.save('pyexcelerate.xlsx') elapsed = (clock() - start_time) print_elapsed_time('pyexcelerate', elapsed)
[ "def", "time_pyexcelerate", "(", ")", ":", "start_time", "=", "clock", "(", ")", "workbook", "=", "pyexcelerate", ".", "Workbook", "(", ")", "worksheet", "=", "workbook", ".", "new_sheet", "(", "'Sheet1'", ")", "for", "row", "in", "range", "(", "(", "row_max", "//", "2", ")", ")", ":", "for", "col", "in", "range", "(", "col_max", ")", ":", "worksheet", ".", "set_cell_value", "(", "(", "(", "row", "*", "2", ")", "+", "1", ")", ",", "(", "col", "+", "1", ")", ",", "(", "'Row: %d Col: %d'", "%", "(", "row", ",", "col", ")", ")", ")", "for", "col", "in", "range", "(", "col_max", ")", ":", "worksheet", ".", "set_cell_value", "(", "(", "(", "row", "*", "2", ")", "+", "2", ")", ",", "(", "col", "+", "1", ")", ",", "(", "row", "+", "col", ")", ")", "workbook", ".", "save", "(", "'pyexcelerate.xlsx'", ")", "elapsed", "=", "(", "clock", "(", ")", "-", "start_time", ")", "print_elapsed_time", "(", "'pyexcelerate'", ",", "elapsed", ")" ]
run pyexcelerate in "faster" mode .
train
false
24,837
def shell_history(attrs=None, where=None): return _osquery_cmd(table='shell_history', attrs=attrs, where=where)
[ "def", "shell_history", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'shell_history'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return shell_history information from osquery cli example: .
train
false
24,838
@decorator def x_runtime(f, *args, **kwargs): _t0 = now() r = f(*args, **kwargs) _t1 = now() r.headers['X-Runtime'] = '{0}s'.format(Decimal(str((_t1 - _t0)))) return r
[ "@", "decorator", "def", "x_runtime", "(", "f", ",", "*", "args", ",", "**", "kwargs", ")", ":", "_t0", "=", "now", "(", ")", "r", "=", "f", "(", "*", "args", ",", "**", "kwargs", ")", "_t1", "=", "now", "(", ")", "r", ".", "headers", "[", "'X-Runtime'", "]", "=", "'{0}s'", ".", "format", "(", "Decimal", "(", "str", "(", "(", "_t1", "-", "_t0", ")", ")", ")", ")", "return", "r" ]
x-runtime flask response decorator .
train
true
24,839
def update_build_queue(apps, schema): Project = apps.get_model(u'projects', u'Project') for project in Project.objects.all(): if (project.build_queue is not None): if (not project.build_queue.startswith(u'build-')): project.build_queue = u'build-{0}'.format(project.build_queue) project.save()
[ "def", "update_build_queue", "(", "apps", ",", "schema", ")", ":", "Project", "=", "apps", ".", "get_model", "(", "u'projects'", ",", "u'Project'", ")", "for", "project", "in", "Project", ".", "objects", ".", "all", "(", ")", ":", "if", "(", "project", ".", "build_queue", "is", "not", "None", ")", ":", "if", "(", "not", "project", ".", "build_queue", ".", "startswith", "(", "u'build-'", ")", ")", ":", "project", ".", "build_queue", "=", "u'build-{0}'", ".", "format", "(", "project", ".", "build_queue", ")", "project", ".", "save", "(", ")" ]
update project build queue to include the previously implied build- prefix .
train
false
24,840
def runnable_payloads(shell_obj): result = [] for payload_name in get_payload_list(): payload = get_payload_instance(payload_name, shell_obj) if payload.can_run(): result.append(payload_name) return result
[ "def", "runnable_payloads", "(", "shell_obj", ")", ":", "result", "=", "[", "]", "for", "payload_name", "in", "get_payload_list", "(", ")", ":", "payload", "=", "get_payload_instance", "(", "payload_name", ",", "shell_obj", ")", "if", "payload", ".", "can_run", "(", ")", ":", "result", ".", "append", "(", "payload_name", ")", "return", "result" ]
the payloads that can be run using this shell object .
train
false
24,841
def special_fixer(p): if p: p = p.replace('\\"', '"') if ((not p) or isinstance(p, unicode)): return p try: p.decode('utf-8') if (sabnzbd.DARWIN and ('&#' in p)): p = fixup_ff4(p) return p.decode('utf-8') except: return p.decode(codepage)
[ "def", "special_fixer", "(", "p", ")", ":", "if", "p", ":", "p", "=", "p", ".", "replace", "(", "'\\\\\"'", ",", "'\"'", ")", "if", "(", "(", "not", "p", ")", "or", "isinstance", "(", "p", ",", "unicode", ")", ")", ":", "return", "p", "try", ":", "p", ".", "decode", "(", "'utf-8'", ")", "if", "(", "sabnzbd", ".", "DARWIN", "and", "(", "'&#'", "in", "p", ")", ")", ":", "p", "=", "fixup_ff4", "(", "p", ")", "return", "p", ".", "decode", "(", "'utf-8'", ")", "except", ":", "return", "p", ".", "decode", "(", "codepage", ")" ]
return string appropriate for the platform .
train
false
24,844
def libvlc_media_list_insert_media(p_ml, p_md, i_pos): f = (_Cfunctions.get('libvlc_media_list_insert_media', None) or _Cfunction('libvlc_media_list_insert_media', ((1,), (1,), (1,)), None, ctypes.c_int, MediaList, Media, ctypes.c_int)) return f(p_ml, p_md, i_pos)
[ "def", "libvlc_media_list_insert_media", "(", "p_ml", ",", "p_md", ",", "i_pos", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_insert_media'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_insert_media'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaList", ",", "Media", ",", "ctypes", ".", "c_int", ")", ")", "return", "f", "(", "p_ml", ",", "p_md", ",", "i_pos", ")" ]
insert media instance in media list on a position the l{libvlc_media_list_lock} should be held upon entering this function .
train
true
24,845
def update_account(account_id, key=None, value=None, items=None): if (items is None): if ((key is None) or (value is None)): return {'Error': 'At least one key/value pair is required'} items = {key: value} (status, result) = _query(action='accounts', command=account_id, data=json.dumps(items), header_dict={'Content-Type': 'application/json;charset=UTF-8'}, method='POST') return show_account(account_id)
[ "def", "update_account", "(", "account_id", ",", "key", "=", "None", ",", "value", "=", "None", ",", "items", "=", "None", ")", ":", "if", "(", "items", "is", "None", ")", ":", "if", "(", "(", "key", "is", "None", ")", "or", "(", "value", "is", "None", ")", ")", ":", "return", "{", "'Error'", ":", "'At least one key/value pair is required'", "}", "items", "=", "{", "key", ":", "value", "}", "(", "status", ",", "result", ")", "=", "_query", "(", "action", "=", "'accounts'", ",", "command", "=", "account_id", ",", "data", "=", "json", ".", "dumps", "(", "items", ")", ",", "header_dict", "=", "{", "'Content-Type'", ":", "'application/json;charset=UTF-8'", "}", ",", "method", "=", "'POST'", ")", "return", "show_account", "(", "account_id", ")" ]
update one or more items for this account .
train
false
24,846
def find_uptime_field(a_pattern, uptime_str): a_check = re.search(a_pattern, uptime_str) if a_check: return int(a_check.group(1)) else: return 0
[ "def", "find_uptime_field", "(", "a_pattern", ",", "uptime_str", ")", ":", "a_check", "=", "re", ".", "search", "(", "a_pattern", ",", "uptime_str", ")", "if", "a_check", ":", "return", "int", "(", "a_check", ".", "group", "(", "1", ")", ")", "else", ":", "return", "0" ]
if there is a match return the match group(1) else return 0 .
train
false
24,847
def _get_xunit_func(obj, name): meth = getattr(obj, name, None) if (fixtures.getfixturemarker(meth) is None): return meth
[ "def", "_get_xunit_func", "(", "obj", ",", "name", ")", ":", "meth", "=", "getattr", "(", "obj", ",", "name", ",", "None", ")", "if", "(", "fixtures", ".", "getfixturemarker", "(", "meth", ")", "is", "None", ")", ":", "return", "meth" ]
return the attribute from the given object to be used as a setup/teardown xunit-style function .
train
false
24,849
def onBaseAppShutDown(state): INFO_MSG(('onBaseAppShutDown: state=%i' % state))
[ "def", "onBaseAppShutDown", "(", "state", ")", ":", "INFO_MSG", "(", "(", "'onBaseAppShutDown: state=%i'", "%", "state", ")", ")" ]
kbengine method .
train
false
24,850
def cleaned_string(val): return urllib.quote_plus(smart_str(val))
[ "def", "cleaned_string", "(", "val", ")", ":", "return", "urllib", ".", "quote_plus", "(", "smart_str", "(", "val", ")", ")" ]
converts val to unicode and url-encodes special characters .
train
false
24,851
def get_real_ip(request): if (KEY_REAL_IP in request): return request[KEY_REAL_IP] if (request.app[KEY_USE_X_FORWARDED_FOR] and (HTTP_HEADER_X_FORWARDED_FOR in request.headers)): request[KEY_REAL_IP] = ip_address(request.headers.get(HTTP_HEADER_X_FORWARDED_FOR).split(',')[0]) else: peername = request.transport.get_extra_info('peername') if peername: request[KEY_REAL_IP] = ip_address(peername[0]) else: request[KEY_REAL_IP] = None return request[KEY_REAL_IP]
[ "def", "get_real_ip", "(", "request", ")", ":", "if", "(", "KEY_REAL_IP", "in", "request", ")", ":", "return", "request", "[", "KEY_REAL_IP", "]", "if", "(", "request", ".", "app", "[", "KEY_USE_X_FORWARDED_FOR", "]", "and", "(", "HTTP_HEADER_X_FORWARDED_FOR", "in", "request", ".", "headers", ")", ")", ":", "request", "[", "KEY_REAL_IP", "]", "=", "ip_address", "(", "request", ".", "headers", ".", "get", "(", "HTTP_HEADER_X_FORWARDED_FOR", ")", ".", "split", "(", "','", ")", "[", "0", "]", ")", "else", ":", "peername", "=", "request", ".", "transport", ".", "get_extra_info", "(", "'peername'", ")", "if", "peername", ":", "request", "[", "KEY_REAL_IP", "]", "=", "ip_address", "(", "peername", "[", "0", "]", ")", "else", ":", "request", "[", "KEY_REAL_IP", "]", "=", "None", "return", "request", "[", "KEY_REAL_IP", "]" ]
get ip address of client .
train
false
24,853
def _output_to_dict(cmdoutput, values_mapper=None): if isinstance(cmdoutput, dict): if ((cmdoutput['retcode'] != 0) or cmdoutput['stderr']): raise CommandExecutionError('RabbitMQ command failed: {0}'.format(cmdoutput['stderr'])) cmdoutput = cmdoutput['stdout'] ret = {} if (values_mapper is None): values_mapper = (lambda string: string.split(' DCTB ')) data_rows = _strip_listing_to_done(cmdoutput.splitlines()) for row in data_rows: try: (key, values) = row.split(' DCTB ', 1) except ValueError: log.debug("Could not find any values for key '{0}'. Setting to '{0}' to an empty string.".format(row)) ret[row] = '' continue ret[key] = values_mapper(values) return ret
[ "def", "_output_to_dict", "(", "cmdoutput", ",", "values_mapper", "=", "None", ")", ":", "if", "isinstance", "(", "cmdoutput", ",", "dict", ")", ":", "if", "(", "(", "cmdoutput", "[", "'retcode'", "]", "!=", "0", ")", "or", "cmdoutput", "[", "'stderr'", "]", ")", ":", "raise", "CommandExecutionError", "(", "'RabbitMQ command failed: {0}'", ".", "format", "(", "cmdoutput", "[", "'stderr'", "]", ")", ")", "cmdoutput", "=", "cmdoutput", "[", "'stdout'", "]", "ret", "=", "{", "}", "if", "(", "values_mapper", "is", "None", ")", ":", "values_mapper", "=", "(", "lambda", "string", ":", "string", ".", "split", "(", "' DCTB '", ")", ")", "data_rows", "=", "_strip_listing_to_done", "(", "cmdoutput", ".", "splitlines", "(", ")", ")", "for", "row", "in", "data_rows", ":", "try", ":", "(", "key", ",", "values", ")", "=", "row", ".", "split", "(", "' DCTB '", ",", "1", ")", "except", "ValueError", ":", "log", ".", "debug", "(", "\"Could not find any values for key '{0}'. Setting to '{0}' to an empty string.\"", ".", "format", "(", "row", ")", ")", "ret", "[", "row", "]", "=", "''", "continue", "ret", "[", "key", "]", "=", "values_mapper", "(", "values", ")", "return", "ret" ]
convert rabbitmqctl output to a dict of data cmdoutput: string output of rabbitmqctl commands values_mapper: function object to process the values part of each line .
train
true
24,854
def _has_dig(): return (salt.utils.which('dig') is not None)
[ "def", "_has_dig", "(", ")", ":", "return", "(", "salt", ".", "utils", ".", "which", "(", "'dig'", ")", "is", "not", "None", ")" ]
the dig-specific functions have been moved into their own module .
train
false
24,855
def template_summary(): tablename = 'survey_template' s3db[tablename] s3db.survey_complete crud_strings = s3.crud_strings[tablename] def postp(r, output): if r.interactive: if (len(get_vars) > 0): (dummy, template_id) = get_vars.viewing.split('.') else: template_id = r.id form = s3db.survey_build_template_summary(template_id) output['items'] = form output['sortby'] = [[0, 'asc']] output['title'] = crud_strings.title_analysis_summary output['subtitle'] = crud_strings.subtitle_analysis_summary return output s3.postp = postp s3db.configure(tablename, listadd=False, deletable=False) output = s3_rest_controller('survey', 'template', method='list', rheader=s3db.survey_template_rheader) s3.actions = None return output
[ "def", "template_summary", "(", ")", ":", "tablename", "=", "'survey_template'", "s3db", "[", "tablename", "]", "s3db", ".", "survey_complete", "crud_strings", "=", "s3", ".", "crud_strings", "[", "tablename", "]", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "r", ".", "interactive", ":", "if", "(", "len", "(", "get_vars", ")", ">", "0", ")", ":", "(", "dummy", ",", "template_id", ")", "=", "get_vars", ".", "viewing", ".", "split", "(", "'.'", ")", "else", ":", "template_id", "=", "r", ".", "id", "form", "=", "s3db", ".", "survey_build_template_summary", "(", "template_id", ")", "output", "[", "'items'", "]", "=", "form", "output", "[", "'sortby'", "]", "=", "[", "[", "0", ",", "'asc'", "]", "]", "output", "[", "'title'", "]", "=", "crud_strings", ".", "title_analysis_summary", "output", "[", "'subtitle'", "]", "=", "crud_strings", ".", "subtitle_analysis_summary", "return", "output", "s3", ".", "postp", "=", "postp", "s3db", ".", "configure", "(", "tablename", ",", "listadd", "=", "False", ",", "deletable", "=", "False", ")", "output", "=", "s3_rest_controller", "(", "'survey'", ",", "'template'", ",", "method", "=", "'list'", ",", "rheader", "=", "s3db", ".", "survey_template_rheader", ")", "s3", ".", "actions", "=", "None", "return", "output" ]
show section-wise summary of questions of a template .
train
false